Vertex shader not compiling due to a non-Ascii character?

巧了我就是萌 提交于 2019-12-02 05:00:14

问题


So I started using OpenGL with glew and GLFW to create a game engine, and I almost immediately ran into a problem when starting working with shaders:

They are not being used or have no effect whatsoever if they are being used.

I have been checking my code with plenty of other examples, and they all match up, nothing looks out of place, and I am starting to run out of ideas and patience (I have been trying to figure out why for nearly a month now) with this.

My main core code is here:

  #include "headers/Default.hpp"

  //Window width and height variables
  int windowWidth = 800;
  int windowHeight = 600;
  float Aspect = (float)windowWidth / (float)windowHeight;

  //Buffer width and buffer height
  int bufferWidth;
  int bufferHeight;

  double deltaTime;
  double currentTime;
  double newTime;

  void CalculateDelta()
  {
     newTime = glfwGetTime();
     deltaTime = newTime - currentTime;
     currentTime = newTime;
  }

  //A call back function to get the window size
  void UpdateWindowSize(GLFWwindow* window, int width, int height)
  {
     windowWidth = width;
     windowHeight = height;

     Aspect = (float)windowWidth / (float)windowHeight;
  }

  void UpdateFrameBufferSize(GLFWwindow* window, int width, int height)
  {
     bufferWidth = width;
     bufferHeight = height;
  }

  //Starts on startup and creates an window context and starts the rendering loop
  int main()
  {
     //Creates an engine startup log to keep
     CreateStartupLog();

     if (!glewInit())
     {
        WriteStartupLog("ERROR: GLEW failed to start\n");
        return 1;
     }
     else
     {
        WriteStartupLog("INFO: GLEW initiated!\n");
     }

     //If glfw is not initiated for whatever reason we return an error
     if (!glfwInit())
     {
        WriteStartupLog("ERROR: GLFW failed to start\n");
        return 1;
     }
     else
     {
        WriteStartupLog("INFO: GLFW initiated!\n");
     }

     ////////////////////////////////////////////////////////////////
     //                      Window Section                        //
     ////////////////////////////////////////////////////////////////
     //glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
     //glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
     //glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
     glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);

     //Gets the primary monitor of the PC and tells OpenGL to use that monitor
     GLFWmonitor* monitor = glfwGetPrimaryMonitor();
     const GLFWvidmode* videoMode = glfwGetVideoMode(monitor);

     //Creates a GLFW window context that we can work with
     GLFWwindow* gameWindow = glfwCreateWindow(windowWidth/*videoMode->width*/, windowHeight/*videoMode->height*/, "FireTech Engine", NULL/*monitor*/, NULL);

     //If the game window is not able to be created, prints an error and terminates the program
     if (!gameWindow)
     {
        WriteStartupLog("ERROR: GLFW could not create a window\n");
        glfwTerminate();
        return 1;
     }
     else
     {
        WriteStartupLog("INFO: GLFW created a window!\n\n");
     }

     //Makes the current context
     glfwMakeContextCurrent(gameWindow);

     //Sets the window callback function for size
     glfwSetWindowSizeCallback(gameWindow, UpdateWindowSize);
     glfwSetFramebufferSizeCallback(gameWindow, UpdateFrameBufferSize);

     //Initiate GLEW
     glewExperimental = GL_TRUE;
     glewInit();

     ////////////////////////////////////////////////////////////////
     //  Functions to set up various systems of the game engine    //
     ////////////////////////////////////////////////////////////////

     //Calls function to create a log file for the game engine
     CreateEngineLog();
     //Calls the function to compile the default shaders
     CompileDefaultShader();
     //Calls the function to get and print out hardware and OpenGL version
     //PrintHardwareInfo();

     ////////////////////////////////////////////////////////////////
     //                        Game Code                           //
     ////////////////////////////////////////////////////////////////
     Sprite testSprite;

     //Rendering loop
     while (!glfwWindowShouldClose(gameWindow))
     {
        CalculateDelta();
        glClearColor(0.3, 0.6, 1.0, 0);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

        //Viewport and ortho settings
        glViewport(0, 0, windowWidth, windowHeight);
        glOrtho(-1, 1, -1 / Aspect, 1 / Aspect, 0, 1);

        //Draw a sprite
        if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_F2))
        {
           testSprite.DebugDraw();
        }
        else
        {
           testSprite.Draw();
        }

        //Draws the stuff we just rendered
        glfwSwapBuffers(gameWindow);
        glLoadIdentity();

        //Polls different events, like input for example
        glfwPollEvents();

        if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_F1))
        {
           int fps = GetFPS();
           printf("FPS: ");
           printf("%d\n", fps);

           printf("Frequency: ");
           printf("%f\n", 1/double(fps));
        }

        if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_ESCAPE))
        {
           glfwSetWindowShouldClose(gameWindow, 1);
        }
     }

     glfwTerminate();
     WriteEngineLog("PROGRAM EXITED: Window closed");
     return 0;
  }

Here is the shader.cpp code:

        #include "../headers/Default.hpp"

  string ReadShaderFile(char* path)
  {
     ifstream shaderFile;
     shaderFile.open(path, std::ifstream::in);
     string output;

     if (shaderFile.is_open())
     {
        printf("Opened shader file located at: \"%s\"\n", path);

        while (!shaderFile.eof())
        {
           output += shaderFile.get();
        }

        printf("Successfully read shader file located at: \"%s\"\n", path);
     }
     else
     {
        WriteEngineLog("ERROR: Could not read shader file!\n");
     }

     shaderFile.close();
     return output;
  }

  Shader::Shader()
  {
     WriteEngineLog("WARNING: There was no path to any GLSL Shader files\n");
  }

  Shader::Shader(char* VertexShaderPathIn, char* FragmentShaderPathIn)
  {
     string vertexShaderString = ReadShaderFile(VertexShaderPathIn);
     string fragmentShaderString = ReadShaderFile(FragmentShaderPathIn);

     //Prints out the string to show the shader's code
     printf("\n%s\n", vertexShaderString.c_str());
     printf("\n%s\n", fragmentShaderString.c_str());

     //Creates the GLchars needed to input the shader code
     const GLchar* vertex_shader = vertexShaderString.c_str();
     const GLchar* fragment_shader = fragmentShaderString.c_str();

     //Creates a vertex shader and compiles it
     GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
     WriteEngineLog("Blank vertex shader created\n");
     glShaderSource(vertexShader, 1, &vertex_shader, NULL);
     WriteEngineLog("Vertex shader given source\n");
     glCompileShader(vertexShader);

     //Compilation error checking begions here
     GLint isVertexCompiled = 0;
     glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &isVertexCompiled);
     if (isVertexCompiled == GL_FALSE)
     {
        //Gets the length of the log
        GLint maxLength = 0;
        glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);

        //Creates and writes the log to the errorLog
        GLchar* errorLog = (GLchar*)malloc(maxLength);
        glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &errorLog[0]);

        //Writes to the engine log with the shader error
        WriteEngineLog("ERROR: Vertex shader failed to compile!\n");
        printf("%s\n", (char*)errorLog);

        //Frees the errorLog allocation
        free(errorLog);

        //Deletes the shader so it doesn't leak
        glDeleteShader(vertexShader);

        WriteEngineLog("ERROR: Aborting shader creation.\n");
        return;
     }
     //Writes in the engine log to report successful compilation
     WriteEngineLog("Vertex shader successfully compiled!\n");

     //Creates a fragment shader
     GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
     WriteEngineLog("Blank fragment shader created\n");
     glShaderSource(fragmentShader, 1, &fragment_shader, NULL);
     WriteEngineLog("Fragment shader given source\n");
     glCompileShader(fragmentShader);

     //Compilation error checking begions here
     GLint isFragmentCompiled = 0;
     glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &isFragmentCompiled);
     if (isFragmentCompiled == GL_FALSE)
     {
        //Gets the length of the log
        GLint maxLength = 0;
        glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);

        //Creates and writes the log to the errorLog
        GLchar* errorLog = (GLchar*)malloc(maxLength);
        glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &errorLog[0]);

        WriteEngineLog("ERROR: Fragment shader failed to compile\n");
        printf("%s\n", (char*)errorLog);

        //Frees the errorLog allocation
        free(errorLog);

        //Deletes the shader so it doesn't leak
        glDeleteShader(vertexShader);
        glDeleteShader(fragmentShader);

        WriteEngineLog("ERROR: Aborting shader creation.\n");
        return;
     }
     //Writes in the engine log to report successful compilation
     WriteEngineLog("Fragment shader successfully compiled!\n");

     //Creates the final shader product
     this->Program = glCreateProgram();
     WriteEngineLog("Blank shader created\n");
     glAttachShader(this->Program, vertexShader);
     WriteEngineLog("Attatched Vertex shader to the shader\n");
     glAttachShader(this->Program, fragmentShader);
     WriteEngineLog("Attatched Fragment shader to the shader\n");
     glLinkProgram(this->Program);

     /*GLint isLinked = 0;
     glGetProgramiv(this->Program, GL_LINK_STATUS, (int*)&isLinked);
     if (isLinked == GL_FALSE)
     {
        //Gets the lngth of the shader info log
        GLint maxLength = 0;
        glGetProgramInfolog(ShaderOut, GL_INFO_LOG_LENGTH, &maxLength);

        //Gets and puts the actual log into a GLchar
        std::vector<GLchar> infoLog(maxLength);
        glGetProgramInfoLog(ShaderOut, maxLength, &maxLength, &infoLog[0]);

        //Deletes programs and shaders so they don't leak
        glDeleteShader(vertexShader);
        glDeleteShader(fragmentShader);

        WriteEngineLog((string)infoLog);

        return;
     }*/

     WriteEngineLog("Shader linked!\n\n");

     WriteEngineLog("INFO: Shader created!\n");

     glDeleteShader(vertexShader);
     glDeleteShader(fragmentShader);
  }

  void Shader::Use()
  {
     glUseProgram(this->Program);
  }

Here is the quad.cpp code:

  #include "../headers/Default.hpp"

  Quad::Quad()
  {
     position.x = 0;
     position.y = 0;
     scale.x = 1;
     scale.y = 1;

     VertexArray = CreateVertexArray();
  }

  //Quad constructor with one arg
  Quad::Quad(Vector2 Position)
  {
     position = Position;

     VertexArray = CreateVertexArray();
  }

  //Quad constructor with two args
  Quad::Quad(Vector2 Position, Vector2 Scale)
  {
     position = Position;
     scale = Scale;

     VertexArray = CreateVertexArray();
  }

  GLuint Quad::CreateVertexArray()
  {
     GLfloat Vertices[] =
     {
         //VERTICES           //COLORS            //TEXCOORDS
         0.5f,  0.5f, 0.0f,   0.0f, 0.0f, 0.0f,   //1.0f, 1.0f, //Top Right Vertice
         0.5f, -0.5f, 0.0f,   0.0f, 1.0f, 0.0f,   //1.0f, 0.0f, //Top Left Vertice
        -0.5f, -0.5f, 0.0f,   0.0f, 0.0f, 1.0f//,   0.0f, 0.0f //Bottom Left Vertice
     };

     GLuint vbo, vao;
     glGenVertexArrays(1, &vao);
     glGenBuffers(1, &vbo);

     glBindVertexArray(vao);

     //Copy vertices into the buffer
     glBindBuffer(GL_ARRAY_BUFFER, vbo);
     glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);

     //Attribute Pointers
     //Position attribute
     glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
     glEnableVertexAttribArray(0);
     //Color attribute
     glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
     glEnableVertexAttribArray(1);

     //Unbinds the VAO
     glBindVertexArray(0);

     return vao;
  }

  //Quad debug drawing function
  void Quad::DebugDraw()
  {
     //Use the default shader
     DefaultShader.Use();

     glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
     glBindVertexArray(VertexArray);

     // draw points 0-3 from the currently bound VAO with current in-use shader
     glDrawArrays(GL_TRIANGLES, 0, 3);
     //glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0); //CAUSING A CRASH AT THE MOMENT

     glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);

     //Unbinds the VAO
     glBindVertexArray(0);
  }

Here is the sprite.cpp code:

  #include "../headers/Default.hpp"

  Sprite::Sprite()
  {
     position.x = 0;
     position.y = 0;
  }

  Sprite::Sprite(Texture tex)
  {
     defaultTexture = tex;
     currentTexture = tex;
  }

  Sprite::Sprite(Texture tex, Vector2 pos)
  {
     defaultTexture = tex;
     currentTexture = tex;
     position = pos;
  }

  Sprite::Sprite(Texture tex, Vector2 pos, Vector2 Scale)
  {
     defaultTexture = tex;
     currentTexture = tex;
     position = pos;
     scale = Scale;
  }

  void Sprite::Draw()
  {
     //Binds the default shader again
     glBindVertexArray(VertexArray);

     //Use the default shader
     DefaultShader.Use();

     // draw points 0-3 from the currently bound VAO with current in-use shader
     glDrawArrays(GL_TRIANGLES, 0, 3);

     glBindVertexArray(0);
  }

Here is my vertex shader and fragment shader code (In order):

  //Vertex Shader
  #version 330 core

  layout (location = 0) in vec3 position; // The position variable has attribute position 0
  layout (location = 1) in vec3 color;

  out vec3 ourColor;

  void main()
  {
      gl_Position = vec4(position, 1.0f); // See how we directly give a vec3 to vec4's constructor
      ourColor = color;
  }

  //Fragment shader
  #version 330 core

  in vec3 ourColor;
  out vec4 color;

  void main()
  {
      color = ourColor;
  }

And I'm getting a warning that my shader did not compile... error is that there is a non ascii character at line ZERO of the vertex shader.


回答1:


I had exactly the same error. This is almost certainly due to Unicode Byte Order Marks, or similar unprinted characters generated by text editors.

These are common in the first characters of a unicode file, but can occur anywhere.

You can programmatically strip these from your shader source strings before compiling, but this could be costly if you are compiling many shaders. See the above link for the data to strip if you go this route.

An alternative is simply to keep the files in ANSI/ASCII format. I am sure most text editors have the facility to set/convert formats, but I will give Notepad++ as an example since it's what I use to edit GLSL:

  1. Open the GLSL file.
  2. Encoding -> Convert to ANSI. (Note that merely hitting "Encode in ANSI" will not strip the characters)
  3. Save the file.

The above should also strip other characters prone to confusing GLSL parsers (and C/C++ in general).

You could inform the user(/developer) the files are in an incorrect format on load in debug builds.



来源:https://stackoverflow.com/questions/36241598/vertex-shader-not-compiling-due-to-a-non-ascii-character

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!