GLSL Shaders do not compile even if they have no obvious bugs












0















I have been trying to texture a cube I have created and I am not able to see the textures. I can just see a blank cube rendering. I have tried not using a texture and making it a single color but that hasn’t worked either. I have looked at the code to see if there is anything wrong with it however I don't see any problems but I think it is because I am new to OpenGL so maybe someone else can see what is wrong with the code.



This is my texture code within vertex_array constructor:



vertex_array::vertex_array(float* vertex_buffer, int num_of_floats, const std::string& texture_file)
{
glGenVertexArrays(1, &va_ID);
glBindVertexArray(va_ID);

glGenBuffers(1, &vb_ID);
glBindBuffer(GL_ARRAY_BUFFER, vb_ID);
glBufferData(GL_ARRAY_BUFFER, num_of_floats * sizeof(float), vertex_buffer, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(0 * sizeof(float)));
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);

glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
int width, height, nrChanells;
stbi_set_flip_vertically_on_load(true);
unsigned char* data = stbi_load(texture_file.c_str(), &width, &height, &nrChanells, 0);
if (data)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
}
else {std::cout << "failed to load texture" << std::endl;}
stbi_image_free(data);

glGenBuffers(1, &ib_ID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ib_ID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(index_buffer), index_buffer, GL_STATIC_DRAW);
glBindVertexArray(0);
}


This is my shader:



#version 330 core

layout(location = 0) in vec4 position;
layout(location = 1) in vec2 texCoord;

out vec2 v_TexCoord;

uniform mat4 view;
uniform mat4 projection;

void main()
{
v_TexCoord = texCoord;
gl_Position = projection * view * position;
};

#version 330 core

layout(location = 0) out vec4 color;

in vec2 v_TexCoord;

uniform sampler2D u_Texture;

void main()
{
vec4 texColor = texture(u_Texture, v_TexCoord);
color = texColor;
//color = vec4(0.0, 0.7, 0.4, 1.0);
};


This is the shader class:



#include "shader.h"
#include <fstream>
#include <string>
#include <sstream>

shader::shader(const std::string& shader_file)
{
std::ifstream file(shader_file);
std::string line;
std::stringstream shaders[2];
std::string shader_type;

while (getline(file, line))
{
if (line.find("#shader") != std::string::npos)
{
if (line.find("vertex") != std::string::npos)
shader_type = "vertex";
else if (line.find("fragment") != std::string::npos)
shader_type = "fragment";
}

else
{
if (shader_type == "vertex")
{
shaders[0] << line << "n";
//std::cout << line << "n";
}
else if (shader_type == "fragment")
{
shaders[1] << line << "n";
//std::cout << line << "n";
}
}
}

s_ID = glCreateProgram();
unsigned int vs_ID = glCreateShader(GL_VERTEX_SHADER);
unsigned int fs_ID = glCreateShader(GL_FRAGMENT_SHADER);
const char* vertex_shader = shaders[0].str().c_str();
const char* fragment_shader = shaders[1].str().c_str();
glShaderSource(vs_ID, 1, &vertex_shader, nullptr);
glShaderSource(fs_ID, 1, &fragment_shader, nullptr);
glCompileShader(vs_ID);
glCompileShader(fs_ID);
glAttachShader(s_ID, vs_ID);
glAttachShader(s_ID, fs_ID);
glLinkProgram(s_ID);
glValidateProgram(s_ID);
glDeleteShader(vs_ID);
glDeleteShader(fs_ID);
}

void shader::bind()
{
glUseProgram(s_ID);
}

void shader::unbind()
{
glUseProgram(0);
}


and is my main application code:



vertex_array va_1(cube1, 40, "resources/blocks.png");
shader shader_1("src/shader1.shader");
va_1.bind();
shader_1.bind();


[Edit by Spektre]



after peaking into GLSL shader logs the problem is:



ERROR: 0:1: '' : syntax error: illegal extended ASCII character (0xdd)


which means wrong encoding somwhere along the way










share|improve this question




















  • 1





    It seems the shader program is not used. Did you Forget to install the shader program before drawing the geometry (glUseProgram)? Anyway this example is not Minimal, Complete, and Verifiable

    – Rabbid76
    Nov 22 '18 at 19:04













  • Which is the value of nrChanells? It affects GL_RGB or GL_RGBA parameters to glTexImage2D

    – Ripi2
    Nov 22 '18 at 19:19











  • sorry Ripi2 I didnt fully understand what you said however I can see it has to do with the textures but I tried doing it without textures and it still output a single colour so there is a problem that isnt to do with the textures however that doesnt mean there isnt a problem with the textures aswell so could you clarify what you meant

    – Ameen Izhac
    Nov 22 '18 at 19:23











  • Rabbid76 yes the shader is used because I call shader_1.bind() in the main file which calls glUseProgram()

    – Ameen Izhac
    Nov 22 '18 at 19:24











  • I will edit so the shader class is included

    – Ameen Izhac
    Nov 22 '18 at 19:24
















0















I have been trying to texture a cube I have created and I am not able to see the textures. I can just see a blank cube rendering. I have tried not using a texture and making it a single color but that hasn’t worked either. I have looked at the code to see if there is anything wrong with it however I don't see any problems but I think it is because I am new to OpenGL so maybe someone else can see what is wrong with the code.



This is my texture code within vertex_array constructor:



vertex_array::vertex_array(float* vertex_buffer, int num_of_floats, const std::string& texture_file)
{
glGenVertexArrays(1, &va_ID);
glBindVertexArray(va_ID);

glGenBuffers(1, &vb_ID);
glBindBuffer(GL_ARRAY_BUFFER, vb_ID);
glBufferData(GL_ARRAY_BUFFER, num_of_floats * sizeof(float), vertex_buffer, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(0 * sizeof(float)));
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);

glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
int width, height, nrChanells;
stbi_set_flip_vertically_on_load(true);
unsigned char* data = stbi_load(texture_file.c_str(), &width, &height, &nrChanells, 0);
if (data)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
}
else {std::cout << "failed to load texture" << std::endl;}
stbi_image_free(data);

glGenBuffers(1, &ib_ID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ib_ID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(index_buffer), index_buffer, GL_STATIC_DRAW);
glBindVertexArray(0);
}


This is my shader:



#version 330 core

layout(location = 0) in vec4 position;
layout(location = 1) in vec2 texCoord;

out vec2 v_TexCoord;

uniform mat4 view;
uniform mat4 projection;

void main()
{
v_TexCoord = texCoord;
gl_Position = projection * view * position;
};

#version 330 core

layout(location = 0) out vec4 color;

in vec2 v_TexCoord;

uniform sampler2D u_Texture;

void main()
{
vec4 texColor = texture(u_Texture, v_TexCoord);
color = texColor;
//color = vec4(0.0, 0.7, 0.4, 1.0);
};


This is the shader class:



#include "shader.h"
#include <fstream>
#include <string>
#include <sstream>

shader::shader(const std::string& shader_file)
{
std::ifstream file(shader_file);
std::string line;
std::stringstream shaders[2];
std::string shader_type;

while (getline(file, line))
{
if (line.find("#shader") != std::string::npos)
{
if (line.find("vertex") != std::string::npos)
shader_type = "vertex";
else if (line.find("fragment") != std::string::npos)
shader_type = "fragment";
}

else
{
if (shader_type == "vertex")
{
shaders[0] << line << "n";
//std::cout << line << "n";
}
else if (shader_type == "fragment")
{
shaders[1] << line << "n";
//std::cout << line << "n";
}
}
}

s_ID = glCreateProgram();
unsigned int vs_ID = glCreateShader(GL_VERTEX_SHADER);
unsigned int fs_ID = glCreateShader(GL_FRAGMENT_SHADER);
const char* vertex_shader = shaders[0].str().c_str();
const char* fragment_shader = shaders[1].str().c_str();
glShaderSource(vs_ID, 1, &vertex_shader, nullptr);
glShaderSource(fs_ID, 1, &fragment_shader, nullptr);
glCompileShader(vs_ID);
glCompileShader(fs_ID);
glAttachShader(s_ID, vs_ID);
glAttachShader(s_ID, fs_ID);
glLinkProgram(s_ID);
glValidateProgram(s_ID);
glDeleteShader(vs_ID);
glDeleteShader(fs_ID);
}

void shader::bind()
{
glUseProgram(s_ID);
}

void shader::unbind()
{
glUseProgram(0);
}


and is my main application code:



vertex_array va_1(cube1, 40, "resources/blocks.png");
shader shader_1("src/shader1.shader");
va_1.bind();
shader_1.bind();


[Edit by Spektre]



after peaking into GLSL shader logs the problem is:



ERROR: 0:1: '' : syntax error: illegal extended ASCII character (0xdd)


which means wrong encoding somwhere along the way










share|improve this question




















  • 1





    It seems the shader program is not used. Did you Forget to install the shader program before drawing the geometry (glUseProgram)? Anyway this example is not Minimal, Complete, and Verifiable

    – Rabbid76
    Nov 22 '18 at 19:04













  • Which is the value of nrChanells? It affects GL_RGB or GL_RGBA parameters to glTexImage2D

    – Ripi2
    Nov 22 '18 at 19:19











  • sorry Ripi2 I didnt fully understand what you said however I can see it has to do with the textures but I tried doing it without textures and it still output a single colour so there is a problem that isnt to do with the textures however that doesnt mean there isnt a problem with the textures aswell so could you clarify what you meant

    – Ameen Izhac
    Nov 22 '18 at 19:23











  • Rabbid76 yes the shader is used because I call shader_1.bind() in the main file which calls glUseProgram()

    – Ameen Izhac
    Nov 22 '18 at 19:24











  • I will edit so the shader class is included

    – Ameen Izhac
    Nov 22 '18 at 19:24














0












0








0


0






I have been trying to texture a cube I have created and I am not able to see the textures. I can just see a blank cube rendering. I have tried not using a texture and making it a single color but that hasn’t worked either. I have looked at the code to see if there is anything wrong with it however I don't see any problems but I think it is because I am new to OpenGL so maybe someone else can see what is wrong with the code.



This is my texture code within vertex_array constructor:



vertex_array::vertex_array(float* vertex_buffer, int num_of_floats, const std::string& texture_file)
{
glGenVertexArrays(1, &va_ID);
glBindVertexArray(va_ID);

glGenBuffers(1, &vb_ID);
glBindBuffer(GL_ARRAY_BUFFER, vb_ID);
glBufferData(GL_ARRAY_BUFFER, num_of_floats * sizeof(float), vertex_buffer, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(0 * sizeof(float)));
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);

glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
int width, height, nrChanells;
stbi_set_flip_vertically_on_load(true);
unsigned char* data = stbi_load(texture_file.c_str(), &width, &height, &nrChanells, 0);
if (data)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
}
else {std::cout << "failed to load texture" << std::endl;}
stbi_image_free(data);

glGenBuffers(1, &ib_ID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ib_ID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(index_buffer), index_buffer, GL_STATIC_DRAW);
glBindVertexArray(0);
}


This is my shader:



#version 330 core

layout(location = 0) in vec4 position;
layout(location = 1) in vec2 texCoord;

out vec2 v_TexCoord;

uniform mat4 view;
uniform mat4 projection;

void main()
{
v_TexCoord = texCoord;
gl_Position = projection * view * position;
};

#version 330 core

layout(location = 0) out vec4 color;

in vec2 v_TexCoord;

uniform sampler2D u_Texture;

void main()
{
vec4 texColor = texture(u_Texture, v_TexCoord);
color = texColor;
//color = vec4(0.0, 0.7, 0.4, 1.0);
};


This is the shader class:



#include "shader.h"
#include <fstream>
#include <string>
#include <sstream>

shader::shader(const std::string& shader_file)
{
std::ifstream file(shader_file);
std::string line;
std::stringstream shaders[2];
std::string shader_type;

while (getline(file, line))
{
if (line.find("#shader") != std::string::npos)
{
if (line.find("vertex") != std::string::npos)
shader_type = "vertex";
else if (line.find("fragment") != std::string::npos)
shader_type = "fragment";
}

else
{
if (shader_type == "vertex")
{
shaders[0] << line << "n";
//std::cout << line << "n";
}
else if (shader_type == "fragment")
{
shaders[1] << line << "n";
//std::cout << line << "n";
}
}
}

s_ID = glCreateProgram();
unsigned int vs_ID = glCreateShader(GL_VERTEX_SHADER);
unsigned int fs_ID = glCreateShader(GL_FRAGMENT_SHADER);
const char* vertex_shader = shaders[0].str().c_str();
const char* fragment_shader = shaders[1].str().c_str();
glShaderSource(vs_ID, 1, &vertex_shader, nullptr);
glShaderSource(fs_ID, 1, &fragment_shader, nullptr);
glCompileShader(vs_ID);
glCompileShader(fs_ID);
glAttachShader(s_ID, vs_ID);
glAttachShader(s_ID, fs_ID);
glLinkProgram(s_ID);
glValidateProgram(s_ID);
glDeleteShader(vs_ID);
glDeleteShader(fs_ID);
}

void shader::bind()
{
glUseProgram(s_ID);
}

void shader::unbind()
{
glUseProgram(0);
}


and is my main application code:



vertex_array va_1(cube1, 40, "resources/blocks.png");
shader shader_1("src/shader1.shader");
va_1.bind();
shader_1.bind();


[Edit by Spektre]



after peaking into GLSL shader logs the problem is:



ERROR: 0:1: '' : syntax error: illegal extended ASCII character (0xdd)


which means wrong encoding somwhere along the way










share|improve this question
















I have been trying to texture a cube I have created and I am not able to see the textures. I can just see a blank cube rendering. I have tried not using a texture and making it a single color but that hasn’t worked either. I have looked at the code to see if there is anything wrong with it however I don't see any problems but I think it is because I am new to OpenGL so maybe someone else can see what is wrong with the code.



This is my texture code within vertex_array constructor:



vertex_array::vertex_array(float* vertex_buffer, int num_of_floats, const std::string& texture_file)
{
glGenVertexArrays(1, &va_ID);
glBindVertexArray(va_ID);

glGenBuffers(1, &vb_ID);
glBindBuffer(GL_ARRAY_BUFFER, vb_ID);
glBufferData(GL_ARRAY_BUFFER, num_of_floats * sizeof(float), vertex_buffer, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(0 * sizeof(float)));
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);

glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
int width, height, nrChanells;
stbi_set_flip_vertically_on_load(true);
unsigned char* data = stbi_load(texture_file.c_str(), &width, &height, &nrChanells, 0);
if (data)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
}
else {std::cout << "failed to load texture" << std::endl;}
stbi_image_free(data);

glGenBuffers(1, &ib_ID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ib_ID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(index_buffer), index_buffer, GL_STATIC_DRAW);
glBindVertexArray(0);
}


This is my shader:



#version 330 core

layout(location = 0) in vec4 position;
layout(location = 1) in vec2 texCoord;

out vec2 v_TexCoord;

uniform mat4 view;
uniform mat4 projection;

void main()
{
v_TexCoord = texCoord;
gl_Position = projection * view * position;
};

#version 330 core

layout(location = 0) out vec4 color;

in vec2 v_TexCoord;

uniform sampler2D u_Texture;

void main()
{
vec4 texColor = texture(u_Texture, v_TexCoord);
color = texColor;
//color = vec4(0.0, 0.7, 0.4, 1.0);
};


This is the shader class:



#include "shader.h"
#include <fstream>
#include <string>
#include <sstream>

shader::shader(const std::string& shader_file)
{
std::ifstream file(shader_file);
std::string line;
std::stringstream shaders[2];
std::string shader_type;

while (getline(file, line))
{
if (line.find("#shader") != std::string::npos)
{
if (line.find("vertex") != std::string::npos)
shader_type = "vertex";
else if (line.find("fragment") != std::string::npos)
shader_type = "fragment";
}

else
{
if (shader_type == "vertex")
{
shaders[0] << line << "n";
//std::cout << line << "n";
}
else if (shader_type == "fragment")
{
shaders[1] << line << "n";
//std::cout << line << "n";
}
}
}

s_ID = glCreateProgram();
unsigned int vs_ID = glCreateShader(GL_VERTEX_SHADER);
unsigned int fs_ID = glCreateShader(GL_FRAGMENT_SHADER);
const char* vertex_shader = shaders[0].str().c_str();
const char* fragment_shader = shaders[1].str().c_str();
glShaderSource(vs_ID, 1, &vertex_shader, nullptr);
glShaderSource(fs_ID, 1, &fragment_shader, nullptr);
glCompileShader(vs_ID);
glCompileShader(fs_ID);
glAttachShader(s_ID, vs_ID);
glAttachShader(s_ID, fs_ID);
glLinkProgram(s_ID);
glValidateProgram(s_ID);
glDeleteShader(vs_ID);
glDeleteShader(fs_ID);
}

void shader::bind()
{
glUseProgram(s_ID);
}

void shader::unbind()
{
glUseProgram(0);
}


and is my main application code:



vertex_array va_1(cube1, 40, "resources/blocks.png");
shader shader_1("src/shader1.shader");
va_1.bind();
shader_1.bind();


[Edit by Spektre]



after peaking into GLSL shader logs the problem is:



ERROR: 0:1: '' : syntax error: illegal extended ASCII character (0xdd)


which means wrong encoding somwhere along the way







c++ opengl






share|improve this question















share|improve this question













share|improve this question




share|improve this question








edited Dec 19 '18 at 9:16









Spektre

29.7k647210




29.7k647210










asked Nov 22 '18 at 18:48









Ameen IzhacAmeen Izhac

174




174








  • 1





    It seems the shader program is not used. Did you Forget to install the shader program before drawing the geometry (glUseProgram)? Anyway this example is not Minimal, Complete, and Verifiable

    – Rabbid76
    Nov 22 '18 at 19:04













  • Which is the value of nrChanells? It affects GL_RGB or GL_RGBA parameters to glTexImage2D

    – Ripi2
    Nov 22 '18 at 19:19











  • sorry Ripi2 I didnt fully understand what you said however I can see it has to do with the textures but I tried doing it without textures and it still output a single colour so there is a problem that isnt to do with the textures however that doesnt mean there isnt a problem with the textures aswell so could you clarify what you meant

    – Ameen Izhac
    Nov 22 '18 at 19:23











  • Rabbid76 yes the shader is used because I call shader_1.bind() in the main file which calls glUseProgram()

    – Ameen Izhac
    Nov 22 '18 at 19:24











  • I will edit so the shader class is included

    – Ameen Izhac
    Nov 22 '18 at 19:24














  • 1





    It seems the shader program is not used. Did you Forget to install the shader program before drawing the geometry (glUseProgram)? Anyway this example is not Minimal, Complete, and Verifiable

    – Rabbid76
    Nov 22 '18 at 19:04













  • Which is the value of nrChanells? It affects GL_RGB or GL_RGBA parameters to glTexImage2D

    – Ripi2
    Nov 22 '18 at 19:19











  • sorry Ripi2 I didnt fully understand what you said however I can see it has to do with the textures but I tried doing it without textures and it still output a single colour so there is a problem that isnt to do with the textures however that doesnt mean there isnt a problem with the textures aswell so could you clarify what you meant

    – Ameen Izhac
    Nov 22 '18 at 19:23











  • Rabbid76 yes the shader is used because I call shader_1.bind() in the main file which calls glUseProgram()

    – Ameen Izhac
    Nov 22 '18 at 19:24











  • I will edit so the shader class is included

    – Ameen Izhac
    Nov 22 '18 at 19:24








1




1





It seems the shader program is not used. Did you Forget to install the shader program before drawing the geometry (glUseProgram)? Anyway this example is not Minimal, Complete, and Verifiable

– Rabbid76
Nov 22 '18 at 19:04







It seems the shader program is not used. Did you Forget to install the shader program before drawing the geometry (glUseProgram)? Anyway this example is not Minimal, Complete, and Verifiable

– Rabbid76
Nov 22 '18 at 19:04















Which is the value of nrChanells? It affects GL_RGB or GL_RGBA parameters to glTexImage2D

– Ripi2
Nov 22 '18 at 19:19





Which is the value of nrChanells? It affects GL_RGB or GL_RGBA parameters to glTexImage2D

– Ripi2
Nov 22 '18 at 19:19













sorry Ripi2 I didnt fully understand what you said however I can see it has to do with the textures but I tried doing it without textures and it still output a single colour so there is a problem that isnt to do with the textures however that doesnt mean there isnt a problem with the textures aswell so could you clarify what you meant

– Ameen Izhac
Nov 22 '18 at 19:23





sorry Ripi2 I didnt fully understand what you said however I can see it has to do with the textures but I tried doing it without textures and it still output a single colour so there is a problem that isnt to do with the textures however that doesnt mean there isnt a problem with the textures aswell so could you clarify what you meant

– Ameen Izhac
Nov 22 '18 at 19:23













Rabbid76 yes the shader is used because I call shader_1.bind() in the main file which calls glUseProgram()

– Ameen Izhac
Nov 22 '18 at 19:24





Rabbid76 yes the shader is used because I call shader_1.bind() in the main file which calls glUseProgram()

– Ameen Izhac
Nov 22 '18 at 19:24













I will edit so the shader class is included

– Ameen Izhac
Nov 22 '18 at 19:24





I will edit so the shader class is included

– Ameen Izhac
Nov 22 '18 at 19:24












1 Answer
1






active

oldest

votes


















2














No colors can mean problem with lighting/normal or just not compiled/linked shaders resulting in usage of fixed function which uses the input locations differently hence no color/texture etc.



First of all check GLSL logs as it is absolutely necessary. Even a small bug like missing ; or wrong character might prevent you from compiling and without checking you just do not know what works or not. Take a look here:





  • complete GL+GLSL+VAO/VBO C++ example



    And look how glGetShaderInfoLog and glGetProgramInfoLog are used. Save or print their results to check for errors.




After you done this it Looks like you got problem with encoding of the shaders:



ERROR: 0:1: '' : syntax error: illegal extended ASCII character (0xdd)


There are more reasons what could cause this:





  1. error in shader file



    If you edit your shader with UNICODE tools or using national keyboard or copy paste from WEB the resulting file might contain "special" characters or even the whole file could be encoded as UNICODE (UTF8,UTF16). That is wrong as gfx driver GLSL compiler can not handle such.



    So check the files in hex view (or write a small script that check for presence of non ASCII codes above 127).



    the UTF encoding is easy to detect here is a hex view of UTF8 SVG file start:



    EF BB BF 3C 3F 78 6D 6C 20 76 65 72 73 69 6F 6E | <?xml version


    Here another SVG but UTF16 this time:



    FF FE 3C 00 3F 00 78 00 6D 00 6C 00 20 00 76 00 | ˙ţ< ? x m l   v 


    As you can see the starting 2 BYTEs are the UTF magic number detecting the UTF encoding. In UTF8 character can be 1 or more bytes. In UTF16 each character has 2 bytes. This is easily detectable. But if your file has just non ASCII character (code>127) it can be very hard to spot and writing a script that detects it would be more reliable.



    However the error message suggest your problems are in first line so you do not need to search whole file to spot it... If you use Total Commander hit [F3] on your shader file and then hit [3] to see hex view or use any other hex view tool at your disposal.




  2. error in loading the file into your program



    I do not know which compiler/IDE/platform you got but If your compiler is UNICODE friendly its possible you are loading your shader file into some string variable that is enforcing UNICODE. In such case you will not found any bug in the #1 but compiler still throws illegal character.



    To confirm this after you load your shader into your string save it back to some file and check if its encoding is OK (do #1 on it).



    If the case simply stop using the string variable that do this and use array of 8 bit data types instead. Beware if you want to use char you must be sure it is char and not UNICODE Wide char instead !!! Do not ask me how search the documentation to your compiler/IDE instead.



    Once this is resolved (saved file is standard ASCII text) the GLSL compilation should proceed without errors...




  3. enforcing ASCII and shader code inside cpp code



    Some programs do not use shader files. Instead they store the source code as const strings in the app source code. This way is susceptible to the same problems as #2. To resolve this some compilers introduce a macro that tells the compiler that string is a ASCII and not UNICODE and should handle it as such. I do not know how the macros are named but I saw use them here on SE/SO in different GLSL related questions. IIRC it was something like:



    const char *txt=TEXT('bla bla vla');


    but I am not sure if it was really just TEXT or Text or something slightly different. So search the QAs for your error code or enforcing ASCII or look into your compiler/IDE documentation for such things.




Also common rookie problem is that they build entire application and only than start to compile. THAT IS WRONG. Because after that they spend eternity debugging...



They should build apps incrementally. So start small and when working move to next step. This way if a bug reveals itself its 99% time in the just added new code hence no need to difficult searching ...



The new GL api however prevents from this approach as you need to have valid GL context, VBO/VAO, mesh loader/generator, view, shaders, compatible gfx driver before the stuff even start showing something. That is a huge amount of possible problems at once. That is why I advice to start with old api and then removing part by part with the new one ... Or use some working start template instead.






share|improve this answer


























  • 1. so from your first point I understood that your concern is that my shader code contains non ASCII characters and you suggested making a program that checks for non ASCII characters so I made a C++ program that casts each character of a string to an int and outputs it if its value is bigger than 126 and when inputing the entire shadercode, there was no output so I assume thats not the issue.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 2. Surely if the string is enforcing UNICODE on ASCII characters that is not a problem because im pretty sure the first 128 characters of UNICODE are the same as the ASCII characters so enforcing UNICODE on already checked to be ASCII characters should just leave the coding the same shouldn’t it. Also, after reading in the file to the string, I printed the string and it printed like normal.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 3. If I cannot manage to read in the shader from the file I will probably use string and macro.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • @AmeenIzhac your #2 assumption is wrong as enforced UNICODE like UTF16 is not the same as ASCII as there will be a lot of zeros along the ASCII which will lead to wrong compilation after first character of code ...

    – Spektre
    Dec 27 '18 at 16:54











  • Ok so I opened the file in notepad++, went to encoding, I found it was set on encode in UTF8, I selected convert to ANSI saved it, then reopened it and it is still on UTF8 no matter how many times I repeat that

    – Ameen Izhac
    Dec 28 '18 at 15:39











Your Answer






StackExchange.ifUsing("editor", function () {
StackExchange.using("externalEditor", function () {
StackExchange.using("snippets", function () {
StackExchange.snippets.init();
});
});
}, "code-snippets");

StackExchange.ready(function() {
var channelOptions = {
tags: "".split(" "),
id: "1"
};
initTagRenderer("".split(" "), "".split(" "), channelOptions);

StackExchange.using("externalEditor", function() {
// Have to fire editor after snippets, if snippets enabled
if (StackExchange.settings.snippets.snippetsEnabled) {
StackExchange.using("snippets", function() {
createEditor();
});
}
else {
createEditor();
}
});

function createEditor() {
StackExchange.prepareEditor({
heartbeatType: 'answer',
autoActivateHeartbeat: false,
convertImagesToLinks: true,
noModals: true,
showLowRepImageUploadWarning: true,
reputationToPostImages: 10,
bindNavPrevention: true,
postfix: "",
imageUploader: {
brandingHtml: "Powered by u003ca class="icon-imgur-white" href="https://imgur.com/"u003eu003c/au003e",
contentPolicyHtml: "User contributions licensed under u003ca href="https://creativecommons.org/licenses/by-sa/3.0/"u003ecc by-sa 3.0 with attribution requiredu003c/au003e u003ca href="https://stackoverflow.com/legal/content-policy"u003e(content policy)u003c/au003e",
allowUrls: true
},
onDemand: true,
discardSelector: ".discard-answer"
,immediatelyShowMarkdownHelp:true
});


}
});














draft saved

draft discarded


















StackExchange.ready(
function () {
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53436683%2fglsl-shaders-do-not-compile-even-if-they-have-no-obvious-bugs%23new-answer', 'question_page');
}
);

Post as a guest















Required, but never shown

























1 Answer
1






active

oldest

votes








1 Answer
1






active

oldest

votes









active

oldest

votes






active

oldest

votes









2














No colors can mean problem with lighting/normal or just not compiled/linked shaders resulting in usage of fixed function which uses the input locations differently hence no color/texture etc.



First of all check GLSL logs as it is absolutely necessary. Even a small bug like missing ; or wrong character might prevent you from compiling and without checking you just do not know what works or not. Take a look here:





  • complete GL+GLSL+VAO/VBO C++ example



    And look how glGetShaderInfoLog and glGetProgramInfoLog are used. Save or print their results to check for errors.




After you done this it Looks like you got problem with encoding of the shaders:



ERROR: 0:1: '' : syntax error: illegal extended ASCII character (0xdd)


There are more reasons what could cause this:





  1. error in shader file



    If you edit your shader with UNICODE tools or using national keyboard or copy paste from WEB the resulting file might contain "special" characters or even the whole file could be encoded as UNICODE (UTF8,UTF16). That is wrong as gfx driver GLSL compiler can not handle such.



    So check the files in hex view (or write a small script that check for presence of non ASCII codes above 127).



    the UTF encoding is easy to detect here is a hex view of UTF8 SVG file start:



    EF BB BF 3C 3F 78 6D 6C 20 76 65 72 73 69 6F 6E | <?xml version


    Here another SVG but UTF16 this time:



    FF FE 3C 00 3F 00 78 00 6D 00 6C 00 20 00 76 00 | ˙ţ< ? x m l   v 


    As you can see the starting 2 BYTEs are the UTF magic number detecting the UTF encoding. In UTF8 character can be 1 or more bytes. In UTF16 each character has 2 bytes. This is easily detectable. But if your file has just non ASCII character (code>127) it can be very hard to spot and writing a script that detects it would be more reliable.



    However the error message suggest your problems are in first line so you do not need to search whole file to spot it... If you use Total Commander hit [F3] on your shader file and then hit [3] to see hex view or use any other hex view tool at your disposal.




  2. error in loading the file into your program



    I do not know which compiler/IDE/platform you got but If your compiler is UNICODE friendly its possible you are loading your shader file into some string variable that is enforcing UNICODE. In such case you will not found any bug in the #1 but compiler still throws illegal character.



    To confirm this after you load your shader into your string save it back to some file and check if its encoding is OK (do #1 on it).



    If the case simply stop using the string variable that do this and use array of 8 bit data types instead. Beware if you want to use char you must be sure it is char and not UNICODE Wide char instead !!! Do not ask me how search the documentation to your compiler/IDE instead.



    Once this is resolved (saved file is standard ASCII text) the GLSL compilation should proceed without errors...




  3. enforcing ASCII and shader code inside cpp code



    Some programs do not use shader files. Instead they store the source code as const strings in the app source code. This way is susceptible to the same problems as #2. To resolve this some compilers introduce a macro that tells the compiler that string is a ASCII and not UNICODE and should handle it as such. I do not know how the macros are named but I saw use them here on SE/SO in different GLSL related questions. IIRC it was something like:



    const char *txt=TEXT('bla bla vla');


    but I am not sure if it was really just TEXT or Text or something slightly different. So search the QAs for your error code or enforcing ASCII or look into your compiler/IDE documentation for such things.




Also common rookie problem is that they build entire application and only than start to compile. THAT IS WRONG. Because after that they spend eternity debugging...



They should build apps incrementally. So start small and when working move to next step. This way if a bug reveals itself its 99% time in the just added new code hence no need to difficult searching ...



The new GL api however prevents from this approach as you need to have valid GL context, VBO/VAO, mesh loader/generator, view, shaders, compatible gfx driver before the stuff even start showing something. That is a huge amount of possible problems at once. That is why I advice to start with old api and then removing part by part with the new one ... Or use some working start template instead.






share|improve this answer


























  • 1. so from your first point I understood that your concern is that my shader code contains non ASCII characters and you suggested making a program that checks for non ASCII characters so I made a C++ program that casts each character of a string to an int and outputs it if its value is bigger than 126 and when inputing the entire shadercode, there was no output so I assume thats not the issue.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 2. Surely if the string is enforcing UNICODE on ASCII characters that is not a problem because im pretty sure the first 128 characters of UNICODE are the same as the ASCII characters so enforcing UNICODE on already checked to be ASCII characters should just leave the coding the same shouldn’t it. Also, after reading in the file to the string, I printed the string and it printed like normal.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 3. If I cannot manage to read in the shader from the file I will probably use string and macro.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • @AmeenIzhac your #2 assumption is wrong as enforced UNICODE like UTF16 is not the same as ASCII as there will be a lot of zeros along the ASCII which will lead to wrong compilation after first character of code ...

    – Spektre
    Dec 27 '18 at 16:54











  • Ok so I opened the file in notepad++, went to encoding, I found it was set on encode in UTF8, I selected convert to ANSI saved it, then reopened it and it is still on UTF8 no matter how many times I repeat that

    – Ameen Izhac
    Dec 28 '18 at 15:39
















2














No colors can mean problem with lighting/normal or just not compiled/linked shaders resulting in usage of fixed function which uses the input locations differently hence no color/texture etc.



First of all check GLSL logs as it is absolutely necessary. Even a small bug like missing ; or wrong character might prevent you from compiling and without checking you just do not know what works or not. Take a look here:





  • complete GL+GLSL+VAO/VBO C++ example



    And look how glGetShaderInfoLog and glGetProgramInfoLog are used. Save or print their results to check for errors.




After you done this it Looks like you got problem with encoding of the shaders:



ERROR: 0:1: '' : syntax error: illegal extended ASCII character (0xdd)


There are more reasons what could cause this:





  1. error in shader file



    If you edit your shader with UNICODE tools or using national keyboard or copy paste from WEB the resulting file might contain "special" characters or even the whole file could be encoded as UNICODE (UTF8,UTF16). That is wrong as gfx driver GLSL compiler can not handle such.



    So check the files in hex view (or write a small script that check for presence of non ASCII codes above 127).



    the UTF encoding is easy to detect here is a hex view of UTF8 SVG file start:



    EF BB BF 3C 3F 78 6D 6C 20 76 65 72 73 69 6F 6E | <?xml version


    Here another SVG but UTF16 this time:



    FF FE 3C 00 3F 00 78 00 6D 00 6C 00 20 00 76 00 | ˙ţ< ? x m l   v 


    As you can see the starting 2 BYTEs are the UTF magic number detecting the UTF encoding. In UTF8 character can be 1 or more bytes. In UTF16 each character has 2 bytes. This is easily detectable. But if your file has just non ASCII character (code>127) it can be very hard to spot and writing a script that detects it would be more reliable.



    However the error message suggest your problems are in first line so you do not need to search whole file to spot it... If you use Total Commander hit [F3] on your shader file and then hit [3] to see hex view or use any other hex view tool at your disposal.




  2. error in loading the file into your program



    I do not know which compiler/IDE/platform you got but If your compiler is UNICODE friendly its possible you are loading your shader file into some string variable that is enforcing UNICODE. In such case you will not found any bug in the #1 but compiler still throws illegal character.



    To confirm this after you load your shader into your string save it back to some file and check if its encoding is OK (do #1 on it).



    If the case simply stop using the string variable that do this and use array of 8 bit data types instead. Beware if you want to use char you must be sure it is char and not UNICODE Wide char instead !!! Do not ask me how search the documentation to your compiler/IDE instead.



    Once this is resolved (saved file is standard ASCII text) the GLSL compilation should proceed without errors...




  3. enforcing ASCII and shader code inside cpp code



    Some programs do not use shader files. Instead they store the source code as const strings in the app source code. This way is susceptible to the same problems as #2. To resolve this some compilers introduce a macro that tells the compiler that string is a ASCII and not UNICODE and should handle it as such. I do not know how the macros are named but I saw use them here on SE/SO in different GLSL related questions. IIRC it was something like:



    const char *txt=TEXT('bla bla vla');


    but I am not sure if it was really just TEXT or Text or something slightly different. So search the QAs for your error code or enforcing ASCII or look into your compiler/IDE documentation for such things.




Also common rookie problem is that they build entire application and only than start to compile. THAT IS WRONG. Because after that they spend eternity debugging...



They should build apps incrementally. So start small and when working move to next step. This way if a bug reveals itself its 99% time in the just added new code hence no need to difficult searching ...



The new GL api however prevents from this approach as you need to have valid GL context, VBO/VAO, mesh loader/generator, view, shaders, compatible gfx driver before the stuff even start showing something. That is a huge amount of possible problems at once. That is why I advice to start with old api and then removing part by part with the new one ... Or use some working start template instead.






share|improve this answer


























  • 1. so from your first point I understood that your concern is that my shader code contains non ASCII characters and you suggested making a program that checks for non ASCII characters so I made a C++ program that casts each character of a string to an int and outputs it if its value is bigger than 126 and when inputing the entire shadercode, there was no output so I assume thats not the issue.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 2. Surely if the string is enforcing UNICODE on ASCII characters that is not a problem because im pretty sure the first 128 characters of UNICODE are the same as the ASCII characters so enforcing UNICODE on already checked to be ASCII characters should just leave the coding the same shouldn’t it. Also, after reading in the file to the string, I printed the string and it printed like normal.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 3. If I cannot manage to read in the shader from the file I will probably use string and macro.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • @AmeenIzhac your #2 assumption is wrong as enforced UNICODE like UTF16 is not the same as ASCII as there will be a lot of zeros along the ASCII which will lead to wrong compilation after first character of code ...

    – Spektre
    Dec 27 '18 at 16:54











  • Ok so I opened the file in notepad++, went to encoding, I found it was set on encode in UTF8, I selected convert to ANSI saved it, then reopened it and it is still on UTF8 no matter how many times I repeat that

    – Ameen Izhac
    Dec 28 '18 at 15:39














2












2








2







No colors can mean problem with lighting/normal or just not compiled/linked shaders resulting in usage of fixed function which uses the input locations differently hence no color/texture etc.



First of all check GLSL logs as it is absolutely necessary. Even a small bug like missing ; or wrong character might prevent you from compiling and without checking you just do not know what works or not. Take a look here:





  • complete GL+GLSL+VAO/VBO C++ example



    And look how glGetShaderInfoLog and glGetProgramInfoLog are used. Save or print their results to check for errors.




After you done this it Looks like you got problem with encoding of the shaders:



ERROR: 0:1: '' : syntax error: illegal extended ASCII character (0xdd)


There are more reasons what could cause this:





  1. error in shader file



    If you edit your shader with UNICODE tools or using national keyboard or copy paste from WEB the resulting file might contain "special" characters or even the whole file could be encoded as UNICODE (UTF8,UTF16). That is wrong as gfx driver GLSL compiler can not handle such.



    So check the files in hex view (or write a small script that check for presence of non ASCII codes above 127).



    the UTF encoding is easy to detect here is a hex view of UTF8 SVG file start:



    EF BB BF 3C 3F 78 6D 6C 20 76 65 72 73 69 6F 6E | <?xml version


    Here another SVG but UTF16 this time:



    FF FE 3C 00 3F 00 78 00 6D 00 6C 00 20 00 76 00 | ˙ţ< ? x m l   v 


    As you can see the starting 2 BYTEs are the UTF magic number detecting the UTF encoding. In UTF8 character can be 1 or more bytes. In UTF16 each character has 2 bytes. This is easily detectable. But if your file has just non ASCII character (code>127) it can be very hard to spot and writing a script that detects it would be more reliable.



    However the error message suggest your problems are in first line so you do not need to search whole file to spot it... If you use Total Commander hit [F3] on your shader file and then hit [3] to see hex view or use any other hex view tool at your disposal.




  2. error in loading the file into your program



    I do not know which compiler/IDE/platform you got but If your compiler is UNICODE friendly its possible you are loading your shader file into some string variable that is enforcing UNICODE. In such case you will not found any bug in the #1 but compiler still throws illegal character.



    To confirm this after you load your shader into your string save it back to some file and check if its encoding is OK (do #1 on it).



    If the case simply stop using the string variable that do this and use array of 8 bit data types instead. Beware if you want to use char you must be sure it is char and not UNICODE Wide char instead !!! Do not ask me how search the documentation to your compiler/IDE instead.



    Once this is resolved (saved file is standard ASCII text) the GLSL compilation should proceed without errors...




  3. enforcing ASCII and shader code inside cpp code



    Some programs do not use shader files. Instead they store the source code as const strings in the app source code. This way is susceptible to the same problems as #2. To resolve this some compilers introduce a macro that tells the compiler that string is a ASCII and not UNICODE and should handle it as such. I do not know how the macros are named but I saw use them here on SE/SO in different GLSL related questions. IIRC it was something like:



    const char *txt=TEXT('bla bla vla');


    but I am not sure if it was really just TEXT or Text or something slightly different. So search the QAs for your error code or enforcing ASCII or look into your compiler/IDE documentation for such things.




Also common rookie problem is that they build entire application and only than start to compile. THAT IS WRONG. Because after that they spend eternity debugging...



They should build apps incrementally. So start small and when working move to next step. This way if a bug reveals itself its 99% time in the just added new code hence no need to difficult searching ...



The new GL api however prevents from this approach as you need to have valid GL context, VBO/VAO, mesh loader/generator, view, shaders, compatible gfx driver before the stuff even start showing something. That is a huge amount of possible problems at once. That is why I advice to start with old api and then removing part by part with the new one ... Or use some working start template instead.






share|improve this answer















No colors can mean problem with lighting/normal or just not compiled/linked shaders resulting in usage of fixed function which uses the input locations differently hence no color/texture etc.



First of all check GLSL logs as it is absolutely necessary. Even a small bug like missing ; or wrong character might prevent you from compiling and without checking you just do not know what works or not. Take a look here:





  • complete GL+GLSL+VAO/VBO C++ example



    And look how glGetShaderInfoLog and glGetProgramInfoLog are used. Save or print their results to check for errors.




After you done this it Looks like you got problem with encoding of the shaders:



ERROR: 0:1: '' : syntax error: illegal extended ASCII character (0xdd)


There are more reasons what could cause this:





  1. error in shader file



    If you edit your shader with UNICODE tools or using national keyboard or copy paste from WEB the resulting file might contain "special" characters or even the whole file could be encoded as UNICODE (UTF8,UTF16). That is wrong as gfx driver GLSL compiler can not handle such.



    So check the files in hex view (or write a small script that check for presence of non ASCII codes above 127).



    the UTF encoding is easy to detect here is a hex view of UTF8 SVG file start:



    EF BB BF 3C 3F 78 6D 6C 20 76 65 72 73 69 6F 6E | <?xml version


    Here another SVG but UTF16 this time:



    FF FE 3C 00 3F 00 78 00 6D 00 6C 00 20 00 76 00 | ˙ţ< ? x m l   v 


    As you can see the starting 2 BYTEs are the UTF magic number detecting the UTF encoding. In UTF8 character can be 1 or more bytes. In UTF16 each character has 2 bytes. This is easily detectable. But if your file has just non ASCII character (code>127) it can be very hard to spot and writing a script that detects it would be more reliable.



    However the error message suggest your problems are in first line so you do not need to search whole file to spot it... If you use Total Commander hit [F3] on your shader file and then hit [3] to see hex view or use any other hex view tool at your disposal.




  2. error in loading the file into your program



    I do not know which compiler/IDE/platform you got but If your compiler is UNICODE friendly its possible you are loading your shader file into some string variable that is enforcing UNICODE. In such case you will not found any bug in the #1 but compiler still throws illegal character.



    To confirm this after you load your shader into your string save it back to some file and check if its encoding is OK (do #1 on it).



    If the case simply stop using the string variable that do this and use array of 8 bit data types instead. Beware if you want to use char you must be sure it is char and not UNICODE Wide char instead !!! Do not ask me how search the documentation to your compiler/IDE instead.



    Once this is resolved (saved file is standard ASCII text) the GLSL compilation should proceed without errors...




  3. enforcing ASCII and shader code inside cpp code



    Some programs do not use shader files. Instead they store the source code as const strings in the app source code. This way is susceptible to the same problems as #2. To resolve this some compilers introduce a macro that tells the compiler that string is a ASCII and not UNICODE and should handle it as such. I do not know how the macros are named but I saw use them here on SE/SO in different GLSL related questions. IIRC it was something like:



    const char *txt=TEXT('bla bla vla');


    but I am not sure if it was really just TEXT or Text or something slightly different. So search the QAs for your error code or enforcing ASCII or look into your compiler/IDE documentation for such things.




Also common rookie problem is that they build entire application and only than start to compile. THAT IS WRONG. Because after that they spend eternity debugging...



They should build apps incrementally. So start small and when working move to next step. This way if a bug reveals itself its 99% time in the just added new code hence no need to difficult searching ...



The new GL api however prevents from this approach as you need to have valid GL context, VBO/VAO, mesh loader/generator, view, shaders, compatible gfx driver before the stuff even start showing something. That is a huge amount of possible problems at once. That is why I advice to start with old api and then removing part by part with the new one ... Or use some working start template instead.







share|improve this answer














share|improve this answer



share|improve this answer








edited Dec 19 '18 at 9:04

























answered Dec 19 '18 at 8:49









SpektreSpektre

29.7k647210




29.7k647210













  • 1. so from your first point I understood that your concern is that my shader code contains non ASCII characters and you suggested making a program that checks for non ASCII characters so I made a C++ program that casts each character of a string to an int and outputs it if its value is bigger than 126 and when inputing the entire shadercode, there was no output so I assume thats not the issue.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 2. Surely if the string is enforcing UNICODE on ASCII characters that is not a problem because im pretty sure the first 128 characters of UNICODE are the same as the ASCII characters so enforcing UNICODE on already checked to be ASCII characters should just leave the coding the same shouldn’t it. Also, after reading in the file to the string, I printed the string and it printed like normal.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 3. If I cannot manage to read in the shader from the file I will probably use string and macro.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • @AmeenIzhac your #2 assumption is wrong as enforced UNICODE like UTF16 is not the same as ASCII as there will be a lot of zeros along the ASCII which will lead to wrong compilation after first character of code ...

    – Spektre
    Dec 27 '18 at 16:54











  • Ok so I opened the file in notepad++, went to encoding, I found it was set on encode in UTF8, I selected convert to ANSI saved it, then reopened it and it is still on UTF8 no matter how many times I repeat that

    – Ameen Izhac
    Dec 28 '18 at 15:39



















  • 1. so from your first point I understood that your concern is that my shader code contains non ASCII characters and you suggested making a program that checks for non ASCII characters so I made a C++ program that casts each character of a string to an int and outputs it if its value is bigger than 126 and when inputing the entire shadercode, there was no output so I assume thats not the issue.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 2. Surely if the string is enforcing UNICODE on ASCII characters that is not a problem because im pretty sure the first 128 characters of UNICODE are the same as the ASCII characters so enforcing UNICODE on already checked to be ASCII characters should just leave the coding the same shouldn’t it. Also, after reading in the file to the string, I printed the string and it printed like normal.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • 3. If I cannot manage to read in the shader from the file I will probably use string and macro.

    – Ameen Izhac
    Dec 23 '18 at 17:14











  • @AmeenIzhac your #2 assumption is wrong as enforced UNICODE like UTF16 is not the same as ASCII as there will be a lot of zeros along the ASCII which will lead to wrong compilation after first character of code ...

    – Spektre
    Dec 27 '18 at 16:54











  • Ok so I opened the file in notepad++, went to encoding, I found it was set on encode in UTF8, I selected convert to ANSI saved it, then reopened it and it is still on UTF8 no matter how many times I repeat that

    – Ameen Izhac
    Dec 28 '18 at 15:39

















1. so from your first point I understood that your concern is that my shader code contains non ASCII characters and you suggested making a program that checks for non ASCII characters so I made a C++ program that casts each character of a string to an int and outputs it if its value is bigger than 126 and when inputing the entire shadercode, there was no output so I assume thats not the issue.

– Ameen Izhac
Dec 23 '18 at 17:14





1. so from your first point I understood that your concern is that my shader code contains non ASCII characters and you suggested making a program that checks for non ASCII characters so I made a C++ program that casts each character of a string to an int and outputs it if its value is bigger than 126 and when inputing the entire shadercode, there was no output so I assume thats not the issue.

– Ameen Izhac
Dec 23 '18 at 17:14













2. Surely if the string is enforcing UNICODE on ASCII characters that is not a problem because im pretty sure the first 128 characters of UNICODE are the same as the ASCII characters so enforcing UNICODE on already checked to be ASCII characters should just leave the coding the same shouldn’t it. Also, after reading in the file to the string, I printed the string and it printed like normal.

– Ameen Izhac
Dec 23 '18 at 17:14





2. Surely if the string is enforcing UNICODE on ASCII characters that is not a problem because im pretty sure the first 128 characters of UNICODE are the same as the ASCII characters so enforcing UNICODE on already checked to be ASCII characters should just leave the coding the same shouldn’t it. Also, after reading in the file to the string, I printed the string and it printed like normal.

– Ameen Izhac
Dec 23 '18 at 17:14













3. If I cannot manage to read in the shader from the file I will probably use string and macro.

– Ameen Izhac
Dec 23 '18 at 17:14





3. If I cannot manage to read in the shader from the file I will probably use string and macro.

– Ameen Izhac
Dec 23 '18 at 17:14













@AmeenIzhac your #2 assumption is wrong as enforced UNICODE like UTF16 is not the same as ASCII as there will be a lot of zeros along the ASCII which will lead to wrong compilation after first character of code ...

– Spektre
Dec 27 '18 at 16:54





@AmeenIzhac your #2 assumption is wrong as enforced UNICODE like UTF16 is not the same as ASCII as there will be a lot of zeros along the ASCII which will lead to wrong compilation after first character of code ...

– Spektre
Dec 27 '18 at 16:54













Ok so I opened the file in notepad++, went to encoding, I found it was set on encode in UTF8, I selected convert to ANSI saved it, then reopened it and it is still on UTF8 no matter how many times I repeat that

– Ameen Izhac
Dec 28 '18 at 15:39





Ok so I opened the file in notepad++, went to encoding, I found it was set on encode in UTF8, I selected convert to ANSI saved it, then reopened it and it is still on UTF8 no matter how many times I repeat that

– Ameen Izhac
Dec 28 '18 at 15:39


















draft saved

draft discarded




















































Thanks for contributing an answer to Stack Overflow!


  • Please be sure to answer the question. Provide details and share your research!

But avoid



  • Asking for help, clarification, or responding to other answers.

  • Making statements based on opinion; back them up with references or personal experience.


To learn more, see our tips on writing great answers.




draft saved


draft discarded














StackExchange.ready(
function () {
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53436683%2fglsl-shaders-do-not-compile-even-if-they-have-no-obvious-bugs%23new-answer', 'question_page');
}
);

Post as a guest















Required, but never shown





















































Required, but never shown














Required, but never shown












Required, but never shown







Required, but never shown

































Required, but never shown














Required, but never shown












Required, but never shown







Required, but never shown







Popular posts from this blog

404 Error Contact Form 7 ajax form submitting

How to know if a Active Directory user can login interactively

Refactoring coordinates for Minecraft Pi buildings written in Python