Related
I am just trying to draw a full screen .png into openGL as a texture. However, I am met with a black screen. My code works fine with jpegs, so I can only assume it's an issue with transparency.
Here is an example png, which it isn't working for: http://cl.ly/e5x4 (although it is opaque, it still doesn't render) (and no, it needs to be a .png)
Here is my code:
glView.m
struct vertex {
float position[3];
float color[4];
float texCoord[2];
};
typedef struct vertex vertex;
const vertex vertices[] = {
{{1, -1, 0}, {0, 167.0/255.0, 253.0/255.0, 1}, {1, 0}}, // BR (0)
{{1, 1, 0}, {0, 222.0/255.0, 1.0, 1}, {1, 1}}, // TR (1)
{{-1, 1, 0}, {0, 222.0/255.0, 1.0, 1}, {0, 1}}, // TL (2)
{{-1, -1, 0}, {0, 167.0/255.0, 253.0/255.0, 1}, {0, 0}}, // BL (3)
};
const GLubyte indicies[] = {
0, 1, 2,
0, 3, 2
};
#implementation glView {
EAGLContext* context;
GLuint positionSlot, colorSlot, textureCoordSlot;
GLuint texture, textureUniform;
GLuint vertexBuffer, indexBuffer;
}
-(GLuint) compileShader:(NSString*)shaderName withType:(GLenum)shaderType {
NSString* shaderPath = [[NSBundle mainBundle] pathForResource:shaderName ofType:#"glsl"];
NSError* err;
NSString* shaderString = [NSString stringWithContentsOfFile:shaderPath encoding:NSUTF8StringEncoding error:&err];
NSAssert(shaderString, #"Failed to load shader string: %#", err.localizedDescription);
GLuint shaderHandle = glCreateShader(shaderType);
const char* shaderStringUTF8 = [shaderString UTF8String];
int shaderStringLength = (int)[shaderString length];
glShaderSource(shaderHandle, 1, &shaderStringUTF8, &shaderStringLength);
glCompileShader(shaderHandle);
GLint compileSuccess;
glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compileSuccess);
if (compileSuccess == GL_FALSE) {
GLchar messages[256];
glGetShaderInfoLog(shaderHandle, sizeof(messages), 0, &messages[0]);
NSString* messageString = [NSString stringWithUTF8String:messages];
NSLog(#"%#", messageString);
#throw NSInternalInconsistencyException;
}
return shaderHandle;
}
-(void) complileShaders {
GLuint vertexShader = [self compileShader:#"vertexShader" withType:GL_VERTEX_SHADER];
GLuint fragmentShader = [self compileShader:#"fragmentShader" withType:GL_FRAGMENT_SHADER];
GLuint programHandle = glCreateProgram();
glAttachShader(programHandle, vertexShader);
glAttachShader(programHandle, fragmentShader);
glLinkProgram(programHandle);
GLint linkSuccess;
glGetProgramiv(programHandle, GL_LINK_STATUS, &linkSuccess);
if (linkSuccess == GL_FALSE) {
GLchar messages[256];
glGetProgramInfoLog(programHandle, sizeof(messages), 0, &messages[0]);
NSString* messageString = [NSString stringWithUTF8String:messages];
NSLog(#"%#", messageString);
#throw NSInternalInconsistencyException;
}
glUseProgram(programHandle);
positionSlot = glGetAttribLocation(programHandle, "position");
colorSlot = glGetAttribLocation(programHandle, "sourceColor");
textureCoordSlot = glGetAttribLocation(programHandle, "texCoordIn");
glEnableVertexAttribArray(positionSlot);
glEnableVertexAttribArray(colorSlot);
glEnableVertexAttribArray(textureCoordSlot);
textureUniform = glGetUniformLocation(programHandle, "tex");
}
-(instancetype) initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
self.layer.opaque = YES;
// Setup context
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
NSAssert(context, #"Failed to initialise context.");
NSAssert([EAGLContext setCurrentContext:context], #"Failed to set the current context.");
// Setup render buffer
GLuint colorBuffer;
glGenRenderbuffers(1, &colorBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colorBuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self.layer];
// Setup frame buffer
GLuint frameBuffer;
glGenFramebuffers(1, &frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorBuffer);
// Setup vertex buffer
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// Setup index buffer
glGenBuffers(1, &indexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indicies), indicies, GL_STATIC_DRAW);
[self complileShaders];
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
texture = [self loadTexture:#"justapng.png"];
[self render];
}
return self;
}
-(GLuint) loadTexture:(NSString*)fileName {
UIImage* textureImage = [UIImage imageNamed:fileName];
NSAssert1(textureImage, #"Unable to load texture %#.", fileName);
return [self loadTextureFromImage:textureImage];
}
-(GLuint) loadTextureFromImage:(UIImage*)image {
CGImageRef textureImage = image.CGImage;
size_t width = CGImageGetWidth(textureImage);
size_t height = CGImageGetHeight(textureImage);
GLubyte* spriteData = (GLubyte*) malloc(width*height*4);
CGColorSpaceRef cs = CGImageGetColorSpace(textureImage);
CGContextRef c = CGBitmapContextCreate(spriteData, width, height, 8, width*4, cs, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(cs);
CGContextDrawImage(c, (CGRect){CGPointZero, {width, height}}, textureImage);
CGContextRelease(c);
GLuint glTex;
glGenTextures(1, &glTex);
glBindTexture(GL_TEXTURE_2D, glTex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)width, (int)height, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
free(spriteData);
return glTex;
}
-(void) render {
glClear(GL_COLOR_BUFFER_BIT);
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBuffer);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glUniform1i(textureUniform, 0);
glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE, sizeof(vertex), 0);
glVertexAttribPointer(colorSlot, 4, GL_FLOAT, GL_FALSE, sizeof(vertex), (GLvoid*)(sizeof(float)*3));
glVertexAttribPointer(textureCoordSlot, 2, GL_FLOAT, GL_FALSE, sizeof(vertex), (GLvoid*)(sizeof(float)*7));
glDrawElements(GL_TRIANGLES, sizeof(indicies)/sizeof(indicies[0]), GL_UNSIGNED_BYTE, 0);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
#end
vertexShader.glsl
attribute vec4 position;
attribute vec4 sourceColor;
varying vec4 destinationColor;
attribute vec2 texCoordIn;
varying vec2 texCoordOut;
void main() {
destinationColor = sourceColor;
gl_Position = position;
texCoordOut = texCoordIn;
}
fragmentShader.glsl
varying lowp vec4 destinationColor;
varying lowp vec2 texCoordOut;
uniform sampler2D tex;
void main() {
gl_FragColor = destinationColor*texture2D(tex, texCoordOut);
}
Sorry for dumping all this code, but I am unsure where the problem is originating from. Any ideas as to what I am doing wrong?
This appears to be the classic non-power-of-two issue (eg. see Android OpenGL2.0 showing black textures). The texture you linked is 1005x335.
The OpenGL ES 2.0 specification only allows for NPOT textures to be 'complete' with glTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S/T, GL_CLAMP_TO_EDGE). Various extensions allow NPOT texturing, however, your device doesn't necessarily support these, and likely doesn't if you're getting black textures. If a texture isn't complete, it is sampled as black per the spec.
For the life of me, I can't render an image to the iPhone simulator screen. I've simplified my code as much as possible.
The following code is in ViewController.m, a class that extends GLKViewController and is also a GLKViewDelegate.
- (void)viewDidLoad {
[super viewDidLoad];
/*Setup EAGLContext*/
self.context = [self createBestEAGLContext];
[EAGLContext setCurrentContext:self.context];
/*Setup View*/
GLKView *view = [[GLKView alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
view.context = self.context;
view.delegate = self;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
self.view = view;
}
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
/*Setup GLK effect*/
self.effect = [[GLKBaseEffect alloc] init];
self.effect.transform.projectionMatrix = GLKMatrix4MakeOrtho(0, 320, 480, 0, -1, 1);
glClearColor(0.5, 1, 1, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
NSDictionary * options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],
GLKTextureLoaderOriginBottomLeft,
nil];
NSError * error;
NSString *path = [[NSBundle mainBundle] pathForResource:#"soccerball" ofType:#"jpg"];
GLKTextureInfo * textureInfo = [GLKTextureLoader textureWithContentsOfFile:path options:options error:&error];
if (textureInfo == nil) {
NSLog(#"Error loading file: %#", [error localizedDescription]);
}
TexturedQuad newQuad;
newQuad.bl.geometryVertex = CGPointMake(0, 0);
newQuad.br.geometryVertex = CGPointMake(textureInfo.width, 0);
newQuad.tl.geometryVertex = CGPointMake(0, textureInfo.height);
newQuad.tr.geometryVertex = CGPointMake(textureInfo.width, textureInfo.height);
newQuad.bl.textureVertex = CGPointMake(0, 0);
newQuad.br.textureVertex = CGPointMake(1, 0);
newQuad.tl.textureVertex = CGPointMake(0, 1);
newQuad.tr.textureVertex = CGPointMake(1, 1);
self.effect.texture2d0.name = textureInfo.name;
self.effect.texture2d0.enabled = YES;
GLKMatrix4 modelMatrix = GLKMatrix4Identity;
modelMatrix = GLKMatrix4Translate(modelMatrix, 100, 200, 0);
self.effect.transform.modelviewMatrix = modelMatrix;
[self.effect prepareToDraw];
long offset = (long)&(newQuad);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, geometryVertex)));
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, textureVertex)));
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
and some of the structs used...
typedef struct {
CGPoint geometryVertex;
CGPoint textureVertex;
} TexturedVertex;
typedef struct {
TexturedVertex bl;
TexturedVertex br;
TexturedVertex tl;
TexturedVertex tr;
} TexturedQuad;
Right now the only thing that is working is
glClearColor(0.5, 1, 1, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
Which does adjust the background colour. There is no 'soccerball' image.
Any help is greatly appreciated.
EDIT - The TextureVertex CGPoints were incorrect so I fixed them. The problem still persists.
Solution:
The TexturedVertex struct must not use CGPoint, but rather GLKVector2.
This is because there is a conversion issue from the float values stored in these points. GLKit expects float values that have single point precision, but CGPoint float values have double point precision and things get weird. Furthermore, this problem only occurs after iOS 7.0
Refer to here for more detail on the issue.
OpenGL ES Shaders and 64-bit iPhone 5S
I'm using GLKView to render some sprites in a iOS app.
My question is, how can I remove/draw only parts of one image? For example, I have a background, and on top of it an image (both sprites). I want to take some random rectangles out of the image on top, so the background will be visible in those rectangles. Is that possible?
I'm creating my textures like this:
- (id)initWithFile:(NSString *)fileName effect:(GLKBaseEffect *)effect position:(GLKVector2)position{
if ((self = [super init])) {
self.effect = effect;
NSDictionary * options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],
GLKTextureLoaderOriginBottomLeft,
nil];
NSError * error;
NSString *path = [[NSBundle mainBundle] pathForResource:fileName ofType:nil];
self.textureInfo = [GLKTextureLoader textureWithContentsOfFile:path options:options error:&error];
self.contentSize = CGSizeMake(self.textureInfo.width, self.textureInfo.height);
TexturedQuad newQuad;
newQuad.bl.geometryVertex = CGPointMake(0, 0);
newQuad.br.geometryVertex = CGPointMake(self.textureInfo.width, 0);
newQuad.tl.geometryVertex = CGPointMake(0, self.textureInfo.height);
newQuad.tr.geometryVertex = CGPointMake(self.textureInfo.width, self.textureInfo.height);
newQuad.bl.textureVertex = CGPointMake(0, 0);
newQuad.br.textureVertex = CGPointMake(1, 0);
newQuad.tl.textureVertex = CGPointMake(0, 1);
newQuad.tr.textureVertex = CGPointMake(1, 1);
self.quad = newQuad;
self.position = position;
self.frameHeight = self.textureInfo.height;
}
return self;
}
And then render them like this
- (void)render {
self.effect.texture2d0.name = self.textureInfo.name;
self.effect.texture2d0.enabled = YES;
self.effect.transform.modelviewMatrix = self.modelMatrix;
[self.effect prepareToDraw];
long offset = (long)&_quad;
glEnableVertexAttribArray(GLKVertexAttribPosition);
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribPosition, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, geometryVertex)));
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(TexturedVertex), (void *) (offset + offsetof(TexturedVertex, textureVertex)));
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
Typically this is done with a second texture that is an alpha map. In the shader, the alpha texture will have regions that are full opaque and other regions that are fully transparent. The alpha channel of the alpha texture is multiplied by the image texture alpha to get the final color.
I am displaying 3 objects with the help of GLKit. However, when I am applying textures to these objects, only one texture is being used for all three.
The code I am using is as follows:
- (void)setUpGL{
NSLog(#"i : %d, %d, %d",i,j,k);
firstPlayerScore = 0;
secondPlayerScore = 0;
staticBall = YES;
isSecondPlayer = NO;
self.boxPhysicsObjects = [NSMutableArray array];
self.spherePhysicsObjects = [NSMutableArray array];
self.immovableBoxPhysicsObjects = [NSMutableArray array];
self.cylinderPhysicsObjects = [NSMutableArray array];
self.secondPlayerCylinderPhysicsObjects = [NSMutableArray array];
self.sphereArray = [NSMutableArray array];
GLKView *view = (GLKView *)self.view;
NSAssert([view isKindOfClass:[GLKView class]],#"View controller's view is not a GLKView");
view.drawableDepthFormat = GLKViewDrawableDepthFormat16;
view.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:view.context];
self.baseEffect = [[GLKBaseEffect alloc] init];
glEnable(GL_CULL_FACE);
glEnable(GL_DEPTH_TEST);
//glGenBuffers(1, &_vertexBuffer);
//glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
//glBufferData(GL_ARRAY_BUFFER, (i+j)*sizeof(float), sphereVerts, GL_STATIC_DRAW);
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
self.baseEffect.light0.enabled = GL_TRUE;
self.baseEffect.light0.ambientColor = GLKVector4Make(0.7f, 0.7f, 0.7f, 1.0f);
[self addImmovableBoxPhysicsObjects];
[self addRandomPhysicsSphereObject];
//[self addFirstPlayerCylinderObject];
//[self addSecondPlayerCylinderObject];
//[self scheduleAddRandomPhysicsSphereObject:nil];
}
- (void)addRandomPhysicsObject{
if(random() % 2 == 0)
{
[self addRandomPhysicsBoxObject];
}
else
{
[self addRandomPhysicsSphereObject];
}
}
- (void)setUpBox{
CGImageRef image = [[UIImage imageNamed:#"outUV2.PNG"] CGImage];
textureInfo1 = [GLKTextureLoader textureWithCGImage:image options:nil error:NULL];
self.baseEffect.texture2d0.name = textureInfo1.name;
self.baseEffect.texture2d0.enabled = YES;
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer( GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), final_meshVerts);
glEnableVertexAttribArray(GLKVertexAttribNormal);
glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), final_meshNormals);
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(float), final_meshTexCoords);
//glDisableVertexAttribArray(GLKVertexAttribTexCoord0);
}
- (void)drawPhysicsBoxObjects{
//self.baseEffect.texture2d0.target = textureInfo1.target;
PAppDelegate *appDelegate = [[UIApplication sharedApplication] delegate];
GLKMatrix4 savedModelviewMatrix = self.baseEffect.transform.modelviewMatrix;
for(PPhysicsObject *currentObject in self.boxPhysicsObjects){
self.baseEffect.transform.modelviewMatrix =
GLKMatrix4Multiply(savedModelviewMatrix,[appDelegate physicsTransformForObject:currentObject]);
[self.baseEffect prepareToDraw];
glDrawArrays(GL_TRIANGLES, 0, final_meshNumVerts);
}
self.baseEffect.light0.diffuseColor = GLKVector4Make(1.0f, 1.0f, 1.0f, 1.0f);// Alpha
for(PPhysicsObject *currentObject in self.immovableBoxPhysicsObjects){
self.baseEffect.transform.modelviewMatrix = GLKMatrix4Multiply(savedModelviewMatrix, [appDelegate physicsTransformForObject:currentObject]);
[self.baseEffect prepareToDraw];
glDrawArrays(GL_TRIANGLES,0, final_meshNumVerts);
}
self.baseEffect.transform.modelviewMatrix = savedModelviewMatrix;
}
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect{
static float a = 0;
a = a+0.1;
//NSLog(#"a : %f",a);
self.baseEffect.transform.modelviewMatrix = GLKMatrix4MakeLookAt(
0, 9.8, 10.0, // Eye position
0.0, 1.0, 0.0, // Look-at position
0.0, 1.0, 0.0); // Up direction
const GLfloat aspectRatio = (GLfloat)view.drawableWidth / (GLfloat)view.drawableHeight;
self.baseEffect.transform.projectionMatrix =
GLKMatrix4MakePerspective(GLKMathDegreesToRadians(35.0f),aspectRatio,0.2f,200.0f); // Far arbitrarily far enough to contain scene
self.baseEffect.light0.position = GLKVector4Make(0.6f, 1.0f, 0.4f, 0.0f);
[self.baseEffect prepareToDraw];
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
[self drawPhysicsSphereObjects];
[self drawPhysicsBoxObjects];
//[self drawPhysicsCylinderObjects];
}
- (void)addRandomPhysicsSphereObject{
PAppDelegate *appDelegate = [[UIApplication sharedApplication] delegate];
PPhysicsObject *anObject = nil;
if([self.spherePhysicsObjects count] < PMAX_NUMBER_BLOCKS)
{
NSLog(#"if");
anObject = [[PPhysicsObject alloc] init];
}
else
{
NSLog(#"else");
anObject = [self.spherePhysicsObjects objectAtIndex:0];
[self.spherePhysicsObjects removeObjectAtIndex:0];
}
[self.spherePhysicsObjects addObject:anObject];
[appDelegate physicsRegisterSphereObject:anObject
position:GLKVector3Make(0,3.5,-2)
mass:0.0f];
[self setUpSphere];
/*[appDelegate physicsSetVelocity:GLKVector3Make(
random() / (float)RAND_MAX * 2.0f - 1.0f,
0.0f,
random() /(float)RAND_MAX * 2.0f - 1.0f)
forObject:anObject];*/
}
- (void)setUpSphere{
CGImageRef image = [[UIImage imageNamed:#"basketball.png"] CGImage];
textureInfo = [GLKTextureLoader textureWithCGImage:image options:nil error:NULL];
self.baseEffect.texture2d0.name = textureInfo.name;
self.baseEffect.texture2d0.enabled = YES;
glEnableVertexAttribArray( GLKVertexAttribPosition);
glVertexAttribPointer( GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), newbasketballVerts);
glEnableVertexAttribArray(GLKVertexAttribNormal);
glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), newbasketballNormals);
glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(float), newbasketballTexCoords);
//glDisableVertexAttribArray(GLKVertexAttribTexCoord0);
}
- (void)drawPhysicsSphereObjects{
NSLog(#"draw");
/*static int x = 1;
if (x>20) {
x=20;
}
matrix = GLKMatrix4Identity;
matrix = GLKMatrix4MakeTranslation(0.1 * (x++), 0.0, 0.0);*/
//self.baseEffect.texture2d0.target = textureInfo2.target;
PAppDelegate *appDelegate = [[UIApplication sharedApplication] delegate];
GLKMatrix4 savedModelviewMatrix = self.baseEffect.transform.modelviewMatrix;
/*CGImageRef image = [[UIImage imageNamed:#"basketball.png"] CGImage];
GLKTextureInfo *textureInfo = [GLKTextureLoader textureWithCGImage:image options:nil error:NULL];
self.baseEffect.texture2d0.name = textureInfo.name;
self.baseEffect.texture2d0.target = textureInfo.target;*/
self.baseEffect.light0.diffuseColor = GLKVector4Make(1.0f, 1.0f, 1.0f, 1.0f);
//glVertexPointer(3, GL_FLOAT, 0, sphereVerts);
//glNormalPointer(GL_FLOAT, 0, sphereNormals);
//glTexCoordPointer(2, GL_FLOAT, 0, final meshTexCoords);
/*glGenBuffers(1, &ballVertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, ballVertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(MeshVertexData), MeshVertexData, GL_STATIC_DRAW);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof(arrowVertexData), 0);
glEnableVertexAttribArray(GLKVertexAttribNormal);
glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_TRUE, sizeof(arrowVertexData), (void *)offsetof(arrowVertexData, normal));
glBindVertexArrayOES(arrowVertexArray);*/
//glEnableVertexAttribArray(GLKVertexAttribTexCoord0);
//glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, 2*sizeof(float), newbasketballTexCoords);
if (!isSecondPlayer) {
for(PPhysicsObject *currentObject in self.spherePhysicsObjects)
{NSLog(#"first");
self.baseEffect.transform.modelviewMatrix =
GLKMatrix4Multiply(savedModelviewMatrix, [appDelegate physicsTransformForObject:currentObject]);
[self.baseEffect prepareToDraw];
glDrawArrays(GL_TRIANGLES, 0, newbasketballNumVerts);
//glDrawArrays(GL_TRIANGLES, 0, sizeof(MeshVertexData) / sizeof(arrowVertexData));
}
}
else{
for(PPhysicsObject *currentObject in self.secondSpherePhysicsObjects)
{
self.baseEffect.transform.modelviewMatrix =
GLKMatrix4Multiply(savedModelviewMatrix, [appDelegate physicsTransformForObject:currentObject]);
[self.baseEffect prepareToDraw];
glDrawArrays(GL_TRIANGLES, 0, newbasketballNumVerts);
//glDrawArrays(GL_TRIANGLES, 0, sizeof(MeshVertexData) / sizeof(arrowVertexData));
}
}
//glBindBuffer(GL_ARRAY_BUFFER, 0);
//glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
//glDisableVertexAttribArray(GLKVertexAttribTexCoord0);
self.baseEffect.transform.modelviewMatrix = savedModelviewMatrix;
}
Why is this only using one texture for all three, and not three different textures, one for each object? How can I fix this?
I had achieved a scene that the moon around the earth moving. different textures for the earth and the moon. under GLKit frame, the code just like this:
-(void)viewDidLoad
{
//......
// Setup Earth texture
CGImageRef earthImageRef =
[[UIImage imageNamed:#"Earth512x256.jpg"] CGImage];
earthTextureInfo = [GLKTextureLoader
textureWithCGImage:earthImageRef
options:[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],
GLKTextureLoaderOriginBottomLeft, nil nil]
error:NULL];
// Setup Moon texture
CGImageRef moonImageRef =
[[UIImage imageNamed:#"Moon256x128.png"] CGImage];
moonTextureInfo = [GLKTextureLoader
textureWithCGImage:moonImageRef
options:[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],
GLKTextureLoaderOriginBottomLeft, nil nil]
error:NULL];
//......
}
then, draw earth and moon.
- (void)drawEarth
{
//setup texture
self.baseEffect.texture2d0.name = earthTextureInfo.name;
self.baseEffect.texture2d0.target = earthTextureInfo.target;
//
GLKMatrixStackPush(self.modelviewMatrixStack);
GLKMatrixStackRotate( // Rotate (tilt Earth's axis)
self.modelviewMatrixStack,
GLKMathDegreesToRadians(SceneEarthAxialTiltDeg),
1.0, 0.0, 0.0);
GLKMatrixStackRotate( // Rotate about Earth's axis
self.modelviewMatrixStack,
GLKMathDegreesToRadians(earthRotationAngleDegrees),
0.0, 1.0, 0.0);
self.baseEffect.transform.modelviewMatrix =
GLKMatrixStackGetMatrix4(self.modelviewMatrixStack);
//draw earth
[self.baseEffect prepareToDraw];
glBindVertexArrayOES(_vertexArray);
glDrawArrays(GL_TRIANGLES, 0, sphereNumVerts);
//pop
GLKMatrixStackPop(self.modelviewMatrixStack);
self.baseEffect.transform.modelviewMatrix =
GLKMatrixStackGetMatrix4(self.modelviewMatrixStack);
}
- (void)drawMoon
{
self.baseEffect.texture2d0.name = moonTextureInfo.name;
self.baseEffect.texture2d0.target = moonTextureInfo.target;
GLKMatrixStackPush(self.modelviewMatrixStack);
GLKMatrixStackRotate( // Rotate to position in orbit
self.modelviewMatrixStack,
GLKMathDegreesToRadians(moonRotationAngleDegrees),
0.0, 1.0, 0.0);
GLKMatrixStackTranslate(// Translate to distance from Earth
self.modelviewMatrixStack,
0.0, 0.0, SceneMoonDistanceFromEarth);
GLKMatrixStackScale( // Scale to size of Moon
self.modelviewMatrixStack,
SceneMoonRadiusFractionOfEarth,
SceneMoonRadiusFractionOfEarth,
SceneMoonRadiusFractionOfEarth);
GLKMatrixStackRotate( // Rotate Moon on its own axis
self.modelviewMatrixStack,
GLKMathDegreesToRadians(moonRotationAngleDegrees),
0.0, 1.0, 0.0);
//
self.baseEffect.transform.modelviewMatrix =
GLKMatrixStackGetMatrix4(self.modelviewMatrixStack);
//draw moon
[self.baseEffect prepareToDraw];
glBindVertexArrayOES(_vertexArray);
glDrawArrays(GL_TRIANGLES, 0, sphereNumVerts);
GLKMatrixStackPop(self.modelviewMatrixStack);
self.baseEffect.transform.modelviewMatrix =
GLKMatrixStackGetMatrix4(self.modelviewMatrixStack);
}
To do multiple textures you will need to do:
effect.texture2d0.name = firstTexture.name;
[effect prepareToDraw];
[self renderFirstObject];
effect.texture2d0.name = secondTexture.name;
[effect prepareToDraw];
[self renderSecondObject];
or something similar. If you have lots of objects, I recommend using texture atlases and then doing batch rendering using:
glDrawElements(GL_TRIANGLES, totalIndicies, GL_UNSIGNED_SHORT, indices);
I tried to use glDrawArray for every single object and the framerate of my app dipped to like 10fps.
In your code, the reason it was using 1 texture for all objects is because you never changed the effect.texture2d0.name to the texture you need before each object. If I were to change your code it would be:
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect{
static float a = 0;
a = a+0.1;
//NSLog(#"a : %f",a);
self.baseEffect.transform.modelviewMatrix = GLKMatrix4MakeLookAt(
0, 9.8, 10.0, // Eye position
0.0, 1.0, 0.0, // Look-at position
0.0, 1.0, 0.0); // Up direction
const GLfloat aspectRatio = (GLfloat)view.drawableWidth / (GLfloat)view.drawableHeight;
self.baseEffect.transform.projectionMatrix =
GLKMatrix4MakePerspective(GLKMathDegreesToRadians(35.0f),aspectRatio,0.2f,200.0f); // Far arbitrarily far enough to contain scene
self.baseEffect.light0.position = GLKVector4Make(0.6f, 1.0f, 0.4f, 0.0f);
[self.baseEffect prepareToDraw];
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
self.baseEffect.texture2d0.name = textureInfo.name;
[self.baseEffect prepareToRender];
[self drawPhysicsSphereObjects];
self.baseEffect.texture2d0.name = textureInfo1.name;
[self.baseEffect prepareToRender];
[self drawPhysicsBoxObjects];
//[self drawPhysicsCylinderObjects];
}
Of course this is simplifying it, and without the vertex attribute array setup.
One thing i did for this problem is that i made one single image with all textures in it... now i give only one texture to my GLKBaseEffect object.
But if any person have answer for multiple objects with multiple textures with the help of GLKit, please let me know...
Thank You.
One solution would be to separate your drawing calls so that first you draw all objects that use texture A, then all objects that use texture B and so on.
There is also the texture atlas alternative described here: https://stackoverflow.com/a/8230592/64167.
I am playing around with learning more OpenGL ES and I may have a way to do this.
In my case I have N quads, each with an individual texture. In [view drawInRect] for each quad I want to draw I set new texture properties on baseEffect before I draw each quad, then call prepareToDraw on the BaseEffect and the quad, then render the quad.
Here is some pseudocode for what I mean:
for (int i = 0; i < quads.count; i++) {
baseEffect.texture2d0.name = textureInfo[i].name;
baseEffect.texture2d0.target = textureInfo[i].target;
[baseEffect prepareToDraw];
[quads[i] prepareToDraw];
glDrawArrays(GL_TRIANGLES, 0, 4);
}
This is working ok for me so far.
I'm trying to hack the generic Xcode iOS OpenGL Game template to draw two vertex buffer objects and render them with different GLSL shaders.
I 'think' I'm rendering the two VBOs correctly? (Because I see them both when running both VBOs through the first shader program) However, my second shader does not appear to be rendering my second object at all.
Here is the vertex data for the two squares:
GLfloat gCubeVertexData[36] =
{
// Data layout for each line below is:
// positionX, positionY, positionZ, normalX, normalY, normalZ,
0.5f, 0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, 0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, 0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 1.0f
};
GLfloat fooVertexData[36] =
{
// Data layout for each line below is:
// positionX, positionY, positionZ, normalX, normalY, normalZ
0.5f, 0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, 0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, 0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, -0.5f, 0.5f, 0.0f, 0.0f, 1.0f
};
Here is where I am trying to generate two VBOs and bind them to the data. Not sure what the purpose of the 'glBindVertexArrayOES(0)' is at the end though?:
- (void)setupGL
{
[EAGLContext setCurrentContext:self.context];
[self loadShaders];
//---- First Vertex Array Object --------
glGenVertexArraysOES(1, &_vertexArray1);
glGenBuffers(1, &_vertexBuffer1);
glBindVertexArrayOES(_vertexArray1);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer1);
glBufferData(GL_ARRAY_BUFFER, sizeof(gCubeVertexData), gCubeVertexData, GL_STATIC_DRAW);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, 24, BUFFER_OFFSET(0));
glEnableVertexAttribArray(GLKVertexAttribNormal);
glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_FALSE, 24, BUFFER_OFFSET(12));
//----- Second Vertex Array Object ----------
glGenVertexArraysOES(1, &_vertexArray2);
glGenBuffers(1, &_vertexBuffer2);
glBindVertexArrayOES(_vertexArray2);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer2);
glBufferData(GL_ARRAY_BUFFER, sizeof(fooVertexData), fooVertexData, GL_STATIC_DRAW);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, 24, BUFFER_OFFSET(0));
glEnableVertexAttribArray(GLKVertexAttribNormal);
glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_FALSE, 24, BUFFER_OFFSET(12));
glBindBuffer(GL_ARRAY_BUFFER,0);
glBindVertexArrayOES(0);
}
I'm using this update code to animate the model-view-projection matrixes:
- (void)update
{
_rotation += self.timeSinceLastUpdate * 0.2f;
float aspect = fabsf(self.view.bounds.size.width / self.view.bounds.size.height);
GLKMatrix4 projectionMatrix = GLKMatrix4MakeOrtho(-1.0f, 1.0f, -1.0f / aspect, 1.0f / aspect, -10.0f, 10.0f);
GLKMatrix4 modelViewMatrix = GLKMatrix4MakeTranslation(0.5f, 0.0f, 0.0f);
modelViewMatrix = GLKMatrix4Multiply(modelViewMatrix, GLKMatrix4MakeZRotation(0.0 - _rotation));
_modelViewProjectionMatrix = GLKMatrix4Multiply(projectionMatrix, modelViewMatrix);
GLKMatrix4 modelViewMatrix2 = GLKMatrix4MakeTranslation(-0.5f, 0.0f, 0.0f);
modelViewMatrix2 = GLKMatrix4Multiply(modelViewMatrix2, GLKMatrix4MakeZRotation(_rotation));
_modelViewProjectionMatrix2 = GLKMatrix4Multiply(projectionMatrix, modelViewMatrix2);
}
When I call the '_program2' shader I don't see the second square:
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
glClearColor(0.65f, 0.65f, 0.65f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArrayOES(_vertexArray1);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer1);
glUseProgram(_program);
glUniformMatrix4fv(uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX], 1, 0, _modelViewProjectionMatrix.m);
glDrawArrays(GL_TRIANGLES, 0, 6);
///////// second object and shader program:
glBindVertexArrayOES(_vertexArray2);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer2);
glUseProgram(_program2);
glUniformMatrix4fv(uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX2], 1, 0, _modelViewProjectionMatrix2.m);
glDrawArrays(GL_TRIANGLES, 0, 6);
}
I've basically tried duplicating the code for loading the first shader, to load the second. I suspect I may be doing something wrong here.. but I'm not sure what:
- (BOOL)loadShaders
{
GLuint vertShader, fragShader, vertShader2, fragShader2;
NSString *vertShaderPathname, *fragShaderPathname, *vertShaderPathname2, *fragShaderPathname2;
// Create shader program.
_program = glCreateProgram();
// Create and compile vertex shader.
vertShaderPathname = [[NSBundle mainBundle] pathForResource:#"Shader" ofType:#"vsh"];
if (![self compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderPathname]) {
NSLog(#"Failed to compile vertex shader");
return NO;
}
// Create and compile fragment shader.
fragShaderPathname = [[NSBundle mainBundle] pathForResource:#"Shader" ofType:#"fsh"];
if (![self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderPathname]) {
NSLog(#"Failed to compile fragment shader");
return NO;
}
// Attach vertex shader to program.
glAttachShader(_program, vertShader);
// Attach fragment shader to program.
glAttachShader(_program, fragShader);
// Bind attribute locations.
// This needs to be done prior to linking.
glBindAttribLocation(_program, ATTRIB_VERTEX, "position");
// Link program.
if (![self linkProgram:_program]) {
NSLog(#"Failed to link program: %d", _program);
if (vertShader) {
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader) {
glDeleteShader(fragShader);
fragShader = 0;
}
if (_program) {
glDeleteProgram(_program);
_program = 0;
}
return NO;
}
// Get uniform locations.
uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX] = glGetUniformLocation(_program, "modelViewProjectionMatrix");
// Release vertex and fragment shaders.
if (vertShader) {
glDetachShader(_program, vertShader);
glDeleteShader(vertShader);
}
if (fragShader) {
glDetachShader(_program, fragShader);
glDeleteShader(fragShader);
}
///////////////// the second shader:
_program2 = glCreateProgram();
vertShaderPathname2 = [[NSBundle mainBundle] pathForResource:#"Shader2" ofType:#"vsh"];
if (![self compileShader:&vertShader2 type:GL_VERTEX_SHADER file:vertShaderPathname2]) {
NSLog(#"Failed to compile vertex shader2");
return NO;
}
fragShaderPathname2 = [[NSBundle mainBundle] pathForResource:#"Shader2" ofType:#"fsh"];
if (![self compileShader:&fragShader2 type:GL_FRAGMENT_SHADER file:fragShaderPathname2]) {
NSLog(#"Failed to compile fragment shader2");
return NO;
}
glAttachShader(_program2, vertShader2);
glAttachShader(_program2, fragShader2);
glBindAttribLocation(_program2, ATTRIB_VERTEX2, "position2");
if (![self linkProgram:_program2]) {
NSLog(#"Failed to link program: %d", _program2);
if (vertShader2) {
glDeleteShader(vertShader2);
vertShader2 = 0;
}
if (fragShader2) {
glDeleteShader(fragShader2);
fragShader2 = 0;
}
if (_program2) {
glDeleteProgram(_program2);
_program2 = 0;
}
return NO;
}
uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX2] = glGetUniformLocation(_program2, "modelViewProjectionMatrix2");
if (vertShader2) {
glDetachShader(_program2, vertShader2);
glDeleteShader(vertShader2);
}
if (fragShader2) {
glDetachShader(_program2, fragShader2);
glDeleteShader(fragShader2);
}
return YES;
}
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file
{
GLint status;
const GLchar *source;
source = (GLchar *)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String];
if (!source) {
NSLog(#"Failed to load vertex shader");
return NO;
}
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
#if defined(DEBUG)
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
NSLog(#"Shader compile log:\n%s", log);
free(log);
}
#endif
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == 0) {
glDeleteShader(*shader);
return NO;
}
return YES;
}
- (BOOL)linkProgram:(GLuint)prog
{
GLint status;
glLinkProgram(prog);
#if defined(DEBUG)
GLint logLength;
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(#"Program link log:\n%s", log);
free(log);
}
#endif
glGetProgramiv(prog, GL_LINK_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
- (BOOL)validateProgram:(GLuint)prog
{
GLint logLength, status;
glValidateProgram(prog);
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(#"Program validate log:\n%s", log);
free(log);
}
glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
My vert and fragment shaders are simple:
// vert shader1:
attribute vec4 position;
uniform mat4 modelViewProjectionMatrix;
void main()
{
gl_Position = modelViewProjectionMatrix * position;
}
// vert shader2:
attribute vec4 position2;
uniform mat4 modelViewProjectionMatrix2;
void main()
{
gl_Position = modelViewProjectionMatrix2 * position2;
}
// frag shader(s):
void main()
{
gl_FragColor = vec4(0.12,0.32,0.54,1.0);
}
The most important thing to remember with OpenGL ES, is that you're using a procedural language within an OOP language.
You can only bind one vertex array to the vertex buffer at a time.
Binding two vertex arrays to the VBO, one after the other, and then applying transformations, will only transform the last vertex array attached to the VBO.
In your main loop, you have to iterate through your list of your vertex arrays. For each vertex array, bind it to the VBO, and then carry out any transformations.
The main problem was where I bound the 'position' attribute location for the second vertex shader. I had been using a 'separate' ATTRIB_VERTEX2 in my enum. Once I bound the 'position' attribute location to ATTRIB_VERTEX I was able to see the second VBO with the other shader program applied... Here is the code tidied up for anyone with the same problem/question:
// Uniform index.
enum
{
UNIFORM_MODELVIEWPROJECTION_MATRIX,
UNIFORM_MODELVIEWPROJECTION_MATRIX2,
NUM_UNIFORMS
};
GLint uniforms[NUM_UNIFORMS];
// Attribute index.
enum
{
ATTRIB_VERTEX,
NUM_ATTRIBUTES
};
GLfloat square1Data[18] =
{
// Data layout for each line below is:
// positionX, positionY, positionZ
0.5f, 0.5f, 0.5f,
-0.5f, 0.5f, 0.5f,
0.5f, -0.5f, 0.5f,
0.5f, -0.5f, 0.5f,
-0.5f, 0.5f, 0.5f,
-0.5f, -0.5f, 0.5f
};
GLfloat square2Data[18] =
{
// Data layout for each line below is:
// positionX, positionY, positionZ
0.5f, 0.5f, 0.5f,
-0.5f, 0.5f, 0.5f,
0.5f, -0.5f, 0.5f,
0.5f, -0.5f, 0.5f,
-0.5f, 0.5f, 0.5f,
-0.5f, -0.5f, 0.5f
};
- (void)setupGL
{
[EAGLContext setCurrentContext:self.context];
[self loadShaders];
//---- First Vertex Array Object --------
glGenVertexArraysOES(1, &_vertexArray1);
glGenBuffers(1, &_vertexBuffer1);
glBindVertexArrayOES(_vertexArray1);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer1);
glBufferData(GL_ARRAY_BUFFER, sizeof(square1Data), square1Data, GL_STATIC_DRAW);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, 12, BUFFER_OFFSET(0));
// glEnableVertexAttribArray(GLKVertexAttribNormal);
// glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_FALSE, 24, BUFFER_OFFSET(12));
//----- Second Vertex Array Object ----------
glGenBuffers(1, &_vertexBuffer2);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer2);
glBufferData(GL_ARRAY_BUFFER, sizeof(square2Data), square2Data, GL_STATIC_DRAW);
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, 12, BUFFER_OFFSET(0));
// glEnableVertexAttribArray(GLKVertexAttribNormal);
// glVertexAttribPointer(GLKVertexAttribNormal, 3, GL_FLOAT, GL_FALSE, 24, BUFFER_OFFSET(12));
glBindBuffer(GL_ARRAY_BUFFER,0);
glBindVertexArrayOES(0);
}
- (void)update
{
_rotation += self.timeSinceLastUpdate * 0.2f;
float aspect = fabsf(self.view.bounds.size.width / self.view.bounds.size.height);
GLKMatrix4 projectionMatrix = GLKMatrix4MakeOrtho(-1.0f, 1.0f, -1.0f / aspect, 1.0f / aspect, -10.0f, 10.0f);
GLKMatrix4 modelViewMatrix = GLKMatrix4MakeTranslation(0.5f, 0.0f, 0.0f);
modelViewMatrix = GLKMatrix4Multiply(modelViewMatrix, GLKMatrix4MakeZRotation(0.0 - _rotation));
_modelViewProjectionMatrix = GLKMatrix4Multiply(projectionMatrix, modelViewMatrix);
GLKMatrix4 modelViewMatrix2 = GLKMatrix4MakeTranslation(-0.5f, 0.0f, 0.0f);
modelViewMatrix2 = GLKMatrix4Multiply(modelViewMatrix2, GLKMatrix4MakeZRotation(_rotation));
_modelViewProjectionMatrix2 = GLKMatrix4Multiply(projectionMatrix, modelViewMatrix2);
}
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
glClearColor(0.65f, 0.65f, 0.65f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArrayOES(_vertexArray1);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer1);
glUseProgram(_program);
glUniformMatrix4fv(uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX], 1, 0, _modelViewProjectionMatrix.m);
glDrawArrays(GL_TRIANGLES, 0, 6);
///////// second VBO and shader program:
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer2);
glUseProgram(_program2);
glUniformMatrix4fv(uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX2], 1, 0, _modelViewProjectionMatrix2.m);
glDrawArrays(GL_TRIANGLES, 0, 6);
}
- (BOOL)loadShaders
{
GLuint vertShader, fragShader, vertShader2, fragShader2;
NSString *vertShaderPathname, *fragShaderPathname, *vertShaderPathname2, *fragShaderPathname2;
// Create shader program.
_program = glCreateProgram();
_program2 = glCreateProgram();
// Create and compile vertex shader.
vertShaderPathname = [[NSBundle mainBundle] pathForResource:#"Shader" ofType:#"vsh"];
if (![self compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderPathname]) {
NSLog(#"Failed to compile vertex shader");
return NO;
}
// Create and compile fragment shader.
fragShaderPathname = [[NSBundle mainBundle] pathForResource:#"Shader" ofType:#"fsh"];
if (![self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderPathname]) {
NSLog(#"Failed to compile fragment shader");
return NO;
}
// Create and compile vertex shader.
vertShaderPathname2 = [[NSBundle mainBundle] pathForResource:#"Shader2" ofType:#"vsh"];
if (![self compileShader:&vertShader2 type:GL_VERTEX_SHADER file:vertShaderPathname2]) {
NSLog(#"Failed to compile vertex shader");
return NO;
}
// Create and compile fragment shader.
fragShaderPathname2 = [[NSBundle mainBundle] pathForResource:#"Shader2" ofType:#"fsh"];
if (![self compileShader:&fragShader2 type:GL_FRAGMENT_SHADER file:fragShaderPathname2]) {
NSLog(#"Failed to compile fragment shader");
return NO;
}
// Attach vertex shader to program.
glAttachShader(_program, vertShader);
glAttachShader(_program2, vertShader2);
// Attach fragment shader to program.
glAttachShader(_program, fragShader);
glAttachShader(_program2, fragShader2);
// Bind attribute locations.
// This needs to be done prior to linking.
glBindAttribLocation(_program, ATTRIB_VERTEX, "position");
glBindAttribLocation(_program2, ATTRIB_VERTEX, "position");
// Link program.
if (![self linkProgram:_program]) {
NSLog(#"Failed to link program: %d", _program);
if (vertShader) {
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader) {
glDeleteShader(fragShader);
fragShader = 0;
}
if (_program) {
glDeleteProgram(_program);
_program = 0;
}
return NO;
}
if (![self linkProgram:_program2]) {
NSLog(#"Failed to link program: %d", _program2);
if (vertShader2) {
glDeleteShader(vertShader2);
vertShader2 = 0;
}
if (fragShader2) {
glDeleteShader(fragShader2);
fragShader2 = 0;
}
if (_program2) {
glDeleteProgram(_program2);
_program2 = 0;
}
return NO;
}
// Get uniform locations.
uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX] = glGetUniformLocation(_program, "modelViewProjectionMatrix");
uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX2] = glGetUniformLocation(_program2, "modelViewProjectionMatrix2");
// Release vertex and fragment shaders.
if (vertShader) {
glDetachShader(_program, vertShader);
glDeleteShader(vertShader);
}
if (fragShader) {
glDetachShader(_program, fragShader);
glDeleteShader(fragShader);
}
if (vertShader2) {
glDetachShader(_program2, vertShader2);
glDeleteShader(vertShader2);
}
if (fragShader2) {
glDetachShader(_program2, fragShader2);
glDeleteShader(fragShader2);
}
return YES;
}