I'm learning OpenGL on iOS by this guide, and I want to implement everything on swift. So, there is some code were I'm getting crash:
Memory structures:
private struct Vertex {
var Position: (GLfloat, GLfloat, GLfloat)
var Color: (GLfloat, GLfloat, GLfloat, GLfloat)
}
private static var Vertices = [
Vertex(Position: (1, -1, 0) , Color: (1, 0, 0, 1)),
Vertex(Position: (1, 1, 0) , Color: (0, 1, 0, 1)),
Vertex(Position: (-1, 1, 0) , Color: (0, 0, 1, 1)),
Vertex(Position: (-1, -1, 0), Color: (0, 0, 0, 1))
]
private static var Indices: [GLubyte] = [
0, 1, 2,
2, 3, 0
]
Create vertex buffers:
var vertexBuffer = GLuint()
glGenBuffers(1, &vertexBuffer)
glBindBuffer(GLenum(GL_ARRAY_BUFFER), vertexBuffer)
glBufferData(GLenum(GL_ARRAY_BUFFER), Vertices.size, Vertices, GLenum(GL_STATIC_DRAW))
var indexBuffer = GLuint()
glGenBuffers(1, &indexBuffer)
glBindBuffer(GLenum(GL_ELEMENT_ARRAY_BUFFER), indexBuffer)
glBufferData(GLenum(GL_ELEMENT_ARRAY_BUFFER), Indices.size, Indices, GLenum(GL_STATIC_DRAW))
Setup memory offsets:
var positionPtr = 0
glVertexAttribPointer(GLuint(positionSlot), 3, GLenum(GL_FLOAT), GLboolean(GL_FALSE), GLsizei(strideofValue(Vertex)), &positionPtr)
var colorPtr = strideof(GLfloat) * 3
glVertexAttribPointer(GLuint(colorSlot), 4, GLenum(GL_FLOAT), GLboolean(GL_FALSE), GLsizei(strideofValue(Vertex)), &colorPtr)
Crash (Trying to draw):
var startPtr = 0
// EXC_BAD_ACCESS code=1 here!
glDrawElements(GLenum(GL_TRIANGLES), GLsizei(Indices.count / 3), GLenum(GL_UNSIGNED_BYTE), &startPtr)
All shaders are compiled without any errors and glClear() draws well, so I suppose my problem is concerned with VBOs.
And here how I calculate size of arrays:
extension Array
{
var size: Int {
get { return self.count * strideof(Element) }
}
}
UPD: I'm using OpenGLES 2.0.
I had learned by you guide for amount 4 months ago. I tried to convert it from objective-c to swift until draw rectangle same below picture.
Now I run it and convert to Swift 2.1. It still work and show same image below.
Here my code (Method setupVBOs, render and struct)
// Track of all our per-vertex information (currently just color and position)
struct Vertex {
var Position: (CFloat, CFloat, CFloat)
var Color: (CFloat, CFloat, CFloat, CFloat)
}
// Array with all the info for each vertex
var Vertices = [
Vertex(Position: (1, -1, 0) , Color: (1, 0, 0, 1)),
Vertex(Position: (1, 1, 0) , Color: (0, 1, 0, 1)),
Vertex(Position: (-1, 1, 0) , Color: (0, 0, 1, 1)),
Vertex(Position: (-1, -1, 0), Color: (0, 0, 0, 1))
]
// Array that gives a list of triangles to create, by specifying the 3 vertices that make up each triangle
var Indices: [GLubyte] = [
0, 1, 2,
2, 3, 0
]
//helper extensions to pass arguments to GL land
extension Array {
func size () -> Int {
return self.count * sizeofValue(self[0])
}
}
//The best way to send data to OpenGL is through something called Vertex Buffer Objects.
func setupVBOs() { // VBO : Vertex Buffer Objects.
//There are two types of vertex buffer objects – one to keep track of the per-vertex data (like we have in the Vertices array), and one to keep track of the indices that make up triangles (like we have in the Indices array).
glGenBuffers(1, &vertexBuffer)
glBindBuffer(GLenum(GL_ARRAY_BUFFER), vertexBuffer)
glBufferData(GLenum(GL_ARRAY_BUFFER), Vertices.count * sizeofValue(Vertices[0]), Vertices, GLenum(GL_STATIC_DRAW)) // send the data over to OpenGL-land.
glGenBuffers(1, &indexBuffer)
glBindBuffer(GLenum(GL_ELEMENT_ARRAY_BUFFER), indexBuffer)
glBufferData(GLenum(GL_ELEMENT_ARRAY_BUFFER), Indices.count * sizeofValue(Indices[0]), Indices, GLenum(GL_STATIC_DRAW))
}
func render() {
glClearColor(0, 104.0/255.0, 55.0/255.0, 1.0)
glClear(GLbitfield(GL_COLOR_BUFFER_BIT))
//glViewport(0, 0, GLint(frame.size.width), GLint(frame.size.height))
glViewport(0, GLint(frame.size.height/2)/2, GLint(frame.size.width), GLint(frame.size.height/2))
// feed the correct values to the two input variables for the vertex shader – the Position and SourceColor attributes.
glVertexAttribPointer(positionSlot, 3, GLenum(GL_FLOAT), GLboolean(UInt8(GL_FALSE)), GLsizei(sizeof(Vertex)), nil)
glVertexAttribPointer(colorSlot, 4, GLenum(GL_FLOAT), GLboolean(UInt8(GL_FALSE)), GLsizei(sizeof(Vertex)), UnsafePointer<Int>(bitPattern: sizeof(Float) * 3))
// This actually ends up calling your vertex shader for every vertex you pass in, and then the fragment shader on each pixel to display on the screen.
glDrawElements(GLenum(GL_TRIANGLES), GLsizei(Indices.count), GLenum(GL_UNSIGNED_BYTE), nil)
_context.presentRenderbuffer(Int(GL_RENDERBUFFER))
}
Related
I tried to get int, short from data, the data get from websocket, but some thing is wrong, and when i cast from UInt8List -> Byte Data -> UInt8List, it add 2 new uint8 in my array. Any one suggest me how is correct way to get int from byte array. (It's big Endian, my code in Swift and the base write data in Dart still correct). Thank anyone for reading this.
I am using 'dart:typed_data'; and get data from WebSocket (dart:io)
print(responseData); // UInt8List: [0, 1, 0, 1, 0, 1, 49]
var byteData = responseData.buffer.asByteData();
var array = byteData.buffer.asUint8List();
print(array); // UInt8List: [130, 7, 0, 1, 0, 1, 0, 1, 49]
var shortValue = responseData.buffer.asByteData().getInt16(0);
print(shortValue); // -32249 ( 2 first byte: [0 ,1] so it must be 1 )
There's something else going on, because your code does not add any extra bytes - and actually, it doesn't use array.
This code:
import 'dart:typed_data';
void main() {
Uint8List responseData = Uint8List.fromList([0, 1, 0, 1, 0, 1, 49]);
print(responseData); // UInt8List: [0, 1, 0, 1, 0, 1, 49]
var byteData = responseData.buffer.asByteData();
//var array = byteData.buffer.asUint8List();
//print(array); // UInt8List: [130, 7, 0, 1, 0, 1, 0, 1, 49]
var shortValue = responseData.buffer.asByteData().getInt16(0);
print(shortValue); // -32249 ( 2 first byte: [0 ,1] so it must be 1 )
}
prints (as expected)
[0, 1, 0, 1, 0, 1, 49]
1
EDIT - as suggested in the comment, the Uint8List you have is in fact a view on a ByteBuffer with a non-zero offset. So, responseData.buffer is that underlying buffer, which includes additional bytes. The simplest solution is to make a copy of the view.
import 'dart:typed_data';
void main() {
Uint8List original = Uint8List.fromList([130, 7, 0, 1, 0, 1, 0, 1, 49]);
print(original);
Uint8List view = Uint8List.view(original.buffer, 2);
print(view);
print(view.buffer.lengthInBytes); // prints 9
print(view.buffer.asByteData().getUint16(0)); // unexpected result
Uint8List copy = Uint8List.fromList(view);
print(copy.buffer.lengthInBytes); // prints 7
print(copy.buffer.asByteData().getUint16(0)); // expected result
}
I'm working with openGL ES 2.0 and swift. I finally got to display the texture but it only one pixel is shown!
Can anyone help with this?
My simple vertex shader:
attribute vec2 TexCoordIn;
varying vec2 TexCoordOut;
void main(void) {
gl_Position = Position;
TexCoordOut = TexCoordIn;
}
And fragment shader:
varying lowp vec2 TexCoordOut;
uniform sampler2D Texture;
void main(void) {
gl_FragColor = texture2D(Texture, TexCoordOut);
}
My view controller:
// My Vertex, Vertices and Indices
struct Vertex {
var Position: (CFloat, CFloat, CFloat)
var Color: (CFloat, CFloat, CFloat, CFloat)
var TexCoord: (CFloat, CFloat)
}
var Vertices = [
Vertex(Position: (1, -1, 0) , Color: (1, 0, 0, 1), TexCoord: (1, 0)),
Vertex(Position: (1, 1, 0) , Color: (0, 1, 0, 1), TexCoord: (1, 1)),
Vertex(Position: (-1, 1, 0) , Color: (0, 0, 1, 1), TexCoord: (0, 1)),
Vertex(Position: (-1, -1, 0), Color: (0, 0, 0, 1), TexCoord: (0, 0))
]
var Indices: [GLubyte] = [
0, 1, 2,
2, 3, 0
]
func compileShaders() {
var vertexShader: GLuint = self.compileShader("Vertex", shaderType: GLenum(GL_VERTEX_SHADER))
var fragmentShader: GLuint = self.compileShader("Fragment", shaderType: GLenum(GL_FRAGMENT_SHADER))
var programHandle: GLuint = glCreateProgram()
glAttachShader(programHandle, vertexShader)
glAttachShader(programHandle, fragmentShader)
glLinkProgram(programHandle)
glUseProgram(programHandle)
glEnableVertexAttribArray(self.positionSlot)
glEnableVertexAttribArray(self.colorSlot)
texCoordSlot = GLuint(glGetAttribLocation(programHandle, "TexCoordIn"))
glEnableVertexAttribArray(texCoordSlot)
let pointer = UnsafePointer<Int>(bitPattern: sizeof(Float) * 7)
glVertexAttribPointer(texCoordSlot, 2, GLenum(GL_FLOAT), GLboolean(UInt8(GL_FALSE)), GLsizei(sizeof(Vertex)), pointer)
textureUniform = GLuint(glGetUniformLocation(programHandle, "Texture"))
}
func setupVBOs() {
glGenVertexArraysOES(1, &VAO);
glBindVertexArrayOES(VAO);
glGenBuffers(1, &vertexBuffer)
glBindBuffer(GLenum(GL_ARRAY_BUFFER), vertexBuffer)
glBufferData(GLenum(GL_ARRAY_BUFFER), Vertices.size(), Vertices, GLenum(GL_STATIC_DRAW))
glEnableVertexAttribArray(positionSlot)
glVertexAttribPointer(positionSlot, 3, GLenum(GL_FLOAT), GLboolean(UInt8(GL_FALSE)), GLsizei(sizeof(Vertex)), nil)
glGenBuffers(1, &indexBuffer)
glBindBuffer(GLenum(GL_ELEMENT_ARRAY_BUFFER), indexBuffer)
glBufferData(GLenum(GL_ELEMENT_ARRAY_BUFFER), Indices.size(), Indices, GLenum(GL_STATIC_DRAW))
glBindBuffer(GLenum(GL_ARRAY_BUFFER), 0)
glBindVertexArrayOES(0)
}
func render() {
glBindVertexArrayOES(VAO);
glActiveTexture(GLenum(GL_TEXTURE0))
glBindTexture(GLenum(GL_TEXTURE_2D), floorTexture)
glUniform1i(GLint(textureUniform), GLint(0))
glDrawElements(GLenum(GL_TRIANGLES), GLsizei(Indices.count), GLenum(GL_UNSIGNED_BYTE), nil)
glContext!.presentRenderbuffer(Int(GL_RENDERBUFFER))
glBindVertexArrayOES(0)
}
The foundation I learned in the great Ray Wenderlich Tutorial. But I could not make it to work in swift.
I am trying to get my context (EAGLContext) to render in Swift; but for days I haven't been able to the function glDrawElements to work.
I have read a couple of similar questions here on Stackoverflow but to no avail.
My glDrawElements is as follows:
glDrawElements(GLenum(GL_TRIANGLES), GLsizei(Indices.count), GLenum(GL_UNSIGNED_BYTE), &offset)
I am having problem with the last parameter - offset, which expects a UnsafePointer<Void>.
I have tried the following:
let offset: CConstVoidPointer = COpaquePointer(UnsafePointer<Int>(0))
The above no longer works, because CConstVoidPointer doesn't seem to be available anymore in Swift 1.2.
And:
var offset = UnsafePointer<Int>(other: 0)
The above gives a warning that I should use bitPattern: instead.
Although I don't believe bitPattern: should be used here (because the parameter expects a Word), I decided to give it a try according to the suggestion provided such as the following:
var offset = UnsafePointer<Int>(bitPattern: 0)
glDrawElements(GLenum(GL_TRIANGLES), GLsizei(Indices.count), GLenum(GL_UNSIGNED_BYTE), &offset)
I would get a EXE_BAD_ACCESS (code=EXC_I386_GPFLT) error.
In vain, I also tried something as simple as the following using:
var offsetZero : Int = 0
and subsequently feeding it to the last parameter of glDrawElements like so:
glDrawElements(GLenum(GL_TRIANGLES), GLsizei(Indices.count), GLenum(GL_UNSIGNED_BYTE), &offsetZero)
I am getting the same EXE_BAD_ACCESS like in the case above.
How can I properly form a type suitable for the last parameter of glDrawElements that expects the type UnsafePointer<Void>?
#RobMayoff
Update (adding code for VBO set-up and variables declarations and definitions):
struct vertex
{
var position: (CFloat, CFloat, CFloat)
var color: (CFloat, CFloat, CFloat, CFloat)
}
var vertices =
[
vertex(position: (-1, -1, 0) , color: (1, 1, 0, 1)),
vertex(position: (1, -1, 0) , color: (1, 1, 1, 1)),
vertex(position: (-1, 0, 1) , color: (0, 1, 0, 1)),
vertex(position: (-1, 1, 1), color: (1, 0, 0, 1))
]
let indices: [UInt8] = [ 0, 2, 3, 3, 1, 0 ]
class mainGLView : UIView
{
var layer : CAEAGLLayer!
var context : EAGLContext!
var cBuffer : GLuint = GLuint()
var pos : GLuint = GLuint()
var color : GLuint = GLuint()
var iBuffer : GLuint = GLuint()
var vBuffer : GLuint = GLuint()
var vao : GLuint = GLuint()
override class func layerClass() -> AnyClass
{
return CAEAGLLayer.self
}
required init(coder aDecoder: NSCoder)
{
super.init(coder: aDecoder)
//setting up context, buffers, shaders, etc.
self.configureVBO()
self.setupRendering()
}
func configureVBO()
{
glGenVertexArraysOES(1, &vao);
glBindVertexArrayOES(vao);
glGenBuffers(GLsizei(1), &vBuffer)
glBindBuffer(GLenum(GL_ARRAY_BUFFER), vBuffer)
glBufferData(GLenum(GL_ARRAY_BUFFER), vertices.size(), vertices, GLenum(GL_STATIC_DRAW))
glEnableVertexAttribArray(pos)
var ptr = COpaquePointer(UnsafePointer<Int>(bitPattern: 0))
glVertexAttribPointer(GLuint(pos), GLint(3), GLenum(GL_FLOAT), GLboolean(GL_FALSE), GLsizei(sizeof(vertex)), &ptr)
glEnableVertexAttribArray(color)
let fColor = UnsafePointer<Int>(bitPattern: sizeof(Float) * 3)
glVertexAttribPointer(GLuint(color), 4, GLenum(GL_FLOAT), GLboolean(GL_FALSE), GLsizei(sizeof(vertex)), fColor)
glGenBuffers(1, &iBuffer)
glBindBuffer(GLenum(GL_ELEMENT_ARRAY_BUFFER), iBuffer)
glBufferData(GLenum(GL_ELEMENT_ARRAY_BUFFER), indices.size(), indices, GLenum(GL_STATIC_DRAW))
glBindBuffer(GLenum(GL_ARRAY_BUFFER), 0)
glBindVertexArrayOES(0)
}
func setupRendering()
{
glBindVertexArrayOES(vao);
glViewport(0, 0, GLint(self.frame.size.width), GLint(self.frame.size.height));
indices.withUnsafeBufferPointer
{
(pointer : UnsafeBufferPointer<UInt8>) -> Void in
glDrawElements(GLenum(GL_TRIANGLES), GLsizei(indices.count), GLenum(GL_UNSIGNED_BYTE), UnsafePointer<Void>(pointer.baseAddress))
Void()
}
self.context.presentRenderbuffer(Int(GL_RENDERBUFFER))
glBindVertexArrayOES(0)
}
}
Because of your choices for the count and type arguments, the indices argument (the last argument) needs to be a pointer to the first element of an array, of (at least) length Indices.count, of unsigned bytes. This pointer needs to be converted to UnsafePointer<Void> in Swift.
You didn't show the declaration of your Indices. Let's assume it's declared like this:
let indices: [UInt8] = [ 0, 1, 2, 3, 4, 5, 6, 7, 8 ]
Then you can call glDrawElements by jumping through these hoops:
indices.withUnsafeBufferPointer { (pointer: UnsafeBufferPointer<UInt8>) -> Void in
glDrawElements(GLenum(GL_TRIANGLES), GLsizei(indices.count),
GLenum(GL_UNSIGNED_BYTE),
UnsafePointer<Void>(pointer.baseAddress))
Void()
}
I am learning opengl es 2.0.
I am trying to apply ortho projection in opengl es 2.0
I draw a sqaure but I actually don't get square shape on the screen.
And I am not sure which part I am missing.
Thank you for your help!
There are some methods in setupRenderingEnv that I did not post. but those methods are for setting up the frames and it works fine.
and m_program is created fine.
Again, thank you for your help.
// my vertex shader
attribute vec4 Position;
attribute vec4 SourceColor;
uniform mat4 Projection;
varying vec4 DestinationColor;
void main(void)
{
DestinationColor = SourceColor;
gl_Position = Projection * Position;
}
// my drawing file
typedef struct
{
float Position[3];
float Color[4];
}Vertex;
const Vertex Vertices[] =
{
{{100, -100, 0}, {1, 0, 0, 1}},
{{100, 100, 0}, {0, 1, 0, 1}},
{{-100, 100, 0}, {0, 0, 1, 1}},
{{-100, -100, 0}, {0, 0, 0, 1}}
};
const GLubyte Indices[] =
{
0, 1, 2,
2, 3, 0
};
- (void)setupRenderingEnv
{
[super setupRenderingEnv];
[self setupVertexBufferObjects];
[self setupRunLoop];
[self applyOrthoWithX:self.frame.size.width andY:self.frame.size.height];
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
}
//-- used for applying ortho in opengl es 2.0
- (void)applyOrthoWithX:(float)maxX andY:(float)maxY
{
float a = 1.0f / maxX;
float b = 1.0f / maxY;
float ortho[16] =
{
a, 0, 0, 0,
0, b, 0, 0,
0, 0, -1, 0,
0, 0, 0, 1
};
GLint projectionUniform = glGetUniformLocation(super.m_programHandle, "Projection");
glUniformMatrix4fv(projectionUniform, 1, 0, &ortho[0]);
}
//-- overriding drawCandle. it render image, draw candle
- (void)drawCandle
{
glClearColor(0, 104.0/255, 55.0/255, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
GLuint positionSlot = glGetAttribLocation(super.m_programHandle, "Position");
GLuint colorSlot = glGetAttribLocation(super.m_programHandle, "SourceColor");
glEnableVertexAttribArray(positionSlot);
glEnableVertexAttribArray(colorSlot);
glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE,
sizeof(Vertex), 0);
glVertexAttribPointer(colorSlot, 4, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid *)(sizeof(float) * 3));
glDrawElements(GL_TRIANGLES, sizeof(Indices)/sizeof(Indices[0]), GL_UNSIGNED_BYTE, 0);
glDisableVertexAttribArray(positionSlot);
glDisableVertexAttribArray(colorSlot);
[super drawCandle];
}
What shape is your viewport? If it's not square then that's the problem. The matrix you're creating - scaling by inverse width and inverse height - is going to make the width always be 1 unit wide and the height 1 unit tall. If the width and height aren't the same number of pixels, then squares won't draw square. You need to account for the aspect ratio of your viewport. Off the top of my head, I think it would be something more like this:
float ortho [ 16 ] = {
a / b, 0, 0, 0,
0, b, 0, 0,
0, 0, -1, 0,
0, 0, 0, 1
};
(I might have a/b inverted - can't remember)
I'm simply just trying to repeat the texture loaded using the iOS 5.1 GLKit improvements, that I've drawn onto a simple quad.
Here are the verticies and texCoords
#define ROAD_TEX_COORD_MAX 10
const Vertex roadVertices[] =
{
{ { 90, -90, 0 } , { 1, 1, 1, 1 }, { ROAD_TEX_COORD_MAX, 0 } }, // Bot R - 0
{ { 90, 90, 0 } , { 1, 1, 1, 1 }, { ROAD_TEX_COORD_MAX, ROAD_TEX_COORD_MAX } }, // Top R - 1
{ { -90, 90, 0 } , { 1, 1, 1, 1 }, { 0 , ROAD_TEX_COORD_MAX } }, // Top L - 2
{ { -90, -90, 0 } , { 1, 1, 1, 1 }, { 0 , 0 } } // Bot L - 3
};
const GLubyte roadIndices[] =
{
0, 1, 2,
2, 3, 0
};
Here is the render routine:
- ( void ) render
{
[self.baseEffect prepareToDraw];
glBindBuffer( GL_ARRAY_BUFFER, vertexBuffer );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, indexBuffer );
glEnableVertexAttribArray( GLKVertexAttribPosition );
glVertexAttribPointer( GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof( Vertex ), ( const GLvoid * ) offsetof( Vertex, Position ) );
glEnableVertexAttribArray( GLKVertexAttribColor );
glVertexAttribPointer( GLKVertexAttribColor, 4, GL_FLOAT, GL_FALSE, sizeof( Vertex ), ( const GLvoid * ) offsetof( Vertex, Color ) );
glEnableVertexAttribArray( GLKVertexAttribTexCoord0 );
glVertexAttribPointer( GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof( Vertex ), ( const GLvoid * ) offsetof( Vertex, TexCoord ) );
glDrawElements( GL_TRIANGLES, sizeof( roadIndices ) / sizeof( roadIndices[0] ), GL_UNSIGNED_BYTE, 0 );
}
However, this is what I'm ending up with:
Now I've read that by default the EAGL setup using the REPEAT texture setting and you have to manually set it to CLAMP_TO_EDGE. However, that's not the case here - I'm not setting either of these (although I have tried to disable the CLAMP and enable the REPEAT and all that did was to make the quad black and no texture was visible.