Failed to compile shader in swift
The code for the CameraViewController.swift class is shown below.
//
// CameraViewController.swift
// iOSSwiftOpenGLCamera
//
// Created by Bradley Griffith on 7/3/14.
// Copyright (c) 2014 Bradley Griffith. All rights reserved.
//
import UIKit
import CoreMedia
import AVFoundation
class CameraViewController: UIViewController, CameraSessionControllerDelegate {
var cameraSessionController: CameraSessionController!
@IBOutlet var openGLView: OpenGLView!
@IBOutlet var togglerSwitch: UISwitch!
/* Lifecycle
------------------------------------------*/
override func viewDidLoad() {
super.viewDidLoad()
cameraSessionController = CameraSessionController()
cameraSessionController.sessionDelegate = self
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
cameraSessionController.startCamera()
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
cameraSessionController.teardownCamera()
}
/* Instance Methods
------------------------------------------*/
@IBAction func toggleShader(sender: AnyObject) {
openGLView.shouldShowShader(togglerSwitch.on)
}
func cameraSessionDidOutputSampleBuffer(sampleBuffer: CMSampleBuffer!) {
openGLView.updateUsingSampleBuffer(sampleBuffer)
}
}
Swift UIView class code shown below.
//
// OpenGLView.swift
// iOSSwiftOpenGLCamera
//
// Created by Bradley Griffith on 7/1/14.
// Copyright (c) 2014 Bradley Griffith. All rights reserved.
//
import UIKit
import CoreMedia
import Foundation
import QuartzCore
import OpenGLES
import GLKit
import AVFoundation
struct Vertex {
var Position: (CFloat, CFloat, CFloat)
var TexCoord: (CFloat, CFloat)
}
var Vertices: (Vertex, Vertex, Vertex, Vertex) = (
Vertex(Position: (1, -1, 0) , TexCoord: (1, 1)),
Vertex(Position: (1, 1, 0) , TexCoord: (1, 0)),
Vertex(Position: (-1, 1, 0) , TexCoord: (0, 0)),
Vertex(Position: (-1, -1, 0), TexCoord: (0, 1))
)
var Indices: (GLubyte, GLubyte, GLubyte, GLubyte, GLubyte, GLubyte) = (
0, 1, 2,
2, 3, 0
)
class OpenGLView: UIView {
var eaglLayer: CAEAGLLayer!
var context: EAGLContext!
var colorRenderBuffer: GLuint = GLuint()
var positionSlot: GLuint = GLuint()
var texCoordSlot: GLuint = GLuint()
var textureUniform: GLuint = GLuint()
var timeUniform: GLuint = GLuint()
var showShaderBoolUniform: GLuint = GLuint()
var indexBuffer: GLuint = GLuint()
var vertexBuffer: GLuint = GLuint()
var unmanagedVideoTexture: Unmanaged<CVOpenGLESTexture>?
var videoTexture: CVOpenGLESTextureRef?
var videoTextureID: GLuint?
var unmanagedCoreVideoTextureCache: Unmanaged<CVOpenGLESTextureCache>?
var coreVideoTextureCache: CVOpenGLESTextureCacheRef?
var textureWidth: UInt?
var textureHeight: UInt?
var time: GLfloat = 0.0
var showShader: GLfloat = 1.0
var frameTimestamp: Double = 0.0
/* Class Methods
------------------------------------------*/
override class func layerClass() -> AnyClass {
// In order for our view to display OpenGL content, we need to set it's
// default layer to be a CAEAGLayer
return CAEAGLLayer.self
}
/* Lifecycle
------------------------------------------*/
required init(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
setupLayer()
setupContext()
setupRenderBuffer()
setupFrameBuffer()
compileShaders()
setupVBOs()
setupDisplayLink()
self.contentScaleFactor = UIScreen.mainScreen().scale
}
/* Setup Methods
------------------------------------------*/
func setupLayer() {
// CALayer are, by default, non-opaque, which is 'bad for performance with OpenGL',
// so let set our CAEAGLLayer layer to be opaque.
eaglLayer = layer as CAEAGLLayer
eaglLayer.opaque = true
}
func setupContext() {
// Just like with CoreGraphics, in order to do much with OpenGL, we need a context. // Here we create a new context with the version of the rendering API we want and
// tells OpenGL that when we draw, we want to do so within this context.
let api: EAGLRenderingAPI = EAGLRenderingAPI.OpenGLES2
context = EAGLContext(API: api)
if let contextValue = self.context as EAGLContext?
{
let err: CVReturn = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nil, contextValue, nil, &unmanagedCoreVideoTextureCache)
coreVideoTextureCache = unmanagedCoreVideoTextureCache!.takeUnretainedValue()
}
else
{
println("Failed to initialize OpenGLES 2.0 context!")
exit(1)
}
}
func setupRenderBuffer() {
// A render buffer is an OpenGL objec that stores the rendered image to present to the screen.
// OpenGL will create a unique identifier for a render buffer and store it in a GLuint.
// So we call the glGenRenderbuffers function and pass it a reference to our colorRenderBuffer.
glGenRenderbuffers(1, &colorRenderBuffer)
// Then we tell OpenGL that whenever we refer to GL_RENDERBUFFER, it should treat that as our colorRenderBuffer.
glBindRenderbuffer( (GLenum) (GL_RENDERBUFFER.value), colorRenderBuffer)
// Finally, we tell our context that the render buffer for our layer is our colorRenderBuffer.
context.renderbufferStorage(Int(GL_RENDERBUFFER), fromDrawable:eaglLayer)
}
func setupFrameBuffer() {
// A frame buffer is an OpenGL object for storage of a render buffer... amongst other things (tm).
// OpenGL will create a unique identifier for a frame vuffer and store it in a GLuint. So we
// make a GLuint and pass it to the glGenFramebuffers function to keep this identifier.
var frameBuffer: GLuint = GLuint()
glGenFramebuffers(1, &frameBuffer)
// Then we tell OpenGL that whenever we refer to GL_FRAMEBUFFER, it should treat that as our frameBuffer.
glBindFramebuffer( (GLenum) (GL_FRAMEBUFFER.value), frameBuffer)
// Finally we tell the frame buffer that it GL_COLOR_ATTACHMENT0 is our colorRenderBuffer. Oh.
glFramebufferRenderbuffer( (GLenum) (GL_FRAMEBUFFER.value), (GLenum) (GL_COLOR_ATTACHMENT0.value), (GLenum) (GL_RENDERBUFFER.value), colorRenderBuffer)
}
func compileShader(shaderName: NSString, shaderType: GLenum) -> GLuint {
// Get NSString with contents of our shader file.
let shaderPath: NSString = NSBundle.mainBundle().pathForResource(shaderName, ofType: "glsl")!
var shaderString: NSString? = NSString.stringWithContentsOfFile(shaderPath, encoding:NSUTF8StringEncoding, error: nil)
let shaderHandle: GLuint = glCreateShader(shaderType)
if let shaderStringValue = shaderString as NSString?
{
// Tell OpenGL to create an OpenGL object to represent the shader, indicating if it a vertex or a fragment shader.
// Conver shader string to CString and call glShaderSource to give OpenGL the source for the shader.
var shaderStringUTF8 = shaderStringValue.UTF8String
var shaderStringLength: GLint = GLint.convertFromIntegerLiteral(Int32(shaderStringValue.length))
glShaderSource(shaderHandle, 1, &shaderStringUTF8, &shaderStringLength)
// Tell OpenGL to compile the shader.
glCompileShader(shaderHandle)
// But compiling can fail! If we have errors in our GLSL code, we can here and output any errors.
var compileSuccess: GLint = GLint()
glGetShaderiv(shaderHandle, (GLenum) (GL_COMPILE_STATUS.value), &compileSuccess)
if (compileSuccess == GL_FALSE) {
var value: GLint = 0
glGetShaderiv(shaderHandle, GLenum(GL_INFO_LOG_LENGTH), &value)
var infoLog: [GLchar] = [GLchar](count: Int(value), repeatedValue: 0)
var infoLogLength: GLsizei = 0
glGetShaderInfoLog(shaderHandle, value, &infoLogLength, &infoLog)
var messageString = NSString(bytes: infoLog, length: Int(infoLogLength), encoding: NSASCIIStringEncoding)
println("Failed to compile shader!")
println(messageString)
exit(1);
}
}
else
{
println("Failed to set contents shader of shader file!")
}
return shaderHandle
}
func compileShaders() {
// Compile our vertex and fragment shaders.
let vertexShader: GLuint = compileShader("SimpleVertex", shaderType: (GLenum) (GL_VERTEX_SHADER.value))
let fragmentShader: GLuint = compileShader("SimpleFragment", shaderType: (GLenum) (GL_FRAGMENT_SHADER.value))
// Call glCreateProgram, glAttachShader, and glLinkProgram to link the vertex and fragment shaders into a complete program.
var programHandle: GLuint = glCreateProgram()
glAttachShader(programHandle, vertexShader)
glAttachShader(programHandle, fragmentShader)
glLinkProgram(programHandle)
// Check for any errors.
var linkSuccess: GLint = GLint()
glGetProgramiv(programHandle, (GLenum) (GL_LINK_STATUS.value), &linkSuccess)
if (linkSuccess == GL_FALSE) {
println("Failed to create shader program!")
// TODO: Actually output the error that we can get from the glGetProgramInfoLog function.
exit(1);
}
// Call glUseProgram to tell OpenGL to actually use this program when given vertex info.
glUseProgram(programHandle)
// Finally, call glGetAttribLocation to get a pointer to the input values for the vertex shader, so we
// can set them in code. Also call glEnableVertexAttribArray to enable use of these arrays (they are disabled by default).
positionSlot = (GLuint) (glGetAttribLocation(programHandle, "Position").value)
glEnableVertexAttribArray(positionSlot)
texCoordSlot = (GLuint) (glGetAttribLocation(programHandle, "TexCoordIn").value)
glEnableVertexAttribArray(texCoordSlot);
textureUniform = (GLuint) (glGetUniformLocation(programHandle, "Texture").value)
timeUniform = (GLuint) (glGetUniformLocation(programHandle, "time").value)
showShaderBoolUniform = (GLuint) (glGetUniformLocation(programHandle, "showShader").value)
}
// Setup Vertex Buffer Objects
func setupVBOs() {
glGenBuffers(1, &vertexBuffer)
glBindBuffer( (GLenum) (GL_ARRAY_BUFFER.value), vertexBuffer)
glBufferData( (GLuint) (GL_ARRAY_BUFFER.value), Int(sizeofValue(Vertices)), &Vertices, (GLenum) (GL_STATIC_DRAW.value))
glGenBuffers(1, &indexBuffer)
glBindBuffer( (GLenum) (GL_ELEMENT_ARRAY_BUFFER.value), indexBuffer)
glBufferData( (GLenum) (GL_ELEMENT_ARRAY_BUFFER.value), Int(sizeofValue(Indices)), &Indices, (GLenum) (GL_STATIC_DRAW.value))
}
func setupDisplayLink() {
let displayLink: CADisplayLink = CADisplayLink(target: self, selector: "render:")
displayLink.addToRunLoop(NSRunLoop.currentRunLoop(), forMode: NSDefaultRunLoopMode)
}
/* Helper Methods
------------------------------------------*/
// func getTextureFromImageWithName(fileName: NSString) -> GLuint {
//
// var spriteImage: CGImageRef? = UIImage(named: fileName).CGImage
//
// var texName: GLuint = GLuint()
//
// if let spriteImageValue = spriteImage as CGImageRef?
// {
// let width: UInt = CGImageGetWidth(spriteImageValue)
// let height: UInt = CGImageGetHeight(spriteImageValue)
//
// let spriteData = UnsafePointer<GLubyte>(calloc(UInt(CGFloat(width) * CGFloat(height) * 4), sizeof((GLubyte).value)
//
// let bitmapInfo = CGBitmapInfo.fromRaw(CGImageAlphaInfo.PremultipliedLast.toRaw())!
// let spriteContext: CGContextRef = CGBitmapContextCreate(spriteData, width, height, 8, width*4, CGImageGetColorSpace(spriteImageValue), bitmapInfo)
//
// CGContextDrawImage(spriteContext, CGRectMake(0, 0, CGFloat(width) , CGFloat(height)), spriteImageValue)
// CGContextRelease(spriteContext)
//
// glGenTextures(1, &texName)
// glBindTexture(GL_TEXTURE_2D.asUnsigned(), texName)
//
// glTexParameteri(GL_TEXTURE_2D.asUnsigned(), GL_TEXTURE_MIN_FILTER.asUnsigned(), GL_NEAREST)
// glTexImage2D(GL_TEXTURE_2D.asUnsigned(), 0, GL_RGBA, GLsizei(width), GLsizei(height), 0, GL_RGBA.asUnsigned(), UInt32(GL_UNSIGNED_BYTE), spriteData)
//
// free(spriteData)
//
// }
// else
// {
// println("Failed to load image!")
// exit(1)
//
// }
//
// return texName
// }
func cleanupVideoTextures()
{
if let videoTextureValue = videoTexture as CVOpenGLESTextureRef? {
videoTexture = nil
}
CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0)
}
func getTextureFromSampleBuffer(sampleBuffer: CMSampleBuffer!) -> GLuint {
cleanupVideoTextures()
var unmanagedImageBuffer: CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
var imageBuffer = unmanagedImageBuffer
var opaqueImageBuffer = unmanagedImageBuffer
var cameraFrame: CVPixelBuffer = opaqueImageBuffer
textureWidth = CVPixelBufferGetWidth(cameraFrame)
textureHeight = CVPixelBufferGetHeight(cameraFrame)
CVPixelBufferLockBaseAddress(cameraFrame, 0)
var err: CVReturn = CVOpenGLESTextureCacheCreateTextureFromImage(
kCFAllocatorDefault,
coreVideoTextureCache,
imageBuffer,
nil,
(GLenum) (GL_TEXTURE_2D.value),
GL_RGBA,
GLsizei(textureWidth!),
GLsizei(textureHeight!),
(GLenum) (GL_BGRA.value),
UInt32(GL_UNSIGNED_BYTE),
0,
&unmanagedVideoTexture
)
videoTexture = unmanagedVideoTexture!.takeUnretainedValue()
var textureID: GLuint = GLuint()
textureID = CVOpenGLESTextureGetName(videoTexture);
glBindTexture( (GLenum) (GL_TEXTURE_2D.value), textureID);
glTexParameteri( (GLenum) (GL_TEXTURE_2D.value), (GLenum) (GL_TEXTURE_MIN_FILTER.value), GL_LINEAR);
glTexParameteri((GLenum) (GL_TEXTURE_2D.value), (GLenum) (GL_TEXTURE_MAG_FILTER.value), GL_LINEAR);
glTexParameteri( (GLenum) (GL_TEXTURE_2D.value), (GLenum)(GL_TEXTURE_WRAP_S.value), GL_CLAMP_TO_EDGE);
glTexParameteri( (GLenum) (GL_TEXTURE_2D.value), (GLenum) (GL_TEXTURE_WRAP_T.value), GL_CLAMP_TO_EDGE);
CVPixelBufferUnlockBaseAddress(cameraFrame, 0)
return textureID
}
func updateUsingSampleBuffer(sampleBuffer: CMSampleBuffer!) {
dispatch_async(dispatch_get_main_queue(), {
self.videoTextureID = self.getTextureFromSampleBuffer(sampleBuffer)
});
}
func shouldShowShader(show: Bool) {
showShader = show ? 1.0 : 0.0
}
func render(displayLink: CADisplayLink) {
if let textureWidthValue = textureWidth as UInt?
{
if let textureHeightValue = textureHeight as UInt?
{
var ratio = CGFloat(frame.size.height) / CGFloat(textureHeightValue)
glViewport(0, 0, GLint(CGFloat(textureWidthValue) * ratio), GLint(CGFloat(textureHeightValue) * ratio))
}
}
else
{
glViewport(0, 0, GLint(self.frame.size.width), GLint(self.frame.size.height))
}
let positionSlotFirstComponent = UnsafePointer<Int>(bitPattern: 0)
glVertexAttribPointer(positionSlot, 3 as GLint, (GLenum) (GL_FLOAT.value), GLboolean.convertFromIntegerLiteral(UInt8(GL_FALSE)), Int32(sizeof(Vertex)), positionSlotFirstComponent)
let texCoordFirstComponent = UnsafePointer<Int>(bitPattern: sizeof(Float) * 3)
glVertexAttribPointer(texCoordSlot, 2, (GLenum) (GL_FLOAT.value), GLboolean.convertFromIntegerLiteral(UInt8(GL_FALSE)), Int32(sizeof(Vertex)), texCoordFirstComponent)
glActiveTexture(UInt32(GL_TEXTURE0))
if let videoTextureIDValue = videoTextureID as GLuint? {
glBindTexture( (GLenum) (GL_TEXTURE_2D.value), videoTextureIDValue)
glUniform1i( (GLint) (textureUniform.value), 0)
}
// Incriment and pass time to shader. This is experimental, be sure to fully test any use of this variable.
time += Float(displayLink.duration)
glUniform1f( (GLint) (timeUniform.value), time)
glUniform1f( (GLint) (showShaderBoolUniform.value), showShader)
let vertextBufferOffset = UnsafePointer<Int>(bitPattern: 0)
glDrawElements( (GLenum) (GL_TRIANGLES.value), Int32(GLfloat(sizeofValue(Indices)) / GLfloat(sizeofValue(Indices.0))), (GLenum) (GL_UNSIGNED_BYTE.value), vertextBufferOffset)
context.presentRenderbuffer(Int(GL_RENDERBUFFER))
}
}
I am using AVFoundation to capture an image and pass the buffer to the OpenGLView.swift UIView class. I have files in "glsl" format. When I run the program, I get an error.
"Failed to compile shader!" I don't know why, if anyone knows about this please let me know. Thank.
I have used multiple shader codes, when I get to this error I tried a simple shader code. which are given below.
Fragment Shader (.fsh)
void main(void) {
gl_FragColor = vec4(1.9, 1.9, 0.7, 1.3);
}
Vertex Shader (.vsh).
attribute vec2 aPosition;
void main(void) {
gl_Position = vec4(aPosition, 0., 1.);
}
+3
source to share
1 answer
This code:
positionSlot = (GLuint) (glGetAttribLocation(programHandle, "Position").value)
glEnableVertexAttribArray(positionSlot)
texCoordSlot = (GLuint) (glGetAttribLocation(programHandle, "TexCoordIn").value)
glEnableVertexAttribArray(texCoordSlot);
textureUniform = (GLuint) (glGetUniformLocation(programHandle, "Texture").value)
timeUniform = (GLuint) (glGetUniformLocation(programHandle, "time").value)
showShaderBoolUniform = (GLuint) (glGetUniformLocation(programHandle, "showShader").value)
gets attributes that are not present in compiled shaders. Change:
positionSlot = (GLuint) (glGetAttribLocation(programHandle, "Position").value)
glEnableVertexAttribArray(positionSlot)
to
positionSlot = (GLuint) (glGetAttribLocation(programHandle, "aPosition").value)
glEnableVertexAttribArray(positionSlot)
(note on "Position" -> "aPosition") to reflect the name of the attribute in
attribute vec2 aPosition;
void main(void) {
gl_Position = vec4(aPosition, 0., 1.);
}
+1
source to share