Nice your source still works fine.
But Learning Webgl 2 with colors
And it doesn’t show ( black screen only )
I am trying to add but no triangle and no squad
http://learningwebgl.com/blog/?p=134
But it can’t work because openglview sees black…
How do I fix
in function getFragmentShader
Add “varying vec4 vColor;” before 0
"void …"
Replace to “gl_FragColor = vColor;”
in function getVertexShader
Add
attribute vec4 aVertexColor;
varying vec4 vColor;
from void main…
vColor = aVertexColor;
and Format and change to haxe format
than it is correctly
But openglview doesn’t show color… and nothing shapes…
I tried it full-code:
package;
import nme.geom.Point;
import nme.geom.Vector3D;
import nme.errors.Error;
import nme.display.OpenGLView;
import nme.display.Sprite;
import nme.geom.Matrix3D;
import nme.geom.Rectangle;
import nme.gl.GL;
import nme.gl.GLBuffer;
import nme.gl.GLProgram;
import nme.gl.GLTexture;
import nme.gl.GLUniformLocation;
import nme.utils.Float32Array;
import nme.utils.UInt8Array;
import nme.Assets;
class Main extends Sprite {
private var view:OpenGLView;
private var shaderProgram:GLProgram;
private var vertexAttribute:Int;
private var modelViewMatrixUniform:GLUniformLocation;
private var projectionMatrixUniform:GLUniformLocation;
private var triangleVertexPositionBuffer:GLBuffer;
// Lesson 2
private var triangleVertexColorBuffer:GLBuffer;
private var squareVertexPositionBuffer:GLBuffer;
// Lesson 2
private var squareVertexColorBuffer:GLBuffer;
public function new () {
super ();
if (!OpenGLView.isSupported) {
throw new Error("Could not initialise OpenGL, sorry :-(");
return;
}
// var canvas = document.getElementById("the-canvas");
view = new OpenGLView ();
initShaders();
initBuffers();
// This is too early to clear in OpenFL
//gl.clearColor(0.0, 0.0, 0.0, 1.0);
GL.clearColor (0.0, 0.0, 0.0, 1.0);
//gl.enable(gl.DEPTH_TEST);
GL.enable (GL.DEPTH_TEST);
//drawScene();
view.render = renderView;
addChild(view);
}
private function initShaders():Void {
var fragmentShader = getFragmentShader();
var vertexShader = getVertexShader();
//shaderProgram = gl.createProgram();
//gl.attachShader(shaderProgram, vertexShader);
//gl.attachShader(shaderProgram, fragmentShader);
//gl.linkProgram(shaderProgram);
shaderProgram = GL.createProgram ();
GL.attachShader (shaderProgram, vertexShader);
GL.attachShader (shaderProgram, fragmentShader);
GL.linkProgram (shaderProgram);
//if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
// alert("Could not initialise shaders");
//}
if (GL.getProgramParameter (shaderProgram, GL.LINK_STATUS) == 0) {
throw "Could not initialise shaders.";
}
//gl.useProgram(shaderProgram);
GL.useProgram (shaderProgram);
//shaderProgram.vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
vertexAttribute = GL.getAttribLocation (shaderProgram, "aVertexPosition");
// Lesson 2:
// shaderProgram.vertexColorAttribute = gl.getAttribLocation(shaderProgram, "aVertexColor");
vertexAttribute = GL.getAttribLocation (shaderProgram, "aVertexColor");
//gl.enableVertexAttribArray(shaderProgram.vertexPositionAttribute);
GL.enableVertexAttribArray (vertexAttribute);
//shaderProgram.pMatrixUniform = gl.getUniformLocation(shaderProgram, "uPMatrix");
//shaderProgram.mvMatrixUniform = gl.getUniformLocation(shaderProgram, "uMVMatrix");
projectionMatrixUniform = GL.getUniformLocation (shaderProgram, "uProjectionMatrix");
modelViewMatrixUniform = GL.getUniformLocation (shaderProgram, "uModelViewMatrix");
}
private function getFragmentShader() {
var fragmentShaderSource =
#if !desktop
"precision mediump float;" +
#end
// Lesson 2
"varying vec4 vColor;
void main(void) {
// Lesson 2
gl_FragColor = vColor;
}";
var fragmentShader = GL.createShader (GL.FRAGMENT_SHADER);
GL.shaderSource (fragmentShader, fragmentShaderSource);
GL.compileShader (fragmentShader);
if (GL.getShaderParameter (fragmentShader, GL.COMPILE_STATUS) == 0) {
throw "Error compiling fragment shader";
}
return fragmentShader;
}
private function getVertexShader() {
var vertexShaderSource =
"attribute vec3 aVertexPosition;
// Lesson 2
attribute vec4 aVertexColor;
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
// Lesson 2
varying vec4 vColor;
void main(void) {
gl_Position = uProjectionMatrix * uModelViewMatrix * vec4 (aVertexPosition, 1.0);
// Lesson 2
vColor = aVertexColor;
}";
var vertexShader = GL.createShader (GL.VERTEX_SHADER);
GL.shaderSource (vertexShader, vertexShaderSource);
GL.compileShader (vertexShader);
if (GL.getShaderParameter (vertexShader, GL.COMPILE_STATUS) == 0) {
throw "Error compiling vertex shader";
}
return vertexShader;
}
private function initBuffers():Void {
//triangleVertexPositionBuffer = gl.createBuffer();
triangleVertexPositionBuffer = GL.createBuffer ();
//gl.bindBuffer(gl.ARRAY_BUFFER, triangleVertexPositionBuffer);
GL.bindBuffer (GL.ARRAY_BUFFER, triangleVertexPositionBuffer);
var vertices = [
0.0, 1.0, 0.0, -1.0, -1.0, 0.0,
1.0, -1.0, 0.0
];
//gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
GL.bufferData (GL.ARRAY_BUFFER, new Float32Array (vertices), GL.STATIC_DRAW);
// Note: These are dynamic object work-arounds in WebGL example
// they are now hard coded in this OpenFL exampled
//triangleVertexPositionBuffer.itemSize = 3;
//triangleVertexPositionBuffer.numItems = 3;
// Lesson 2:
//triangleVertexColorBuffer = gl.createBuffer();
//gl.bindBuffer(gl.ARRAY_BUFFER, triangleVertexColorBuffer);
triangleVertexColorBuffer = GL.createBuffer();
GL.bindBuffer(GL.ARRAY_BUFFER, triangleVertexColorBuffer);
var colors = [
1.0, 0.0, 0.0, 1.0,
0.0, 1.0, 0.0, 1.0,
0.0, 0.0, 1.0, 1.0
];
//gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
GL.bufferData(GL.ARRAY_BUFFER, new Float32Array(colors), GL.STATIC_DRAW);
//triangleVertexColorBuffer.itemSize = 4;
//triangleVertexColorBuffer.numItems = 3;
// Todo(Hays) Why? is this needed, it is in OpenFL example but not in WebGL
GL.bindBuffer (GL.ARRAY_BUFFER, null);
//squareVertexPositionBuffer = gl.createBuffer();
squareVertexPositionBuffer = GL.createBuffer ();
//gl.bindBuffer(gl.ARRAY_BUFFER, squareVertexPositionBuffer);
GL.bindBuffer (GL.ARRAY_BUFFER, squareVertexPositionBuffer);
vertices = [
1.0, 1.0, 0.0, -1.0, 1.0, 0.0,
1.0, -1.0, 0.0, -1.0, -1.0, 0.0
];
//gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
GL.bufferData (GL.ARRAY_BUFFER, new Float32Array (vertices), GL.STATIC_DRAW);
// Note: These are dynamic object work-arounds in WebGL example
// they are now hard coded in this OpenFL exampled
//squareVertexPositionBuffer.itemSize = 3;
//squareVertexPositionBuffer.numItems = 4;
//squareVertexColorBuffer = gl.createBuffer();
//gl.bindBuffer(gl.ARRAY_BUFFER, squareVertexColorBuffer);
squareVertexColorBuffer = GL.createBuffer();
GL.bindBuffer(GL.ARRAY_BUFFER, squareVertexColorBuffer);
colors = [];
/*for (var i=0; i < 4; i++) {
colors = colors.concat([0.5, 0.5, 1.0, 1.0]);
}*/
for (i in 0...4) {
colors = colors.concat([0.5, 0.5, 1.0, 1.0]);
}
//gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
GL.bufferData(GL.ARRAY_BUFFER, new Float32Array(colors), GL.STATIC_DRAW);
//squareVertexColorBuffer.itemSize = 4;
//squareVertexColorBuffer.numItems = 4;
// Todo(Hays) Why? is this needed, it is in OpenFL example but not in WebGL
GL.bindBuffer (GL.ARRAY_BUFFER, null);
}
//var mvMatrix = mat4.create();
//var pMatrix = mat4.create();
private function renderView (rect:Rectangle):Void {
GL.useProgram (shaderProgram);
GL.enableVertexAttribArray (vertexAttribute);
GL.clearColor (0.0, 0.0, 0.0, 1.0);
//GL.enable (GL.DEPTH_TEST);
var modelViewMatrix = Matrix3D.create2D (0, 0, 1, 0);
//gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
GL.viewport (Std.int (rect.x), Std.int (rect.y), Std.int (rect.width), Std.int (rect.height));
//gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
GL.clear (GL.COLOR_BUFFER_BIT | GL.DEPTH_BUFFER_BIT);
//mat4.perspective(45, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, pMatrix);
var projectionMatrix = makePerspective(45, (rect.width/rect.height), 0.1, 100.0);
//mat4.identity(mvMatrix);
// move all items to center of viewport
//mat4.translate(mvMatrix, [-1.5, 0.0, -7.0]);
modelViewMatrix.position = modelViewMatrix.position.add(new Vector3D(-1.5, 0.0, -7.0));
//gl.bindBuffer(gl.ARRAY_BUFFER, triangleVertexPositionBuffer);
//gl.vertexAttribPointer(shaderProgram.vertexPositionAttribute, triangleVertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);
GL.bindBuffer (GL.ARRAY_BUFFER, triangleVertexPositionBuffer);
GL.vertexAttribPointer (vertexAttribute, 3, GL.FLOAT, false, 0, 0);
// Lesson 2
//gl.bindBuffer(gl.ARRAY_BUFFER, triangleVertexColorBuffer);
//gl.vertexAttribPointer(shaderProgram.vertexColorAttribute, triangleVertexColorBuffer.itemSize, gl.FLOAT, false, 0, 0);
GL.bindBuffer(GL.ARRAY_BUFFER, triangleVertexColorBuffer);
GL.vertexAttribPointer(vertexAttribute, 4, GL.FLOAT, false, 0, 0);
//setMatrixUniforms();
//gl.uniformMatrix4fv(shaderProgram.pMatrixUniform, false, pMatrix);
//gl.uniformMatrix4fv(shaderProgram.mvMatrixUniform, false, mvMatrix);
GL.uniformMatrix4fv (projectionMatrixUniform, false, new Float32Array (projectionMatrix.rawData));
GL.uniformMatrix4fv (modelViewMatrixUniform, false, new Float32Array (modelViewMatrix.rawData));
//gl.drawArrays(gl.TRIANGLES, 0, triangleVertexPositionBuffer.numItems);
GL.drawArrays (GL.TRIANGLE_STRIP, 0, 3);
//mat4.translate(mvMatrix, [3.0, 0.0, 0.0]);
modelViewMatrix.position = modelViewMatrix.position.add(new Vector3D(3, 0.0, 0.0));
//gl.bindBuffer(gl.ARRAY_BUFFER, squareVertexPositionBuffer);
//gl.vertexAttribPointer(shaderProgram.vertexPositionAttribute, squareVertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);
GL.bindBuffer (GL.ARRAY_BUFFER, squareVertexPositionBuffer);
GL.vertexAttribPointer (vertexAttribute, 3, GL.FLOAT, false, 0, 0);
// Lesson 2
//gl.bindBuffer(gl.ARRAY_BUFFER, squareVertexColorBuffer);
//gl.vertexAttribPointer(shaderProgram.vertexColorAttribute, squareVertexColorBuffer.itemSize, gl.FLOAT, false, 0, 0);
GL.bindBuffer(GL.ARRAY_BUFFER, squareVertexColorBuffer);
GL.vertexAttribPointer(vertexAttribute, 4, GL.FLOAT, false, 0, 0);
//setMatrixUniforms();
//gl.uniformMatrix4fv(shaderProgram.pMatrixUniform, false, pMatrix);
//gl.uniformMatrix4fv(shaderProgram.mvMatrixUniform, false, mvMatrix);
GL.uniformMatrix4fv (projectionMatrixUniform, false, new Float32Array (projectionMatrix.rawData));
GL.uniformMatrix4fv (modelViewMatrixUniform, false, new Float32Array (modelViewMatrix.rawData));
//gl.drawArrays(gl.TRIANGLE_STRIP, 0, squareVertexPositionBuffer.numItems);
GL.drawArrays (GL.TRIANGLE_STRIP, 0, 4);
//
GL.bindBuffer (GL.ARRAY_BUFFER, null);
GL.disableVertexAttribArray (vertexAttribute);
GL.useProgram (null);
}
private function makePerspective(fieldOfViewInRadians:Float, aspect:Float, near:Float, far:Float) {
var f = Math.tan(Math.PI * 0.5 - 0.5 * fieldOfViewInRadians);
var rangeInv = 1.0 / (near - far);
return new Matrix3D([
f / aspect, 0.0, 0.0, 0.0,
0.0, f, 0.0, 0.0,
0.0, 0.0, (near + far) * rangeInv, -1.0,
0.0, 0.0, near * far * rangeInv * 2, 0.0]
);
}
}
I use for NME ( last version of NME )
But I can not understand how do I use Lime because it sees different code’s constructor. Openfl = Nme and Lime are very different.
How do I format into Lime version?