OpenGL and Processing 2.0

The existing OpenGL codes for Processing do not work in the 2.0 alpha. Here is an example code segment I modify to use the new PGL class.

import processing.opengl.*;
import javax.media.opengl.*;
 
GL2 gl;
float t, s, c;
 
void setup() {
  size(400, 400, OPENGL);
  background(0);
  PGraphicsOpenGL pg = (PGraphicsOpenGL) g;
  PGL pgl = pg.beginPGL();
  gl = pgl.gl.getGL().getGL2();
  pg.endPGL();
  t = 0.0f;
  s = 0.0f;
  c = 0.0f;
}
 
void draw() {
  t += 0.01;
  s = sin(t);
  c = cos(t);
 
  gl.glClear(GL.GL_COLOR_BUFFER_BIT);
  gl.glBegin(GL.GL_TRIANGLES);
  gl.glColor3f(1, 0, 0);
  gl.glVertex3f(-c, -c, s);
  gl.glColor3f(0, 1, 0);
  gl.glVertex3f(0, c, 0);
  gl.glColor3f(0, 0, 1);
  gl.glVertex3f(s, -s, c);
  gl.glEnd();
}

DirectShow for Processing – OpenGL

I try to work out another version of the DirectShow for Processing classes in the last post. In this version, I write the movie data directly to an OpenGL texture object. Below is the modified version of the DMovie class. The DCapture class can also be modified in the same way.
 
The modified DMovie class

import de.humatic.dsj.*;
import java.awt.image.BufferedImage;
import com.sun.opengl.util.texture.*;
 
class DMovie implements java.beans.PropertyChangeListener {
 
  private DSMovie movie;
  public int width, height;
  public Texture tex;
 
  DMovie(String _s) {
    movie = new DSMovie(dataPath(_s), DSFiltergraph.DD7, this);
    movie.setVolume(1.0);
    movie.setLoop(false);
    movie.play();
    width = movie.getDisplaySize().width;
    height = movie.getDisplaySize().height;
    tex = TextureIO.newTexture(movie.getImage(), false);
  }
 
  public void updateImage() {
    BufferedImage bimg = movie.getImage();
    TextureData td = TextureIO.newTextureData(bimg, false);
    tex.updateImage(td);
  }
 
  public void loop() {
    movie.setLoop(true);
    movie.play();
  }
 
  public void play() {
    movie.play();
  }
 
  public void propertyChange(java.beans.PropertyChangeEvent e) {
    switch (DSJUtils.getEventType(e)) {
    }
  }
}

 
Sample code that uses the new DMovie class

import processing.opengl.*;
import javax.media.opengl.*;
 
DMovie mov;
PGraphicsOpenGL pgl;
 
void setup()
{
  size(1280, 692, OPENGL);
  pgl = (PGraphicsOpenGL) g;
  GL gl = pgl.beginGL();
  background(0);
  mov = new DMovie("Hugo.mp4");
  mov.loop();
  mov.tex.bind();
  pgl.endGL();
  ;
}
 
void draw()
{
  GL gl = pgl.beginGL();
  mov.updateImage();
  mov.tex.enable();
  gl.glBegin(GL.GL_QUADS);
  gl.glTexCoord2f(0, 0); 
  gl.glVertex2f(0, 0);
  gl.glTexCoord2f(1, 0); 
  gl.glVertex2f(width, 0);
  gl.glTexCoord2f(1, 1); 
  gl.glVertex2f(width, height);
  gl.glTexCoord2f(0, 1); 
  gl.glVertex2f(0, height);
  gl.glEnd();  
 
  mov.tex.disable();
  pgl.endGL();
}

Pixel Buffer Object in Processing

People switching from Processing to OpenFrameworks or other more serious development platforms due to performance consideration. I have done a few searches and found that there are a number of libraries using different Java bindings of OpenCL, Vertex Buffer Object, Pixel Buffer Object, and even DirectShow. I wonder if it is more possible to use Processing in production environment where performance is important.

I have done a test to compare using live webcam video stream with traditional texture method and another one with pixel buffer object. The performance difference is noticeable and significant using my MacBook Pro. I do not record the videos as it may distort the real time performance.

This is the ‘traditional’ method.

import processing.video.*;
import processing.opengl.*;
 
float a;
 
Capture cap;
PImage img;
 
void setup()
{
  println(Capture.list());
  size(640, 480, OPENGL);
  hint(ENABLE_OPENGL_2X_SMOOTH);
  hint(DISABLE_DEPTH_TEST);
  a = 0;
 
  img = loadImage("tron.jpg");
  frameRate(30);
  cap = new Capture(this, width, height, 30);
  cap.read();
  textureMode(NORMALIZED);
}
 
void draw()
{
  background(0);
  image(img, 0, 0);
  translate(width/2, height/2, 0);
  float b = a*PI/180.0;
  rotateY(b);
  rotateX(b);
  beginShape(QUADS);
  texture(cap);
  vertex(-320, -240, 0, 0, 0);
  vertex( 320, -240, 0, 1, 0);
  vertex( 320, 240, 0, 1, 1);
  vertex(-320, 240, 0, 0, 1);
  endShape();
  a += 1;
  a %= 360;
}
 
void captureEvent(Capture _c)
{
  _c.read();
}

 

 
This is the PBO mehtod.

import processing.video.*;
import processing.opengl.*;
import javax.media.opengl.*;
import java.nio.IntBuffer;
 
float a;
PGraphicsOpenGL pgl;
GL gl;
PImage img;
 
int [] tex = new int[1];
int [] pbo = new int[1];
 
Capture cap;
 
void setup()
{
  println(Capture.list());
  size(640, 480, OPENGL);
  hint(ENABLE_OPENGL_2X_SMOOTH);
  hint(DISABLE_DEPTH_TEST);
  a = 0;
 
  img = loadImage("tron.jpg");
  frameRate(30);
  pgl = (PGraphicsOpenGL) g;
  cap = new Capture(this, width, height, 30);
  cap.read();
 
  gl = pgl.gl;
 
  gl.glGenBuffers(1, pbo, 0);
  gl.glBindBuffer(GL.GL_PIXEL_UNPACK_BUFFER, pbo[0]);  
  gl.glBufferData(GL.GL_PIXEL_UNPACK_BUFFER, 4*cap.width*cap.height, null, GL.GL_STREAM_DRAW);
  gl.glBindBuffer(GL.GL_PIXEL_UNPACK_BUFFER, 0);
 
  gl.glGenTextures(1, tex, 0);
  gl.glBindTexture(GL.GL_TEXTURE_2D, tex[0]);
 
  gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST);
  gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST);
  gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP);
  gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP);
 
  gl.glTexImage2D(GL.GL_TEXTURE_2D, 0, GL.GL_RGBA, cap.width, cap.height, 0, GL.GL_BGRA, GL.GL_UNSIGNED_BYTE, null);
  gl.glBindTexture(GL.GL_TEXTURE_2D, 0);
}
 
void draw()
{
  background(0);
  image(img, 0, 0);
 
  gl = pgl.beginGL();
  gl.glColor3f( 1.0f, 1.0f, 1.0f);	
 
  gl.glEnable(GL.GL_TEXTURE_2D);
 
  gl.glBindTexture(GL.GL_TEXTURE_2D, tex[0]);
  gl.glBindBuffer(GL.GL_PIXEL_UNPACK_BUFFER, pbo[0]);
 
  gl.glTexSubImage2D(GL.GL_TEXTURE_2D, 0, 0, 0, cap.width, cap.height, GL.GL_BGRA, GL.GL_UNSIGNED_BYTE, 
  0);
 
  gl.glBufferData(GL.GL_PIXEL_UNPACK_BUFFER, 4*cap.width*cap.height, null, GL.GL_STREAM_DRAW);
 
  IntBuffer tmp1 = gl.glMapBuffer(GL.GL_PIXEL_UNPACK_BUFFER, GL.GL_WRITE_ONLY).asIntBuffer();
  tmp1.put(cap.pixels);
 
  gl.glUnmapBuffer(GL.GL_PIXEL_UNPACK_BUFFER);
  gl.glBindBuffer(GL.GL_PIXEL_UNPACK_BUFFER, 0);
 
  gl.glTranslatef(width/2, height/2, 0);
  gl.glRotatef(a, 1, 1, 0);
 
  gl.glBegin(GL.GL_QUADS);	
  gl.glTexCoord2f(0.0f, 0.0f);			
  gl.glVertex3f(-320, -240, 0);
  gl.glTexCoord2f(1.0f, 0.0f);
  gl.glVertex3f( 320, -240, 0);
  gl.glTexCoord2f(1.0f, 1.0f);
  gl.glVertex3f( 320, 240, 0);
  gl.glTexCoord2f(0.0f, 1.0f);
  gl.glVertex3f(-320, 240, 0);
  gl.glEnd();
  gl.glBindTexture(GL.GL_TEXTURE_2D, 0);
  pgl.endGL();
  a += 1.0;
  a %= 360;
}
 
void captureEvent(Capture _c)
{
  _c.read();
}

 

Cocoa Capture and Display

The last example directly preview the camera image. In this example, I store the frame image and re-display it again using an OpenGL view. There is no QTCaptureView in the window.

The major reference is the open source Cocoa based OpenCV experiment – CVOCV.