Stereoscopic 3d as Post Processor

Just out of curiosity, is it possible to implement stereoscopic 3d as a post processor in jME3?

Yes

2 Likes

Hey guys,



do you have any hints how to implement stereoscopy as SceneProcessor or PostFilter? We have the hardware to use quad buffering but iZ3d doesn’t work with jme3. In jme2 there was something like HardwareStereo, AnaglyphStereo and SideBySideStereo. I tried to reimplement that stuff for jme3 but it doesn’t work.



This is the version from jme2:



[java]/**

  • Stereo pass with assymetric frustum projection
  • and 3 stereo presentation modes (anaglyph, side by side, opengl stereo buffers)

    *
  • To use stereo buffer mode, the display must be initialized with a
  • stereo setting (requires dedicated hardware).
  • Before passing GameSettings to the JmeContext,
  • set "GameStereo" to "true".

    *
  • @author Momoko_Fan

    */

    public class StereoRenderPass extends RenderPass {

    private static final long serialVersionUID = 1L;



    protected float focalLength = 15f;



    /**
  • intraocular distance

    */

    protected float IOD = 1f;



    protected StereoMode mode = StereoMode.SIDE_BY_SIDE;

    protected ProjectionMode pMode = ProjectionMode.ASYMMETRIC_FRUSTUM;



    protected Vector3f savedCamLoc = new Vector3f(),

    savedNodeLoc = new Vector3f();



    protected Vector3f temp = new Vector3f(),

    temp2 = new Vector3f();



    private IntBuffer offscreenBuffer = null;

    private int width = 0;

    private int height = 0;

    private File offscreenFile = null;



    protected enum CameraSide {

    LEFT,

    RIGHT

    }



    public enum StereoMode {

    NO_STEREO,

    ANAGLYPH,

    STEREO_BUFFER,

    SIDE_BY_SIDE

    }



    public enum ProjectionMode{

    SIMPLE_OFFSET,

    ASYMMETRIC_FRUSTUM

    }



    public StereoRenderPass(){

    super();

    }



    public void setMode(StereoMode mode){

    this.mode = mode;

    }



    public StereoMode getMode() {

    return this.mode;

    }



    public void setProjection(ProjectionMode mode){

    pMode = mode;

    }



    public ProjectionMode getProjection() {

    return this.pMode;

    }



    /**
  • Set the distance to where the two eyes focus

    */

    public void setFocusDistance(float dist){

    focalLength = dist;

    }



    public float getFocusDistance(){

    return focalLength;

    }



    /**
  • Set the distance between the stereo cameras

    */

    public void setEyeDistance(float dist){

    IOD = dist;

    }



    public float getEyeDistance(){

    return IOD;

    }



    protected void setFrustum(Renderer r, CameraSide side){

    Camera cam = r.getCamera();



    float aspectratio = 0;

    float aperture;



    float near = 0;

    float ndfl = 0;



    float widthdiv2 = 0;



    cam.getDirection().cross(cam.getUp(), temp);



    if (pMode == ProjectionMode.ASYMMETRIC_FRUSTUM){

    // Divide by 2 for side-by-side stereo

    aspectratio = (float)r.getWidth() / (float)r.getHeight();

    aperture = 45.0f;



    near = cam.getFrustumNear();

    ndfl = near / focalLength;



    // aperture in radians

    widthdiv2 = near * FastMath.tan((FastMath.DEG_TO_RAD * aperture) / 2.0f);



    temp.multLocal(IOD / 2.0f);

    }else{

    temp.multLocal(IOD * 4.0f);

    }



    if (side == CameraSide.RIGHT){

    if (pMode == ProjectionMode.ASYMMETRIC_FRUSTUM){

    float top = widthdiv2;

    float bottom = -widthdiv2;

    float left = (-aspectratio * widthdiv2) - (0.5f * IOD * ndfl);

    float right = (aspectratio * widthdiv2) - (0.5f * IOD * ndfl);



    cam.setFrustum(near, cam.getFrustumFar(), left, right, top, bottom);

    }



    cam.getLocation().addLocal(temp);

    }else{

    if (pMode == ProjectionMode.ASYMMETRIC_FRUSTUM){

    float top = widthdiv2;

    float bottom = -widthdiv2;

    float left = (-aspectratio * widthdiv2) + (0.5f * IOD * ndfl);

    float right = (aspectratio * widthdiv2) + (0.5f * IOD * ndfl);



    cam.setFrustum(near, cam.getFrustumFar(), left, right, top, bottom);

    }



    cam.getLocation().subtractLocal(temp);

    }

    }



    @Override

    public void doRender(Renderer r) {

    if (mode == StereoMode.ANAGLYPH) {

    doAnaglyphRender®;

    } else if (mode == StereoMode.STEREO_BUFFER) {

    doHardwareRender®;

    } else if (mode == StereoMode.SIDE_BY_SIDE) {

    doSideBySideRender®;

    } else {

    doNoStereoRender®;

    }

    doOffscreenRender®;

    }



    private void doNoStereoRender(Renderer renderer) {

    renderer.clearBuffers();

    super.doRender(renderer);

    renderer.displayBackBuffer();

    renderer.getCamera().update();

    }



    private void doAnaglyphRender(Renderer renderer) {

    renderer.setPolygonOffset(zFactor, zOffset);

    renderer.clearBuffers();

    Camera cam = renderer.getCamera();



    // save the original camera location

    savedCamLoc.set(cam.getLocation());



    // LEFT EYE

    setFrustum(renderer, CameraSide.LEFT);

    GL11.glColorMask(true, false, false, true);

    cam.update();

    super.doRender(renderer);



    renderer.clearZBuffer();



    // RIGHT EYE

    setFrustum(renderer, CameraSide.RIGHT);

    GL11.glColorMask(false, true, true, true);

    cam.update();

    super.doRender(renderer);



    cam.getLocation().set(savedCamLoc);

    GL11.glColorMask(true, true, true, true);

    }



    private void doHardwareRender(Renderer renderer) {

    renderer.setPolygonOffset(zFactor, zOffset);

    GL11.glDrawBuffer(GL11.GL_BACK_LEFT);

    renderer.clearBuffers();

    GL11.glDrawBuffer(GL11.GL_BACK_RIGHT);

    renderer.clearBuffers();

    Camera cam = renderer.getCamera();



    // save the original camera location

    savedCamLoc.set(cam.getLocation());



    // LEFT EYE

    setFrustum(renderer, CameraSide.LEFT);

    GL11.glDrawBuffer(GL11.GL_BACK_LEFT);

    cam.update();

    super.doRender(renderer);



    renderer.clearZBuffer();



    // RIGHT EYE

    setFrustum(renderer, CameraSide.RIGHT);

    GL11.glDrawBuffer(GL11.GL_BACK_RIGHT);

    cam.update();

    super.doRender(renderer);



    cam.getLocation().set(savedCamLoc);

    GL11.glDrawBuffer(GL11.GL_BACK);

    }



    private void doSideBySideRender(Renderer renderer) {

    renderer.setPolygonOffset(zFactor, zOffset);

    renderer.clearBuffers();

    Camera cam = renderer.getCamera();



    // save the original camera location

    savedCamLoc.set(cam.getLocation());



    // LEFT EYE

    setFrustum(renderer, CameraSide.LEFT);

    cam.setViewPort(0f, 0.5f, 0f, 1f);

    cam.update();

    super.doRender(renderer);



    renderer.clearZBuffer();



    // RIGHT EYE

    setFrustum(renderer, CameraSide.RIGHT);

    cam.setViewPort(0.5f, 1.0f, 0f, 1f);

    cam.update();

    super.doRender(renderer);



    cam.getLocation().set(savedCamLoc);

    cam.setViewPort(0f, 1f, 0f, 1f);

    }



    private void doOffscreenRender(Renderer r) {

    if (offscreenBuffer != null) {

    GL11.glReadPixels(0, 0, width, height, GL12.GL_BGRA,

    GL11.GL_UNSIGNED_BYTE, offscreenBuffer);

    int[] bufArray = new int[offscreenBuffer.capacity()];

    offscreenBuffer.get(bufArray);

    flipArray(bufArray, width, height);

    DataBuffer db = new DataBufferInt(bufArray, width * height);

    int[] mask= {0x00ff0000, 0x0000ff00, 0x000000ff};

    ColorModel colorModel = new DirectColorModel(32, mask[0], mask[1], mask[2]);

    WritableRaster raster = Raster.createPackedRaster(db, width, height, width, mask, null);

    BufferedImage image = new BufferedImage(colorModel, raster, false, null);

    try {

    if (offscreenFile != null) {

    ImageIO.write(image, “png”, offscreenFile);

    }

    } catch (IOException e) {

    Logger.getLogger("").severe(“Wasn’t unable to save image”);

    e.printStackTrace();

    }

    offscreenBuffer = null;

    }

    }



    public void createOffscreenImage(int pWidth, int pHeight, File file) {

    this.offscreenBuffer = ByteBuffer.allocateDirect(pWidth * pHeight * 4)

    .order(ByteOrder.LITTLE_ENDIAN).asIntBuffer();

    this.width = pWidth;

    this.height = pHeight;

    this.offscreenFile = file;

    }



    private static void flipArray(int[] src, int width, int height) {

    List<int[]> blocks = new ArrayList<int[]>(height);

    for (int i = 0; i < src.length; i += width) {

    int[] widthStore = new int[width];

    for (int j = 0; j < width; j++) {

    widthStore[j] = src;

    }

    blocks.add(widthStore);

    }

    Collections.reverse(blocks);

    int index = 0;

    for (int[] block : blocks) {

    for (int i = 0; i < block.length; i++) {

    src[index++] = block;

    }

    }

    }

    }[/java]



    Any ideas?



    Regards

    Moe

I did a quick test case that works (i guess)

Apparently anaglphs use the red channel of the left picture ,ad the green and blue of the right one.



So…what i did was to set up 2 viewports, render them to an offscreen framebuffer, then rendered a full screen quad with a material using a shader fed with this 2 textures.



here is the code. it’s just a quick example, but it might hint you to a more “industrial” solution



[java]

import com.jme3.app.SimpleApplication;

import com.jme3.light.DirectionalLight;

import com.jme3.material.Material;

import com.jme3.math.ColorRGBA;

import com.jme3.math.Vector3f;

import com.jme3.post.FilterPostProcessor;

import com.jme3.renderer.Camera;

import com.jme3.renderer.RenderManager;

import com.jme3.renderer.ViewPort;

import com.jme3.scene.Geometry;

import com.jme3.texture.FrameBuffer;

import com.jme3.texture.Image.Format;

import com.jme3.texture.Texture2D;

import com.jme3.ui.Picture;

import com.jme3.util.SkyFactory;



public class TestStereoscopy extends SimpleApplication {



public static void main(String[] args) {

TestStereoscopy app = new TestStereoscopy();

app.start();

}

FrameBuffer fbView1, fbView2;

Texture2D tex1, tex2;

Material matStereo;

Picture quadStereo;



public void simpleInitApp() {

createScene();



// Setup first view

cam.setLocation(new Vector3f(0.6612741f, 1.4273317f, 10.18577f));

fbView1 = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);

tex1 = new Texture2D(cam.getWidth(), cam.getHeight(), Format.RGBA8);

fbView1.setColorTexture(tex1);

fbView1.setDepthBuffer(Format.Depth);

viewPort.setOutputFrameBuffer(fbView1);



// Setup second view

Camera cam2 = cam.clone();

cam2.setLocation(new Vector3f(0.7612741f, 1.4273317f, 10.18577f));

final ViewPort view2 = renderManager.createMainView(“Bottom Left”, cam2);

view2.setClearFlags(true, true, true);

view2.attachScene(rootNode);

fbView2 = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);

tex2 = new Texture2D(cam.getWidth(), cam.getHeight(), Format.RGBA8);

fbView2.setColorTexture(tex2);

fbView2.setDepthBuffer(Format.Depth);

view2.setOutputFrameBuffer(fbView2);



quadStereo = new Picture(“result”);

matStereo = new Material(assetManager, “Common/MatDefs/Post/Stereoscopy.j3md”);

matStereo.setTexture(“Texture1”, tex1);

matStereo.setTexture(“Texture2”, tex2);

quadStereo.setMaterial(matStereo);



flyCam.setEnabled(false);

}



@Override

public void simpleRender(RenderManager rm) {



quadStereo.setWidth(cam.getWidth());

quadStereo.setHeight(cam.getHeight());

quadStereo.setPosition(0, 0);

quadStereo.updateGeometricState();



rm.setCamera(cam, true);

rm.getRenderer().setFrameBuffer(null);

rm.renderGeometry(quadStereo);

}



private void createScene() {

// create the geometry and attach it

Geometry teaGeom = (Geometry) assetManager.loadModel(“Models/Teapot/Teapot.obj”);

teaGeom.scale(3);

teaGeom.getMaterial().setColor(“GlowColor”, ColorRGBA.Green);



DirectionalLight dl = new DirectionalLight();

dl.setColor(ColorRGBA.White);

dl.setDirection(Vector3f.UNIT_XYZ.negate());

rootNode.addLight(dl);

rootNode.attachChild(teaGeom);

rootNode.attachChild(SkyFactory.createSky(assetManager, “Textures/Sky/Bright/BrightSky.dds”, false));

}

}

[/java]



J3md :



MaterialDef Stereoscopy {



MaterialParameters {

Texture2D Texture1

Texture2D Texture2

}





Technique {

VertexShader GLSL100: Common/MatDefs/Post/Post.vert

FragmentShader GLSL100: Common/MatDefs/Post/Stereoscopy.frag



WorldParameters {

WorldViewProjectionMatrix

}

}



Technique FixedFunc {

}



}





Shader



uniform sampler2D m_Texture1;

uniform sampler2D m_Texture2;

varying vec2 texCoord;





void main() {

vec4 texVal1 = texture2D(m_Texture1, texCoord);

vec4 texVal2 = texture2D(m_Texture2, texCoord);



gl_FragColor =vec4(texVal1.r, texVal2.g, texVal2.b,1.0);



}





and a screenshot of the result

http://i.imgur.com/Hm17D.jpg

2 Likes

Hey that’s great. But i’m more interested in using the quad buffering stuff. We have two beamers, one with an polarization filter. So both the beamers are plugged into a graphics cards outputs. To use this kind of polarization stereo we need the possibility to provide rendering into two different framebuffers. In the example from Momoko_Fan it’s the method doHardwareRender which does the trick.



Regards

Moe

That will require direct OpenGL calls … Those are not provided directly by the jME renderer

Yes i thought so. Ok, the openGL calls are not the problem but how could i manually render the scene like in the example with super.doRender(…) described above?

Well, i tested a little bit and i think that i got it running. Side by side and anaglyph work now, but the quad buffering stuff doesn’t work. If i directly wanna use GL11.glDrawBuffer(GL11.GL_BACK_RIGHT) the following exception is thrown:



[java]org.lwjgl.opengl.OpenGLException: Invalid operation (1282)

at org.lwjgl.opengl.Util.checkGLError(Util.java:59)

at org.lwjgl.opengl.GL11.glDrawBuffer(GL11.java:1172)

at framework.jme3.modules.processing.StereoModule.doHardwareRender(StereoModule.java:130)

at framework.jme3.modules.processing.StereoModule.render(StereoModule.java:97)

at framework.jme3.modules.ModuleManager.render(ModuleManager.java:102)

at framework.jme3.CityApplication.update(CityApplication.java:465)

at com.jme3.system.lwjgl.LwjglAbstractDisplay.runLoop(LwjglAbstractDisplay.java:144)

at com.jme3.system.lwjgl.LwjglCanvas.runLoop(LwjglCanvas.java:199)

at com.jme3.system.lwjgl.LwjglAbstractDisplay.run(LwjglAbstractDisplay.java:218)

at java.lang.Thread.run(Thread.java:662)[/java]



Are there any flags i have to set to use quad buffering?

Tested on graphics card: NVidia Quaddro FX 4600

Finally i got it working. I added the following line in the LWJGLCanvas



[java]protected PixelFormat acquirePixelFormat(){

if (pixelFormat == null){

pixelFormat = new PixelFormat(settings.getBitsPerPixel(),

0,

settings.getDepthBits(),

settings.getStencilBits(),

settings.getSamples());

pixelFormat.withStereo(true);

}

return pixelFormat;

}[/java]



Could you guys maybe provide a method to setup the stereo flag. Could also be integrated in the settings class.



Regards

Moe

If you do get it to run I have an nVidia 3D Vision kit here, I could test things out.

@madjack: i already did get it to run. But to use the GL_BACK_RIGHT buffer i needed to set the pixelformat in the right way :wink:



For all that are interested in here’s the final version:



[java]public class StereoModule extends SimpleModule {



public enum StereoMode {

Anaglyph, SideBySide, Hardware

}



protected enum CameraSide {

LEFT,

RIGHT

}



public enum ProjectionMode{

SIMPLE_OFFSET,

ASYMMETRIC_FRUSTUM

}



protected float focalLength = 15f;

protected float iod = 1f;



protected StereoMode mode = StereoMode.SideBySide;

protected ProjectionMode pMode = ProjectionMode.ASYMMETRIC_FRUSTUM;



private Vector3f savedCamLoc = new Vector3f();

private Vector3f temp = new Vector3f();



private RenderManager rm = null;

private Camera cam = null;



public StereoModule(boolean pEnabled, String pDescription) {

super(pEnabled, pDescription);

}



@Override

protected void simpleInit(CityApplication pCity) {

rm = pCity.getRenderManager();

cam = pCity.getCamera();

}



@Override

protected boolean simpleUpdate(CityApplication pCityApp) {

return false;

}



public void render(float tpf) {

if (isEnabled()) {

switch (mode) {

case Anaglyph:

doAnaglyphRender(tpf);

break;

case SideBySide:

doSideBySideRender(tpf);

break;

case Hardware:

doHardwareRender(tpf);

break;

}

} else {

rm.render(tpf);

}

}



private void doAnaglyphRender(float tpf) {

rm.getRenderer().clearBuffers(true, true, true);

savedCamLoc.set(cam.getLocation());

setFrustum(cam, CameraSide.LEFT);

GL11.glColorMask(true, false, false, true);

cam.update();

rm.render(tpf);

rm.getRenderer().clearBuffers(false, true, false);

setFrustum(cam, CameraSide.RIGHT);

GL11.glColorMask(false, true, true, true);

cam.update();

rm.render(tpf);

cam.getLocation().set(savedCamLoc);

GL11.glColorMask(true, true, true, true);

}



private void doHardwareRender(float tpf) {

try {

GL11.glDrawBuffer(GL11.GL_BACK_LEFT);

rm.getRenderer().clearBuffers(true, true, true);

GL11.glDrawBuffer(GL11.GL_BACK_RIGHT);

rm.getRenderer().clearBuffers(true, true, true);

savedCamLoc.set(cam.getLocation());



// LEFT EYE

setFrustum(cam, CameraSide.LEFT);

GL11.glDrawBuffer(GL11.GL_BACK_LEFT);

cam.update();

rm.render(tpf); // super.doRender(renderer);

rm.getRenderer().clearBuffers(false, true, false); // renderer.clearZBuffer();



// RIGHT EYE

setFrustum(cam, CameraSide.RIGHT);

GL11.glDrawBuffer(GL11.GL_BACK_RIGHT);

cam.update();

rm.render(tpf); // super.doRender(renderer);

cam.getLocation().set(savedCamLoc);

GL11.glDrawBuffer(GL11.GL_BACK);

} catch (OpenGLException e) {

e.printStackTrace();

System.err.println(“Quad Buffering not supported by the hardware. Turning stereo mode off.”);

mode = StereoMode.Anaglyph;

render(tpf);

}

}



private void doSideBySideRender(float tpf) {

// renderer.setPolygonOffset(zFactor, zOffset);



rm.getRenderer().clearBuffers(true, true, true);

savedCamLoc.set(cam.getLocation());



// LEFT EYE

setFrustum(cam, CameraSide.LEFT);

cam.setViewPort(0f, 0.5f, 0f, 1f);

cam.update();

rm.render(tpf); // super.doRender(renderer);

rm.getRenderer().clearBuffers(false, true, false);



// RIGHT EYE

setFrustum(cam, CameraSide.RIGHT);

cam.setViewPort(0.5f, 1.0f, 0f, 1f);

cam.update();

rm.render(tpf); // super.doRender(renderer);

cam.getLocation().set(savedCamLoc);

cam.setViewPort(0f, 1f, 0f, 1f);

}



protected void setFrustum(Camera cam, CameraSide side){

float aspectratio = 0;

float aperture;

float near = 0;

float ndfl = 0;

float widthdiv2 = 0;

cam.getDirection().cross(cam.getUp(), temp);

if (pMode == ProjectionMode.ASYMMETRIC_FRUSTUM){

// Divide by 2 for side-by-side stereo

aspectratio = (float) cam.getWidth() / (float) cam.getHeight();

aperture = 45.0f;

near = cam.getFrustumNear();

ndfl = near / focalLength;

// aperture in radians

widthdiv2 = near * FastMath.tan((FastMath.DEG_TO_RAD * aperture) / 2.0f);

temp.multLocal(iod / 2.0f);

} else {

temp.multLocal(iod * 4.0f);

}

if (side == CameraSide.RIGHT){

if (pMode == ProjectionMode.ASYMMETRIC_FRUSTUM){

float top = widthdiv2;

float bottom = -widthdiv2;

float left = (-aspectratio * widthdiv2) - (0.5f * iod * ndfl);

float right = (aspectratio * widthdiv2) - (0.5f * iod * ndfl);

cam.setFrustum(near, cam.getFrustumFar(), left, right, top, bottom);

}

cam.getLocation().addLocal(temp);

} else {

if (pMode == ProjectionMode.ASYMMETRIC_FRUSTUM){

float top = widthdiv2;

float bottom = -widthdiv2;

float left = (-aspectratio * widthdiv2) + (0.5f * iod * ndfl);

float right = (aspectratio * widthdiv2) + (0.5f * iod * ndfl);

cam.setFrustum(near, cam.getFrustumFar(), left, right, top, bottom);

}

cam.getLocation().subtractLocal(temp);

}

}



public void setMode(StereoMode mode){

this.mode = mode;

}



public StereoMode getStereoMode() {

return this.mode;

}



public void setProjection(ProjectionMode mode){

pMode = mode;

}



public ProjectionMode getProjection() {

return this.pMode;

}



public void setFocusDistance(float dist){

focalLength = dist;

}



public float getFocusDistance(){

return focalLength;

}



public void setEyeDistance(float dist){

iod = dist;

}

public float getEyeDistance(){

return iod;

}



}[/java]



Make sure to have the lwjgl.jar in the classpath.

Hi,



could you please insert a method to setup the stereo mode with

[java]pixelFormat.withStereo(true);[/java]



i cannot call this method from outside the LWJGLCanvas, cause acquirePixelFormat is protected and i need such a possibility to use quad buffering.



Regards

Moe