VideoRecorderAppState scrambled video lines

Hello all.

Short Version:
My picture in picture video has a scrambled red line though it and I don’t know why.

Long Version:
I have produced a program where two cameras view a scene from different positions. One camera’s ViewPort is the main ViewPort and the second is a picture in picture (pip) ViewPort. Like so:

Then I use two slightly modified VideoRecorderAppState to record both the main ViewPort and the pip ViewPort into separate video files.

I have achieved this however, my pip video has a scrambled red line through the center, as shown:

Any ideas on what may be causing this and how I might correct it?

I am using jME3 (not sure which version but it was installed in September of 2014) on a Windows 7, 64 bit system with an NVIDIA Quadro 4000 video card.

I hate posting a wall of code but in an attempt to provide full information, what follows should be working code that, when run should record video for 12 seconds and save the two videos in the user’s home directory.

Main.java


import java.awt.Dimension;
import java.io.File;

import com.jme3.app.SimpleApplication;
import com.jme3.asset.TextureKey;
import com.jme3.input.MouseInput;
import com.jme3.input.controls.ActionListener;
import com.jme3.input.controls.AnalogListener;
import com.jme3.input.controls.MouseAxisTrigger;
import com.jme3.input.controls.MouseButtonTrigger;
import com.jme3.input.controls.Trigger;
import com.jme3.light.AmbientLight;
import com.jme3.light.DirectionalLight;
import com.jme3.material.Material;
import com.jme3.math.ColorRGBA;
import com.jme3.math.FastMath;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector3f;
import com.jme3.post.FilterPostProcessor;
import com.jme3.post.filters.BloomFilter;
import com.jme3.post.filters.LightScatteringFilter;
import com.jme3.renderer.Camera;
import com.jme3.renderer.ViewPort;
import com.jme3.renderer.queue.RenderQueue.ShadowMode;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
import com.jme3.scene.shape.Box;
import com.jme3.system.AppSettings;
import com.jme3.texture.Texture;
import com.jme3.util.SkyFactory;

public class Main extends SimpleApplication {

	private String NAME_BOX_1 = "Box1";
	private String NAME_BOX_2 = "Box2";
	private String NAME_BOX_3 = "Box3";
	private String MATERIAL_LIGHTING = "Common/MatDefs/Light/Lighting.j3md";
	private String TEXTURE_STONE_WALL = "Textures/Terrain/BrickWall/BrickWall.jpg";
	private String TEXTURE_STONE_WALL_NORMAL = "Textures/Terrain/BrickWall/BrickWall_normal.jpg";
	private String TEXTURE_FLOOR = "Textures/Terrain/Pond/Pond.jpg";
	private String TEXTURE_FLOOR_NORMAL = "Textures/Terrain/Pond/Pond_normal.png";
	private String TEXTURE_SKY = "Textures/Sky/Lagoon/";
	private String TEXTURE_MISSING = "";

	private float DEGREES_45 = FastMath.DEG_TO_RAD * 45f;
	private Node boxNode;

	private final static Trigger TRIGGER_GRAB = new MouseButtonTrigger(
			MouseInput.BUTTON_LEFT);
	private final static Trigger TRIGGER_ROTATE_X = new MouseAxisTrigger(
			MouseInput.AXIS_X, true);
	private final static Trigger TRIGGER_ROTATE_Y = new MouseAxisTrigger(
			MouseInput.AXIS_Y, true);
	private final static Trigger TRIGGER_ROTATE_NEGATIVE_X = new MouseAxisTrigger(
			MouseInput.AXIS_X, false);
	private final static Trigger TRIGGER_ROTATE_NEGATIVE_Y = new MouseAxisTrigger(
			MouseInput.AXIS_Y, false);
	private final static Trigger TRIGGER_ZOOM_OUT = new MouseAxisTrigger(
			MouseInput.AXIS_WHEEL, false);
	private final static Trigger TRIGGER_ZOOM_IN = new MouseAxisTrigger(
			MouseInput.AXIS_WHEEL, true);

	private final static String MAPPING_GRAB = "Grab";
	private final static String MAPPING_ROTATE_ABOUT_Y = "Rotate Y";
	private final static String MAPPING_ROTATE_ABOUT_X = "Rotate X";
	private final static String MAPPING_ROTATE_ABOUT_NEGATIVE_Y = "Rotate negative Y";
	private final static String MAPPING_ROTATE_ABOUT_NEGATIVE_X = "Rotate negative X";
	private final static String MAPPING_ZOOM_OUT = "Zoom out";
	private final static String MAPPING_ZOOM_IN = "Zoom in";

	private Vector3f LIGHT_DIRECTION_VECTOR = new Vector3f(5f, -2f, 5f);
	private final static float CAMERA_SPEED = 100f;

	private DirectionalLight sun;

	private TestVideoAppState vidAppState;
	private TestVideoAppState pipVidAppState;
	private float count;
	
	private ViewPort pipViewPort;
	private Camera pipCam;

	@Override
	public void simpleInitApp() {
		flyCam.setEnabled(false);
		setUpKeyMappings();
		Geometry box1Geom = makeTexturedBox(NAME_BOX_1, ColorRGBA.Gray,
				TEXTURE_STONE_WALL, TEXTURE_MISSING,
				TEXTURE_STONE_WALL_NORMAL, TEXTURE_MISSING, false);
		Geometry box2Geom = makeTexturedBox(NAME_BOX_2, ColorRGBA.Gray,
				TEXTURE_STONE_WALL, TEXTURE_MISSING,
				TEXTURE_STONE_WALL_NORMAL, TEXTURE_MISSING, false);
		box2Geom.setLocalTranslation(new Vector3f(4, 3, -4));
		box2Geom.rotate(DEGREES_45, 0.0f, 0.0f);
		box2Geom.rotate(0.0f, DEGREES_45, 0.0f);
		Geometry box3Geom = makeTexturedBox(NAME_BOX_3, ColorRGBA.Gray,
				TEXTURE_STONE_WALL, TEXTURE_MISSING,
				TEXTURE_STONE_WALL_NORMAL, TEXTURE_MISSING, false);
		box3Geom.setLocalTranslation(new Vector3f(-4, 3, 4));
		Geometry floor = makeFloor();
		floor.setLocalTranslation(new Vector3f(0, -1.05f, 0));
		// make the object appear in the scene
		boxNode = new Node();
		boxNode.attachChild(box1Geom);
		boxNode.attachChild(box2Geom);
		boxNode.attachChild(box3Geom);
		boxNode.attachChild(floor);
		rootNode.attachChild(boxNode);
		setUpLight();

		rootNode.setShadowMode(ShadowMode.Off);
		floor.setShadowMode(ShadowMode.Receive);
		box1Geom.setShadowMode(ShadowMode.CastAndReceive);
		box2Geom.setShadowMode(ShadowMode.CastAndReceive);
		box3Geom.setShadowMode(ShadowMode.CastAndReceive);
		cam.setLocation(cam.getLocation().add(new Vector3f(5, 4, 2)));
		cam.lookAt(Vector3f.ZERO, Vector3f.UNIT_Y);

		FilterPostProcessor fpp = new FilterPostProcessor(assetManager);
		viewPort.addProcessor(fpp);
		viewPort.setBackgroundColor(new ColorRGBA());

		BloomFilter bloom = new BloomFilter();
		bloom.setExposurePower(55);
		bloom.setBloomIntensity(1.0f);
		fpp.addFilter(bloom);
		Vector3f lightPos = LIGHT_DIRECTION_VECTOR.multLocal(-100);
		LightScatteringFilter filter = new LightScatteringFilter(lightPos);

		fpp.addFilter(filter);

		viewPort.addProcessor(fpp);

		makeSky();
		
		// Picture in Picture 
		pipCam = cam.clone();
		pipCam.setName("pipCam");
		pipViewPort = renderManager.createMainView(pipCam.getName(), pipCam);
		ViewPortPosition vpp = ViewPortPosition.TOP_RIGHT_STANDARD_MARGIN;
		pipCam.setViewPort(vpp.getLeft(),
				vpp.getRight(), vpp.getBottom(),
				vpp.getTop());
		pipCam.setLocation(boxNode.getWorldTranslation().subtract(LIGHT_DIRECTION_VECTOR).normalize().mult(15));
		pipCam.setLocation(pipCam.getLocation().add(Vector3f.UNIT_Y.mult(5)));
		pipCam.lookAt(boxNode.getWorldTranslation(), Vector3f.UNIT_Y);
		pipViewPort.setClearFlags(true, true, true);
		pipViewPort.attachScene(rootNode);
		
		FilterPostProcessor pipfpp = new FilterPostProcessor(assetManager);
		pipViewPort.addProcessor(pipfpp);
		pipViewPort.setBackgroundColor(new ColorRGBA());

		BloomFilter pipbloom = new BloomFilter();
		pipbloom.setExposurePower(55);
		pipbloom.setBloomIntensity(1.0f);
		pipfpp.addFilter(pipbloom);
		LightScatteringFilter pipfilter = new LightScatteringFilter(lightPos);

		pipfpp.addFilter(pipfilter);

		pipViewPort.addProcessor(pipfpp);
		
		// Video Recording
		pipVidAppState = new TestVideoAppState(new File(System.getProperty("user.home") + File.separator + "jMonkey-pip" + System.currentTimeMillis() / 1000 + ".avi"));
		pipVidAppState.setViewPort(pipViewPort);
		stateManager.attach(pipVidAppState);
		vidAppState = new TestVideoAppState(new File(System.getProperty("user.home") + File.separator + "jMonkey-" + System.currentTimeMillis() / 1000 + ".avi"));
		stateManager.attach(vidAppState);
		count = 0f;
	}

	@Override
	public void simpleUpdate(float tpf) {
		// Video Recording
		count += tpf;
		if (count > 12) {
			if (count<12.5) {
				System.out.println("Video Done!!");
			}
			stateManager.detach(vidAppState);
			stateManager.detach(pipVidAppState);
		}
		Geometry box2 = (Geometry) boxNode.getChild(NAME_BOX_2);
		Quaternion currentRotation = box2.getWorldRotation();
		Vector3f axis = currentRotation.toRotationMatrix().getRow(1);
		Quaternion rotation = new Quaternion();
		rotation.fromAngleAxis(tpf, axis);
		box2.rotate(rotation);

		Geometry woodBox = (Geometry) boxNode.getChild(NAME_BOX_3);
		woodBox.rotate(0, -tpf / 2.0f, 0);
		
		Vector3f pipCamLeft = pipCam.getLeft();
		pipCam.setLocation(pipCam.getLocation().add(pipCamLeft.normalize().mult(tpf*CAMERA_SPEED/10.0f)));
		pipCam.lookAt(boxNode.getWorldTranslation(), Vector3f.UNIT_Y);
		pipCam.setLocation(boxNode.getWorldTranslation().subtract(pipCam.getDirection().normalize().mult(15)));
	}

	/* makes a simple pebbled floor */
	private Geometry makeFloor() {
		Box box = new Box(10, 0.1f, 10);
		Geometry boxGeom = new Geometry("floor", box);
		Material mat = new Material(assetManager, MATERIAL_LIGHTING);
		mat.setColor("Ambient", ColorRGBA.Gray);
		TextureKey diffuse = new TextureKey(TEXTURE_FLOOR, false);
		mat.setTexture("DiffuseMap", assetManager.loadTexture(diffuse));
		TextureKey normal = new TextureKey(TEXTURE_FLOOR_NORMAL, false);
		mat.setTexture("NormalMap", assetManager.loadTexture(normal));
		boxGeom.setMaterial(mat);
		return boxGeom;
	}

	/* Creates a directional "sun" and ambient light */
	private void setUpLight() {
		sun = new DirectionalLight();
		sun.setDirection(LIGHT_DIRECTION_VECTOR);
		sun.setColor(ColorRGBA.White);
		rootNode.addLight(sun);
		AmbientLight ambient = new AmbientLight();
		ambient.setColor(ColorRGBA.White);
		rootNode.addLight(ambient);
	}

	/* Makes the sky box from jME3-testdata.jar files */
	private void makeSky() {
		Texture north = assetManager.loadTexture(TEXTURE_SKY
				+ "lagoon_north.jpg");
		Texture south = assetManager.loadTexture(TEXTURE_SKY
				+ "lagoon_south.jpg");
		Texture east = assetManager
				.loadTexture(TEXTURE_SKY + "lagoon_east.jpg");
		Texture west = assetManager
				.loadTexture(TEXTURE_SKY + "lagoon_west.jpg");
		Texture up = assetManager.loadTexture(TEXTURE_SKY + "lagoon_up.jpg");
		Texture down = assetManager
				.loadTexture(TEXTURE_SKY + "lagoon_down.jpg");
		Spatial skyBox = SkyFactory.createSky(assetManager, west, east, north,
				south, up, down);
		boxNode.attachChild(skyBox);
	}
	
	/* Makes a geometry of a box with textures */
	private Geometry makeTexturedBox(String name, ColorRGBA color,
			String textureMap, String heightMap, String normalMap,
			String specularMap, boolean invert) {
		Box box = new Box(1, 1, 1);
		Geometry boxGeom = new Geometry(name, box);
		Material mat = new Material(assetManager, MATERIAL_LIGHTING);
		mat.setBoolean("UseMaterialColors", true);
		mat.setColor("Diffuse", color);
		mat.setColor("Ambient", ColorRGBA.DarkGray);
		mat.setColor("Specular", ColorRGBA.White);
		mat.setFloat("Shininess", 128f); // [1,128]
		if (!textureMap.equals("")) {
			TextureKey diffuse = new TextureKey(textureMap, invert);
			mat.setTexture("DiffuseMap", assetManager.loadTexture(diffuse));
		}
		if (!normalMap.equals("")) {
			TextureKey normal = new TextureKey(normalMap, invert);
			mat.setTexture("NormalMap", assetManager.loadTexture(normal));
		}
		if (!heightMap.equals("")) {
			TextureKey parallax = new TextureKey(heightMap, invert);
			mat.setTexture("ParallaxMap", assetManager.loadTexture(parallax));
		}
		if (!specularMap.equals("")) {
			TextureKey specular = new TextureKey(specularMap, invert);
			mat.setTexture("SpecularMap", assetManager.loadTexture(specular));
		}
		boxGeom.setMaterial(mat);
		return boxGeom;
	}

	/** Start the jMonkeyEngine application */
	public static void main(String[] args) {
		float screenScaleFactor = 0.9f;
		AppSettings settings = new AppSettings(true);
		settings.setTitle("jME3 Test"); // specify your settings here
		Dimension screenDimensions = java.awt.Toolkit.getDefaultToolkit()
				.getScreenSize();
		settings.setResolution(
				(int) (screenDimensions.width * screenScaleFactor),
				(int) ((screenDimensions.height * screenScaleFactor) - 50));
		Main app = new Main();
		app.setSettings(settings); // apply settings to app
		app.setPauseOnLostFocus(false);
		app.setShowSettings(false);
		app.start(); // use settings and run
	}
	
	/* Listener for mouse clicks */
	private boolean grabbed = false;
	private ActionListener actionListener = new ActionListener() {
		public void onAction(String name, boolean isPressed, float tpf) {
			if (name.equals(MAPPING_GRAB) && !isPressed) {
				grabbed = false;
			} else {
				grabbed = true;
			}
		}
	};
	
	/* Listener for mouse movements */
	private AnalogListener analogListener = new AnalogListener() {
		public void onAnalog(String name, float intensity, float tpf) {
			if (name.equals(MAPPING_ZOOM_OUT)) {
				Vector3f camDirection = cam.getDirection();
				cam.setLocation(cam.getLocation().subtract(
						camDirection.normalize().mult(CAMERA_SPEED * tpf)));
			} else if (name.equals(MAPPING_ZOOM_IN)) {
				Vector3f camDirection = cam.getDirection();
				cam.setLocation(cam.getLocation().add(
						camDirection.normalize().mult(CAMERA_SPEED * tpf)));
			} else if (grabbed) {
				if (name.equals(MAPPING_ROTATE_ABOUT_Y)) {
					Vector3f camLeft = cam.getLeft();
					cam.setLocation(cam.getLocation().add(
							camLeft.normalize().mult(CAMERA_SPEED * intensity)));
					cam.lookAt(boxNode.getWorldTranslation(), Vector3f.UNIT_Y);
				} else if (name.equals(MAPPING_ROTATE_ABOUT_X)) {
					Vector3f camUp = cam.getUp();
					cam.setLocation(cam.getLocation().add(
							camUp.normalize().mult(CAMERA_SPEED * intensity)));
					cam.lookAt(boxNode.getWorldTranslation(), Vector3f.UNIT_Y);
				} else if (name.equals(MAPPING_ROTATE_ABOUT_NEGATIVE_Y)) {
					Vector3f camLeft = cam.getLeft();
					cam.setLocation(cam.getLocation().subtract(
							camLeft.normalize().mult(CAMERA_SPEED * intensity)));
					cam.lookAt(boxNode.getWorldTranslation(), Vector3f.UNIT_Y);
				} else if (name.equals(MAPPING_ROTATE_ABOUT_NEGATIVE_X)) {
					Vector3f camUp = cam.getUp();
					cam.setLocation(cam.getLocation().subtract(
							camUp.normalize().mult(CAMERA_SPEED * intensity)));
					cam.lookAt(boxNode.getWorldTranslation(), Vector3f.UNIT_Y);
				}
			}
		}
	};

	/* Makes mouse movements rotate and zoom the scene */
	private void setUpKeyMappings() {
		inputManager.addMapping(MAPPING_GRAB, TRIGGER_GRAB);
		inputManager.addMapping(MAPPING_ROTATE_ABOUT_Y, TRIGGER_ROTATE_X);
		inputManager.addMapping(MAPPING_ROTATE_ABOUT_X, TRIGGER_ROTATE_Y);
		inputManager.addMapping(MAPPING_ROTATE_ABOUT_NEGATIVE_Y,
				TRIGGER_ROTATE_NEGATIVE_X);
		inputManager.addMapping(MAPPING_ROTATE_ABOUT_NEGATIVE_X,
				TRIGGER_ROTATE_NEGATIVE_Y);
		inputManager.addMapping(MAPPING_ZOOM_OUT, TRIGGER_ZOOM_OUT);
		inputManager.addMapping(MAPPING_ZOOM_IN, TRIGGER_ZOOM_IN);

		inputManager.addListener(actionListener, new String[] { MAPPING_GRAB });
		inputManager.addListener(analogListener, new String[] {
				MAPPING_ROTATE_ABOUT_Y, MAPPING_ROTATE_ABOUT_X,
				MAPPING_ROTATE_ABOUT_NEGATIVE_Y,
				MAPPING_ROTATE_ABOUT_NEGATIVE_X, MAPPING_ZOOM_OUT,
				MAPPING_ZOOM_IN });
	}
}

TestVideoAppState.java
Nearly the same as VideoRecorderAppState.java with the addition of being able to set the ViewPort being recorded


import com.jme3.app.Application;
import com.jme3.app.state.AbstractAppState;
import com.jme3.app.state.AppStateManager;
import com.jme3.app.state.MjpegFileWriter;
import com.jme3.post.SceneProcessor;
import com.jme3.renderer.Camera;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.Renderer;
import com.jme3.renderer.ViewPort;
import com.jme3.renderer.queue.RenderQueue;
import com.jme3.system.Timer;
import com.jme3.texture.FrameBuffer;
import com.jme3.util.BufferUtils;
import com.jme3.util.Screenshots;

import java.awt.image.BufferedImage;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.*;
import java.util.logging.Level;
import java.util.logging.Logger;

/**
 * A Video recording AppState that records the screen output into an AVI file
 * with M-JPEG content. The file should be playable on any OS in any video
 * player.<br/>
 * The video recording starts when the state is attached and stops when it is
 * detached or the application is quit. You can set the fileName of the file to
 * be written when the state is detached, else the old file will be overwritten.
 * If you specify no file the AppState will attempt to write a file into the
 * user home directory, made unique by a timestamp.
 * 
 * @author normenhansen, Robert McIntyre, entrusC
 */
public class TestVideoAppState extends AbstractAppState {

	private int framerate = 30;
	private VideoProcessor processor;
	private File file;
	private Application app;
	private ExecutorService executor = Executors
			.newCachedThreadPool(new ThreadFactory() {

				public Thread newThread(Runnable r) {
					Thread th = new Thread(r);
					th.setName("jME Video Processing Thread");
					th.setDaemon(true);
					return th;
				}
			});
	private int numCpus = Runtime.getRuntime().availableProcessors();
	private ViewPort lastViewPort;
	private float quality;
	private Timer oldTimer;

	/**
	 * Using this constructor the video files will be written sequentially to
	 * the user's home directory with a quality of 0.8 and a framerate of 30fps.
	 */
	public TestVideoAppState() {
		this(null, 0.8f);
	}

	/**
	 * Using this constructor the video files will be written sequentially to
	 * the user's home directory.
	 * 
	 * @param quality
	 *            the quality of the jpegs in the video stream (0.0 smallest
	 *            file - 1.0 largest file)
	 */
	public TestVideoAppState(float quality) {
		this(null, quality);
	}

	/**
	 * Using this constructor the video files will be written sequentially to
	 * the user's home directory.
	 * 
	 * @param quality
	 *            the quality of the jpegs in the video stream (0.0 smallest
	 *            file - 1.0 largest file)
	 * @param framerate
	 *            the frame rate of the resulting video, the application will be
	 *            locked to this framerate
	 */
	public TestVideoAppState(float quality, int framerate) {
		this(null, quality, framerate);
	}

	/**
	 * This constructor allows you to specify the output file of the video. The
	 * quality is set to 0.8 and framerate to 30 fps.
	 * 
	 * @param file
	 *            the video file
	 */
	public TestVideoAppState(File file) {
		this(file, 0.8f);
	}

	/**
	 * This constructor allows you to specify the output file of the video as
	 * well as the quality
	 * 
	 * @param file
	 *            the video file
	 * @param quality
	 *            the quality of the jpegs in the video stream (0.0 smallest
	 *            file - 1.0 largest file)
	 * @param framerate
	 *            the frame rate of the resulting video, the application will be
	 *            locked to this framerate
	 */
	public TestVideoAppState(File file, float quality) {
		this.file = file;
		this.quality = quality;
		this.lastViewPort = null;
		Logger.getLogger(this.getClass().getName()).log(Level.INFO,
				"JME3 VideoRecorder running on {0} CPU's", numCpus);
	}

	/**
	 * This constructor allows you to specify the output file of the video as
	 * well as the quality
	 * 
	 * @param file
	 *            the video file
	 * @param quality
	 *            the quality of the jpegs in the video stream (0.0 smallest
	 *            file - 1.0 largest file)
	 */
	public TestVideoAppState(File file, float quality, int framerate) {
		this.file = file;
		this.quality = quality;
		this.framerate = framerate;
		this.lastViewPort = null;
		Logger.getLogger(this.getClass().getName()).log(Level.INFO,
				"JME3 VideoRecorder running on {0} CPU's", numCpus);
	}

	public File getFile() {
		return file;
	}

	public void setFile(File file) {
		if (isInitialized()) {
			throw new IllegalStateException("Cannot set file while attached!");
		}
		this.file = file;
	}

	public void setViewPort(ViewPort viewPort) {
		this.lastViewPort = viewPort;
	}

	/**
	 * Get the quality used to compress the video images.
	 * 
	 * @return the quality of the jpegs in the video stream (0.0 smallest file -
	 *         1.0 largest file)
	 */
	public float getQuality() {
		return quality;
	}

	/**
	 * Set the video image quality from 0(worst/smallest) to 1(best/largest).
	 * 
	 * @param quality
	 *            the quality of the jpegs in the video stream (0.0 smallest
	 *            file - 1.0 largest file)
	 */
	public void setQuality(float quality) {
		this.quality = quality;
	}

	@Override
	public void initialize(AppStateManager stateManager, Application app) {
		super.initialize(stateManager, app);
		this.app = app;
		this.oldTimer = app.getTimer();
		app.setTimer(new IsoTimer(framerate));
		if (file == null) {
			String filename = System.getProperty("user.home") + File.separator
					+ "jMonkey-" + System.currentTimeMillis() / 1000 + ".avi";
			file = new File(filename);
		}
		processor = new VideoProcessor();
		if (lastViewPort == null) {
			List<ViewPort> vps = app.getRenderManager().getPostViews();
			for (int i = vps.size() - 1; i >= 0; i--) {
				lastViewPort = vps.get(i);
				if (lastViewPort.isEnabled()) {
					break;
				}
			}
		}
		lastViewPort.addProcessor(processor);
	}

	@Override
	public void cleanup() {
		lastViewPort.removeProcessor(processor);
		app.setTimer(oldTimer);
		initialized = false;
		file = null;
		super.cleanup();
	}

	private class WorkItem {

		ByteBuffer buffer;
		BufferedImage image;
		byte[] data;

		public WorkItem(int width, int height) {
			image = new BufferedImage(width, height,
					BufferedImage.TYPE_4BYTE_ABGR);
			buffer = BufferUtils.createByteBuffer(width * height * 4);
		}
	}

	private class VideoProcessor implements SceneProcessor {

		private Camera camera;
		private int width;
		private int height;
		private RenderManager renderManager;
		private boolean isInitilized = false;
		private LinkedBlockingQueue<WorkItem> freeItems;
		private LinkedBlockingQueue<WorkItem> usedItems = new LinkedBlockingQueue<WorkItem>();
		private MjpegFileWriter writer;

		public void addImage(Renderer renderer, FrameBuffer out) {
			if (freeItems == null) {
				return;
			}
			try {
				final WorkItem item = freeItems.take();
				usedItems.add(item);
				item.buffer.clear();
				renderer.readFrameBuffer(out, item.buffer);
				executor.submit(new Callable<Void>() {

					public Void call() throws Exception {
						Screenshots.convertScreenShot(item.buffer, item.image);
						item.data = writer.writeImageToBytes(item.image,
								quality);
						while (usedItems.peek() != item) {
							Thread.sleep(1);
						}
						writer.addImage(item.data);
						usedItems.poll();
						freeItems.add(item);
						return null;
					}
				});
			} catch (InterruptedException ex) {
				Logger.getLogger(TestVideoAppState.class.getName()).log(
						Level.SEVERE, null, ex);
			}
		}

		public void initialize(RenderManager rm, ViewPort viewPort) {
			this.camera = viewPort.getCamera();
			this.width = camera.getWidth();
			this.height = camera.getHeight();
			this.renderManager = rm;
			this.isInitilized = true;
			if (freeItems == null) {
				freeItems = new LinkedBlockingQueue<WorkItem>();
				for (int i = 0; i < numCpus; i++) {
					freeItems.add(new WorkItem(width, height));
				}
			}
		}

		public void reshape(ViewPort vp, int w, int h) {
		}

		public boolean isInitialized() {
			return this.isInitilized;
		}

		public void preFrame(float tpf) {
			if (null == writer) {
				try {
					writer = new MjpegFileWriter(file, width, height, framerate);
				} catch (Exception ex) {
					Logger.getLogger(TestVideoAppState.class.getName())
							.log(Level.SEVERE,
									"Error creating file writer: {0}", ex);
				}
			}
		}

		public void postQueue(RenderQueue rq) {
		}

		public void postFrame(FrameBuffer out) {
			addImage(renderManager.getRenderer(), out);
		}

		public void cleanup() {
			try {
				while (freeItems.size() < numCpus) {
					Thread.sleep(10);
				}
				writer.finishAVI();
			} catch (Exception ex) {
				Logger.getLogger(TestVideoAppState.class.getName()).log(
						Level.SEVERE, "Error closing video: {0}", ex);
			}
			writer = null;
		}
	}

	public static final class IsoTimer extends com.jme3.system.Timer {

		private float framerate;
		private int ticks;
		private long lastTime = 0;

		public IsoTimer(float framerate) {
			this.framerate = framerate;
			this.ticks = 0;
		}

		public long getTime() {
			return (long) (this.ticks * (1.0f / this.framerate) * 1000f);
		}

		public long getResolution() {
			return 1000L;
		}

		public float getFrameRate() {
			return this.framerate;
		}

		public float getTimePerFrame() {
			return (float) (1.0f / this.framerate);
		}

		public void update() {
			long time = System.currentTimeMillis();
			long difference = time - lastTime;
			lastTime = time;
			if (difference < (1.0f / this.framerate) * 1000.0f) {
				try {
					Thread.sleep(difference);
				} catch (InterruptedException ex) {
				}
			}
			this.ticks++;
		}

		public void reset() {
			this.ticks = 0;
		}
	}
}

ViewPortPosition.java
Just helps size and position the pip correctly.


public class ViewPortPosition {
    public ViewPortPosition(ViewPortPosition viewPortPosition) {
        this.scale = viewPortPosition.scale;
        this.topMargin = viewPortPosition.topMargin;
        this.rightMargin = viewPortPosition.rightMargin;
    }
    public ViewPortPosition(float scale, float topMargin, float rightMargin) {
        this.scale = scale;
        this.topMargin = topMargin;
        this.rightMargin = rightMargin;
    }
    public static final ViewPortPosition TOP_RIGHT_STANDARD_MARGIN = new ViewPortPosition(
            0.4f, 0.0666667f, 0.03f);
    public float getScale() {
        return scale;
    }
    public float getBottom() {
        return 1 - topMargin - scale;
    }
    public float getLeft() {
        return 1 - rightMargin - scale;
    }
    public float getTop() {
        return 1 - topMargin;
    }
    public float getRight() {
        return 1 - rightMargin;
    }
}

Well, the code worked before wordpress replaced all of the " with & quot; I’m not sure how to fix that…

What a mess. If you want working code someone tell me how to stop the auto-formatting and I can try and post again.

Thanks

is your pip viewport a even number of pixels?

Thank you for the quick reply. I apologize for the delay in mine.

@Empire Phoenix said: is your pip viewport a even number of pixels?

No, it was not and when I adjust the value to make an even pixel count the problem is fixed.

I only see the Camera.setViewPort() method for setting the ViewPort size which takes a top, bottom, right, and left value as a percentage of the screen size. If I am allowing my user to select screen size then I will have to do a check to ensure an even pixel size for the pip and adjust the values if it is not. Is this the best way or am I missing something easier.

Thanks again for your help!!