Hello everyone.
I’ve been trying to create a virtual reality app for a college project. Basically, I’d need to draw two stream to two textures and overlay them properly. One stream is from a phone’s camera, another is from a webcam stream provided by my PI. The idea is to draw the camera feed on a non moving plane, then draw the webcam feed depending on VR input on another plane and rotate and scale it accordingly. This way, we get the feeling that there’s a portal, more or less. The whole project should be very basic overall, but I got stuck at the first step - drawing the camera feed on a texture.
I’ve tried processing with nyartoolkit, but alas, I own an Android 6.0 Nexus 5, and so far, no support for the Android camera is available for the libraries.
As I’ve coded with ogre and jME before, this was my next step. I’ve gotten the camera preview working, and the jME libraries. If I comment out the setTexture part of the onImageAvailable function, I get a blank plane at 60 fps. Actually, in the function itself, if I comment out the texture.setImage part in the function itself, the fps is also around 60. If everything is activated and I don’t look at the plane directly, I get 60 fps. As soon as the texture must be drawn (when I look at it), the fps drops to 1. Any idea why that is, and how I could go about making it faster?
This is my code.
MainActivity.java:
package com.mkozelj.VRPortals;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.os.Bundle;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.widget.Toast;
import com.jme3.app.AndroidHarness;
import com.jme3.system.android.AndroidConfigChooser.ConfigType;
import java.io.FileOutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.LogManager;
import java.lang.System.*;
public class MainActivity extends AndroidHarness {
public MainActivity() {
// Set the application class to run
appClass = "com.mkozelj.VRPortals.FrontEnd";
// Try ConfigType.FASTEST; or ConfigType.LEGACY if you have problems
eglConfigType = ConfigType.FASTEST;
// Exit Dialog title & message
exitDialogTitle = "Exit?";
exitDialogMessage = "Press Yes";
// Choose screen orientation
screenOrientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
// Enable MouseEvents being generated from TouchEvents (default = true)
mouseEventsEnabled = true;
// Set the default logging level (default=Level.INFO, Level.ALL=All Debug Info)
LogManager.getLogManager().getLogger("").setLevel(Level.INFO);
}
private static final String TAG = "VRP";
private static final int STATE_PREVIEW = 0;
private static final int STATE_WAITING_LOCK = 1;
private static final int STATE_WAITING_PRECAPTURE = 2;
private static final int STATE_WAITING_NON_PRECAPTURE = 3;
private static final int STATE_PICTURE_TAKEN = 4;
private static final int MAX_PREVIEW_WIDTH = 1920;
private static final int MAX_PREVIEW_HEIGHT = 1080;
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
if (cameraDevice == null) {
return;
}
// This method is called when the camera is opened. We start camera preview here.
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
if (cameraDevice == null) {
return;
}
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
if (cameraDevice == null) {
return;
}
cameraDevice.close();
mCameraDevice = null;
}
};
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private ImageReader mImageReader;
private long lastTicks = 0;
private long deltaTicks = 0;
//private byte[] imageBuffer = new byte[6555];
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
//deltaTicks =500000001;// java.lang.System.nanoTime() - lastTicks;
//if (deltaTicks > 500000000) {
//Handle new image here
Image mImage = reader.acquireNextImage();
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
int w = mImage.getWidth();
int h = mImage.getHeight();
//int test = buffer.remaining();
//byte[] imageBuffer = new byte[buffer.remaining()];
//buffer.get(imageBuffer);
getFrontEnd().setTexture(reader.getImageFormat(), w, h, buffer);
mImage.close();
// mBackgroundHandler.post(new ImageHandler(reader.acquireNextImage(), reader.getImageFormat()));
//lastTicks = java.lang.System.nanoTime();
// }else{
// reader.acquireLatestImage().close();
// }
}
};
private FrontEnd mFrontEnd = null;
private CaptureRequest.Builder mPreviewRequestBuilder;
private CaptureRequest mPreviewRequest;
private int mState = 0;
private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
}
@Override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) {
}
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
}
};
private String mCameraId;
private CameraCaptureSession mCaptureSession;
private CameraDevice mCameraDevice;
private Size mPreviewSize;
private void showToast(final String text) {
final Activity activity = this;
if (activity != null) {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
private static Size chooseOptimalSize(Size[] choices, int outputWidth,
int outputHeight, int maxWidth, int maxHeight, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList();
// Collect the supported resolutions that are smaller than the preview Surface
List<Size> notBigEnough = new ArrayList();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
Log.i("Screen Sizes", option.getWidth() + " " + option.getHeight());
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight
&& option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= outputWidth
&& option.getHeight() >= outputHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
// Pick the smallest of those big enough. If there is no one big enough, pick the
// largest of those not big enough.
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight()
- (long) rhs.getWidth() * rhs.getHeight());
}
}
private int outputHeight = 480;
private int outputWidth = 720;
private int mSensorOrientation;
private void setUpCameraOutputs(int width, int height) {
Activity activity = this;
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
// For still image captures, we use the largest available size.
Size largest = chooseOptimalSize(map.getOutputSizes(ImageFormat.JPEG), outputWidth, outputHeight, outputWidth, outputHeight, new Size(outputWidth, outputHeight));
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/ 2);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
//noinspection ConstantConditions
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (mSensorOrientation == 90 || mSensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (mSensorOrientation == 0 || mSensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
maxPreviewWidth = MAX_PREVIEW_WIDTH;
}
if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
maxPreviewHeight = MAX_PREVIEW_HEIGHT;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, outputWidth,
outputHeight, largest);
// We fit the aspect ratio of TextureView to the size of preview we picked.
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
//handle texture orientation here
} else {
}
// Check if the flash is supported.
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
mFlashSupported = available == null ? false : available;
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
Log.d(TAG, e.toString());
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
}
}
private boolean mFlashSupported;
private void openCamera(int width, int height) {
setUpCameraOutputs(width, height);
Activity activity = this;
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.d(TAG, e.toString());
}
}
private void closeCamera() {
try {
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
} finally {
}
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.d(TAG, e.toString());
}
}
private class ImageHandler implements Runnable {
/**
* The JPEG image
*/
private final Image mImage;
private final int imageFormat;
public ImageHandler(Image image, int imageFormat) {
mImage = image;
this.imageFormat = imageFormat;
}
@Override
public void run() {
//ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
//int w = mImage.getWidth();
//int h = mImage.getHeight();
//byte[] b = new byte[buffer.remaining()];
//buffer.get(b);
// getFrontEnd().setTexture(imageFormat, w, h, b);
//mImage.close();
}
}
private void createCameraPreviewSession() {
try {
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
setAutoFlash(mPreviewRequestBuilder);
// Finally, we start displaying the camera preview.
mPreviewRequest = mPreviewRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mPreviewRequest,
mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.d(TAG, e.toString());
}
}
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
}, null);
} catch (CameraAccessException e) {
Log.d(TAG, e.toString());
}
}
private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
if (mFlashSupported) {
requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
}
}
@Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
startBackgroundThread();
openCamera(outputWidth, outputHeight);
if (view != null) {
this.runOnUiThread(new Runnable() {
@Override
public void run() {
logger.log(Level.INFO, "View already created, adjusting to fixed resolution.");
// settings are adjusted in onSurfaceChanged in OGLESContext
view.getHolder().setFixedSize(720, 480);
}
});
}
Log.e("JME3", "On Create Finished");
}
@Override
public void onPause() {
super.onPause();
stopBackgroundThread();
closeCamera();
Log.e("JME3", "On Pause Finished");
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
openCamera(outputWidth, outputHeight);
Log.e("JME3", "On Resume Finished");
}
private FrontEnd getFrontEnd() {
return (FrontEnd) getJmeApplication();
}
}
FrontEnd.java:
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.mkozelj.VRPortals;
/**
*
* @author MatijaKozelj
*/
import android.util.Log;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import com.jme3.app.SimpleApplication;
import com.jme3.asset.AssetInfo;
import com.jme3.asset.TextureKey;
import com.jme3.material.Material;
import com.jme3.math.Vector3f;
import com.jme3.renderer.Renderer;
import com.jme3.scene.Geometry;
import com.jme3.scene.shape.Box;
import com.jme3.system.AppSettings;
import com.jme3.texture.Image;
import com.jme3.texture.Image.Format;
import com.jme3.texture.Texture2D;
import com.jme3.texture.image.ImageRaster;
import com.jme3.texture.plugins.AndroidImageLoader;
import java.nio.ByteBuffer;
public class FrontEnd extends SimpleApplication {
private Box box;
private Material material;
private boolean sceneInitialized = false;
private AndroidImageLoader mImageLoader;
private Texture2D cameraTexture;
public FrontEnd() {
mImageLoader = new AndroidImageLoader();
}
@Override
public void simpleInitApp() {
rootNode.detachAllChildren();
box = new Box(Vector3f.ZERO, 3, 2, 1);
Geometry geometry = new Geometry("box", box);
material = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
//material.setColor("Color", ColorRGBA.Blue);
geometry.setMaterial(material);
rootNode.attachChild(geometry);
sceneInitialized = true;
}
private ByteArrayInfo backgroundBuffer = new ByteArrayInfo(new byte[0]);
private boolean isFirstTime = true;
private Image texImage = null;
public void setTexture(int format, int width, int height, ByteBuffer data) {
// Only proceed if the scene has already been setup
if (!sceneInitialized) {
return;
}
try {
if (isFirstTime) {
byte[] b = new byte[data.remaining()];
data.get(b);
texImage = (Image) mImageLoader.load(new ByteArrayInfo(b));
cameraTexture = new Texture2D(texImage);
material.setTexture("ColorMap", cameraTexture);
isFirstTime = false;
} else {
byte[] b = new byte[data.remaining()];
data.get(b);
texImage = (Image) mImageLoader.load(new ByteArrayInfo(b));
texImage.setUpdateNeeded();
cameraTexture.setImage(texImage);
}
} catch (Exception e) {
Log.e("Image renderer",e.toString());
}
//material.setTexture("ColorMap", cameraTexture);
}
private class ByteArrayInfo extends AssetInfo {
public byte[] data;
public ByteArrayInfo(byte[] data) {
super(assetManager, new TextureKey("ByteArray", true));
this.data = data;
}
@Override
public InputStream openStream() {
return new ByteArrayInputStream(data);
}
}
}
Sorry about the formatting, I appear to be having some problems with the backticks. Thank you for your help.
Regards,
Matija