我是Android应用程序的新手,正在尝试使用SurfaceTexture相机。没有呼叫OnFrameAvailable()的回叫...请建议我一个解决方案。代码如下。

这缺少什么?我不确定是否已正确调用setOnFrameListener()

package com.example.cameratest;
import com.example.test.R;

import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.View;


import android.graphics.SurfaceTexture;
import android.graphics.SurfaceTexture.OnFrameAvailableListener;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.*;

import android.util.Log;
import android.view.Surface;


import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.concurrent.locks.ReentrantLock;

public class MainActivity extends Activity  implements OnFrameAvailableListener {
private static final String TAG = "CameraToMpegTest";
private static final boolean VERBOSE = true;           // lots of logging
// where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission)
private static final long DURATION_SEC = 8;
// camera state
private Camera mCamera;
private static SurfaceTexture mSurfaceTexture;
private int[] mGlTextures = null;
private Object mFrameSyncObject = new Object();
private boolean mFrameAvailable = false;

protected void onCreate (Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}

public void startCamera(View v){
try {

this.initCamera(0);
this.StartCamera();


} catch (Throwable throwable) {
throwable.printStackTrace();
}
}

private void StartCamera() {

try {

mCamera.startPreview();

long startWhen = System.nanoTime();
long desiredEnd = startWhen + DURATION_SEC * 1000000000L;

int frameCount = 0;

while (System.nanoTime() < desiredEnd) {
// Feed any pending encoder output into the muxer.

awaitNewImage();
}
}
finally {
// release everything we grabbed
releaseCamera();

}
}

/**
* Stops camera preview, and releases the camera to the system.
*/
private void releaseCamera() {
if (VERBOSE) Log.d(TAG, "releasing camera");
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}

private void  initCamera(int cameraId) {

mCamera = Camera.open(cameraId);
if (mCamera == null) {
Log.d(TAG, "No front-facing camera found; opening default");
mCamera = Camera.open();    // opens first back-facing camera
}
if (mCamera == null) {
throw new RuntimeException("Unable to open camera");
}

Camera.Parameters parms =mCamera.getParameters();
parms.setPreviewSize(640, 480);
mGlTextures = new int[1];
GLES20.glGenTextures(1, mGlTextures, 0);


GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,mGlTextures[0]);


GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
try {
mCamera.setPreviewTexture(mSurfaceTexture);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mSurfaceTexture.setOnFrameAvailableListener(MainActivity.this);


}
public void awaitNewImage() {
final int TIMEOUT_MS = 4500;
synchronized (mFrameSyncObject) {
while (!mFrameAvailable) {
try {
// Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
// stalling the test if it doesn't arrive.
if(VERBOSE) Log.i(TAG, "Waiting for Frame in Thread");
mFrameSyncObject.wait(TIMEOUT_MS);
if (!mFrameAvailable) {
// TODO: if "spurious wakeup", continue while loop
throw new RuntimeException("Camera frame wait timed out");
}
} catch (InterruptedException ie) {
// shouldn't happen
throw new RuntimeException(ie);
}
}
mFrameAvailable = false;
}


}

@Override
public void onFrameAvailable(SurfaceTexture st) {
if (VERBOSE) Log.d(TAG, "new frame available");
synchronized (mFrameSyncObject) {
if (mFrameAvailable) {
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
}
mFrameAvailable = true;
mFrameSyncObject.notifyAll();
}
}
}

最佳答案

我认为您必须在SurfaceTeture.updateTextImage()回调后致电OnFrameAvailable()告诉摄像机“我用完了您的最后一帧,再给我一帧”。

(对不起,但我的英语无法提供更好的解释)

09-11 20:51