我正在使用移动视觉API扫描条形码。我正在使用TextureView
在其上渲染相机。我读了这个github线程https://github.com/googlesamples/android-vision/issues/15,看来移动视觉与TextureView
不兼容
我看到CameraSource
用于与SurfaceView
不兼容的TextureView
,因为没有方法可以预览TextureView
上的帧。 https://developers.google.com/android/reference/com/google/android/gms/vision/CameraSource.html#start(android.view.SurfaceHolder)
我尝试使用以下方法进行操作,并检查是否未调用receiveDetections
。有谁知道如何将移动视觉API与TextureView
和Camera
集成。
MainActivity.java
@RuntimePermissions
public class ScanBarcodeActivity extends BaseActivity {
private TextureView textureView;
private BarcodeDetector barcodeDetector;
private Camera camera;
private String TAG = LogUtils.makeLogTag(ScanBarcodeActivity.class);
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ActivityScanBarcodeBinding activityScanBarcodeBinding = setContentView(this, R.layout.activity_scan_barcode);
textureView = activityScanBarcodeBinding.textureView;
barcodeDetector = new BarcodeDetector.Builder(this).build();
ScanBarcodeActivityPermissionsDispatcher.requestCameraPermissionWithCheck(this);
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
// NOTE: delegate the permission handling to generated method
ScanBarcodeActivityPermissionsDispatcher.onRequestPermissionsResult(this, requestCode, grantResults);
if(permissions[0].equals(Manifest.permission.CAMERA)) {
if(grantResults[0] == PackageManager.PERMISSION_GRANTED) {
initializeCamera(); }
}
}
@NeedsPermission(Manifest.permission.CAMERA)
void requestCameraPermission() {
initializeCamera();
}
void initializeCamera() {
textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
camera = Camera.open();
/* Set Auto focus */
Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes();
if(focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)){
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} else
if(focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)){
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
camera.setParameters(parameters);
try {
camera.setPreviewTexture(surface);
} catch (IOException io) {
LogUtils.LOGD(TAG, io.getMessage());
}
camera.startPreview();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
try {
camera.stopPreview();
camera.release();
} catch (Exception e) {
LogUtils.LOGD(TAG, e.getMessage());
}
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
});
barcodeDetector.setProcessor(new Detector.Processor<Barcode>() {
@Override
public void release() {
}
@Override
public void receiveDetections(Detector.Detections<Barcode> detections) {
final SparseArray<Barcode> barcodes = detections.getDetectedItems();
if (barcodes.size() != 0) {
LogUtils.LOGD(TAG, barcodes.valueAt(0).displayValue);
}
}
});
}
@OnShowRationale(Manifest.permission.CAMERA)
void showRationaleForCamera(final PermissionRequest request) {
new AlertDialog.Builder(this)
.setMessage(R.string.permission_camera_rationale)
.setPositiveButton(R.string.button_allow, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
request.proceed();
}
})
.setNegativeButton(R.string.button_deny, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
request.cancel();
}
})
.show();
}
@OnPermissionDenied(Manifest.permission.CAMERA)
void showPermissionDeniedForCamera() {
Toast.makeText(this, R.string.permission_camera_denied, Toast.LENGTH_SHORT).show();
}
@OnNeverAskAgain(Manifest.permission.CAMERA)
void showNeverAskAgainForCamera() {
Toast.makeText(this, R.string.permission_camera_never_ask_again, Toast.LENGTH_SHORT).show();
}
}
最佳答案
BarcodeReader Vision示例使用defualt的SurfaceView的原因很简单:兼容性。 SurfaceView可从API级别1获得,但TextureView可从API级别14获得。
幸运的是,可以创建同时支持SurfaceView和TextureView的BarcodeReader,而不会完全失去兼容性。
我不记得确切在哪里,但是Google创建了一个基于TextureView的类,该类增强了其行为,防止了拉伸(stretch)图像。它被称为“AutoFitTextureView”,我为您制作了它:
public class AutoFitTextureView extends TextureView {
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public AutoFitTextureView(Context context) {
this(context, null);
}
public AutoFitTextureView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
}
现在,您可以在CameraSourcePreview类上使用该新类而不是SurfaceView:
查看用于SurfaceView的注释行
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = "CameraSourcePreview";
private Context mContext;
//private SurfaceView mSurfaceView;
private AutoFitTextureView mAutoFitTextureView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
private GraphicOverlay mOverlay;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
//mSurfaceView = new SurfaceView(context);
//mSurfaceView.getHolder().addCallback(new SurfaceCallback());
//addView(mSurfaceView);
mAutoFitTextureView = new AutoFitTextureView(context);
mAutoFitTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
addView(mAutoFitTextureView);
}
@RequiresPermission(Manifest.permission.CAMERA)
public void start(CameraSource cameraSource) throws IOException, SecurityException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
@RequiresPermission(Manifest.permission.CAMERA)
public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException, SecurityException {
mOverlay = overlay;
start(cameraSource);
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
@RequiresPermission(Manifest.permission.CAMERA)
private void startIfReady() throws IOException, SecurityException {
if (mStartRequested && mSurfaceAvailable) {
//mCameraSource.start(mSurfaceView.getHolder());
mCameraSource.start(mAutoFitTextureView);
if (mOverlay != null) {
Size size = mCameraSource.getPreviewSize();
int min = Math.min(size.getWidth(), size.getHeight());
int max = Math.max(size.getWidth(), size.getHeight());
if (isPortraitMode()) {
// Swap width and height sizes when in portrait, since it will be rotated by
// 90 degrees
mOverlay.setCameraInfo(min, max, mCameraSource.getCameraFacing());
} else {
mOverlay.setCameraInfo(max, min, mCameraSource.getCameraFacing());
}
mOverlay.clear();
}
mStartRequested = false;
}
}
private final TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
mSurfaceAvailable = true;
mOverlay.bringToFront();
try {startIfReady();} catch (IOException e) {Log.e(TAG, "Could not start camera source.", e);}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
mSurfaceAvailable = false;
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {}
};
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int width = 320;
int height = 240;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
width = size.getWidth();
height = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = width;
//noinspection SuspiciousNameCombination
width = height;
height = tmp;
}
final int layoutWidth = right - left;
final int layoutHeight = bottom - top;
// Computes height and width for potentially doing fit width.
int childWidth = layoutWidth;
int childHeight = (int)(((float) layoutWidth / (float) width) * height);
// If height is too tall using fit width, does fit height instead.
if (childHeight > layoutHeight) {
childHeight = layoutHeight;
childWidth = (int)(((float) layoutHeight / (float) height) * width);
}
for (int i = 0; i < getChildCount(); ++i) {
getChildAt(i).layout(0, 0, childWidth, childHeight);
}
try {
startIfReady();
} catch (SecurityException se) {
Log.e(TAG,"Do not have permission to start the camera", se);
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Log.d(TAG, "isPortraitMode returning false by default");
return false;
}
}
最后,您应该向CameraSource类添加一个新的开始方法:
请注意,此新方法是从CameraSourcePreview类调用的
public CameraSource start(AutoFitTextureView textureView) throws IOException {
synchronized (mCameraLock) {
if(mCamera != null) {
return this;
}
mCamera = createCamera();
mCamera.setPreviewTexture(textureView.getSurfaceTexture());
mCamera.startPreview();
mProcessingThread = new Thread(mFrameProcessor);
mFrameProcessor.setActive(true);
mProcessingThread.start();
}
return this;
}
现在,您可以将自己的BarcodeReader与TextureView一起使用。
我已经测试了所有代码,并在S4 Lollipop和Nexus5棉花糖上工作。
希望对您有所帮助!
关于android - 如何将移动视觉API与TextureView和Camera结合使用,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/43159280/