2

我是新來的Android應用程序,我想用相機使用SurfaceTextureOnFrameAvailable()的回叫沒有被稱爲...請給我一個解決方案。代碼如下。OnFrameAvailable回調不來了

這裏缺少什麼?我不確定是否對setOnFrameListener()進行了正確的呼叫。

package com.example.cameratest; 
import com.example.test.R; 

import android.app.Activity; 
import android.content.Intent; 
import android.os.Bundle; 
import android.view.Menu; 
import android.view.View; 


import android.graphics.SurfaceTexture; 
import android.graphics.SurfaceTexture.OnFrameAvailableListener; 
import android.hardware.Camera; 
import android.media.MediaCodec; 
import android.media.MediaCodecInfo; 
import android.media.MediaFormat; 
import android.media.MediaMuxer; 
import android.opengl.*; 

import android.util.Log; 
import android.view.Surface; 


import java.io.IOException; 
import java.nio.ByteBuffer; 
import java.nio.ByteOrder; 
import java.nio.FloatBuffer; 
import java.util.concurrent.locks.ReentrantLock; 

public class MainActivity extends Activity implements OnFrameAvailableListener { 
private static final String TAG = "CameraToMpegTest"; 
private static final boolean VERBOSE = true;   // lots of logging 
// where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission) 
private static final long DURATION_SEC = 8;  
// camera state 
private Camera mCamera; 
private static SurfaceTexture mSurfaceTexture; 
private int[] mGlTextures = null; 
private Object mFrameSyncObject = new Object(); 
private boolean mFrameAvailable = false; 

protected void onCreate (Bundle savedInstanceState){ 
super.onCreate(savedInstanceState); 
setContentView(R.layout.activity_main); 
} 

public void startCamera(View v){ 
try { 

this.initCamera(0); 
this.StartCamera(); 


} catch (Throwable throwable) { 
throwable.printStackTrace(); 
} 
} 

private void StartCamera() { 

try { 

mCamera.startPreview(); 

long startWhen = System.nanoTime(); 
long desiredEnd = startWhen + DURATION_SEC * 1000000000L; 

int frameCount = 0; 

while (System.nanoTime() < desiredEnd) { 
// Feed any pending encoder output into the muxer. 

awaitNewImage(); 
} 
} 
finally { 
// release everything we grabbed 
releaseCamera(); 

} 
} 

/** 
* Stops camera preview, and releases the camera to the system. 
*/ 
private void releaseCamera() { 
if (VERBOSE) Log.d(TAG, "releasing camera"); 
if (mCamera != null) { 
mCamera.stopPreview(); 
mCamera.release(); 
mCamera = null; 
} 
} 

private void initCamera(int cameraId) { 

mCamera = Camera.open(cameraId); 
if (mCamera == null) { 
Log.d(TAG, "No front-facing camera found; opening default"); 
mCamera = Camera.open(); // opens first back-facing camera 
} 
if (mCamera == null) { 
throw new RuntimeException("Unable to open camera"); 
} 

Camera.Parameters parms =mCamera.getParameters(); 
parms.setPreviewSize(640, 480); 
mGlTextures = new int[1]; 
GLES20.glGenTextures(1, mGlTextures, 0); 


GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,mGlTextures[0]); 


GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 
GLES20.GL_NEAREST); 
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 
GLES20.GL_LINEAR); 
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 
GLES20.GL_CLAMP_TO_EDGE); 
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 
GLES20.GL_CLAMP_TO_EDGE); 
mSurfaceTexture = new SurfaceTexture(mGlTextures[0]); 
try { 
mCamera.setPreviewTexture(mSurfaceTexture); 
} catch (IOException e) { 
// TODO Auto-generated catch block 
e.printStackTrace(); 
} 
mSurfaceTexture.setOnFrameAvailableListener(MainActivity.this); 


} 
public void awaitNewImage() { 
final int TIMEOUT_MS = 4500; 
synchronized (mFrameSyncObject) { 
while (!mFrameAvailable) { 
try { 
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid 
// stalling the test if it doesn't arrive. 
if(VERBOSE) Log.i(TAG, "Waiting for Frame in Thread"); 
mFrameSyncObject.wait(TIMEOUT_MS); 
if (!mFrameAvailable) { 
// TODO: if "spurious wakeup", continue while loop 
throw new RuntimeException("Camera frame wait timed out"); 
} 
} catch (InterruptedException ie) { 
// shouldn't happen 
throw new RuntimeException(ie); 
} 
} 
mFrameAvailable = false; 
} 


} 

@Override 
public void onFrameAvailable(SurfaceTexture st) { 
if (VERBOSE) Log.d(TAG, "new frame available"); 
synchronized (mFrameSyncObject) { 
if (mFrameAvailable) { 
throw new RuntimeException("mFrameAvailable already set, frame could be dropped"); 
} 
mFrameAvailable = true; 
mFrameSyncObject.notifyAll(); 
} 
} 
} 
+0

我也面臨着同樣的問題。你偶然找到解決方案嗎? – Subhransu

+0

在我的理解onFrameAvailable應與線程一起使用。因此,我不面臨問題 –

回答

-1

在我的理解onFrameAvailable應與線程一起使用。隨着我不是面臨的問題,並確保updatetextImage被稱爲接收幀

0

我認爲你必須調用SurfaceTeture.updateTextImage()OnFrameAvailable()回調後告訴相機在「我用你的最後一幀,再給我一次一」。

(很抱歉,但我的英語能不能提供一個更好的解釋)

-1
@Override 
    public void onFrameAvailable(SurfaceTexture surfaceTexture) { 
     ... 
     surfaceTexture.updateTexImage(); 
    } 

有同樣的問題,好像我忘了打電話的updateTexImage()

+0

除了OpenGL線程外(例如在渲染期間),您不應該調用updateTexImage()。 https://developer.android.com/reference/android/graphics/Sur​​faceTexture.html –

+0

@RupertRawnsley'onFrameAvailable()'在OpenGL線程上運行。一旦你讓Camera監聽SurfaceTexture的改變,你就可以在OpenGL線程中接收到這些改變,你可以安全地調用'surfaceTexture.updateTexImage();'並在你的EglSurfaces上繪製任何東西。 查看[google/grafika /] [1]的例子。 [1]:https://github.com/google/grafika –

+0

[文檔](https://developer.android.com/reference/android/hardware/Camera.html#setPreviewTexture \(android.graphics。 SurfaceTexture \))沒有提到這一點。此外,grafika示例中onFrameAvailable的三次調用均未使用GL線程,實際上對[此實現]使用了註釋(https://github.com/google/grafika/blob/bc29f6715f25ff841f96f6faa9ccd12d89d9ef97/app/src/main/java /com/android/grafika/CameraCaptureActivity.java#L397)表示您需要在曲面所有者線程(本例中爲GL)上調用updateTexImage。 –