0
我正在研究將視頻保存爲Android設備上的.mpeg文件的應用程序。我一直在Github上與vanevery的MJPEG項目合作,只取得了一些成功,(https://github.com/vanevery/Android-MJPEG-Video-Capture-FFMPEG/blob/master/src/com/mobvcasting/mjpegffmpeg/MJPEGFFMPEGTest.java)。將Android攝像頭的視頻保存爲本地.mjpeg文件
這是到目前爲止我的代碼:
public class VideoCapture extends Activity implements OnClickListener, SurfaceHolder.Callback, Camera.PreviewCallback {
public static final String LOGTAG = "VIDEOCAPTURE";
String szBoundaryStart = "\r\n\r\n--myboundary\r\nContent-Type: image/jpeg\r\nContent-Length: ";
String szBoundaryDeltaTime = "\r\nDelta-time: 110";
String szBoundaryEnd = "\r\n\r\n";
private SurfaceHolder holder;
private Camera camera;
private CamcorderProfile camcorderProfile;
boolean bRecording = false;
boolean bPreviewRunning = false;
byte[] previewCallbackBuffer;
File mjpegFile;
FileOutputStream fos;
BufferedOutputStream bos;
Button btnRecord;
Camera.Parameters p;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Date T = new Date();
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
String szFileName = "videocapture-"+sdf.format(T)+"-";
try {
mjpegFile = File.createTempFile(szFileName, ".mjpeg", Environment.getExternalStorageDirectory());
} catch (Exception e) {
Log.v(LOGTAG,e.getMessage());
finish();
}
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.main);
btnRecord = (Button) this.findViewById(R.id.RecordButton);
btnRecord.setOnClickListener(this);
camcorderProfile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
SurfaceView cameraView = (SurfaceView) findViewById(R.id.CameraView);
holder = cameraView.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
cameraView.setClickable(true);
cameraView.setOnClickListener(this);
}
public void onClick(View v) {
if (bRecording) {
bRecording = false;
try {
bos.flush();
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
Log.v(LOGTAG, "Recording Stopped");
} else {
try {
fos = new FileOutputStream(mjpegFile);
bos = new BufferedOutputStream(fos);
bRecording = true;
Log.v(LOGTAG, "Recording Started");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.v(LOGTAG, "surfaceCreated");
camera = Camera.open();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOGTAG, "surfaceChanged");
if (!bRecording) {
if (bPreviewRunning){
camera.stopPreview();
} try {
p = camera.getParameters();
p.setPreviewSize(camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight);
p.setPreviewFrameRate(camcorderProfile.videoFrameRate);
camera.setParameters(p);
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(this);
Log.v(LOGTAG,"startPreview");
camera.startPreview();
bPreviewRunning = true;
} catch (IOException e) {
Log.e(LOGTAG,e.getMessage());
e.printStackTrace();
}
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v(LOGTAG, "surfaceDestroyed");
if (bRecording) {
bRecording = false;
try {
bos.flush();
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
bPreviewRunning = false;
camera.release();
finish();
}
public void onPreviewFrame(byte[] b, Camera c) {
if (bRecording) {
// Assuming ImageFormat.NV21
if (p.getPreviewFormat() == ImageFormat.NV21) {
Log.v(LOGTAG,"Started Writing Frame");
try {
ByteArrayOutputStream jpegByteArrayOutputStream = new ByteArrayOutputStream();
YuvImage im = new YuvImage(b, ImageFormat.NV21, p.getPreviewSize().width, p.getPreviewSize().height, null);
Rect r = new Rect(0,0,p.getPreviewSize().width,p.getPreviewSize().height);
im.compressToJpeg(r, 5, jpegByteArrayOutputStream);
byte[] jpegByteArray = jpegByteArrayOutputStream.toByteArray();
byte[] boundaryBytes = (szBoundaryStart + jpegByteArray.length + szBoundaryDeltaTime + szBoundaryEnd).getBytes();
bos.write(boundaryBytes);
bos.write(jpegByteArray);
bos.flush();
} catch (IOException e) {
e.printStackTrace();
}
Log.v(LOGTAG,"Finished Writing Frame");
} else {
Log.v(LOGTAG,"NOT THE RIGHT FORMAT");
}
}
}
@Override
public void onConfigurationChanged(Configuration conf)
{
super.onConfigurationChanged(conf);
}
}
我懷疑問題可能出在onPreviewFrame的JPEG格式(解析)()。任何幫助或建議將不勝感激。提前致謝。
如果你不能解釋這個代碼是不是工作,我們不能困擾嘗試和猜測問題可能是什麼。 –
嘿,我試圖實現相同的,但我不知道如何使用您的代碼。你能提供你的github項目嗎? –