2016-01-25 108 views
2

我的任務描述: 我正在開發Android上的視頻播放器(API> = 17)。它必須與HLSmulticast視頻一起工作。另外,它必須支持多個音軌。從AVPacket提供字節數據的MediaCodec:輸出緩衝區問題

爲什麼我決定使用ffmpeg

  • 在某些設備上MediaPlayer不支持multicast - 視頻
  • MediaExtractor不HLS工作(getTrackCount()返回0)
  • ffmpeg的作品用HLSmulticast

我的想法: 我在一個循環中使用ffmpeg解串一個流。我使用videoStream->codec->extradata獲得CSD,然後正確配置MediaFormat。在每次迭代中,當我有新的視頻AVPacket可用時,我使用av_bitstream_filter_inith264_mp4toannexb過濾它的緩衝區。然後我調用java方法onNewVideoData,其中我得到了AVPacket字節數組。我清除了可用的輸入緩衝區,然後填入新的數據。我也得到了pts。由於我有一個沒有開始的流,另外,我通過從以下所有pts'減去第一個AVPacketpts'來計算新的pts''。第一個pts我分配給0.然後我打電話queueInputBuffer發送緩衝區到解碼器。

我使用兩個線程:一個用於獲取和提交數據到輸入緩衝區,另一個用於將其發送到Surface

完整的播放器的C代碼:

#include <jni.h> 
#include <android/log.h> 
#include <stddef.h> 

#include <libavformat/avformat.h> 
#include <libavcodec/avcodec.h> 
#include <libavutil/buffer.h> 

#define TAG "ffmpegPlayer" 

struct 
{ 
    const char* url; 
    jint width; 
    jint height; 
    jfloat aspectRatio; 
    jint streamsCount; 
    AVFormatContext* formatContext; 
    AVStream* videoStream; 
} context; 

AVPacket packet; 
AVBitStreamFilterContext* avBitStreamFilterContext; 

JNIEXPORT jbyteArray JNICALL Java_com_example_app_FfmpegPlayer_getCsdNative(JNIEnv* env, jobject x) 
{ 
    jbyteArray arr = (*env)->NewByteArray(env, context.videoStream->codec->extradata_size); 
    (*env)->SetByteArrayRegion(env, arr, 0, context.videoStream->codec->extradata_size, (jbyte*)context.videoStream->codec->extradata); 

    return arr; 
} 

JNIEXPORT jint JNICALL Java_com_example_app_FfmpegPlayer_getWidthNative(JNIEnv* env, jobject x) 
{ 
    return context.width; 
} 

JNIEXPORT jint JNICALL Java_com_example_app_FfmpegPlayer_getHeightNative(JNIEnv* env, jobject x) 
{ 
    return context.height; 
} 

JNIEXPORT jfloat JNICALL Java_com_example_app_FfmpegPlayer_getAspectRatioNative(JNIEnv* env, jobject x) 
{ 
    return context.aspectRatio; 
} 

JNIEXPORT jfloat JNICALL Java_com_example_app_FfmpegPlayer_getStreamsCountNative(JNIEnv* env, jobject x) 
{ 
    return context.streamsCount; 
} 

JNIEXPORT jlong JNICALL Java_com_example_app_FfmpegPlayer_getPtsNative(JNIEnv* env, jobject obj) 
{ 
    return packet.pts * av_q2d(context.videoStream->time_base) * 1000000; 
} 

JNIEXPORT jboolean JNICALL Java_com_example_app_FfmpegPlayer_initNative(JNIEnv* env, jobject obj, const jstring u) 
{ 
    av_register_all(); 
    avBitStreamFilterContext = av_bitstream_filter_init("h264_mp4toannexb"); 

    const char* url = (*env)->GetStringUTFChars(env, u , NULL); 
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "Init: %s", url); 

    AVFormatContext* formatContext = NULL; 
    if (avformat_open_input(&formatContext, url, NULL, NULL) < 0) { 
     __android_log_print(ANDROID_LOG_ERROR, TAG, "Unable to open input"); 
     return JNI_FALSE; 
    } 

    if (avformat_find_stream_info(formatContext, NULL) < 0) { 
     __android_log_print(ANDROID_LOG_ERROR, TAG, "Unable to find stream info"); 
     return JNI_FALSE; 
    } 

    AVInputFormat * iformat = formatContext->iformat; 
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "format: %s", iformat->name); 

    context.streamsCount = formatContext->nb_streams; 
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "Streams count: %d", formatContext->nb_streams); 

    int i = 0; 
    AVStream* videoStream = NULL; 
    AVDictionaryEntry* lang; 
    for (i = 0; i < formatContext->nb_streams; i++) { 
     int codecType = formatContext->streams[i]->codec->codec_type; 
     if (videoStream == NULL && codecType == AVMEDIA_TYPE_VIDEO) { 
      videoStream = formatContext->streams[i]; 
     } 
     else if (codecType == AVMEDIA_TYPE_AUDIO) { 
      lang = av_dict_get(formatContext->streams[i]->metadata, "language", NULL, 0); 
      if (lang != NULL) { 
       __android_log_print(ANDROID_LOG_DEBUG, TAG, "Audio stream %d: %s", i, lang->value); 
      } 
     } 
    } 
    if (videoStream == NULL) { 
     __android_log_print(ANDROID_LOG_ERROR, TAG, "Unable to find video stream"); 
     return JNI_FALSE; 
    } 
    context.videoStream = videoStream; 
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "Video stream: %d", videoStream->index); 

    AVCodecContext *codecContext = formatContext->streams[videoStream->index]->codec; 

    __android_log_print(ANDROID_LOG_DEBUG, TAG, "width: %d, height: %d", codecContext->width, codecContext->height); 
    context.width = codecContext->width; 
    context.height = codecContext->height; 

    AVRational aspectRatio = codecContext->sample_aspect_ratio; 
    __android_log_print(ANDROID_LOG_DEBUG, TAG, "aspect ratio: %d/%d", aspectRatio.num, aspectRatio.den); 
    context.aspectRatio = aspectRatio.num/aspectRatio.den; 

    context.formatContext = formatContext; 

    return JNI_TRUE; 
} 

void filterPacket() 
{ 
    av_bitstream_filter_filter(avBitStreamFilterContext, context.videoStream->codec, NULL, &packet.data, &packet.size, packet.data, packet.size, packet.flags); 
} 

JNIEXPORT void JNICALL Java_com_example_app_FfmpegPlayer_startNative(JNIEnv* env, jobject obj) 
{ 
    jclass cl = (*env)->GetObjectClass(env, obj); 
    jmethodID updateMethodId = (*env)->GetMethodID(env, cl, "onNewVideoData", "()V"); 

    while (av_read_frame(context.formatContext, &packet) >= 0) { 
     if (context.formatContext == NULL) { 
      return; 
     } 
     if (packet.stream_index == context.videoStream->index) { 
      filterPacket(); 
      (*env)->CallVoidMethod(env, obj, updateMethodId); 
     } 
    } 
} 

JNIEXPORT jbyteArray JNICALL Java_com_example_app_FfmpegPlayer_getVideoDataNative(JNIEnv* env, jobject obj) 
{ 
    AVBufferRef *buf = packet.buf; 

    jbyteArray arr = (*env)->NewByteArray(env, buf->size); 
    (*env)->SetByteArrayRegion(env, arr, 0, buf->size, (jbyte*)buf->data); 

    return arr; 
} 

完整的Java代碼:

package com.example.app; 


import android.media.MediaCodec; 
import android.media.MediaFormat; 
import android.view.Surface; 

import java.nio.ByteBuffer; 

public class FfmpegPlayer { 

    static { 
     System.loadLibrary("avutil-54"); 
     System.loadLibrary("swscale-3"); 
     System.loadLibrary("swresample-1"); 
     System.loadLibrary("avcodec-56"); 
     System.loadLibrary("avformat-56"); 
     System.loadLibrary("avfilter-5"); 
     System.loadLibrary("ffmpeg-player"); 
    } 

    private native boolean initNative(String url); 
    private native boolean startNative(); 
    private native int getWidthNative(); 
    private native int getHeightNative(); 
    private native float getAspectRatioNative(); 
    private native byte[] getVideoDataNative(); 
    private native long getPtsNative(); 
    private native byte[] getCsdNative(); 

    private String source; 
    private PlayerThread playerThread; 
    private int width; 
    private int height; 
    private MediaCodec decoder; 
    private ByteBuffer[] inputBuffers; 
    private Surface surface; 
    private long firstPtsTime; 

    public PlanetaPlayer(Surface surface) { 
     this.surface = surface; 
    } 

    public void setDataSource(String source) { 
     if (!initNative(source)) { 
      return; 
     } 
     width = getWidthNative(); 
     height = getHeightNative(); 
     MediaFormat format = MediaFormat.createVideoFormat("video/avc", width, height); 
     format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width * height); 
     format.setByteBuffer("csd-0", ByteBuffer.wrap(getCsdNative())); 
     LogUtils.log("CSD: "); 
     outputAsHex(getCsdNative()); 
     try { 
      decoder = MediaCodec.createDecoderByType("video/avc"); 
      decoder.configure(format, surface, null, 0); 
      decoder.start(); 

      playerThread = new PlayerThread(); 
      playerThread.start(); 

      new OutputThread().run(); 
     } 
     catch (Exception e) { 
      e.printStackTrace(); 
     } 
    } 

    public void onNewVideoData() { 
     int index = decoder.dequeueInputBuffer(0); 
     if (index >= 0) { 
      byte[] data = getVideoDataNative(); 
      ByteBuffer byteBuffer = decoder.getInputBuffers()[index]; 
      byteBuffer.clear(); 
      byteBuffer.put(data); 
      long pts = getPtsNative(); 

      LogUtils.log("Input AVPacket pts: " + pts); 
      LogUtils.log("Input AVPacket data length: " + data.length); 
      LogUtils.log("Input AVPacket data: "); 
      outputAsHex(data); 

      if (firstPtsTime == 0) { 
       firstPtsTime = pts; 
       pts = 0; 
      } 
      else { 
       pts -= firstPtsTime; 
      } 
      decoder.queueInputBuffer(index, 0, data.length, pts, 0); 
     } 
    } 

    private void outputAsHex(byte[] data) { 
     String[] test = new String[data.length]; 
     for (int i = 0; i < data.length; i++) { 
      test[i] = String.format("%02x", data[i]); 
     } 
     LogUtils.log(test); 
    } 

    private class PlayerThread extends Thread { 
     @Override 
     public void run() { 
      super.run(); 

      startNative(); 
     } 
    } 

    private class OutputThread extends Thread { 

     @Override 
     public void run() { 
      super.run(); 
      MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 
      while (true) { 
       int index = decoder.dequeueOutputBuffer(info, 0); 
       if (index >= 0) { 
        ByteBuffer buffer = decoder.getOutputBuffers()[index]; 
        buffer.position(info.offset); 
        buffer.limit(info.offset + info.size); 
        byte[] test = new byte[info.size]; 
        for (int i = 0; i < info.size; i++) { 
         test[i] = buffer.get(i); 
        } 
        LogUtils.log("Output info: size=" + info.size + ", presentationTimeUs=" + info.presentationTimeUs + ",offset=" + info.offset + ",flags=" + info.flags); 
        LogUtils.log("Output data: "); 
        outputAsHex(test); 
        decoder.releaseOutputBuffer(index, true); 
       } 
      } 
     } 
    } 
} 

問題: 因爲我使用的TS文件與測試以下視頻流:

Codec: H264 - MPEG-4 AVC (part 10) (h264) 
Resolution: 720x578 
Frame rate: 25 
Decoded format: Planar 4:2:0 YUV 

的CSD如下:

[00, 00, 00, 01, 09, 10, 00, 00, 00, 01, 27, 4d, 40, 1e, 9a, 62, 01, 68, 48, b0, 44, 20, a0, a0, a8, 00, 00, 03, 00, 08, 00, 00, 03, 01, 94, a0, 00, 00, 00, 01, 28, ee, 3c, 80] 

在不同設備上我有不同的結果。但我無法在Surface上顯示視頻。

輸入:

Input AVPacket pts: 351519222 
Input AVPacket data length: 54941 
Input AVPacket data: [00, 00, 00, 01, 09, 10, 00, 00, 00, 01, 27, 4d, 40, 1e, 9a, 62, 01, 68, 48, b0, 44, 20, a0, a0, a8, 00, 00, 03, 00, 08, 00, 00, 03, 01, 94, a0, 00, 00, 00, 01,...] 
------------------------------------ 
Input AVPacket pts: 351539222 
Input AVPacket data length: 9605 
Input AVPacket data: [00, 00, 00, 01, 09, 30, 00, 00, 00, 01, 06, 01, 01, 24, 80, 00, 00, 00, 01, 21, e3, bd, da, e4, 46, c5, 8b, 6b, 7d, 07, 59, 23, 6f, 92, e9, fb, 3b, b9, 4d, f9,...] 
------------------------------------ 
Input AVPacket pts: 351439222 
Input AVPacket data length: 1985 
Input AVPacket data: [00, 00, 00, 01, 09, 50, 00, 00, 00, 01, 06, 01, 01, 14, 80, 00, 00, 00, 01, 21, a8, f2, 74, 69, 14, 54, 4d, c5, 8b, e8, 42, 52, ac, 80, 53, b4, 4d, 24, 1f, 6c,...] 
------------------------------------ 
Input AVPacket pts: 351459222 
Input AVPacket data length: 2121 
Input AVPacket data: [00, 00, 00, 01, 09, 50, 00, 00, 00, 01, 06, 01, 01, 24, 80, 00, 00, 00, 01, 21, a8, f3, 74, e9, 0b, 8b, 17, e8, 43, f8, 10, 88, ca, 2b, 11, 53, c8, 31, f0, 0b,...] 
... on and on 

華碩Zenfone(Android的5.0。2)輸出線程(解碼,只有8個字節的數據的25個緩衝器奇怪的結果)後:

Output info: size=8, presentationTimeUs=-80001,offset=0,flags=0 
Output data: 
[01, 00, 00, 00, 90, c5, 99, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=0,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, 78, ea, 86, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=720000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e8, 86, b6, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=780000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, c0, cb, 93, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=840000,offset=0,flags=0 
Output data: 
[01, 00, 00, 00, 80, 87, 93, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=960000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e0, 3f, 8b, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=1040000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, f8, 76, 85, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=1180000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e0, 87, 93, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=1260000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e8, b5, d2, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=1800000,offset=0,flags=0 
Output data: 
[01, 00, 00, 00, 90, c5, 99, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=1860000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e0, c0, 84, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=2080000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, c0, cb, 93, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=3440000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, 80, 87, 93, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=3520000,offset=0,flags=0 
Output data: 
[01, 00, 00, 00, 78, ea, 86, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=4160000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e8, 86, b6, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=4300000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e0, 3f, 8b, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=4400000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, 90, c5, 99, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=4480000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, f8, 76, 85, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=4680000,offset=0,flags=0 
Output data: 
[01, 00, 00, 00, c0, cb, 93, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=4720000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e0, c0, 84, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=4760000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e0, 87, 93, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=4800000,offset=0,flags=0 
Output data: 
[01, 00, 00, 00, 58, 54, 83, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=5040000,offset=0,flags=0 
Output data: 
[01, 00, 00, 00, e8, b5, d2, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=5100000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, 80, 87, 93, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=5320000,offset=0,flags=0 
Output data: 
[01, 00, 00, 00, 78, ea, 86, ac] 
--------------------------- 
Output info: size=8, presentationTimeUs=5380000,offset=0,flags=1 
Output data: 
[01, 00, 00, 00, e8, 86, b6, ac] 

其他華碩Zenfone日誌:

01-25 17:11:36.859 4851-4934/com.example.app I/OMXClient: Using client-side OMX mux. 
01-25 17:11:36.865 317-1075/? I/OMX-VDEC-1080P: component_init: OMX.qcom.video.decoder.avc : fd=43 
01-25 17:11:36.867 317-1075/? I/OMX-VDEC-1080P: Capabilities: driver_name = msm_vidc_driver, card = msm_vdec_8974, bus_info = , version = 1, capabilities = 4003000 
01-25 17:11:36.881 317-1075/? I/OMX-VDEC-1080P: omx_vdec::component_init() success : fd=43 
01-25 17:11:36.885 4851-4934/com.example.app I/ACodec: [OMX.qcom.video.decoder.avc] DRC Mode: Dynamic Buffer Mode 
01-25 17:11:36.893 317-20612/? E/C2DColorConvert: unknown format passed for luma alignment number 
01-25 17:11:36.933 317-12269/? E/C2DColorConvert: unknown format passed for luma alignment number 
01-25 17:11:36.933 317-12269/? E/C2DColorConvert: unknown format passed for luma alignment number 
01-25 17:11:36.935 317-5559/? E/C2DColorConvert: unknown format passed for luma alignment number 
01-25 17:11:36.957 317-5559/? E/C2DColorConvert: unknown format passed for luma alignment number 
01-25 17:11:36.957 4851-4934/com.example.app I/ExtendedCodec: Decoder will be in frame by frame mode 
01-25 17:11:36.963 317-1075/? E/C2DColorConvert: unknown format passed for luma alignment number 
01-25 17:11:36.963 317-1075/? E/C2DColorConvert: unknown format passed for luma alignment number 
01-25 17:11:36.964 317-20612/? E/OMX-VDEC-1080P: Extension: OMX.google.android.index.describeColorFormat not implemented 
01-25 17:11:37.072 317-20612/? E/OMX-VDEC-1080P: Extension: OMX.google.android.index.describeColorFormat not implemented 
01-25 17:11:37.072 4851-4934/com.example.app W/ACodec: do not know color format 0x7fa30c04 = 2141391876 

華碩的Nexus 7(的Android 6.0。 1)崩潰:

01-25 17:23:06.921 11602-11695/com.example.app I/OMXClient: Using client-side OMX mux. 
01-25 17:23:06.952 11602-11694/com.example.app I/MediaCodec: [OMX.qcom.video.decoder.avc] setting surface generation to 11880449 
01-25 17:23:06.954 194-194/? E/OMX-VDEC-1080P: Extension: OMX.google.android.index.storeANWBufferInMetadata not implemented 
01-25 17:23:06.954 194-194/? E/OMX-VDEC-1080P: Extension: OMX.google.android.index.storeMetaDataInBuffers not implemented 
01-25 17:23:06.954 194-194/? E/OMXNodeInstance: getExtensionIndex(45:qcom.decoder.avc, OMX.google.android.index.storeMetaDataInBuffers) ERROR: NotImplemented(0x80001006) 
01-25 17:23:06.954 11602-11695/com.example.app E/ACodec: [OMX.qcom.video.decoder.avc] storeMetaDataInBuffers failed w/ err -2147483648 
01-25 17:23:06.963 11602-11695/com.example.app D/SurfaceUtils: set up nativeWindow 0xa0b7a108 for 720x576, color 0x7fa30c03, rotation 0, usage 0x42002900 
01-25 17:23:06.967 194-604/? E/OMX-VDEC-1080P: GET_MV_BUFFER_SIZE returned: Size: 122880 and alignment: 8192 
01-25 17:23:07.203 11602-11695/com.example.app W/AHierarchicalStateMachine: Warning message AMessage(what = 'omxI') = { 
                     int32_t type = 0 
                     int32_t event = 2130706432 
                     int32_t data1 = 1 
                     int32_t data2 = 0 
                     } unhandled in root state. 
01-25 17:23:07.232 11602-11695/com.example.app D/SurfaceUtils: set up nativeWindow 0xa0b7a108 for 720x576, color 0x7fa30c03, rotation 0, usage 0x42002900 
01-25 17:23:07.241 194-194/? E/OMX-VDEC-1080P: GET_MV_BUFFER_SIZE returned: Size: 122880 and alignment: 8192 
01-25 17:23:07.242 194-194/? E/OMX-VDEC-1080P: Insufficient sized buffer given for playback, expected 671744, got 663552 
01-25 17:23:07.242 194-194/? E/OMXNodeInstance: useBuffer(45:qcom.decoder.avc, Output:1 [email protected]) ERROR: BadParameter(0x80001005) 
01-25 17:23:07.243 11602-11695/com.example.app E/ACodec: registering GraphicBuffer 0 with OMX IL component failed: -2147483648 
01-25 17:23:07.243 11602-11695/com.example.app E/ACodec: Failed to allocate output port buffers after port reconfiguration: (-2147483648) 
01-25 17:23:07.243 11602-11695/com.example.app E/ACodec: signalError(omxError 0x80001001, internalError -2147483648) 
01-25 17:23:07.243 11602-11694/com.example.app E/MediaCodec: Codec reported err 0x80001001, actionCode 0, while in state 6 
01-25 17:23:07.245 11602-11602/com.example.app W/System.err: java.lang.IllegalStateException 
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:  at android.media.MediaCodec.native_dequeueOutputBuffer(Native Method) 
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:  at android.media.MediaCodec.dequeueOutputBuffer(MediaCodec.java:2379) 
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:  at com.example.app.FfmpegPlayer$OutputThread.run(FfmpegPlayer.java:122) 
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:  at com.example.app.FfmpegPlayer.setDataSource(FfmpegPlayer.java:66) 
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:  at com.example.app.activities.TestActivity$2.surfaceCreated(TestActivity.java:151) 
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:  at android.view.SurfaceView.updateWindow(SurfaceView.java:583) 
01-25 17:23:07.245 11602-11602/com.example.app W/System.err:  at android.view.SurfaceView$3.onPreDraw(SurfaceView.java:177) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.view.ViewTreeObserver.dispatchOnPreDraw(ViewTreeObserver.java:944) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2055) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1107) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:6013) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.view.Choreographer$CallbackRecord.run(Choreographer.java:858) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.view.Choreographer.doCallbacks(Choreographer.java:670) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.view.Choreographer.doFrame(Choreographer.java:606) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:844) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.os.Handler.handleCallback(Handler.java:739) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.os.Handler.dispatchMessage(Handler.java:95) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.os.Looper.loop(Looper.java:148) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at android.app.ActivityThread.main(ActivityThread.java:5417) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at java.lang.reflect.Method.invoke(Native Method) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:726) 
01-25 17:23:07.246 11602-11602/com.example.app W/System.err:  at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:616) 

另一個設備總是有空輸出緩衝區,思想索引aren> = 0;

我在做什麼錯?

回答

0

兩件事情:

首先,視頻流和播放你應該使用ExoPlayer

ExoPlayer支持HLS開箱即用。 ExoPlayer是高度模塊化的,應該直接爲多播視頻創建一個自定義SampleSource,可能會重複使用ffmpeg demux。

,看着你的代碼,我看到

01-25 17:23:07.242 194-194 /? E/OMX-VDEC-1080P:用於回放的大小不足的緩衝區,預計爲671744,得到663552

也許KEY_MAX_INPUT_SIZE未被正確設置。看看如何ExoPlayer sets KEY_MAX_INPUT_SIZE,即

// Round up width/height to an integer number of macroblocks. 
maxPixels = ((maxWidth + 15)/16) * ((maxHeight + 15)/16) * 16 * 16; 
minCompressionRatio = 2; 
// ... 
int maxInputSize = (maxPixels * 3)/(2 * minCompressionRatio); 
format.setInteger(android.media.MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);