ijkplayer開啟rtsp,并且支持錄制和截圖功能

 * author:lebonbill
 * E-mail:lebonbillwu@gmail.com

ijkplayer框架

ijkplayer 是B站開源的一款做視頻直播的播放器框架, 基于ffmpeg, 支持 Android 和 iOS,git地址是:https://github.com/Bilibili/ijkplayer.git

編譯之前

之前用Cygwin編譯,各種坑,建議還是用ubuntu編譯會方便很多,我是在windows8跑vmware虛擬ubuntu來編譯的.首先git獲取ijkplayer的源代碼,然后還需要配置環境變量Android SDK和Android NDK
# add these lines to your ~/.bash_profile or ~/.profile
# export ANDROID_SDK=<your sdk path>
# export ANDROID_NDK=<your ndk path>

開啟支持RTSP

開啟支持RTSP,默認不支持RTSP,需要修改module-lite.sh內容,新增對應的協議,module-lite.sh是在config目錄下
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-protocol=rtp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=rtsp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-decoder=mjpeg"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=mjpeg"
然后修改支持module-lite.sh
cd config
rm module.sh
ln -s module-lite.sh module.sh

添加錄制+截圖功能

在編譯之前如果想實現實現錄制和截圖功能的話可以看這里,不需要可以忽略跳過。由于是基于FFMpeg,新增startRecord(開始錄播),stopRecord(通知錄制),和截圖都需要修改FFMpeg的源碼,所以找到ijkplayer-android/ijkmedia/ijkplayer目錄下的ff_ffplay.h,新增三個方法
int       ffp_start_record(FFPlayer *ffp, const char *file_name);
int       ffp_stop_record(FFPlayer *ffp);
int       ffp_record_file(FFPlayer *ffp, AVPacket *packet);
void      ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf);
修改ff_ffplay.c
* 開始錄制函數:file_name是保存路徑
int ffp_start_record(FFPlayer *ffp, const char *file_name)
{
    assert(ffp);
    
    VideoState *is = ffp->is;
    
    ffp->m_ofmt_ctx = NULL;
    ffp->m_ofmt = NULL;
    ffp->is_record = 0;
    ffp->record_error = 0;
    
    if (!file_name || !strlen(file_name)) { // 沒有路徑
        av_log(ffp, AV_LOG_ERROR, "filename is invalid");
        goto end;
    }
    
    if (!is || !is->ic|| is->paused || is->abort_request) { // 沒有上下文,或者上下文已經停止
        av_log(ffp, AV_LOG_ERROR, "is,is->ic,is->paused is invalid");
        goto end;
    }
    
    if (ffp->is_record) { // 已經在錄制
        av_log(ffp, AV_LOG_ERROR, "recording has started");
        goto end;
    }
    
    // 初始化一個用于輸出的AVFormatContext結構體
    avformat_alloc_output_context2(&ffp->m_ofmt_ctx, NULL, "mp4", file_name);
    if (!ffp->m_ofmt_ctx) {
        av_log(ffp, AV_LOG_ERROR, "Could not create output context filename is %s\n", file_name);
        goto end;
    }
    ffp->m_ofmt = ffp->m_ofmt_ctx->oformat;
    
    for (int i = 0; i < is->ic->nb_streams; i++) {
        // 對照輸入流創建輸出流通道
        AVStream *in_stream = is->ic->streams[i];
        AVStream *out_stream = avformat_new_stream(ffp->m_ofmt_ctx, in_stream->codec->codec);
        if (!out_stream) {
            av_log(ffp, AV_LOG_ERROR, "Failed allocating output stream\n");
            goto end;
        }
        
        // 將輸入視頻/音頻的參數拷貝至輸出視頻/音頻的AVCodecContext結構體
        av_log(ffp, AV_LOG_DEBUG, "in_stream->codec;%@\n", in_stream->codec);
        if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Failed to copy context from input to output stream codec context\n");
            goto end;
        }
        
        out_stream->codec->codec_tag = 0;
        if (ffp->m_ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
        }
    }
    
    av_dump_format(ffp->m_ofmt_ctx, 0, file_name, 1);
    
    // 打開輸出文件
    if (!(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
        if (avio_open(&ffp->m_ofmt_ctx->pb, file_name, AVIO_FLAG_WRITE) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Could not open output file '%s'", file_name);
            goto end;
        }
    }
    
    // 寫視頻文件頭
    if (avformat_write_header(ffp->m_ofmt_ctx, NULL) < 0) {
        av_log(ffp, AV_LOG_ERROR, "Error occurred when opening output file\n");
        goto end;
    }
    
    ffp->is_record = 1;
    ffp->record_error = 0;
    pthread_mutex_init(&ffp->record_mutex, NULL);
    
    return 0;
end:
    ffp->record_error = 1;
    return -1;
}
*停止錄播

int ffp_stop_record(FFPlayer *ffp)
{
    assert(ffp);
    if (ffp->is_record) {
        ffp->is_record = 0;
        pthread_mutex_lock(&ffp->record_mutex);
        if (ffp->m_ofmt_ctx != NULL) {
            av_write_trailer(ffp->m_ofmt_ctx);
            if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                avio_close(ffp->m_ofmt_ctx->pb);
            }
            avformat_free_context(ffp->m_ofmt_ctx);
            ffp->m_ofmt_ctx = NULL;
            ffp->is_first = 0;
        }
        pthread_mutex_unlock(&ffp->record_mutex);
        pthread_mutex_destroy(&ffp->record_mutex);
        av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
    } else {
        av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
    }
    return 0;
}
*保存文件

int ffp_stop_record(FFPlayer *ffp)
{
    assert(ffp);
    if (ffp->is_record) {
        ffp->is_record = 0;
        pthread_mutex_lock(&ffp->record_mutex);
        if (ffp->m_ofmt_ctx != NULL) {
            av_write_trailer(ffp->m_ofmt_ctx);
            if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                avio_close(ffp->m_ofmt_ctx->pb);
            }
            avformat_free_context(ffp->m_ofmt_ctx);
            ffp->m_ofmt_ctx = NULL;
            ffp->is_first = 0;
        }
        pthread_mutex_unlock(&ffp->record_mutex);
        pthread_mutex_destroy(&ffp->record_mutex);
        av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
    } else {
        av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
    }
    return 0;
}
*截圖
void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
{
  ALOGD("=============>start snapshot\n");

  VideoState *is = ffp->is;
  Frame *vp;
  int i = 0, linesize = 0, pixels = 0;
  uint8_t *src;

  vp = &is->pictq.queue[is->pictq.rindex];
  int height = vp->bmp->h;
  int width = vp->bmp->w;

  ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);

  // copy data to bitmap in java code
  linesize = vp->bmp->pitches[0];
  src = vp->bmp->pixels[0];
  pixels = width * 4;
  for (i = 0; i < height; i++) {
      memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
  }
  
  ALOGD("=============>end snapshot\n");
}
修改ff_ffplay_def.h,加入對應的錄制所需的參數變量
    AVFormatContext *m_ofmt_ctx;        // 用于輸出的AVFormatContext結構體
    AVOutputFormat *m_ofmt;
    pthread_mutex_t record_mutex;       // 鎖
    int is_record;                      // 是否在錄制
    int record_error;
    
    int is_first;                       // 第一幀數據
    int64_t start_pts;                  // 開始錄制時pts
    int64_t start_dts;                  // 開始錄制時dts
修改ijkplayer.c,加入對應的錄制方法
int ijkmp_start_record(IjkMediaPlayer *mp,const char *file_name)
{
    assert(mp);
    MPTRACE("ijkmp_startRecord()\n");
    pthread_mutex_lock(&mp->mutex);
    int retval = ffp_start_record(mp->ffplayer,file_name);
    pthread_mutex_unlock(&mp->mutex);
    MPTRACE("ijkmp_startRecord()=%d\n", retval);
    return retval;
}

int ijkmp_stop_record(IjkMediaPlayer *mp)
{
    assert(mp);
    MPTRACE("ijkmp_stopRecord()\n");
    pthread_mutex_lock(&mp->mutex);
    int retval = ffp_stop_record(mp->ffplayer);
    pthread_mutex_unlock(&mp->mutex);
    MPTRACE("ijkmp_stopRecord()=%d\n", retval);
    return retval;
}

實現截圖的函數
void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
{
  ALOGD("=============>start snapshot\n");

  VideoState *is = ffp->is;
  Frame *vp;
  int i = 0, linesize = 0, pixels = 0;
  uint8_t *src;

  vp = &is->pictq.queue[is->pictq.rindex];
  int height = vp->bmp->h;
  int width = vp->bmp->w;

  ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);

  // copy data to bitmap in java code
  linesize = vp->bmp->pitches[0];
  src = vp->bmp->pixels[0];
  pixels = width * 4;
  for (i = 0; i < height; i++) {
      memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
  }
  
  ALOGD("=============>end snapshot\n");
}
還要修改ijkmedia>ijkplayer>android下的jni文件(ijkplayer_jni.c),新增對應的方法
static jboolean
IjkMediaPlayer_getCurrentFrame(JNIEnv *env, jobject thiz, jobject bitmap)
{
    jboolean retval = JNI_TRUE;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(mp, env, NULL, "mpjni: getCurrentFrame: null mp", LABEL_RETURN);

    uint8_t *frame_buffer = NULL;

    if (0 > AndroidBitmap_lockPixels(env, bitmap, (void **)&frame_buffer)) {
        (*env)->ThrowNew(env, "java/io/IOException", "Unable to lock pixels.");
        return JNI_FALSE;
    }

    ijkmp_get_current_frame(mp, frame_buffer);

    if (0 > AndroidBitmap_unlockPixels(env, bitmap)) {
        (*env)->ThrowNew(env, "java/io/IOException", "Unable to unlock pixels.");
        return JNI_FALSE;
    }

    LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
    return retval;
}


static jint
IjkMediaPlayer_startRecord(JNIEnv *env, jobject thiz,jstring file)
{
    jint retval = 0;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(mp, env, NULL, "mpjni: startRecord: null mp", LABEL_RETURN);
    const char *nativeString = (*env)->GetStringUTFChars(env, file, 0);
    retval = ijkmp_start_record(mp,nativeString);

LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
    return retval;
}

static jint
IjkMediaPlayer_stopRecord(JNIEnv *env, jobject thiz)
{
    jint retval = 0;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(mp, env, NULL, "mpjni: stopRecord: null mp", LABEL_RETURN);

    retval = ijkmp_stop_record(mp);

LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
    return retval;
}
修改JNINativeMethod g_methods添加對應的方法
static JNINativeMethod g_methods[] = {
    {
        "_setDataSource",
        "(Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;)V",
        (void *) IjkMediaPlayer_setDataSourceAndHeaders
    },
    { "_setDataSourceFd",       "(I)V",     (void *) IjkMediaPlayer_setDataSourceFd },
    { "_setDataSource",         "(Ltv/danmaku/ijk/media/player/misc/IMediaDataSource;)V", (void *)IjkMediaPlayer_setDataSourceCallback },
    { "_setAndroidIOCallback",  "(Ltv/danmaku/ijk/media/player/misc/IAndroidIO;)V", (void *)IjkMediaPlayer_setAndroidIOCallback },

    { "_setVideoSurface",       "(Landroid/view/Surface;)V", (void *) IjkMediaPlayer_setVideoSurface },
    { "_prepareAsync",          "()V",      (void *) IjkMediaPlayer_prepareAsync },
    { "_start",                 "()V",      (void *) IjkMediaPlayer_start },
    { "_stop",                  "()V",      (void *) IjkMediaPlayer_stop },
    { "seekTo",                 "(J)V",     (void *) IjkMediaPlayer_seekTo },
    { "_pause",                 "()V",      (void *) IjkMediaPlayer_pause },
    { "isPlaying",              "()Z",      (void *) IjkMediaPlayer_isPlaying },
    { "getCurrentPosition",     "()J",      (void *) IjkMediaPlayer_getCurrentPosition },
    { "getDuration",            "()J",      (void *) IjkMediaPlayer_getDurations },
    { "startRecord",            "(Ljava/lang/String;)I",      (void *) IjkMediaPlayer_startRecord },
    { "stopRecord",             "()I",      (void *) IjkMediaPlayer_stopRecord },
……

開始編譯了,這個是Android版本

cd android/contrib
sh compile-ffmpeg.sh clean
sh compile-ffmpeg.sh all
cd ..
sh compile-ijk.sh all

編譯出來在android目錄下能找到對應cpu版本的so,這個是我編譯出來的so:
點我下載,百度云盤,密碼是:mc0l

如何使用

使用方法,在自己的android項目引入上面的SO文件,然后復制ijkplayer-java,修改IjkMediaPlayer文件,新加幾個native方法
    @Override
    public native int startRecord(String file);

    @Override
    public native int stopRecord();

    @Override
    public native boolean getCurrentFrame(Bitmap bitmap);
在項目中使用,我用的是開源的example,所以我修改了,IjkVideoView,實現上面的幾個方法,如下:
    //截圖
    public boolean snapshotPicture() {
        int width = getVideoWidth();
        int height = getVideoHeight();
        Bitmap srcBitmap = Bitmap.createBitmap(width,
                height, Bitmap.Config.ARGB_8888);
        boolean flag = getCurrentFrame(srcBitmap);
        if (flag) {
            // 保存圖片
            String path = getInnerSDCardPath() + "/ijkplayer/snapshot";
            File screenshotsDirectory = new File(path);
            if (!screenshotsDirectory.exists()) {
                screenshotsDirectory.mkdirs();
            }

            File savePath = new File(
                    screenshotsDirectory.getPath()
                            + "/"
                            + new SimpleDateFormat("yyyyMMddHHmmss")
                            .format(new Date()) + ".jpg");
            ImageUtils.saveBitmap(savePath.getPath(), srcBitmap);
        }
        return flag;
    }

    //開始錄像
    public void startRecord() {
        if (mMediaPlayer != null && mIjkPlayer != null) {
            String path = getInnerSDCardPath() + "/"
                    + new SimpleDateFormat("yyyyMMddHHmmss")
                    .format(new Date()) + ".mp4";
            mMediaPlayer.startRecord(path);
           
        }
    }

    //結束錄像
    public void stopRecord() {
        if (mMediaPlayer != null && mIjkPlayer != null) {
            mMediaPlayer.stopRecord();

        }
    }

最后

搞了好幾天終于把上頭交代的功能一一實現,首先要感謝IOS組的思路和方案,最后抽空把他總結一下,分享之……

[圖片上傳中...(11111.jpg-b4f6cb-1512122172499-0)]

 * author:lebonbill
 * E-mail:lebonbillwu@gmail.com

ijkplayer框架

ijkplayer 是B站開源的一款做視頻直播的播放器框架, 基于ffmpeg, 支持 Android 和 iOS,git地址是:https://github.com/Bilibili/ijkplayer.git

編譯ijkplayer

之前用Cygwin編譯,各種坑,建議還是用ubuntu編譯會方便很多,我是在windows8跑vmware虛擬ubuntu來編譯的.首先git獲取ijkplayer的源代碼,然后還需要配置環境變量Android SDK和Android NDK
# add these lines to your ~/.bash_profile or ~/.profile
# export ANDROID_SDK=<your sdk path>
# export ANDROID_NDK=<your ndk path>

開啟支持RTSP

開啟支持RTSP,默認不支持RTSP,需要修改module-lite.sh內容,新增對應的協議,module-lite.sh是在config目錄下
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-protocol=rtp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=rtsp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-decoder=mjpeg"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=mjpeg"
然后修改支持module-lite.sh
cd config
rm module.sh
ln -s module-lite.sh module.sh

添加錄制+截圖功能

在編譯之前如果想實現實現錄制和截圖功能的話可以看這里,不需要可以忽略跳過。由于是基于FFMpeg,新增startRecord(開始錄播),stopRecord(通知錄制),和截圖都需要修改FFMpeg的源碼,所以找到ijkplayer-android/ijkmedia/ijkplayer目錄下的ff_ffplay.h,新增三個方法
int       ffp_start_record(FFPlayer *ffp, const char *file_name);
int       ffp_stop_record(FFPlayer *ffp);
int       ffp_record_file(FFPlayer *ffp, AVPacket *packet);
void      ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf);
修改ff_ffplay.c
* 開始錄制函數:file_name是保存路徑
int ffp_start_record(FFPlayer *ffp, const char *file_name)
{
    assert(ffp);
    
    VideoState *is = ffp->is;
    
    ffp->m_ofmt_ctx = NULL;
    ffp->m_ofmt = NULL;
    ffp->is_record = 0;
    ffp->record_error = 0;
    
    if (!file_name || !strlen(file_name)) { // 沒有路徑
        av_log(ffp, AV_LOG_ERROR, "filename is invalid");
        goto end;
    }
    
    if (!is || !is->ic|| is->paused || is->abort_request) { // 沒有上下文,或者上下文已經停止
        av_log(ffp, AV_LOG_ERROR, "is,is->ic,is->paused is invalid");
        goto end;
    }
    
    if (ffp->is_record) { // 已經在錄制
        av_log(ffp, AV_LOG_ERROR, "recording has started");
        goto end;
    }
    
    // 初始化一個用于輸出的AVFormatContext結構體
    avformat_alloc_output_context2(&ffp->m_ofmt_ctx, NULL, "mp4", file_name);
    if (!ffp->m_ofmt_ctx) {
        av_log(ffp, AV_LOG_ERROR, "Could not create output context filename is %s\n", file_name);
        goto end;
    }
    ffp->m_ofmt = ffp->m_ofmt_ctx->oformat;
    
    for (int i = 0; i < is->ic->nb_streams; i++) {
        // 對照輸入流創建輸出流通道
        AVStream *in_stream = is->ic->streams[i];
        AVStream *out_stream = avformat_new_stream(ffp->m_ofmt_ctx, in_stream->codec->codec);
        if (!out_stream) {
            av_log(ffp, AV_LOG_ERROR, "Failed allocating output stream\n");
            goto end;
        }
        
        // 將輸入視頻/音頻的參數拷貝至輸出視頻/音頻的AVCodecContext結構體
        av_log(ffp, AV_LOG_DEBUG, "in_stream->codec;%@\n", in_stream->codec);
        if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Failed to copy context from input to output stream codec context\n");
            goto end;
        }
        
        out_stream->codec->codec_tag = 0;
        if (ffp->m_ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
        }
    }
    
    av_dump_format(ffp->m_ofmt_ctx, 0, file_name, 1);
    
    // 打開輸出文件
    if (!(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
        if (avio_open(&ffp->m_ofmt_ctx->pb, file_name, AVIO_FLAG_WRITE) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Could not open output file '%s'", file_name);
            goto end;
        }
    }
    
    // 寫視頻文件頭
    if (avformat_write_header(ffp->m_ofmt_ctx, NULL) < 0) {
        av_log(ffp, AV_LOG_ERROR, "Error occurred when opening output file\n");
        goto end;
    }
    
    ffp->is_record = 1;
    ffp->record_error = 0;
    pthread_mutex_init(&ffp->record_mutex, NULL);
    
    return 0;
end:
    ffp->record_error = 1;
    return -1;
}
*停止錄播

int ffp_stop_record(FFPlayer *ffp)
{
    assert(ffp);
    if (ffp->is_record) {
        ffp->is_record = 0;
        pthread_mutex_lock(&ffp->record_mutex);
        if (ffp->m_ofmt_ctx != NULL) {
            av_write_trailer(ffp->m_ofmt_ctx);
            if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                avio_close(ffp->m_ofmt_ctx->pb);
            }
            avformat_free_context(ffp->m_ofmt_ctx);
            ffp->m_ofmt_ctx = NULL;
            ffp->is_first = 0;
        }
        pthread_mutex_unlock(&ffp->record_mutex);
        pthread_mutex_destroy(&ffp->record_mutex);
        av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
    } else {
        av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
    }
    return 0;
}
*保存文件

int ffp_stop_record(FFPlayer *ffp)
{
    assert(ffp);
    if (ffp->is_record) {
        ffp->is_record = 0;
        pthread_mutex_lock(&ffp->record_mutex);
        if (ffp->m_ofmt_ctx != NULL) {
            av_write_trailer(ffp->m_ofmt_ctx);
            if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                avio_close(ffp->m_ofmt_ctx->pb);
            }
            avformat_free_context(ffp->m_ofmt_ctx);
            ffp->m_ofmt_ctx = NULL;
            ffp->is_first = 0;
        }
        pthread_mutex_unlock(&ffp->record_mutex);
        pthread_mutex_destroy(&ffp->record_mutex);
        av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
    } else {
        av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
    }
    return 0;
}
*截圖
void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
{
  ALOGD("=============>start snapshot\n");

  VideoState *is = ffp->is;
  Frame *vp;
  int i = 0, linesize = 0, pixels = 0;
  uint8_t *src;

  vp = &is->pictq.queue[is->pictq.rindex];
  int height = vp->bmp->h;
  int width = vp->bmp->w;

  ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);

  // copy data to bitmap in java code
  linesize = vp->bmp->pitches[0];
  src = vp->bmp->pixels[0];
  pixels = width * 4;
  for (i = 0; i < height; i++) {
      memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
  }
  
  ALOGD("=============>end snapshot\n");
}
修改ff_ffplay_def.h,加入對應的錄制所需的參數變量
    AVFormatContext *m_ofmt_ctx;        // 用于輸出的AVFormatContext結構體
    AVOutputFormat *m_ofmt;
    pthread_mutex_t record_mutex;       // 鎖
    int is_record;                      // 是否在錄制
    int record_error;
    
    int is_first;                       // 第一幀數據
    int64_t start_pts;                  // 開始錄制時pts
    int64_t start_dts;                  // 開始錄制時dts
修改ijkplayer.c,加入對應的錄制方法
int ijkmp_start_record(IjkMediaPlayer *mp,const char *file_name)
{
    assert(mp);
    MPTRACE("ijkmp_startRecord()\n");
    pthread_mutex_lock(&mp->mutex);
    int retval = ffp_start_record(mp->ffplayer,file_name);
    pthread_mutex_unlock(&mp->mutex);
    MPTRACE("ijkmp_startRecord()=%d\n", retval);
    return retval;
}

int ijkmp_stop_record(IjkMediaPlayer *mp)
{
    assert(mp);
    MPTRACE("ijkmp_stopRecord()\n");
    pthread_mutex_lock(&mp->mutex);
    int retval = ffp_stop_record(mp->ffplayer);
    pthread_mutex_unlock(&mp->mutex);
    MPTRACE("ijkmp_stopRecord()=%d\n", retval);
    return retval;
}

實現截圖的函數
void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
{
  ALOGD("=============>start snapshot\n");

  VideoState *is = ffp->is;
  Frame *vp;
  int i = 0, linesize = 0, pixels = 0;
  uint8_t *src;

  vp = &is->pictq.queue[is->pictq.rindex];
  int height = vp->bmp->h;
  int width = vp->bmp->w;

  ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);

  // copy data to bitmap in java code
  linesize = vp->bmp->pitches[0];
  src = vp->bmp->pixels[0];
  pixels = width * 4;
  for (i = 0; i < height; i++) {
      memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
  }
  
  ALOGD("=============>end snapshot\n");
}
還要修改ijkmedia>ijkplayer>android下的jni文件(ijkplayer_jni.c),新增對應的方法
static jboolean
IjkMediaPlayer_getCurrentFrame(JNIEnv *env, jobject thiz, jobject bitmap)
{
    jboolean retval = JNI_TRUE;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(mp, env, NULL, "mpjni: getCurrentFrame: null mp", LABEL_RETURN);

    uint8_t *frame_buffer = NULL;

    if (0 > AndroidBitmap_lockPixels(env, bitmap, (void **)&frame_buffer)) {
        (*env)->ThrowNew(env, "java/io/IOException", "Unable to lock pixels.");
        return JNI_FALSE;
    }

    ijkmp_get_current_frame(mp, frame_buffer);

    if (0 > AndroidBitmap_unlockPixels(env, bitmap)) {
        (*env)->ThrowNew(env, "java/io/IOException", "Unable to unlock pixels.");
        return JNI_FALSE;
    }

    LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
    return retval;
}


static jint
IjkMediaPlayer_startRecord(JNIEnv *env, jobject thiz,jstring file)
{
    jint retval = 0;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(mp, env, NULL, "mpjni: startRecord: null mp", LABEL_RETURN);
    const char *nativeString = (*env)->GetStringUTFChars(env, file, 0);
    retval = ijkmp_start_record(mp,nativeString);

LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
    return retval;
}

static jint
IjkMediaPlayer_stopRecord(JNIEnv *env, jobject thiz)
{
    jint retval = 0;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(mp, env, NULL, "mpjni: stopRecord: null mp", LABEL_RETURN);

    retval = ijkmp_stop_record(mp);

LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
    return retval;
}
修改JNINativeMethod g_methods添加對應的方法
static JNINativeMethod g_methods[] = {
    {
        "_setDataSource",
        "(Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;)V",
        (void *) IjkMediaPlayer_setDataSourceAndHeaders
    },
    { "_setDataSourceFd",       "(I)V",     (void *) IjkMediaPlayer_setDataSourceFd },
    { "_setDataSource",         "(Ltv/danmaku/ijk/media/player/misc/IMediaDataSource;)V", (void *)IjkMediaPlayer_setDataSourceCallback },
    { "_setAndroidIOCallback",  "(Ltv/danmaku/ijk/media/player/misc/IAndroidIO;)V", (void *)IjkMediaPlayer_setAndroidIOCallback },

    { "_setVideoSurface",       "(Landroid/view/Surface;)V", (void *) IjkMediaPlayer_setVideoSurface },
    { "_prepareAsync",          "()V",      (void *) IjkMediaPlayer_prepareAsync },
    { "_start",                 "()V",      (void *) IjkMediaPlayer_start },
    { "_stop",                  "()V",      (void *) IjkMediaPlayer_stop },
    { "seekTo",                 "(J)V",     (void *) IjkMediaPlayer_seekTo },
    { "_pause",                 "()V",      (void *) IjkMediaPlayer_pause },
    { "isPlaying",              "()Z",      (void *) IjkMediaPlayer_isPlaying },
    { "getCurrentPosition",     "()J",      (void *) IjkMediaPlayer_getCurrentPosition },
    { "getDuration",            "()J",      (void *) IjkMediaPlayer_getDurations },
    { "startRecord",            "(Ljava/lang/String;)I",      (void *) IjkMediaPlayer_startRecord },
    { "stopRecord",             "()I",      (void *) IjkMediaPlayer_stopRecord },
……

開始編譯了,這個是Android版本

cd android/contrib
sh compile-ffmpeg.sh clean
sh compile-ffmpeg.sh all
cd ..
sh compile-ijk.sh all

編譯出來在android目錄下能找到對應cpu版本的so,這個是我編譯出來的so:
點我下載,百度云盤,密碼是:mc0l

如何使用

使用方法,在自己的android項目引入上面的SO文件,然后復制ijkplayer-java,修改IjkMediaPlayer文件,新加幾個native方法
    @Override
    public native int startRecord(String file);

    @Override
    public native int stopRecord();

    @Override
    public native boolean getCurrentFrame(Bitmap bitmap);
在項目中使用,我用的是開源的example,所以我修改了,IjkVideoView,實現上面的幾個方法,如下:
    //截圖
    public boolean snapshotPicture() {
        int width = getVideoWidth();
        int height = getVideoHeight();
        Bitmap srcBitmap = Bitmap.createBitmap(width,
                height, Bitmap.Config.ARGB_8888);
        boolean flag = getCurrentFrame(srcBitmap);
        if (flag) {
            // 保存圖片
            String path = getInnerSDCardPath() + "/ijkplayer/snapshot";
            File screenshotsDirectory = new File(path);
            if (!screenshotsDirectory.exists()) {
                screenshotsDirectory.mkdirs();
            }

            File savePath = new File(
                    screenshotsDirectory.getPath()
                            + "/"
                            + new SimpleDateFormat("yyyyMMddHHmmss")
                            .format(new Date()) + ".jpg");
            ImageUtils.saveBitmap(savePath.getPath(), srcBitmap);
        }
        return flag;
    }

    //開始錄像
    public void startRecord() {
        if (mMediaPlayer != null && mIjkPlayer != null) {
            String path = getInnerSDCardPath() + "/"
                    + new SimpleDateFormat("yyyyMMddHHmmss")
                    .format(new Date()) + ".mp4";
            mMediaPlayer.startRecord(path);
           
        }
    }

    //結束錄像
    public void stopRecord() {
        if (mMediaPlayer != null && mIjkPlayer != null) {
            mMediaPlayer.stopRecord();

        }
    }

最后

搞了好幾天終于把上頭交代的功能一一實現,首先要感謝IOS組的思路和方案,最后抽空把他總結一下,分享之……

so文件百度云鏈接
https://pan.baidu.com/s/1CuYUsHPA-NwZuz27nKXyEA
ijkmedia核心代碼百度云鏈接
https://pan.baidu.com/s/1ZNz-8oEC5-LT9arjEMqsfg

最后編輯于
?著作權歸作者所有,轉載或內容合作請聯系作者
平臺聲明:文章內容(如有圖片或視頻亦包括在內)由作者上傳并發布,文章內容僅代表作者本人觀點,簡書系信息發布平臺,僅提供信息存儲服務。