FFmpeg一共包含8個庫:
1)avcodec:編解碼(最重要的庫)
2)avformat:封裝格式處理
3)avfiler:濾鏡特效處理
4)avdevice:各種設備的輸入輸出
5)avutil:工具庫(大部分庫都需要這個庫的支持)
6)postpro:后加工
7)swresaple:音頻采樣數據格式轉換
8)swscale:視頻像素格式轉換
FFmpeg解碼流程圖如下:
20170120092743479.png
FFmpeg解碼函數簡介:
1)av_register_all():注冊所有組件
2)avformat_open_intput():打開視頻文件
3)avformat_find_stream_info():獲取視頻文件信息
4)avcodec_find_decoder():查找解碼器
5)avcode_open2():打開解碼器
6)av_read_frame():從輸入文件讀取一幀壓縮數據
7)avcodec_decode_video2():解壓一幀壓縮數據
8)avcodec_close():關閉解碼器
9)avformat_close_input():關閉輸入視頻文件
FFmpeg使用
step1:
按照我上篇文章編譯FFmpeg for Android獲取到的.so庫和include文件夾。然后在Android studio工程的app/libs目錄下新建文件夾armeabi和include,再將.so庫文件復制到app/libs/armeabi目錄下,然后將include里的頭文件復制到app/libs/include目錄下:
step2:
定義本地方法native 類似于抽象方法 該方法沒有方法體, 是c代碼實現該方法
public native String Video_decode(); 按“Alt+Enter”提示在c代碼中生成方法方法名稱,點擊提示自動生成一個空方法。
public class MainActivity extends Activity {
// Used to load the 'native-lib' library on application startup.
static {
System.loadLibrary("native-lib");
System.loadLibrary("avcodec-58");
System.loadLibrary("avdevice-58");
System.loadLibrary("avfilter-7");
System.loadLibrary("avformat-58");
System.loadLibrary("avutil-56");
System.loadLibrary("postproc-55");
System.loadLibrary("swresample-3");
System.loadLibrary("swscale-5");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Example of a call to a native method
TextView tv = (TextView) findViewById(R.id.sample_text);
tv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String input= Environment.getExternalStorageDirectory().getAbsolutePath()+"/video.mp4";
String output= Environment.getExternalStorageDirectory().getAbsolutePath()+"/video.yuv";
video_decode(input,output);
}
});
}
/**
* A native method that is implemented by the 'native-lib' native library,
* which is packaged with this application.
*/
public native void video_decode(String input,String output);
}
step3:
修改CMakeLists.txt文件:
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# TODO 設置構建本機庫文件所需的 CMake的最小版本
cmake_minimum_required(VERSION 3.4.1)
# TODO 設置變量,方便底下使用
set(INC_DIR ${PROJECT_SOURCE_DIR}/libs/include)
set(LINK_DIR ${PROJECT_SOURCE_DIR}/libs/${ANDROID_ABI})
# TODO 添加so庫對應的頭文件目錄
include_directories(${INC_DIR})
# TODO 引入so庫,IMPORT代表從第三方引入的意思
add_library( avcodec-58 SHARED IMPORTED)
# TODO 設置編譯的庫文件存放的目錄
set_target_properties( avcodec-58 PROPERTIES IMPORTED_LOCATION ${LINK_DIR}/libavcodec-58.so)
add_library( avfilter-7 SHARED IMPORTED)
set_target_properties( avfilter-7 PROPERTIES IMPORTED_LOCATION ${LINK_DIR}/libavfilter-7.so)
add_library( avformat-58 SHARED IMPORTED)
set_target_properties( avformat-58 PROPERTIES IMPORTED_LOCATION ${LINK_DIR}/libavformat-58.so)
add_library( avutil-56 SHARED IMPORTED)
set_target_properties( avutil-56 PROPERTIES IMPORTED_LOCATION ${LINK_DIR}/libavutil-56.so)
add_library( swresample-3 SHARED IMPORTED)
set_target_properties( swresample-3 PROPERTIES IMPORTED_LOCATION ${LINK_DIR}/libswresample-3.so)
add_library( swscale-5 SHARED IMPORTED)
set_target_properties( swscale-5 PROPERTIES IMPORTED_LOCATION ${LINK_DIR}/libswscale-5.so)
# TODO 添加自己寫的 C/C++源文件
add_library( native-lib
SHARED
src/main/cpp/native-lib.c )
# TODO 依賴 NDK中的庫
find_library( log-lib
log )
# TODO 將目標庫與 NDK中的庫進行連接
target_link_libraries( native-lib
avcodec-58
avfilter-7
avformat-58
avutil-56
swresample-3
swscale-5
${log-lib} )
step4:
開始在native-lib.c里面寫解析代碼了:
#include <jni.h>
#include <android/log.h>
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"jason",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"jason",FORMAT,##__VA_ARGS__);
//封裝格式
#include "libavformat/avformat.h"
//解碼
#include "libavcodec/avcodec.h"
//縮放
#include "libswscale/swscale.h"
JNIEXPORT void JNICALL
Java_com_ffmpeg_1study_MainActivity_video_1decode(JNIEnv *env, jobject instance, jstring input_,
jstring output_) {
const char *input_cstr = (*env)->GetStringUTFChars(env, input_, 0);
const char *output_cstr = (*env)->GetStringUTFChars(env, output_, 0);
//1.注冊組件
av_register_all();
//封裝格式上下文
AVFormatContext *pFormatCtx = avformat_alloc_context();
//2.打開輸入視頻文件
if(avformat_open_input(&pFormatCtx,input_cstr,NULL,NULL) != 0){
LOGE("%s","打開輸入視頻文件失敗");
return;
}
//3.獲取視頻信息
if(avformat_find_stream_info(pFormatCtx,NULL) < 0){
LOGE("%s","獲取視頻信息失敗");
return;
}
//視頻解碼,需要找到視頻對應的AVStream所在pFormatCtx->streams的索引位置
int video_stream_idx = -1;
int i = 0;
for(; i < pFormatCtx->nb_streams;i++){
//根據類型判斷,是否是視頻流
if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
video_stream_idx = i;
break;
}
}
//4.獲取視頻解碼器
AVCodecContext *pCodeCtx = pFormatCtx->streams[video_stream_idx]->codec;
AVCodec *pCodec = avcodec_find_decoder(pCodeCtx->codec_id);
if(pCodec == NULL){
LOGE("%s","無法解碼");
return;
}
//5.打開解碼器
if(avcodec_open2(pCodeCtx,pCodec,NULL) < 0){
LOGE("%s","解碼器無法打開");
return;
}
//編碼數據
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
//像素數據(解碼數據)
AVFrame *frame = av_frame_alloc();
AVFrame *yuvFrame = av_frame_alloc();
//只有指定了AVFrame的像素格式、畫面大小才能真正分配內存
//緩沖區分配內存
uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodeCtx->width, pCodeCtx->height));
//初始化緩沖區
avpicture_fill((AVPicture *)yuvFrame, out_buffer, AV_PIX_FMT_YUV420P, pCodeCtx->width, pCodeCtx->height);
//輸出文件
FILE* fp_yuv = fopen(output_cstr,"wb");
//用于像素格式轉換或者縮放
struct SwsContext *sws_ctx = sws_getContext(
pCodeCtx->width, pCodeCtx->height, pCodeCtx->pix_fmt,
pCodeCtx->width, pCodeCtx->height, AV_PIX_FMT_YUV420P,
SWS_BILINEAR, NULL, NULL, NULL);
int len ,got_frame, framecount = 0;
//6.一陣一陣讀取壓縮的視頻數據AVPacket
while(av_read_frame(pFormatCtx,packet) >= 0){
//解碼AVPacket->AVFrame
len = avcodec_decode_video2(pCodeCtx, frame, &got_frame, packet);
//Zero if no frame could be decompressed
//非零,正在解碼
if(got_frame){
//frame->yuvFrame (YUV420P)
//轉為指定的YUV420P像素幀
sws_scale(sws_ctx,
frame->data,frame->linesize, 0, frame->height,
yuvFrame->data, yuvFrame->linesize);
//向YUV文件保存解碼之后的幀數據
//AVFrame->YUV
//一個像素包含一個Y
int y_size = pCodeCtx->width * pCodeCtx->height;
fwrite(yuvFrame->data[0], 1, y_size, fp_yuv);
fwrite(yuvFrame->data[1], 1, y_size/4, fp_yuv);
fwrite(yuvFrame->data[2], 1, y_size/4, fp_yuv);
LOGI("解碼%d幀",framecount++);
}
av_free_packet(packet);
}
fclose(fp_yuv);
av_frame_free(&frame);
avcodec_close(pCodeCtx);
avformat_free_context(pFormatCtx);
(*env)->ReleaseStringUTFChars(env, input_, input_cstr);
(*env)->ReleaseStringUTFChars(env, output_, output_cstr);
}
step5:
在andriod studio面板的Gradle Script -> build.gradle(Module:app)
修改build.gradle文件,在android {……}的段里面,加上這段
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
在defaultConfig {……}的段里面,加上這段
ndk {
abiFilters 'armeabi'
}
這樣就可以編譯運行了。