步驟
1.Android端寫surfaceView,設定視訊格式ARGB,callback,holder,不多說
2.解碼視訊資料,得到一幀一幀的YUV資料(了解視訊解碼流程)
3.通過surfaceView擷取nativewindow
4.lock Window
5.第三方的libyuv to RGBA
6.set buffer
7.unlock Window
實作代碼
#include <jni.h>
#include <android/log.h>
#include <stdlib.h>
#include <stdio.h>
//編解碼相關
#include "libavcodec/avcodec.h"
//封裝格式處理
#include "libavformat/avformat.h"
//像素處理
#include "include/libswscale/swscale.h"
// 自定義頭檔案
#include "com_example_ffmpeg_FfmpegUtil.h"
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"render",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"render",FORMAT,##__VA_ARGS__);
JNIEXPORT void JNICALL Java_com_example_rendor_SurfaceUtil_render(JNIEnv *env,
jclass jclzz, jstring input_jstr, jobject surface) {
const char *input_cstr = (*env)->GetStringUTFChars(env, input_jstr, NULL);
// 注冊所有的元件
av_register_all();
// 拿到封裝格式上下文
AVFormatContext * aVFormatContext = avformat_alloc_context();
// 打開視訊檔案
int isOpen = avformat_open_input(&aVFormatContext, input_cstr, NULL, NULL);
if (isOpen < ) {
LOGE("%s", "打開視訊檔案失敗");
return;
}
LOGE("%s", "avformat_open_input");
int stream_info = avformat_find_stream_info(aVFormatContext, NULL);
if (stream_info < ) {
LOGE("%s", "stream_info失敗");
return;
}
LOGE("%s", "avformat_open_input");
// 拿到解碼器
AVCodecContext *aVCodecContext;
int i = ;
int index = -;
for (; i < aVFormatContext->nb_streams; i++) {
aVCodecContext = aVFormatContext->streams[i]->codec;
if (aVCodecContext->codec_type == AVMEDIA_TYPE_VIDEO) {
index = i;
break;
}
}
if (index == -) {
LOGE("%s", "沒有找到視訊流");
return;
}
// 找到解碼器
AVCodec *avcodec = avcodec_find_decoder(
aVFormatContext->streams[index]->codec->codec_id);
if (avcodec == NULL) {
LOGE("%s", "沒有找到解碼器");
return;
}
LOGE("%s", "avcodec_find_decoder");
// 打開解碼器
int isOpenCodec = avcodec_open2(aVCodecContext, avcodec, NULL);
if (isOpenCodec < ) {
LOGE("%s", "打開解碼器失敗");
return;
}
LOGE("%s", "avcodec_open2");
//讀取每一祯壓縮資料
AVPacket *aVPacket = (AVPacket *) av_malloc(sizeof(AVPacket));
AVFrame *aVFrame = av_frame_alloc();
AVFrame *rgbFrame = av_frame_alloc();
// 通過Java的surface對象拿到nativeWindow,處理完成之後需要釋放
ANativeWindow* native_window = ANativeWindow_fromSurface(env, surface);
ANativeWindow_Buffer outBuffer;
LOGE("%s", "初始化緩沖區");
int got_picture_ptr;
int frameCount;
int len;
while (av_read_frame(aVFormatContext, aVPacket) >= ) {
LOGE("%s", "av_read_frame");
len = avcodec_decode_video2(aVCodecContext, aVFrame, &got_picture_ptr,
aVPacket);
if (got_picture_ptr) {
// 擷取到解壓資料
// 設定nativewindow的緩沖區,width,height來控制緩沖區的大小(像素),buffer會進行相應的縮放來适配螢幕的大小
ANativeWindow_setBuffersGeometry(native_window,
aVCodecContext->width, aVCodecContext->height,
WINDOW_FORMAT_RGBA_8888);
//* Lock the window's next drawing surface for writing.
int lock = ANativeWindow_lock(native_window, &outBuffer, NULL);
//初始化緩沖區 這裡把解碼後的視訊frame的緩沖區與nativewindow的緩沖區相關聯
avpicture_fill((AVPicture *) rgbFrame, outBuffer.bits,
PIX_FMT_RGB32, aVCodecContext->width,
aVCodecContext->height);
// 将yuv420的frame轉換成rgb
int convert = I420ToARGB(aVFrame->data[], aVFrame->linesize[],
aVFrame->data[], aVFrame->linesize[], aVFrame->data[],
aVFrame->linesize[], rgbFrame->data[],
rgbFrame->linesize[], aVCodecContext->width,
aVCodecContext->height);
// * Unlock the window's drawing surface after previously locking it,
// * posting the new buffer to the display.
int32_t unlock = ANativeWindow_unlockAndPost(native_window);
usleep( * );
}
av_free_packet(aVPacket);
}
avformat_free_context(aVFormatContext);
avcodec_free_context(&aVCodecContext);
av_frame_free(&aVFrame);
av_frame_free(&rgbFrame);
ANativeWindow_release(native_window);
(*env)->ReleaseStringUTFChars(env, input_jstr, input_cstr);
}
Android.mk
依賴FFmpeg的8個so(實際上用不着),以及第三方的libyuv(YUV 2 RGB)
LOCAL_PATH := $(call my-dir)
#ffmpeg lib
include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := libavcodec-.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avdevice
LOCAL_SRC_FILES := libavdevice-.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avfilter
LOCAL_SRC_FILES := libavfilter-.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := libavformat-.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := libavutil-.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := postproc
LOCAL_SRC_FILES := libpostproc-.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := libswresample-.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := libswscale-.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := yuv
LOCAL_SRC_FILES := libyuv.so
include $(PREBUILT_SHARED_LIBRARY)
#myapp
include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := ffmpeg.c
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/ffmpeg
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/libyuv
##-landroid參數 for native windows(這行很重要,不要編譯老是報找不到window.h)
LOCAL_LDLIBS := -llog -landroid
LOCAL_SHARED_LIBRARIES := yuv avcodec avdevice avfilter avformat avutil postproc swresample swscale
include $(BUILD_SHARED_LIBRARY)
Application.mk
// 指定支援的平台
APP_ABI := armeabi
// 指定platFrom,不指定的話低版本的platform(小于9)會沒有這個頭檔案
APP_PLATFORM := android-