天天看點

NDK中使用mediacodec編碼h264

《Ndk中使用Mediacode解碼》

《android mediacodec 編碼demo(java)》

《NDK中使用mediacodec編碼h264》

《Android native 層使用opengl渲染YUV420p和NV12》

《android 使用NativeWindow渲染RGB視訊》

《opengl 疊加顯示文字》

《android studio 編譯freeType》

《最原始的yuv圖像疊加文字的實作--手動操作像素》

Ndk native c使用MediaCodec編碼:(在ndk直接編譯成可執行檔案,push到裝置上可運作)

ndkmediacode 是android在framwork層對native 層mediacodec 的一層封裝,源碼在 framework/av/media/ndk/NdkMediacodec.cpp

對應的庫 :/system/lib64/libmediandk.so

//canok 20210316 
//NdkMediacodec.cpp
#include<stdio.h>
#include<stdlib.h>
#include<string.h>
#include<pthread.h>
#include <unistd.h>
#include "media/NdkMediaCodec.h"

#define LOGD printf
bool bRun = true;
AMediaCodec* pMediaCodec;
AMediaFormat *format ;
FILE *fp_in = NULL;
FILE *fp_out = NULL;
int mW = 1920;
int mH=1080;
int64_t getNowUs(){
    timeval tv;
    gettimeofday(&tv, 0);
    return (int64_t)tv.tv_sec * 1000000 + (int64_t)tv.tv_usec;
}

void *run(void*pram){

    fp_in = fopen("/storage/emulated/0/canok/1080p60.yuv","r");
    if(NULL == fp_in){
        LOGD("[%s][%d]fopen erro, no inputfile!\n",__FUNCTION__ ,__LINE__);
        exit(-1);
    }
    fp_out = fopen("/storage/emulated/0/canok/out_1080p60.h264","w+");
    if(NULL == fp_out){
        LOGD("[%s][%d]fopen erro\n",__FUNCTION__ ,__LINE__);
        exit(-1);
    }

    //https://github.com/android/ndk-samples/blob/main/native-codec/app/src/main/cpp/native-codec-jni.cpp
    //decode
    //這裡設定名稱
    pMediaCodec = AMediaCodec_createEncoderByType("video/avc");//h264 // 建立 codec 編碼器
    if(pMediaCodec == NULL){
        LOGD("createEncoder erro[%s%d]\n",__FUNCTION__ ,__LINE__);
    }
    format = AMediaFormat_new();

    AMediaFormat_setString(format, "mime", "video/avc");
    AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_WIDTH,mW);
    AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_HEIGHT,mH);
    //在OMX_IVCommon.h https://www.androidos.net.cn/android/9.0.0_r8/xref/frameworks/native/headers/media_plugin/media/openmax/OMX_IVCommon.h
   // AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_COLOR_FORMAT,OMX_COLOR_FormatYUV420Planar);
    AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_COLOR_FORMAT,19);
    AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_FRAME_RATE,25);
    AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_BIT_RATE,4000*1000);
    AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,5);

    LOGD("[%s][%s][%d]fromat:%s\n",__FUNCTION__ ,__DATE__,__LINE__,AMediaFormat_toString(format));

    //這裡配置 format
    media_status_t status = AMediaCodec_configure(pMediaCodec,format,NULL,NULL,AMEDIACODEC_CONFIGURE_FLAG_ENCODE);//解碼,flags 給0,編碼給AMEDIACODEC_CONFIGURE_FLAG_ENCODE
    if(status!=0){
        LOGD("erro config %d\n",status);

        return NULL;
    }
    AMediaCodec_start(pMediaCodec);
    if(status!=0){
        LOGD("start erro %d\n",status);
        return NULL;
    }
    int in_frame=0;
    int out_frame=0;
    int64_t lastOuttime=0;
    while(bRun){
        //請求buffer
        ssize_t bufidx = AMediaCodec_dequeueInputBuffer(pMediaCodec,0);
        //LOGD("input buffer %zd\n",bufidx);
        if(bufidx>=0) {
            size_t bufsize;
            int64_t pts = getNowUs();
            uint8_t *buf = AMediaCodec_getInputBuffer(pMediaCodec, bufidx, &bufsize);
            //填充yuv資料
            int frameLenYuv = mW * mH * 3 / 2;
            if (fread(buf, 1, frameLenYuv, fp_in) < frameLenYuv) {
                fseek(fp_in, 0, SEEK_SET);
                fread(buf, 1, frameLenYuv, fp_in);
            }
            //入隊列
            LOGD("in[%d] pts:%lld\n", in_frame++, pts);
            AMediaCodec_queueInputBuffer(pMediaCodec, bufidx, 0, frameLenYuv, pts, 0);
        }

        AMediaCodecBufferInfo info;
        //取輸出buffer
        auto outindex = AMediaCodec_dequeueOutputBuffer(pMediaCodec, &info, 0);
        if (outindex >= 0) {
            //在這裡取走編碼後的資料
            //釋放buffer給編碼器
            size_t outsize;
            uint8_t *buf = AMediaCodec_getOutputBuffer(pMediaCodec,outindex,&outsize);
            fwrite(buf,1,info.size,fp_out);
            if(1) {
                AMediaFormat *format2 = AMediaCodec_getOutputFormat(pMediaCodec);
                int32_t frameRate,w,h,color;
                AMediaFormat_getInt32(format2,AMEDIAFORMAT_KEY_FRAME_RATE,&frameRate);
                AMediaFormat_getInt32(format2,AMEDIAFORMAT_KEY_WIDTH,&w);
                AMediaFormat_getInt32(format2,AMEDIAFORMAT_KEY_HEIGHT,&h);
                LOGD("out[%d] pts %lld %dX%[email protected]%d %d \n",out_frame++,info.presentationTimeUs,w,h,frameRate,info.size);
                int64_t nowtime = getNowUs();
                LOGD("frame stay times:%lld,  out_gap:%lld\n",nowtime-info.presentationTimeUs,nowtime-lastOuttime);
                lastOuttime = nowtime;
            }
            AMediaCodec_releaseOutputBuffer(pMediaCodec, outindex, false);
        }
    }
}
int main(int argc, const char*argv[]){
    int ret =0;
    pthread_t pid;
    if((ret=pthread_create(&pid,NULL,run,NULL)) !=0 ){
        LOGD("thread_create err\n");
        return -1;
    }
    while(1){
        usleep(1000*1000);
    }
}
           
#Android.mk
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE:= codec_demo
LOCAL_SRC_FILES := NdkMediacodec.cpp
#LOCAL_SHARED_LIBRARIES := libandroid libmediandk liblog
LOCAL_LDLIBS := -lmediandk
LOCAL_LDFLAGS += -pie -fPIE
include $(BUILD_EXECUTABLE)
           

android studio ndk編譯出來的檔案在 /app/build/intermediates/ndkBuild/debug/obj/local/arm64-v8a/codec_demo

NDK中使用mediacodec編碼h264