《Ndk中使用Mediacode解码》
《android mediacodec 编码demo(java)》
《NDK中使用mediacodec编码h264》
《Android native 层使用opengl渲染YUV420p和NV12》
《android 使用NativeWindow渲染RGB视频》
《opengl 叠加显示文字》
《android studio 编译freeType》
《最原始的yuv图像叠加文字的实现--手动操作像素》
常见的都是在java层使用mediacode解码,给mediacode绑定surface直接解码渲染,为了配合ffmpeg现在想在native用ndk使用mediacode硬解码,解码直接输出yuv数据,只是单纯地需要一个解码器,不绑定surface. 百度百度出来的文章怎么就没有浅显易懂直接可用的呢?非要上来就讲那张mediacode官方图。。。我想,既然是demo, 就应该是小白直接可用,没有过多的架构设计最好是一个main函数到底,多好。自己折腾老半天终于弄出来个可以用的demo,一个在android studio 上直接build 出可执行程序,生成的文件在 app/build/intermediates/ndkBuild/debug/obj/local/arm64-v8a/codec_demo , 该程序从 一个h264裸流文件中循环读取h264帧,调用Mediacodec 解码得到NV21的yuv输出,然后写到输出文件中。记录下:
android studio上使用Android.mk : 需要在app::build.gradle 中的 android->defaultconfig中添加
ndk{
abiFilters "arm64-v8a"
}
android->下添加:
externalNativeBuild{
ndkBuild{
path file("src/main/jni/Android.mk") //里面是我们的Android.mk文件路径
}
}
用ndk可以把以下三个文件拷贝到一个名称为jni的目录下面,然后ndk-build(为什么要放到jni目录下,这个就得问ndk-build工具了),可以得到 codec_demo 可执行程序,push到设备上可以直接运行(前提,要放置好 h264源文件,没有纯粹的h264裸文件,可以使用ffmpeg 命令从MP4文件中抽取),部分源代码借鉴于 谷歌官方demo https://github.com/android/ndk-samples/blob/main/native-codec/app/src/main/cpp/native-codec-jni.cpp
在真机上运行 该demo 程序:
放上原代码:
//canok 20210123
//NdkMediacodec.cpp
#include
#include
#include
#include
#include "media/NdkMediaCodec.h"
#include "media/NdkMediaFormat.h"
#include "geth264Frame.cpp"#define LOGD printf
bool bRun = true;
AMediaCodec* pMediaCodec;
AMediaFormat *format ;
FILE *fp_out =NULL;int64_t getTimeNsec() {struct timespec now;clock_gettime(CLOCK_MONOTONIC, &now);return (int64_t) now.tv_sec*1000*1000*1000 + now.tv_nsec;
}
int64_t getTimeSec() {struct timespec now;clock_gettime(CLOCK_MONOTONIC, &now);return (int64_t)now.tv_sec;
}
int64_t getTimeMsec(){ //毫秒struct timespec now;clock_gettime(CLOCK_MONOTONIC, &now);return now.tv_sec*1000 +(int64_t)now.tv_nsec/(1000*1000);
}
int64_t getTimeUsec(){ //usstruct timespec now;clock_gettime(CLOCK_MONOTONIC, &now);return now.tv_sec*1000*1000 +(int64_t)now.tv_nsec/(1000);
}
int firstFrames =0;
void *run(void*pram){if(fp_out &#61;&#61;NULL){fp_out &#61; fopen("/storage/emulated/0/canok/yuv.data","w&#43;");if(fp_out&#61;&#61;NULL){LOGD("fopen erro!\n");return NULL;}}init("/storage/emulated/0/canok/test.h264");//https://github.com/android/ndk-samples/blob/main/native-codec/app/src/main/cpp/native-codec-jni.cpp//decode//这里设定名称// pMediaCodec &#61; AMediaCodec_createCodecByName("video/avc");//h264pMediaCodec &#61; AMediaCodec_createDecoderByType("video/avc");//h264format &#61; AMediaFormat_new();AMediaFormat_setString(format, "mime", "video/avc");AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_WIDTH,672);AMediaFormat_setInt32(format,AMEDIAFORMAT_KEY_HEIGHT,378);LOGD("[%d%s%s]\n",__LINE__,__FUNCTION__,__DATE__);//这里配置media_status_t status &#61; AMediaCodec_configure(pMediaCodec,format,NULL,/*可以在这制定native surface, 直接渲染*/NULL,0);//解码&#xff0c;flags 给0&#xff0c;编码给AMEDIACODEC_CONFIGURE_FLAG_ENCODEif(status!&#61;0){LOGD("erro config %d\n",status);}//启动AMediaCodec_start(pMediaCodec);int outFramecount &#61; 0;int inFramecount &#61;0;while(bRun){//无法做到理想的入一帧&#xff0c;就解码输出该帧。 解码需要参考。现在是多次输入&#xff0c;每一次输入成功都把当前所有已经解码完的取出。//1.0 取空buffer&#xff0c;填充数据&#xff0c;入队ssize_t bufidx &#61; AMediaCodec_dequeueInputBuffer(pMediaCodec,2000);//如果配置错误&#xff0c;比如format中的格式和宽高没有配置&#xff0c;这里get 会出错&#xff08;格式都不知道&#xff0c;怎么知道分配多大空间&#xff1f;&#xff09;&#xff0c;返回错误码&#xff0c;错误码的定义在&#xff1f;&#xff1f;&#xff1f;&#xff1f;//LOGD("input bufidx %d \n",bufidx);if(bufidx>&#61;0){ //当取不到空buffer的时候&#xff0c;有可能是解码慢跟不上输入速度&#xff0c;导致buffer不够用&#xff0c;所以还需要在后面继续取解码后的数据。size_t bufsize;uint8_t *buf&#61; AMediaCodec_getInputBuffer(pMediaCodec,bufidx,&bufsize);//get h264 frame: 并填充到 buf,//LOGD("bufsize %d\n",bufsize);int h264FrameLen &#61; getOneNal(buf,bufsize);if(h264FrameLen<&#61;0){//需要销毁。。。。。。LOGD("get over!!!!!\n");break;}// presentationTimeUs 就是 PTS 如果不要求渲染&#xff0c;这里可以随便。 也可以更具这一个值&#xff0c;来确定每一帧的身份&#xff0c;解码完后的数据里也有这个值。//注意当前例子中&#xff0c;上面 getOneNal 并不是获取到一帧完整数据&#xff0c;有可能是 sps pps, 这种情况就不会有对应的 一帧输出。uint64_t presentationTimeUs &#61; getTimeUsec();LOGD("in framecount %d :%lld\n",inFramecount&#43;&#43;,presentationTimeUs);//入队列 给到解码器AMediaCodec_queueInputBuffer(pMediaCodec,bufidx,0,h264FrameLen,presentationTimeUs,0);}//2.0 取输出&#xff0c;拿走数据&#xff0c;归还buffersize_t bufsize;uint8_t *buf&#61;NULL;AMediaCodecBufferInfo info;do{bufidx &#61; AMediaCodec_dequeueOutputBuffer(pMediaCodec, &info, 2000);//取数据&#xff0c;一直到取到解码后的数据//LOGD("out bufidx %d \n",bufidx);if (bufidx >&#61; 0) {int framelen &#61; 0;{int mWidth, mHeight;auto format &#61; AMediaCodec_getOutputFormat(pMediaCodec);AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &mWidth);AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &mHeight);int32_t localColorFMT;AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT,&localColorFMT);//framelen &#61; mWidth * mHeight * 1.5; //这里干脆自己算大小了&#xff0c;是不是也应该有一个 键值存储可以直接来获取&#xff1f;framelen &#61; info.size;LOGD("out: outFramecount %d %lld ", outFramecount,info.presentationTimeUs);LOGD("out:[%d]X[%d]%d,%d ", mWidth, mHeight, localColorFMT,framelen); //21 &#61;&#61; nv21格式 具体的定义在哪里&#xff1f;&#xff1f;&#xff1f;}//在这里取走解码后的数据&#xff0c;//然后释放buffer给解码器。buf &#61; AMediaCodec_getOutputBuffer(pMediaCodec, bufidx, &bufsize);LOGD("%d[%ld:%ld]out data:%d \n", outFramecount&#43;&#43;, getTimeSec(), getTimeMsec(),bufsize);//bufsize 并不是有效数据的大小。//fwrite(buf,1,bufsize,fp_out);fwrite(buf, 1, framelen, fp_out);AMediaCodec_releaseOutputBuffer(pMediaCodec, bufidx, false);} else if (bufidx &#61;&#61; AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {// 解码输出的格式发生变化int mWidth, mHeight;auto format &#61; AMediaCodec_getOutputFormat(pMediaCodec);AMediaFormat_getInt32(format, "width", &mWidth);AMediaFormat_getInt32(format, "height", &mHeight);int32_t localColorFMT;AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT,&localColorFMT);}else if(bufidx &#61;&#61; AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {}else {}}while(bufidx>0);// 一直取到没数据&#xff0c;把之前解码完的都取出来}
}
int main(int argc, const char*argv[]){//if(argc !&#61; 4){// LOGD("usage:filename,width,height\n");// return -1;// }int ret &#61;0;pthread_t pid;if((ret&#61;pthread_create(&pid,NULL,run,NULL)) !&#61;0 ){LOGD("thread_create err\n");return -1;}while(1){usleep(1000*1000);}}
/******20190828 canok*** output: complete frames**/
//geth264Frame.cpp 用来从h264文件中循环读取 帧数据
#include
#include
#include
#include
#define NALU_TYPE_DPA 2
#define NALU_TYPE_DPB 3
#define NALU_TYPE_DPC 4
#define NALU_TYPE_IDR 5
#define NALU_TYPE_SEI 6
#define NALU_TYPE_SPS 7
#define NALU_TYPE_PPS 8
#define NALU_TYPE_AUD 9
#define NALU_TYPE_EOSEQ 10
#define NALU_TYPE_EOSTREAM 11
#define NALU_TYPE_FILL 12#define CACH_LEN (1024*1000)//缓冲区不能设置太小&#xff0c;如果出现比某一帧比缓冲区大&#xff0c;会被覆盖掉一部分
static uint8_t *g_cach[2] &#61; {NULL,NULL};
static FILE* fp_inH264 &#61; NULL;
static int icach &#61; 0;
static int ioffset &#61; 0;
static int bLoop &#61; 1;
static bool bInit&#61;false;
static int init()
{if(bInit){return 0;}else{bInit &#61; true;}if(g_cach[0] &#61;&#61; NULL){g_cach[0] &#61; (uint8_t*)malloc(CACH_LEN);}if(g_cach[1] &#61;&#61; NULL){g_cach[1] &#61; (uint8_t*)malloc(CACH_LEN);}if(fp_inH264 &#61;&#61; NULL){//fp_inH264 &#61; fopen("./live555.video","r");fp_inH264 &#61; fopen("/storage/emulated/0/canok/test.h264","r");if(fp_inH264 &#61;&#61; NULL){printf("fope erro [%d%s]\n",__LINE__,__FUNCTION__);return -1;}}if(fread(g_cach[icach], 1,CACH_LEN,fp_inH264 )
static int deinit()
{if(g_cach[0]){free(g_cach[0]);g_cach[0] &#61; NULL;}if(g_cach[1]){free(g_cach[1]);g_cach[1] &#61; NULL;}if(fp_inH264){fclose(fp_inH264);fp_inH264 &#61; NULL;}return 0;
}
static int I_count &#61;0;
static int PB_count &#61; 0;
static int All_count &#61; 0;
static int SPS_count &#61;0;
static int PPS_count &#61;0;
static int AUD_count &#61;0;//分隔符
static int checkNal(uint8_t nalHeader)
{All_count &#43;&#43;;char type &#61; nalHeader & ((1<<5)-1);switch(type){case NALU_TYPE_SPS:PPS_count &#43;&#43;;printf("sps\n");break;case NALU_TYPE_PPS:SPS_count &#43;&#43;;printf("pps\n");break;case NALU_TYPE_IDR:I_count &#43;&#43;;printf("I slice !!!!!!!!!!!!!!\n");break;case NALU_TYPE_SLICE:PB_count &#43;&#43;;printf("B/P slice\n");break;case NALU_TYPE_AUD:// 结束符&#xff0c;没有实际数据AUD_count &#43;&#43;;printf("Delimiter&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;\n");break;default:printf("type :%d\n",type);}return type;
}
static int checkFlag(uint8_t *buffer, int offset)
{static uint8_t mMark[4] &#61; {0x00,0x00,0x00,0x01};return !memcmp(buffer&#43;offset,mMark,4);//return (!memcmp(buffer&#43;offset,mMark,4) && ((buffer[offset&#43;4]&((1<<5)-1)) &#61;&#61; 9) );}
//获取一个Nal到 buf, bufLen表示缓冲区最大可以容纳的数据
//返回实际的帧数据长度
static int getOneNal(uint8_t *buf, int bufLen)
{if(!bInit){init();}int i &#61;0;int startpoint &#61; ioffset;int endpoint &#61; ioffset;for (i &#61; ioffset&#43;4; i <&#61; CACH_LEN - 4; i&#43;&#43;) {if (checkFlag(g_cach[icach], i)){startpoint &#61; ioffset;endpoint &#61; i;break;}}if(endpoint - startpoint > 0){int dataLen &#61; endpoint -startpoint;if(bufLen
{if(init()){return -1;}uint8_t *buffer &#61; (uint8_t*)malloc(CACH_LEN);int len &#61;0;FILE *fp_out &#61; fopen("out.h264","w&#43;");while((len &#61; getOneNal(buffer,CACH_LEN) )> 0){printf("get a Nal len:%8d-----",len);checkNal(buffer[4]);fwrite(buffer,1,len,fp_out);}fclose(fp_out);free(buffer);deinit();printf("All_count %d\n",All_count);printf("I_count %d\n",I_count);printf("PB_count %d\n",PB_count);printf("AUD_count %d\n",AUD_count);printf("SPS_count %d\n",SPS_count);printf("PPS_count %d\n",PPS_count);
}
#endif
//Android.mk
LOCAL_PATH :&#61; $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE:&#61; codec_demo
LOCAL_SRC_FILES :&#61; NdkMediacodec.cpp
#LOCAL_SHARED_LIBRARIES :&#61; libandroid libmediandk liblog //android studio 上这样不行&#xff1f;&#xff1f;&#xff1f;提示没有定义的模块&#xff1f; 垃圾&#xff01;&#xff01;&#xff01;&#xff01;&#xff01; 听说要指定为 platform 21.................
LOCAL_LDLIBS :&#61; -lmediandk
LOCAL_LDFLAGS &#43;&#61; -pie -fPIE
include $(BUILD_EXECUTABLE)