热门标签 | HotTags
当前位置:  开发笔记 > 编程语言 > 正文

androidffmpeg编码h264,Mac系统下ffmpeg+h264+flv编码的android录制屏幕实现2

接上一篇。activity_flv.xmlxmlns:toolshttp:schemas.android.comtoolsandroid:layout_widthmatch_

接上一篇。

activity_flv.xml

xmlns:tools="http://schemas.android.com/tools"

android:layout_width="match_parent"

android:layout_height="match_parent"

tools:context=".FlvActivity">

android:layout_width="match_parent"

android:layout_height="match_parent"

android:orientation="horizontal">

android:id="@+id/take_button"

android:layout_width="wrap_content"

android:layout_height="match_parent"

android:text="open"/>

android:id="@+id/surfaceView1"

android:layout_width="0dp"

android:layout_height="match_parent"

android:layout_weight="1"/>

最后偷懒贴个图build.gradle

893dc7ec1f20

build.gradle

忘记上jni文件了,直接走起

#include

#include"libavcodec/avcodec.h"

#include"libavformat/avformat.h"

#include"libavutil/time.h"

#ifdefANDROID

#include

#include

#defineLOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR,"(>_

#defineLOGI(format, ...)  __android_log_print(ANDROID_LOG_INFO,"(=_=)",format, ##__VA_ARGS__)

#else

#define LOGE(format, ...)  printf("(>_

#define LOGI(format, ...)  printf("(^_^) "format"\n", ##__VA_ARGS__)

#endif

AVFormatContext*ofmt_ctx;

AVStream* video_st;

AVCodecContext* pCodecCtx;

AVCodec* pCodec;

AVPacketenc_pkt;

AVFrame*pFrameYUV;

intframecnt =0;

intyuv_width;

intyuv_height;

inty_length;

intuv_length;

int64_tstart_time;

//Output FFmpeg's av_log()

voidcustom_log(void*ptr,intlevel,const char* fmt,va_listvl){

FILE*fp=fopen("/storage/emulated/0/av_log.txt","a+");

if(fp){

vfprintf(fp,fmt,vl);

fflush(fp);

fclose(fp);

}

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_initial

(JNIEnv*env,jobjectobj,jintwidth,jintheight)

{

const char* out_path ="/storage/emulated/0/testffmpeg.flv";

yuv_width=width;

yuv_height=height;

y_length=width*height;

uv_length=width*height/4;

//FFmpeg av_log() callback

av_log_set_callback(custom_log);

av_register_all();

//output initialize

avformat_alloc_output_context2(&ofmt_ctx,NULL,"flv", out_path);

//output encoder initialize

pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);

if(!pCodec){

LOGE("Can not find encoder!\n");

return-1;

}

pCodecCtx = avcodec_alloc_context3(pCodec);

pCodecCtx->pix_fmt=AV_PIX_FMT_YUV420P;

pCodecCtx->width= width;

pCodecCtx->height= height;

pCodecCtx->time_base.num=1;

pCodecCtx->time_base.den=30;

pCodecCtx->bit_rate=800000;

pCodecCtx->gop_size=300;

/* Some formats want stream headers to be separate. */

if(ofmt_ctx->oformat->flags&AVFMT_GLOBALHEADER)

pCodecCtx->flags|=CODEC_FLAG_GLOBAL_HEADER;

//H264 codec param

//pCodecCtx->me_range = 16;

//pCodecCtx->max_qdiff = 4;

//pCodecCtx->qcompress = 0.6;

pCodecCtx->qmin=10;

pCodecCtx->qmax=51;

//Optional Param

pCodecCtx->max_b_frames=3;

// Set H264 preset and tune

AVDictionary*param =0;

av_dict_set(¶m,"preset","ultrafast",0);

av_dict_set(¶m,"tune","zerolatency",0);

if(avcodec_open2(pCodecCtx, pCodec, ¶m) <0){

LOGE("Failed to open encoder!\n");

return-1;

}

//Add a new stream to output,should be called by the user before avformat_write_header() for muxing

video_st &#61; avformat_new_stream(ofmt_ctx, pCodec);

if(video_st &#61;&#61;NULL){

return-1;

}

video_st->time_base.num&#61;1;

video_st->time_base.den&#61;30;

video_st->codec&#61; pCodecCtx;

//Open output URL,set before avformat_write_header() for muxing

if(avio_open(&ofmt_ctx->pb, out_path,AVIO_FLAG_READ_WRITE) <0){

LOGE("Failed to open output file!\n");

return-1;

}

//Write File Header

avformat_write_header(ofmt_ctx,NULL);

start_time &#61; av_gettime();

return0;

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_encode

(JNIEnv*env,jobjectobj,jbyteArrayyuv)

{

intret;

intenc_got_frame&#61;0;

inti&#61;0;

pFrameYUV &#61; av_frame_alloc();

uint8_t*out_buffer &#61; (uint8_t*)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));

avpicture_fill((AVPicture*)pFrameYUV, out_buffer,AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

//&#xfffd;&#xfffd;׿&#xfffd;&#xfffd;&#xfffd;&#xfffd;ͷ&#xfffd;&#xfffd;&#xfffd;&#xfffd;ΪNV21&#xfffd;&#xfffd;ʽ&#xfffd;&#xfffd;&#xfffd;˴&#xfffd;&#xfffd;&#xfffd;&#xfffd;&#xfffd;ת&#xfffd;&#xfffd;ΪYUV420P&#xfffd;&#xfffd;ʽ

jbyte* in&#61; (jbyte*)(*env)->GetByteArrayElements(env,yuv,0);

memcpy(pFrameYUV->data[0],in,y_length);

for(i&#61;0;i

{

*(pFrameYUV->data[2]&#43;i)&#61;*(in&#43;y_length&#43;i*2);

*(pFrameYUV->data[1]&#43;i)&#61;*(in&#43;y_length&#43;i*2&#43;1);

}

pFrameYUV->format&#61;AV_PIX_FMT_YUV420P;

pFrameYUV->width&#61; yuv_width;

pFrameYUV->height&#61; yuv_height;

enc_pkt.data&#61;NULL;

enc_pkt.size&#61;0;

av_init_packet(&enc_pkt);

ret &#61; avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);

av_frame_free(&pFrameYUV);

if(enc_got_frame &#61;&#61;1){

LOGI("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);

framecnt&#43;&#43;;

enc_pkt.stream_index&#61; video_st->index;

//Write PTS

AVRationaltime_base &#61; ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };

AVRationalr_framerate1 &#61; {60,2};//{ 50, 2 };

AVRationaltime_base_q &#61; {1,AV_TIME_BASE};

//Duration between 2 frames (us)

int64_tcalc_duration &#61; (double)(AV_TIME_BASE)*(1/ av_q2d(r_framerate1));//&#xfffd;ڲ&#xfffd;ʱ&#xfffd;&#xfffd;&#xfffd;

//Parameters

//enc_pkt.pts &#61; (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));

enc_pkt.pts&#61; av_rescale_q(framecnt*calc_duration, time_base_q, time_base);

enc_pkt.dts&#61; enc_pkt.pts;

enc_pkt.duration&#61; av_rescale_q(calc_duration, time_base_q, time_base);//(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));

enc_pkt.pos&#61; -1;

//Delay

int64_tpts_time &#61; av_rescale_q(enc_pkt.dts, time_base, time_base_q);

int64_tnow_time &#61; av_gettime() - start_time;

if(pts_time > now_time)

av_usleep(pts_time - now_time);

ret &#61; av_interleaved_write_frame(ofmt_ctx, &enc_pkt);

av_free_packet(&enc_pkt);

}

return0;

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_flush

(JNIEnv*env,jobjectobj)

{

intret;

intgot_frame;

AVPacketenc_pkt;

if(!(ofmt_ctx->streams[0]->codec->codec->capabilities&

CODEC_CAP_DELAY))

return0;

while(1) {

enc_pkt.data&#61;NULL;

enc_pkt.size&#61;0;

av_init_packet(&enc_pkt);

ret &#61; avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,

NULL, &got_frame);

if(ret <0)

break;

if(!got_frame){

ret &#61;0;

break;

}

LOGI("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);

//Write PTS

AVRationaltime_base &#61; ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };

AVRationalr_framerate1 &#61; {60,2};

AVRationaltime_base_q &#61; {1,AV_TIME_BASE};

//Duration between 2 frames (us)

int64_tcalc_duration &#61; (double)(AV_TIME_BASE)*(1/ av_q2d(r_framerate1));//&#xfffd;ڲ&#xfffd;ʱ&#xfffd;&#xfffd;&#xfffd;

//Parameters

enc_pkt.pts&#61; av_rescale_q(framecnt*calc_duration, time_base_q, time_base);

enc_pkt.dts&#61; enc_pkt.pts;

enc_pkt.duration&#61; av_rescale_q(calc_duration, time_base_q, time_base);

//ת&#xfffd;&#xfffd;PTS/DTS&#xfffd;&#xfffd;Convert PTS/DTS&#xfffd;&#xfffd;

enc_pkt.pos&#61; -1;

framecnt&#43;&#43;;

ofmt_ctx->duration&#61; enc_pkt.duration* framecnt;

/* mux encoded frame */

ret &#61; av_interleaved_write_frame(ofmt_ctx, &enc_pkt);

if(ret <0)

break;

}

//Write file trailer

av_write_trailer(ofmt_ctx);

return0;

}

JNIEXPORTjintJNICALLJava_csupport_lyjq_com_csupport_FlvActivity_close

(JNIEnv*env,jobjectobj)

{

if(video_st)

avcodec_close(video_st->codec);

avio_close(ofmt_ctx->pb);

avformat_free_context(ofmt_ctx);

return0;

}

效果图&#xff0c;生成的flv文件在sd卡根目录下

893dc7ec1f20

Happy ending



推荐阅读
author-avatar
zifei84589
这个家伙很懒,什么也没留下!
PHP1.CN | 中国最专业的PHP中文社区 | DevBox开发工具箱 | json解析格式化 |PHP资讯 | PHP教程 | 数据库技术 | 服务器技术 | 前端开发技术 | PHP框架 | 开发工具 | 在线工具
Copyright © 1998 - 2020 PHP1.CN. All Rights Reserved | 京公网安备 11010802041100号 | 京ICP备19059560号-4 | PHP1.CN 第一PHP社区 版权所有