/*** ref. 雷霄骅 Lei Xiaohua*/
#include
#include
#include
#pragma comment(lib,"ws2_32.lib")
#define __STDC_CONSTANT_MACROS
extern "C"
{
#include "include/libavcodec/avcodec.h"
#include "include/libavformat/avformat.h"
#include "include/libswscale/swscale.h"
#include "include/libavdevice/avdevice.h"
#include "include/libavutil/imgutils.h"
#include "include/libavutil/opt.h"
#include "include/libavutil/imgutils.h"
//#include "includes/SDL.h"
};int Send(uint8_t * buf ,int size) {return 0;
}
struct sockaddr_in G_Servaddr;int main(int argc, char* argv[])
{AVFormatContext *pFormatContext &#61; NULL;int i &#61; 0, videoindex;AVCodecContext *pCodecContext &#61; NULL;AVCodec *pCodec &#61; NULL;FILE *fp_out;fp_out &#61; fopen("ds.h264", "wb");AVPacket pPacket; int y_size;FILE *fp_yuv &#61; fopen("output.yuv", "wb&#43;");struct SwsContext *img_convert_ctx;AVFormatContext *pFormatContextEncod &#61; NULL;AVCodecContext *pCodecContextEncod &#61; NULL;AVCodec *pCodecEncod &#61; NULL;AVCodecID codec_id &#61; AV_CODEC_ID_H264;int in_w &#61; 1920, in_h &#61; 1080;int framenum &#61; 100;int ret, got_output, xy_size, got_picture;/*socket--------------------------------*/WSADATA wsa; if (WSAStartup(MAKEWORD(2, 2), &wsa) !&#61; 0){printf("WSAStartup failed!\n");return 1;}int connfd;//socklen_t addrlen(0);SOCKET ServerS &#61; socket(AF_INET, SOCK_DGRAM, 0);int WSAStartup(WORD wVersionRequested,LPWSADATA lpWSAData);SOCKADDR_IN DistAddr;DistAddr.sin_family &#61; AF_INET;DistAddr.sin_port &#61; htons(8800);DistAddr.sin_addr.s_addr &#61; inet_addr("127.0.0.1"); //inet_addr("192.168.23.232");if (DistAddr.sin_addr.s_addr &#61;&#61; INADDR_NONE){printf("不可用地址!\n");return -1;}int time_out &#61; 2000;//ret &#61; setsockopt(ServerS, SOL_SOCKET, SO_RCVTIMEO, (char*)&time_out, sizeof(time_out));char buf[150000]; //1500 bytes/*socket--------------------------------*/av_register_all();avformat_network_init();pFormatContext &#61; avformat_alloc_context();avdevice_register_all();AVDictionary* options &#61; NULL;av_dict_set(&options, "video_size", "1920*1080", 0);//设定捕捉范围av_dict_set(&options, "framerate", "25", 0);AVInputFormat *ifmt &#61; av_find_input_format("gdigrab");if (avformat_open_input(&pFormatContext, "desktop", ifmt, &options) !&#61; 0){ printf("Couldn&#39;t open input stream.\n"); return -1; }if (avformat_find_stream_info(pFormatContext, NULL) <0)//找到流{ printf("Couldn&#39;t find stream information.\n"); return -1; }videoindex &#61; -1;for (i &#61; 0; i nb_streams; i&#43;&#43;){if (pFormatContext->streams[i]->codec->codec_type &#61;&#61; AVMEDIA_TYPE_VIDEO)//流的数目计数{videoindex &#61; i; break;}}if (videoindex &#61;&#61; -1){ printf("Didn&#39;t find a video stream.\n"); return -1; }av_dump_format(pFormatContext, 0, 0, 0);pCodecContext &#61; pFormatContext->streams[videoindex]->codec;//编码器参数直接取数据流的参数&#xff08;桌面流pCodec &#61; avcodec_find_decoder(pCodecContext->codec_id);//解码器 BMP解码if (pCodec &#61;&#61; NULL){printf("Codec not found.\n");return -1;}if (avcodec_open2(pCodecContext, pCodec, NULL) <0)//在环境中打开这个编码器{printf("Could not open codec.\n");return -1;}AVFrame *pFrame, *pFrameYUV;pFrame &#61; av_frame_alloc();pFrameYUV &#61; av_frame_alloc();uint8_t *out_buffer;AVPacket *packet;out_buffer &#61; (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecContext->width, pCodecContext->height, 1));av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV420P, pCodecContext->width, pCodecContext->height, 1);packet &#61; (AVPacket *)av_malloc(sizeof(AVPacket));printf("--------------- File Information ----------------\n");//手工调试函数&#xff0c;输出tbn、tbc、tbr、PAR、DAR的含义av_dump_format(pFormatContext, 0, 0, 0);初始化输出流上下文。//AVFormatContext * output_format_context_ &#61; NULL;//avformat_alloc_output_context2(&output_format_context_, NULL, "rtsp", "rtsp://127.0.0.1:8800/");//AVDictionary *format_opts &#61; NULL;//av_dict_set(&format_opts, "stimeout", "2000000", 0);//av_dict_set(&format_opts, "rtsp_transport", "tcp", 0);//avformat_write_header(output_format_context_, &format_opts);// encode-----------------------------------pCodecEncod &#61; avcodec_find_encoder(codec_id);if (!pCodecEncod) {printf("Codec not found\n");return -1;}pCodecContextEncod &#61; avcodec_alloc_context3(pCodecEncod);if (!pCodecContextEncod) {printf("Could not allocate video codec context\n");return -1;}pCodecContextEncod->bit_rate &#61; 1000000;/*比特率越高&#xff0c;传送的数据越大,越清晰*/pCodecContextEncod->width &#61; in_w;pCodecContextEncod->height &#61; in_h;pCodecContextEncod->time_base.num &#61; 1;pCodecContextEncod->time_base.den &#61; 25;pCodecContextEncod->gop_size &#61; 50;pCodecContextEncod->max_b_frames &#61; 1;pCodecContextEncod->pix_fmt &#61; AV_PIX_FMT_YUV420P;/* 关键帧的周期&#xff0c;也就是两个IDR帧之间的距离&#xff0c;一个帧组的最大帧数&#xff0c;一般而言&#xff0c;每一秒视频至少需要使用 1 个关键帧。增加关键帧个数可改善质量&#xff0c;但是同时增加带宽和网络负载。*/av_opt_set(pCodecContextEncod->priv_data, "preset", "slow", 0);if (avcodec_open2(pCodecContextEncod, pCodecEncod, NULL) <0) {printf("Could not open codec\n");return -1;}xy_size &#61; pCodecContextEncod->width * pCodecContextEncod->height;// encode-----------------------------------//&packet &#61; (AVPacket *)av_malloc(sizeof(AVPacket));img_convert_ctx &#61; sws_getContext(pCodecContext->width, pCodecContext->height, pCodecContext->pix_fmt,pCodecContext->width, pCodecContext->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);i &#61; 0;//读取数据while (av_read_frame(pFormatContext, packet) >&#61; 0 && i <200){if (packet->stream_index &#61;&#61; videoindex){av_init_packet(&pPacket);pPacket.data &#61; NULL;pPacket.size &#61; 0;pFrame->pts &#61; i;ret &#61; avcodec_decode_video2(pCodecContext, pFrame, &got_picture, packet);if (ret <0){printf("Decode Error.\n");return -1;}if (got_picture >&#61; 1){//成功解码一帧sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecContext->height,pFrameYUV->data, pFrameYUV->linesize);//转换图像格式y_size &#61; pCodecContext->width*pCodecContext->height;fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //U Cb fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //V Cr //printf("Succeed to decode-scale-fwrite 1 frame!\n");pFrameYUV->width &#61; 1920;pFrameYUV->height &#61; 1080;pFrameYUV->format &#61; AV_PIX_FMT_YUV420P;/*encode-----------------------*/ret &#61; avcodec_encode_video2(pCodecContextEncod, &pPacket, pFrameYUV, &got_output);if (ret <0) {printf("Error encoding frame\n");return -1;}if (got_output) {// printf("Succeed to encode frame: %5d\tsize:%5d\n", framenum, pPacket.size);framenum&#43;&#43;;fwrite(pPacket.data, 1, pPacket.size, fp_out);/*sendudp----------------------*/memset(buf,0, sizeof(buf));//int a;memcpy(buf, &pPacket.data, pPacket.size);int result &#61; sendto(ServerS, buf, sizeof(pPacket.size)&#43;1, 0, (SOCKADDR *)&DistAddr, sizeof(DistAddr));
// int ret1 &#61; av_interleaved_write_frame(output_format_context_, &pPacket);av_free_packet(&pPacket);}/*sendudp----------------------*//*encode-----------------------*/pFrameYUV->pts&#43;&#43;;i&#43;&#43;;}else{//未解码到一帧&#xff0c;可能时结尾B帧或延迟帧&#xff0c;在后面做flush decoder处理}}av_free_packet(packet);}av_dump_format(pFormatContext, 0, 0, 0);//Flush Decoderwhile (true){if (!(pCodec->capabilities & AV_CODEC_CAP_DELAY))break;ret &#61; avcodec_decode_video2(pCodecContext, pFrame, &got_picture, NULL);if (ret <0){break;}if (!got_picture){break;}sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecContext->height,pFrameYUV->data, pFrameYUV->linesize);y_size &#61; pCodecContext->width*pCodecContext->height;fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv); //Y fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv); //Ufwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv); //Vprintf("Flush Decoder: Succeed to decode 1 frame!\n");}//Flush Encoderfor (got_output &#61; 1; got_output; i&#43;&#43;) {ret &#61; avcodec_encode_video2(pCodecContextEncod, &pPacket, NULL, &got_output);if (ret <0) {printf("Error encoding frame\n");return -1;}if (got_output) {printf("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", pPacket.size);fwrite(pPacket.data, 1, pPacket.size, fp_out);//av_write_frame(pFormatCtx, &pkt);av_free_packet(&pPacket);}}closesocket(ServerS);WSACleanup();sws_freeContext(img_convert_ctx);av_frame_free(&pFrameYUV);av_frame_free(&pFrame);avcodec_close(pCodecContext);avformat_close_input(&pFormatContext);fclose(fp_yuv);fclose(fp_out);avcodec_close(pCodecContextEncod);av_free(pCodecEncod);//av_freep(&pFrame->data[0]);//av_frame_free(&pFrame);//std:cin>>i>>endl;scanf("%d",&i);return 0;}