<ruby id="bdb3f"></ruby>

    <p id="bdb3f"><cite id="bdb3f"></cite></p>

      <p id="bdb3f"><cite id="bdb3f"><th id="bdb3f"></th></cite></p><p id="bdb3f"></p>
        <p id="bdb3f"><cite id="bdb3f"></cite></p>

          <pre id="bdb3f"></pre>
          <pre id="bdb3f"><del id="bdb3f"><thead id="bdb3f"></thead></del></pre>

          <ruby id="bdb3f"><mark id="bdb3f"></mark></ruby><ruby id="bdb3f"></ruby>
          <pre id="bdb3f"><pre id="bdb3f"><mark id="bdb3f"></mark></pre></pre><output id="bdb3f"></output><p id="bdb3f"></p><p id="bdb3f"></p>

          <pre id="bdb3f"><del id="bdb3f"><progress id="bdb3f"></progress></del></pre>

                <ruby id="bdb3f"></ruby>

                企業??AI智能體構建引擎,智能編排和調試,一鍵部署,支持知識庫和私有化部署方案 廣告
                [TOC] ### 原理 ***** ``` 原理流程: 采集–>處理–>壓縮編碼–>封裝–>推流–>分發–>流媒體觀看 ``` ### 示例代碼 ***** ``` extern "C" { #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libswscale/swscale.h" #include "libavdevice/avdevice.h" #include "libavutil/time.h" }; #include <stddef.h> #include <stdint.h> void show_dshow_device() { AVFormatContext *pFmtCtx = avformat_alloc_context(); AVDeviceInfoList *device_info = NULL; AVDictionary* options = NULL; av_dict_set(&options, "list_devices", "true", 0); AVInputFormat *iformat = av_find_input_format("dshow"); printf("Device Info=============\n"); avformat_open_input(&pFmtCtx, "video=dummy", iformat, &options); printf("========================\n"); } int main () { avdevice_register_all(); avformat_network_init(); av_register_all(); //Register Device avdevice_register_all(); avformat_network_init(); //Show Dshow Device show_dshow_device(); char capture_name[100] = {0},device_name[100] ={0}; printf("\nChoose capture device: "); if (gets(capture_name) == 0) { printf("Error in gets()\n"); return -1; } sprintf(device_name, "video=%s", capture_name); AVInputFormat* ifmt = av_find_input_format("dshow");//根據名稱查找鏈表當中的AVInputFormat AVFormatContext * ifmt_ctx = avformat_alloc_context(); //================= //1.avformat_open_input()該函數用于打開多媒體數據 //set own video device name //2.avcodec_open2初始化一個視音頻編解碼器的AVCodecContext //3.vcodec_find_encoder()和avcodec_find_decoder()。 //avcodec_find_encoder()用于查找FFmpeg的編碼器,avcodec_find_decoder()用于查找FFmpeg的解碼器 //================= if (avformat_open_input(&ifmt_ctx,device_name,ifmt,NULL) != 0) { printf("Couldn't open input stream.(無法打開輸入流)\n"); return -1; } //input initialize if (avformat_find_stream_info(ifmt_ctx,NULL) < 0) { //獲取流信息 printf("Couldn't find stream information.(無法獲取流信息)\n"); return -1; } int videoindex = -1; for (int i =0; i < ifmt_ctx->nb_streams; ++i) { if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoindex = i; break; } } if (videoindex == -1) { printf("Couldn't find a video stream.(沒有找到視頻流)\n"); return -1; } if (avcodec_open2(ifmt_ctx->streams[videoindex]->codec,avcodec_find_decoder(ifmt_ctx->streams[videoindex]->codec->codec_id),NULL) < 0) { printf("Could not open codec.(無法打開解碼器)\n"); return -1; } //============================== //output initialize /*ffmpeg將網絡協議和文件同等看待,同時因為使用RTMP協議進行傳輸,這里我們指定輸出為flv格式,編碼器使用H.264*/ //============================== AVFormatContext* ofmt_ctx = avformat_alloc_context(); AVOutputFormat *ofmt = NULL; const char* out_path = "rtmp://192.168.1.22/live/test"; avformat_alloc_output_context2(&ofmt_ctx,NULL,"flv",out_path); //output encode initialize AVCodec* pCodec = avcodec_find_encoder(AV_CODEC_ID_H264); if (!pCodec) { printf("Can not find encoder! (沒有找到合適的編碼器!)\n"); return -1; } AVCodecContext * pCodecCtx = avcodec_alloc_context3(pCodec); //初始化 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420Pw; pCodecCtx->width = ifmt_ctx->streams[videoindex]->codec->width; pCodecCtx->height = ifmt_ctx->streams[videoindex]->codec->height; pCodecCtx->time_base.num = 1; pCodecCtx->time_base.den = 25; pCodecCtx->bit_rate = 40000; pCodecCtx->gop_size = 250; ofmt = ofmt_ctx->oformat; if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) { pCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; } pCodecCtx->qmin = 10; pCodecCtx->qmax = 51; pCodecCtx->max_b_frames = 3; AVDictionary * param = 0; av_dict_set(&param,"preset", "fast", 0);//Set the given entry in *pm, overwriting an existing entry(覆蓋現有輸入). av_dict_set(&param,"tune","zerolatency",0); if (avcodec_open2(pCodecCtx,pCodec,&param) < 0) { printf("Failed to open encoder! (編碼器打開失敗!)\n"); return -1; } AVStream* video_st = avformat_new_stream(ofmt_ctx,pCodec);//將新流添加到媒體文件。 if (video_st == NULL) { return -1; } video_st->time_base.num = 1; video_st->time_base.den = 25; video_st->codec = pCodecCtx; if (avio_open(&ofmt_ctx->pb,out_path,AVIO_FLAG_READ_WRITE) < 0) { //創建并初始化AVIOContext以訪問url指示的資源。 printf("Failed to open output file! (輸出文件打開失敗!)\n"); return -1; } int ret; av_dump_format(ofmt_ctx,0,out_path,1); ret = avformat_write_header(ofmt_ctx,NULL); if (ret < 0) { printf( "Error occurred when opening output URL\n"); goto end; } //start push AVPacket pkt; int frame_index=0; int64_t start_time=av_gettime(); while(1) { AVStream *in_stream, *out_stream; //Get an AVPacket ret = av_read_frame(ifmt_ctx, &pkt); if (ret < 0) break; //FIX:No PTS (Example: Raw H.264) //Simple Write PTS if(pkt.pts==AV_NOPTS_VALUE){ //Write PTS AVRational time_base1=ifmt_ctx->streams[videoindex]->time_base; //Duration between 2 frames (us) int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate); //Parameters pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE); pkt.dts=pkt.pts; pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE); } //Important:Delay if(pkt.stream_index==videoindex){ AVRational time_base=ifmt_ctx->streams[videoindex]->time_base; AVRational time_base_q={1,AV_TIME_BASE}; int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q); int64_t now_time = av_gettime() - start_time; if (pts_time > now_time) av_usleep(pts_time - now_time); }in_stream = ifmt_ctx->streams[pkt.stream_index]; out_stream = ofmt_ctx->streams[pkt.stream_index]; /* copy packet */ //Convert PTS/DTS pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX)); pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX)); pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base); pkt.pos = -1; //Print to Screen if(pkt.stream_index==videoindex){ printf("Send %8d video frames to output URL\n",frame_index); frame_index++; } //ret = av_write_frame(ofmt_ctx, &pkt); ret = av_interleaved_write_frame(ofmt_ctx, &pkt); if (ret < 0) { printf( "Error muxing packet\n"); break; } av_free_packet(&pkt); } //Write file trailer av_write_trailer(ofmt_ctx); end: avformat_close_input(&ifmt_ctx); /* close output */ if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE)) avio_close(ofmt_ctx->pb); avformat_free_context(ofmt_ctx); if (ret < 0 && ret != AVERROR_EOF) { printf( "Error occurred.\n"); return -1; } return 0; } ```
                  <ruby id="bdb3f"></ruby>

                  <p id="bdb3f"><cite id="bdb3f"></cite></p>

                    <p id="bdb3f"><cite id="bdb3f"><th id="bdb3f"></th></cite></p><p id="bdb3f"></p>
                      <p id="bdb3f"><cite id="bdb3f"></cite></p>

                        <pre id="bdb3f"></pre>
                        <pre id="bdb3f"><del id="bdb3f"><thead id="bdb3f"></thead></del></pre>

                        <ruby id="bdb3f"><mark id="bdb3f"></mark></ruby><ruby id="bdb3f"></ruby>
                        <pre id="bdb3f"><pre id="bdb3f"><mark id="bdb3f"></mark></pre></pre><output id="bdb3f"></output><p id="bdb3f"></p><p id="bdb3f"></p>

                        <pre id="bdb3f"><del id="bdb3f"><progress id="bdb3f"></progress></del></pre>

                              <ruby id="bdb3f"></ruby>

                              哎呀哎呀视频在线观看