2017-11-30 48 views
1

ここにコードです:これはコンソール アプリケーションのメインのコンテンツです、コードはコンパイルして実行され、ビデオはキャプチャされますが、オーディオは ではありません。ffmpeg.autogenを使用すると、ビデオはIPカメラからキャプチャされますが、オーディオはキャプチャされません。

 FFmpegBinariesHelper.RegisterFFmpegBinaries(); 

     ffmpeg.av_register_all(); 
     ffmpeg.avcodec_register_all(); 
     ffmpeg.avformat_network_init(); 
     AVFormatContext* context = ffmpeg.avformat_alloc_context(); 
     int video_stream_index = 0; 

     ffmpeg.av_register_all(); 
     ffmpeg.avcodec_register_all(); 
     ffmpeg.avformat_network_init(); 

     //open rtsp 
     if (ffmpeg.avformat_open_input(&context, "rtsp://user:[email protected]/axis-media/media.amp?", null, null) != 0) 
     { 
      return ; 
     } 

     if (ffmpeg.avformat_find_stream_info(context, null) < 0) 
     { 
      return; 
     } 

     //search video stream 
     for (int i = 0; i < context->nb_streams; i++) 
     { 
      if (context->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) 
       video_stream_index = i; 
     } 

     AVPacket packet; 
     ffmpeg.av_init_packet(&packet); 

     //open output file 



     AVOutputFormat* fmt = ffmpeg.av_guess_format("mp4", null, null); 
     // AVFormatContext* oc = ffmpeg.avformat_alloc_context(); 
     AVFormatContext* oc = null; 
     ffmpeg.avformat_alloc_output_context2(&oc, fmt, null, null); 
     oc->oformat = fmt; 


     ffmpeg.avio_open2(&oc->pb, "test.mp4", ffmpeg.AVIO_FLAG_WRITE, null, null); 

     AVStream* stream = null; 
     int cnt = 0; 
     //start reading packets from stream and write them to file 

     ffmpeg.av_read_play(context);//play RTSP 
     while (ffmpeg.av_read_frame(context, &packet) >= 0 && cnt < 1000) 
     {//read 100 frames 
      if (packet.stream_index == video_stream_index) 
      {//packet is video    
       if (stream == null) 
       {//create stream in file 
        stream = ffmpeg.avformat_new_stream(oc, context->streams[video_stream_index]->codec->codec); 
        ffmpeg.avcodec_copy_context(stream->codec, context->streams[video_stream_index]->codec); 
        stream->sample_aspect_ratio = context->streams[video_stream_index]->codec->sample_aspect_ratio; 
        ffmpeg.avformat_write_header(oc, null); 
       } 
       packet.stream_index = stream->id; 

       ffmpeg.av_interleaved_write_frame(oc, &packet); 
       cnt++; 
      } 
      ffmpeg.av_free_packet(&packet); 
      ffmpeg.av_init_packet(&packet); 
     } 
     ffmpeg.av_read_pause(context); 
     ffmpeg.av_write_trailer(oc); 
     ffmpeg.avio_close(oc->pb); 
     ffmpeg.avformat_free_context(oc); 

答えて

0

私はオーディオのコードを追加する方法を見つけました。オーディオはコピーしてビデオと同期しています。 SetPacketPropertiesため

 AVFormatContext* ifcx = null; 

     AVCodecContext* v_iccx = null; 
     AVCodec* v_icodec = null; 
     AVStream* v_ist = null; 
     int v_index; 


     AVCodecContext* a_iccx = null; 
     AVCodec* a_icodec = null; 
     AVStream* a_ist = null; 
     int a_index; 
     DateTime timenow, timestart; 

     AVFormatContext* ofcx; 
     AVOutputFormat* ofmt; 

     AVStream* ost;  
     AVPacket packet; 

     string sFileInput; 
     string sFileOutput; 



     sFileInput = rtspUrl; 

     var startNumber = 0; 
     var filePrefix = "camera" + cameraId; 

     // create folder if not exist 
     if (!Directory.Exists(destinationFolder)) 
     { 
      Directory.CreateDirectory(destinationFolder); 
     } 

     var files = Directory.GetFiles(destinationFolder, "*" + filePrefix + "*"); 
     if (files.Any()) 
     { 
      var lastFile = files.Last(); 
      var temp = lastFile.Substring(lastFile.Length - 7, 3); 
      if (int.TryParse(temp, out startNumber)) 
      { 
       startNumber++; 

      } 

     } 

     string NextFile = string.Format("{0}\\{1}-{2:000}.mp4", destinationFolder, filePrefix, startNumber); 
     //EventLog.WriteEntry(sSource, "Capturing " + NextFile); 
     sFileOutput = NextFile; 

     FFmpegBinariesHelper.RegisterFFmpegBinaries(); 
     // Initialize library 
     ffmpeg.av_log_set_level(ffmpeg.AV_LOG_DEBUG); 
     ffmpeg.av_register_all(); 
     ffmpeg.avcodec_register_all(); 
     ffmpeg.avformat_network_init(); 

     // 
     // Input 
     // 
     AVFormatContext** tmpIfcx = &ifcx; 
     var ts = new CancellationTokenSource(); 
     CancellationToken ct = ts.Token; 

     var task = new Task<int>(() => Avformat_open_input_async(tmpIfcx, sFileInput),ct); 

     task.Start();    

     task.Wait(2000);    

     if (!task.IsCompleted) 
     { 
      ts.Cancel(); 
      //EventLog.WriteEntry(sSource, "Waiting on task Avformat_open_input_async ", EventLogEntryType.Warning); 
      task.Wait(2000); 
      //EventLog.WriteEntry(sSource, "Timeout callling " + sFileInput, EventLogEntryType.Error);     
      return; 
     } 

     var result = task.Result; 
     //open rtsp 

     // ifcx = tmpIfcx; 
     if (result != 0) 
     {     
      EventLog.WriteEntry(sSource, "ERROR: Cannot open input file " + sFileInput, EventLogEntryType.Error); 
      return; 
     } 

     if (ffmpeg.avformat_find_stream_info(ifcx, null) < 0) 
     {    
      EventLog.WriteEntry(sSource, "ERROR: Cannot find stream info\n", EventLogEntryType.Error); 
      ffmpeg.avformat_close_input(&ifcx); 
      return; 
     } 

     //search video stream 
     v_index = -1; 
     a_index = -1; 
     for (int ix = 0; ix < ifcx->nb_streams; ix++) 
     { 

      if (ifcx->streams[ix]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) 
      { 
       v_ist = ifcx->streams[ix]; 
       v_icodec = ifcx->video_codec; 
       v_index = ix; 
       v_iccx = ifcx->streams[ix]->codec; 

      } 
      if (ifcx->streams[ix]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO) 
      { 
       a_ist = ifcx->streams[ix]; 
       a_icodec = ifcx->video_codec; 
       a_index = ix; 
       a_iccx = ifcx->streams[ix]->codec; 

      } 
     } 
     if (v_index < 0) 
     { 
      EventLog.WriteEntry(sSource, "ERROR: Cannot find input video stream\n",EventLogEntryType.Error); 
      ffmpeg.avformat_close_input(&ifcx); 
      return; 
     } 

     // 
     // Output 
     // 

     //open output file 
     ofmt = ffmpeg.av_guess_format(null, sFileOutput, "mp4"); 

     // ffmpeg.format 
     ofcx = ffmpeg.avformat_alloc_context(); 
     ofcx->oformat = ofmt; 

     ffmpeg.avio_open(&ofcx->pb, sFileOutput, ffmpeg.AVIO_FLAG_WRITE); 

     // Create output stream 
     ost = ffmpeg.avformat_new_stream(ofcx, (AVCodec *) v_iccx->codec); 
     AVStream* a_ost = ffmpeg.avformat_new_stream(ofcx, (AVCodec*)a_iccx->codec); 
     //ost = ffmpeg.avformat_new_stream(ofcx, ifcx->video_codec); 

     ffmpeg.avcodec_copy_context(ost->codec, v_iccx); 
     ffmpeg.avcodec_copy_context(a_ost->codec, a_iccx); 

     ffmpeg.avcodec_open2(v_iccx, v_icodec, null); 
     ffmpeg.avcodec_open2(a_iccx, a_icodec, null); 


     // Assume r_frame_rate is accurate 
     var avRational = new AVRational(); 
     avRational.den = ost->r_frame_rate.den * 2; 
     avRational.num = ost->r_frame_rate.num ; 

     var aaRational = new AVRational(); 
     aaRational.den = a_ost->r_frame_rate.den ; 
     aaRational.num = a_ost->r_frame_rate.num ; 

     ost->r_frame_rate = avRational; 
     ost->avg_frame_rate = ost->r_frame_rate; 
     ost->time_base = av_inv_q(ost->r_frame_rate); 
     ost->codec->time_base = ost->time_base; 

     a_ost->r_frame_rate = aaRational; 
     a_ost->avg_frame_rate = a_ost->r_frame_rate; 
     a_ost->time_base = av_inv_q(a_ost->r_frame_rate); 
     a_ost->codec->time_base = a_ost->time_base; 

     ffmpeg.avformat_write_header(ofcx, null); 

     //start reading packets from stream and write them to file 

     ffmpeg.av_dump_format(ifcx, 0, ifcx->filename.ToString(), 0); 
     ffmpeg.av_dump_format(ofcx, 0, ofcx->filename.ToString(), 1); 

     timestart = timenow = DateTime.Now; 

     ffmpeg.av_init_packet(&packet); 
     if (segmentLength == 0) 
      segmentLength = 15; 
     var dateToEnd = DateTime.Now.AddMinutes(segmentLength); 
     //EventLog.WriteEntry(sSource, "date to end capture " + dateToEnd.ToString()); 
     while ((dateToEnd - DateTime.Now).TotalMinutes > 0 && IsCapturing) 
     { 
      if (endDateTime.HasValue && DateTime.Compare(DateTime.Now, endDateTime.Value) >= 0) 
      { 
       ffmpeg.av_packet_unref(&packet); 
       ffmpeg.av_init_packet(&packet); 
       IsCapturing = false;      
       break; 
      } 
      int readFrame = -1; 
      try 
      { 
       readFrame = ffmpeg.av_read_frame(ifcx, &packet); 

      } 
      catch(Exception ex) 
      { 
       EventLog.WriteEntry(sSource, $"Error av_read_frame {ex.ToString()}", EventLogEntryType.Error); 
       break; 
      } 

      if (readFrame < 0) 
      { 
       EventLog.WriteEntry(sSource, "reafFrame < 0 " + NextFile, EventLogEntryType.Error); 
       ffmpeg.av_packet_unref(&packet); 
       ffmpeg.av_init_packet(&packet); 
       break; 
      } 

      if (packet.stream_index == v_index) 
      { //packet is video 
       packet.stream_index = v_ist->index; 
       ffmpeg.av_interleaved_write_frame(ofcx, &packet); 

      } 
      if (packet.stream_index == a_index) 
      { //packet is audio    

       SetPacketProperties(&packet, a_iccx, a_ist); 
       ffmpeg.av_interleaved_write_frame(ofcx, &packet); 

      } 
      ffmpeg.av_packet_unref(&packet); 
      ffmpeg.av_init_packet(&packet); 

     } 
     ffmpeg.av_read_pause(ifcx); 
     ffmpeg.av_write_trailer(ofcx); 
     ffmpeg.avio_close(ofcx->pb); 
     ffmpeg.avformat_free_context(ofcx); 

     ffmpeg.avformat_network_deinit(); 
+0

コード:ここでは、コードです –

0
private unsafe void SetPacketProperties(AVPacket* packet, AVCodecContext* codecContext, AVStream* stream) 
    { 

     packet->pts = ffmpeg.av_rescale_q_rnd(packet->pts, codecContext->time_base, stream->time_base, AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX); 
     packet->dts = ffmpeg.av_rescale_q_rnd(packet->dts, codecContext->time_base, stream->time_base, AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX); 
     packet->duration = (int)ffmpeg.av_rescale_q(packet->duration, codecContext->time_base, stream->time_base); 
     packet->stream_index = stream->index; 


    } 
関連する問題