使用linux系统视频设备通过ffmpeg库以编程方式流式传输

我能够在自己的代码中流式传输用h264编码的静态mp4文件。

但是当我在程序中使用Linux视频设备/dev/video0作为输入时,它会出现此错误:

failed to open file `udp://xxx.xxx.xxx.xxx:28000` or configure filtergraph.

我尚未在程序中使用编码器。我不明白为什么我不能直接从摄像机流式传输。

静态mp4文件以h264编码。我的代码流保持原样,但是当我尝试以video4linux2格式流传输相机时,它无法播放。

我需要使用任何格式对摄像机的输出进行编码吗?

#include <libavutil/timestamp.h>
#include <libavformat/avformat.h>
static void log_packet(const AVFormatContext *fmt_ctx,const AVPacket *pkt,const char *tag)
{
    AVRational *time_base = &fmt_ctx->streams[pkt->stream_index]->time_base;
    printf("%s: pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s stream_index:%d\n",tag,av_ts2str(pkt->pts),av_ts2timestr(pkt->pts,time_base),av_ts2str(pkt->dts),av_ts2timestr(pkt->dts,av_ts2str(pkt->duration),av_ts2timestr(pkt->duration,pkt->stream_index);
}
int main(int argc,char **argv)
{
    AVOutputFormat *ofmt = NULL;
    AVFormatContext *ifmt_ctx = NULL,*ofmt_ctx = NULL;
    AVPacket pkt;
    const char *in_filename,*out_filename;
    AVInputFormat *inputFormat =av_find_input_format("video4linux2");

    int ret,i;
    /*if (argc < 3) {
        printf("usage: %s input output\n"
               "API example program to remux a media file with libavformat and libavcodec.\n"
               "The output format is guessed according to the file extension.\n"
               "\n",argv[0]);
        return 1;
    }
    //in_filename  = argv[1];
   // out_filename = argv[2];*/

    in_filename  = "/dev/video0";
    out_filename = "udp://xxx.xxx.xxx.xxx:28000";

    avdevice_register_all();
    avcodec_register_all();
    av_register_all();
    //Networkq
    avformat_network_init();


    if ((ret = avformat_open_input(&ifmt_ctx,in_filename,inputFormat,0)) < 0) {
        fprintf(stderr,"Could not open input file '%s'",in_filename);
        goto end;
    }

    if ((ret = avformat_find_stream_info(ifmt_ctx,"Failed to retrieve input stream information");
        goto end;
    }

    av_dump_format(ifmt_ctx,0);
    avformat_alloc_output_context2(&ofmt_ctx,NULL,"mpegts",out_filename);
    if (!ofmt_ctx) {
        fprintf(stderr,"Could not create output context\n");
        ret = AVERROR_UNKNOWN;
        goto end;
    }
    ofmt = ofmt_ctx->oformat;
    for (i = 0; i < ifmt_ctx->nb_streams; i++) {
        AVStream *in_stream = ifmt_ctx->streams[i];
        AVStream *out_stream = avformat_new_stream(ofmt_ctx,in_stream->codec->codec);
        if (!out_stream) {
            fprintf(stderr,"Failed allocating output stream\n");
            ret = AVERROR_UNKNOWN;
            goto end;
        }
        ret = avcodec_copy_context(out_stream->codec,in_stream->codec);
        if (ret < 0) {
            fprintf(stderr,"Failed to copy context from input to output stream codec context\n");
            goto end;
        }
        out_stream->codec->codec_tag = 0;
        if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
            out_stream->codec->flags |= AV_CODEC_flaG_GLOBAL_HEADER;
    }
    av_dump_format(ofmt_ctx,out_filename,1);
    if (!(ofmt->flags & AVFMT_NOFILE)) {
        ret = avio_open(&ofmt_ctx->pb,AVIO_flaG_WRITE);
        if (ret < 0) {
            fprintf(stderr,"Could not open output file '%s'",out_filename);
            goto end;
        }
    }
    ret = avformat_write_header(ofmt_ctx,NULL);
    if (ret < 0) {
        fprintf(stderr,"Error occurred when opening output file\n");
        goto end;
    }
    while (1) {
        AVStream *in_stream,*out_stream;
        ret = av_read_frame(ifmt_ctx,&pkt);
        if (ret < 0)
            break;
        in_stream  = ifmt_ctx->streams[pkt.stream_index];
        out_stream = ofmt_ctx->streams[pkt.stream_index];
        log_packet(ifmt_ctx,&pkt,"in");
        /* copy packet */
        pkt.pts = av_rescale_q_rnd(pkt.pts,in_stream->time_base,out_stream->time_base,AV_ROUND_NEAR_INF|AV_ROUND_PASS_MInmAX);
        pkt.dts = av_rescale_q_rnd(pkt.dts,AV_ROUND_NEAR_INF|AV_ROUND_PASS_MInmAX);
        pkt.duration = av_rescale_q(pkt.duration,out_stream->time_base);
        pkt.pos = -1;
        log_packet(ofmt_ctx,"out");
        ret = av_interleaved_write_frame(ofmt_ctx,&pkt);
        if (ret < 0) {
            fprintf(stderr,"Error muxing packet\n");
            break;
        }
        av_free_packet(&pkt);
    }
    av_write_trailer(ofmt_ctx);
end:
    avformat_close_input(&ifmt_ctx);
    /* close output */
    if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
        avio_closep(&ofmt_ctx->pb);
    avformat_free_context(ofmt_ctx);
    if (ret < 0 && ret != AVERROR_EOF) {
        fprintf(stderr,"Error occurred: %s\n",av_err2str(ret));
        return 1;
    }
    return 0;
}

输出为

pi@raspberrypi:~/source_code $ ./deneme_4
Input #0,video4linux2,v4l2,from '/dev/video0':
  Duration: N/A,start: 74626.180310,bitrate: 235929 kb/s
    Stream #0:0: Video: rawvideo (I420 / 0x30323449),yuv420p,1024x768,235929 kb/s,25 fps,25 tbr,1000k tbn,1000k tbc
Output #0,mpegts,to 'udp://xxx.xxx.xxx.xxx:28000':
    Stream #0:0: Unknown: none
[mpegts @ 0x1d6a5c0] Using AVStream.codec to pass codec parameters to muxers is deprecated,use AVStream.codecpar instead.
in: pts:74626180310 pts_time:74626.2 dts:74626180310 dts_time:74626.2 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716356228 pts_time:74626.2 dts:6716356228 dts_time:74626.2 duration:3600 duration_time:0.04 stream_index:0
in: pts:74626408490 pts_time:74626.4 dts:74626408490 dts_time:74626.4 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716376764 pts_time:74626.4 dts:6716376764 dts_time:74626.4 duration:3600 duration_time:0.04 stream_index:0
in: pts:74626665690 pts_time:74626.7 dts:74626665690 dts_time:74626.7 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716399912 pts_time:74626.7 dts:6716399912 dts_time:74626.7 duration:3600 duration_time:0.04 stream_index:0
in: pts:74626923514 pts_time:74626.9 dts:74626923514 dts_time:74626.9 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716423116 pts_time:74626.9 dts:6716423116 dts_time:74626.9 duration:3600 duration_time:0.04 stream_index:0
in: pts:74627180935 pts_time:74627.2 dts:74627180935 dts_time:74627.2 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716446284 pts_time:74627.2 dts:6716446284 dts_time:74627.2 duration:3600 duration_time:0.04 stream_index:0
in: pts:74627439174 pts_time:74627.4 dts:74627439174 dts_time:74627.4 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716469526 pts_time:74627.4 dts:6716469526 dts_time:74627.4 duration:3600 duration_time:0.04 stream_index:0
in: pts:74627695789 pts_time:74627.7 dts:74627695789 dts_time:74627.7 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716492621 pts_time:74627.7 dts:6716492621 dts_time:74627.7 duration:3600 duration_time:0.04 stream_index:0
in: pts:74627951834 pts_time:74628 dts:74627951834 dts_time:74628 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716515665 pts_time:74628 dts:6716515665 dts_time:74628 duration:3600 duration_time:0.04 stream_index:0
in: pts:74628208064 pts_time:74628.2 dts:74628208064 dts_time:74628.2 duration:40000 duration_time:0.04 stream_index:0
out: pts:6716538726 pts_time:74628.2 dts:6716538726 dts_time:74628.2 duration:3600 duration_time:0.04 stream_index:0
^C
fxbxx 回答:使用linux系统视频设备通过ffmpeg库以编程方式流式传输

/ dev / video0包含原始像素数据,据我所知,没有流格式支持该格式。写入之前,您必须先编码/压缩每个帧。这些文件起作用,因为文件中的数据已被编码。

本文链接:https://www.f2er.com/3058225.html

大家都在问