如何在 Web 上显示 H264 TCP 流

问题描述

我有一组微型摄像机,它们通过 TCP 流式传输 H264 编码的视频。我需要根据浏览器中的用户操作以某种方式按需连接到他们,并向他们显示实时流。

我一直在互联网上搜索如何实现这一点,但没有成功。 我最接近这个结果的是编写一个小程序,使用 libavC++ 连接到视频流,将它们保存为动画 JPEG,然后使用 mjpg_streamer 将结果显示为现场直播。但是这个解决方案过于复杂,我的程序崩溃了,错误如下:

Failed to decode av_out_packet: Operation Now in progress

Failed to read av_frame

这是我用来解码流的一段代码

void decode_stream(const char * address,int threadIdx,const char * output_dir) {
    std::cout << "Started decoding thread ID: " << std::this_thread::get_id()  << "  TID: " << threadIdx << std::endl;

    AVFormatContext *av_format_ctx = avformat_alloc_context();

    // register timeout callback
    auto * ith = new ffmpeg_interrupt_handler(default_timeout * 10);
    av_format_ctx->interrupt_callback.opaque = (void *)ith;
    av_format_ctx->interrupt_callback.callback = &ffmpeg_interrupt_handler::check_interrupt;

    AVInputFormat *av_input_fmt = av_find_input_format("h264");

    if (avformat_open_input(&av_format_ctx,address,av_input_fmt,nullptr) != 0) {
        avformat_close_input(&av_format_ctx);
        perror("Could not open input context");
        exit(EXIT_FAILURE);
    }

    int video_stream_index = -1;

    AVCodec* av_codec;
    AVCodecParameters * av_codec_params;

    //find valid video stream
    for (int i = 0; i < av_format_ctx->nb_streams; ++i) {
        av_codec_params = av_format_ctx->streams[i]->codecpar;
        av_codec = avcodec_find_decoder(av_codec_params->codec_id);

        if (!av_codec) {
            perror("Could not find coded decoder");
            continue;
        }

        if (av_codec_params->codec_type == AVMEDIA_TYPE_VIDEO) {
            video_stream_index = i;
            break;
        }
    }

    if (video_stream_index == -1) {
        perror("Could find valid video stream.");
        exit(EXIT_FAILURE);
    }

    //allocate codec context
    AVCodecContext * av_codec_ctx = avcodec_alloc_context3(av_codec);
    if (!av_codec_ctx) {
        perror("Could not create AVCodec Context\n");
        exit(EXIT_FAILURE);
    }

    if (avcodec_parameters_to_context(av_codec_ctx,av_codec_params) < 0) {
        perror("Could not initialize AVCodec Context\n");
        exit(EXIT_FAILURE);
    }

    if (avcodec_open2(av_codec_ctx,av_codec,nullptr) < 0) {
        perror("Could not open AVCodec\n");
        exit(EXIT_FAILURE);
    }

    AVFrame* av_frame = av_frame_alloc();

    if (!av_frame) {
        perror("Could not allocate AVFrame");
        exit(EXIT_FAILURE);
    }

    AVPacket *av_packet = av_packet_alloc();
    if (!av_packet) {
        perror("Could not allocate AVFrame");
        exit(EXIT_FAILURE);
    }

    AVCodec *av_out_codec = avcodec_find_encoder(AV_CODEC_ID_MJPEG);
    if (!av_out_codec) {
        perror("Could not find MJPEG codec");
        exit(EXIT_FAILURE);
    }

    AVCodecContext *av_out_codec_ctx = avcodec_alloc_context3(av_out_codec);
    if (!av_out_codec_ctx) {
        perror("Could not allocate output context");
        exit(EXIT_FAILURE);
    }

    av_out_codec_ctx->width = 1280;
    av_out_codec_ctx->height = 720;
    av_out_codec_ctx->pix_fmt = AV_PIX_FMT_YUVJ420P;
    av_out_codec_ctx->time_base = (AVRational){5,AVFMT_VARIABLE_FPS};

    if (avcodec_open2(av_out_codec_ctx,av_out_codec,nullptr) < 0) {
        perror("Could not open output codec");
        exit(EXIT_FAILURE);
    }

    AVPacket *av_out_packet = av_packet_alloc();

    std::string output_filename = output_dir;

    if (! fs::exists(output_dir)) {
        fs::create_directory(output_dir);
    } else if ( fs::exists(output_dir) && ! fs::is_directory(output_dir)) {
        perror("Target output is not a directory!");
        exit(EXIT_FAILURE);
    }

    std::string output_final_dir = output_dir;
    output_final_dir += "stream_" + std::to_string(threadIdx);

    if (! fs::exists(output_final_dir)) {
        fs::create_directory(output_final_dir);
    }

    output_filename += "stream_" + std::to_string(threadIdx) + "/stream_" + std::to_string(threadIdx) + ".jpg";

    int response;
    FILE *JPEGFile = nullptr;

    ith->reset(default_timeout);
    while (av_read_frame(av_format_ctx,av_packet) >= 0) {
        if (av_packet->stream_index == video_stream_index) {
            response = avcodec_send_packet(av_codec_ctx,av_packet);

            if (response < 0) {
                perror("Failed to decode av_out_packet");
                exit(EXIT_FAILURE);
            }

            response = avcodec_receive_frame(av_codec_ctx,av_frame);
            if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) {
                continue;
            } else if (response < 0) {
                perror("Failed to decode av_out_packet");
                exit(EXIT_FAILURE);
            }

            if (av_frame->format != AV_PIX_FMT_YUV420P) {
                printf("Generated file may not be a grayscale\n");
            }

            // send frame to encode into out format
            avcodec_send_frame(av_out_codec_ctx,av_frame);

            // receive encoded out data
            avcodec_receive_packet(av_out_codec_ctx,av_out_packet);

            // open output
            JPEGFile = fopen(output_filename.c_str(),"wb");
            if (JPEGFile == nullptr || JPEGFile == NULL) {
                perror("Could not open output file");
                fclose(JPEGFile);
                JPEGFile = nullptr;
                break;
            }
            // write to output
            fwrite(av_out_packet->data,1,av_out_packet->size,JPEGFile);

            // close output
            if (! fclose(JPEGFile)) {
                JPEGFile = nullptr;
            }

            // unref out packet
            av_packet_unref(av_out_packet);

            av_packet_unref(av_packet);
            // reset packet timeout
            ith->reset(default_timeout);
        }
    }


    if (JPEGFile != nullptr) {
        fclose(JPEGFile);
        JPEGFile = nullptr;
    }
    std::cout << "Exiting thread: " << threadIdx << std::endl;
    should_stop_thread[threadIdx] = true;
    av_packet_free(&av_out_packet);
    avcodec_close(av_out_codec_ctx);

    av_frame_free(&av_frame);
    av_packet_free(&av_packet);
    avformat_close_input(&av_format_ctx);
    avformat_free_context(av_format_ctx);
    avcodec_free_context(&av_codec_ctx);
}

无论如何,如果我缺少一个更简单的解决方案,我愿意接受。真实流和显示视频之间的延迟对我来说很重要,不能超过 1 秒。

解决方法

暂无找到可以解决该程序问题的有效方法,小编努力寻找整理中!

如果你已经找到好的解决方法,欢迎将解决方案带上本链接一起发送给小编。

小编邮箱:dio#foxmail.com (将#修改为@)