如何在C++中将OpenCv Mat转换为webrtc::VideoFrame(I420Buffer)

zzlelutf  于 2022-12-13  发布在  其他
关注(0)|答案(1)|浏览(600)

通过这个link,我学会了如何将webrtc::VideoFrame转换为OpenCv Mat。修改了部分数据后,我想通过WebRTC以videoFrame(webrtc::I420Buffer)的形式发送回去。如何将OpenCV Mat转换为webrtc::VideoFrame?

rtc::scoped_refptr<webrtc::I420Buffer> MatToI420(cv::Mat& cvMat, const int buf_width, const int buf_height){

    rtc::scoped_refptr<webrtc::I420Buffer> buffer;
    buffer = webrtc::I420Buffer::Create(buf_width, buf_height, buf_width, (buf_width + 1) / 2, (buf_width + 1) / 2);

    //Test. make it a black screen -> succeeded.
    //webrtc::I420Buffer::SetBlack(buffer.get());

    //This is not working yet.
    //cv::Mat yuv;
    //cv::cvtColor(cvMat, yuv, CV_BGRA2YUV_I420);
    //const int conversionResult = libyuv::ConvertToI420(
    //    yuv.ptr(), CalcBufferSize(webrtc::VideoType::kARGB, buf_width, buf_height)
    //    , buffer->MutableDataY(), buffer->StrideY()
    //    , buffer->MutableDataU(), buffer->StrideU()
    //    , buffer->MutableDataV(), buffer->StrideV()
    //    , 0, 0
    //    , buf_width, buf_height
    //    , buf_width, buf_height
    //    , libyuv::kRotate0,
    //    libyuv::FOURCC_ARGB
    //);

    //strange video....
    //buffer = webrtc::I420Buffer::Copy(
    //    buf_width, buf_height
    //    , cvMat.data, cvMat.cols
    //    , cvMat.data + cvMat.rows * cvMat.cols, cvMat.cols / 2
    //    , cvMat.data + cvMat.rows * cvMat.cols + ((cvMat.rows / 2) * (cvMat.cols / 2)), cvMat.cols / 2
    //);

    return buffer;
}
4urapxun

4urapxun1#

这是一个有效的例子。试试看。

// ConsoleApplication1.cpp : This file contains the 'main' function. Program execution begins and ends there.
//

#include <iostream>
#include "opencv2/opencv.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/highgui.hpp"

extern "C" {
    //Required for using sws_scale
#include <libavutil/frame.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
}

#include "api/scoped_refptr.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_frame_buffer.h"
#include "api/video/video_rotation.h"
#include "modules/video_capture/video_capture_factory.h"

//https://stackoverflow.com/questions/71607268/how-to-convert-webrtcvideoframe-to-opencv-mat-in-c
cv::Mat I420ToMat(webrtc::I420Buffer* inputBuffer) {
    cv::Mat result;
    struct SwsContext* sws_ctx = NULL;
    sws_ctx = sws_getContext(inputBuffer->width(), inputBuffer->height(), AV_PIX_FMT_YUV420P,
        inputBuffer->width(), inputBuffer->height(),
        AV_PIX_FMT_BGR24, SWS_FAST_BILINEAR,
        NULL, NULL, NULL);
    if (sws_ctx == nullptr){ return result; }

    AVFrame* pBGRFrame = av_frame_alloc();  //Allocate frame, because it is more continent than allocating and initializing data buffer and linesize.
    pBGRFrame->format = AV_PIX_FMT_BGR24;
    pBGRFrame->width = inputBuffer->width();
    pBGRFrame->height = inputBuffer->height();
    int sts = av_frame_get_buffer(pBGRFrame, 0);    //Buffers allocation
    if (sts < 0){ return result; }
    const uint8_t* const src_data[] = { inputBuffer->DataY(), inputBuffer->DataU(), inputBuffer->DataV() };
    const int src_stride[] = { inputBuffer->StrideY(), inputBuffer->StrideU(), inputBuffer->StrideV() };
    sts = sws_scale(sws_ctx, src_data, src_stride, 0, inputBuffer->height(), pBGRFrame->data, pBGRFrame->linesize);
    if (sts != inputBuffer->height()){ return result; }

    result = cv::Mat(pBGRFrame->height, pBGRFrame->width, CV_8UC3, pBGRFrame->data[0], pBGRFrame->linesize[0]).clone();    //cv::Mat is OpenCV "thin image wrapper".

    //Free
    sws_freeContext(sws_ctx);
    av_frame_free(&pBGRFrame);
    return result;
}

//https://stackoverflow.com/questions/70291740/sws-scale-generates-malformed-video
rtc::scoped_refptr<webrtc::I420Buffer> MatToI420(cv::Mat cvMat, const int width, const int height)
{
    rtc::scoped_refptr<webrtc::I420Buffer> result;
    int out_linesize[4] = { 0, 0, 0, 0 };
    uint8_t* out_planes[4] = { nullptr, nullptr, nullptr, nullptr };
    int sts = av_image_alloc(out_planes, out_linesize, width, height, AV_PIX_FMT_YUV420P, 1);
    if (sts < 0){ return result; }
    struct SwsContext* sws_context = nullptr;
    sws_context = sws_getCachedContext(sws_context, width, height, AV_PIX_FMT_BGR24, width, height, AV_PIX_FMT_YUV420P,
        SWS_BILINEAR, nullptr, nullptr, nullptr);
    if (sws_context == nullptr){ return result; }
    const int in_linesize[3] = { 3 * width, 0, 0 };
    const uint8_t* in_planes[1] = { cvMat.data };
    int response = sws_scale(sws_context, in_planes, in_linesize, 0, height, out_planes, out_linesize);
    if (response < 0){ return result; }

    /*
    FILE* f;
    fopen_s(&f, "yuv420p_image.bin", "wb");
    fwrite(out_planes[0], 1, width * height, f);
    fwrite(out_planes[1], 1, width * height / 4, f);
    fwrite(out_planes[2], 1, width * height / 4, f);
    fclose(f);
    //.\ffmpeg -y -f rawvideo -s 640x480 -pixel_format yuv420p -i yuv420p_image.bin rgbcheck.png
    */

    result = webrtc::I420Buffer::Copy(
        width, height
       ,out_planes[0], width
       ,out_planes[1], width / 2
       ,out_planes[2], width / 2
    );

    av_freep(out_planes);
    sws_freeContext(sws_context);
    return result;
}

int main()
{
    //Create raw video frame in I420 format using FFmpeg (for testing):
    //ffmpeg -f lavfi -i testsrc=size=640x480:duration=1:rate=1 -pix_fmt yuv420p -f rawvideo I420.yuv
    int width = 640;
    int height = 480;
    int stride_y = width;
    int stride_u = width / 2;
    int stride_v = width / 2;
    std::unique_ptr<uint8_t> data_y_ (new uint8_t[width * height]);
    std::unique_ptr<uint8_t> data_u_ (new uint8_t[width * height / 4]);
    std::unique_ptr<uint8_t> data_v_ (new uint8_t[width * height / 4]);
    FILE* f;
    fopen_s(&f, "I420.yuv", "rb");
    fread(data_y_.get(), 1, width * height, f);  //Read Y color channel.
    fread(data_u_.get(), 1, width * height / 4, f);  //Read U color channel.
    fread(data_v_.get(), 1, width * height / 4, f);  //Read V color channel.
    fclose(f);

    // [i420 -> cv::Mat] -> i420 -> cv::Mat

    //make a (webrtc::I420Buffer) buffer  
    rtc::scoped_refptr<webrtc::I420Buffer> i420buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_u, stride_v);
    i420buffer = webrtc::I420Buffer::Copy(width, height, data_y_.get(), stride_y, data_u_.get(), stride_u, data_v_.get(), stride_v);
    cv::Mat matImg = I420ToMat(i420buffer.get());

    //Use OpenCV for showing the image
    cv::imshow("cvImg", matImg);
    cv::waitKey();

    //Save the inage in PNG format using OpenCV
    cv::imwrite("rgb1.png", matImg);

    // i420 -> [cv::Mat -> i420] -> cv::Mat

    rtc::scoped_refptr<webrtc::I420Buffer> new_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_u, stride_v);
    new_buffer = MatToI420(matImg, width, height);

    // i420 -> cv::Mat -> [i420 -> cv::Mat]

    cv::Mat matImg2 = I420ToMat(new_buffer.get());

    cv::imshow("cvImg2", matImg2);
    cv::waitKey();

    //Save the inage in PNG format using OpenCV
    cv::imwrite("rgb2.png", matImg2);
    cv::waitKey();
}

相关问题