1.首先编译FFMpeg ,源码下载地址:Download FFmpeg 

tar -xvf ffmpeg-4.2.2.tar.bz2
cd ffmpeg-4.2.2
./configure \--prefix=/home/z/Desktop/ffmpeg-3.4.7/build--enable-cross-compile --arch=aarch64 --target-os=linux --host-os=linux --cross-prefix=/opt/gcc-aarch64-linux-gnu/bin/aarch64-linux-gnu- --enable-gpl --enable-shared --enable-libx264 --enable-avresample --disable-asm --extra-cflags=-I/home/z/Desktop/ffmpeg-3.4.7/build/include --extra-ldflags=-L/home/z/Desktop/ffmpeg-3.4.7/build/lib      

./configure 参数无回车(其中编译参数目录,根据实际目录情况自行修改

2.编译mpp,github 下载地址。mpp 下载

下载后,进入mpp/build/linux/aarch64。执行

​./make-Makefiles.bash

完成编译。

3.编译rga库,rga下载地址 。rga 下载

准备工作完成

思路如下

        1.首先由ffmpeg完成拉流工作。

        2.交给MPP解码。

        3.MPP解码之后交给rga负责图片格式转换裁切等工作。

        4.交给qt显示。

1.初始化工作

1.1初始化ffmpeg

int MyFFmpeg::MyFFmpegInit()
{
    int i;
    int ret = -1;

    // 获取视频播放URL
    QByteArray byteRtspUrl =m_rtspUrl.toLocal8Bit();
    char *pRtspUrl = byteRtspUrl.data();

    // 初始化所有组件,调用该函数后,才能使用复用器和编解码器
    av_register_all();

    // 初始化网络库
    avformat_network_init();

    // 分配AVFormatContext,它是FFMPEG解封装(flv,mp4,rmvb,avi)功能的结构体,
    // 具体可参考https://blog.csdn.net/leixiaohua1020/article/details/14214705
    m_AVFormatContext = avformat_alloc_context();

    // 设置参数
    AVDictionary *options = nullptr;
    // 设置传输协议为TCP协议
    av_dict_set(&options, "rtsp_transport", "tcp", 0);

    // 设置TCP连接最大延时时间
    av_dict_set(&options, "max_delay", "100000", 0);

    // 设置“buffer_size”缓存容量
    av_dict_set(&options, "buffer_size", "102400", 0);

    // 设置avformat_open_input超时时间为3秒
    av_dict_set(&options, "stimeout", "1000000", 0);

    av_dict_set(&options, "threads", "4", 0);

    // 打开网络流或文件流
    ret = avformat_open_input(&m_AVFormatContext, pRtspUrl, nullptr, &options);
    if (ret != 0)
    {
        qDebug("Couldn't open input stream, ret=%d\n", ret);
        return ret;
    }

    // 读取流数据包并获取流的相关信息
    if (avformat_find_stream_info(m_AVFormatContext, nullptr) < 0)
    {
        qDebug("Couldn't find stream information.\n");
        return -1;
    }

    // 确定流格式是否为视频
    for (i = 0; i < m_AVFormatContext->nb_streams; i++)
    {
        if (m_AVFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            m_videoIndex = i;
            break;
        }
    }

    if (m_videoIndex == -1)
    {
        qDebug("Didn't find a video stream.\n");
        return -1;
    }
    // 获取图像宽高
    srcWidth =m_AVFormatContext->streams[m_videoIndex]->codec->width;
    srcHight =m_AVFormatContext->streams[m_videoIndex]->codec->height;



    qDebug()<<"srcWidth"<<m_AVFormatContext->streams[m_videoIndex]->codec->width;
    qDebug()<<"srcWidth"<<m_AVFormatContext->streams[m_videoIndex]->codec->height;

    m_AVPacket = (AVPacket *) malloc(sizeof(AVPacket)); //分配一个packet
    av_new_packet(m_AVPacket, srcWidth*srcHight); //分配packet的数据



    qDebug("============== MyFFmpegInit ok! ====================== ");

    return 0;
}

1.2 初始化MPP

    MPP_RET ret         = MPP_OK;

    // base flow context
    MppCtx ctx          = nullptr;
    MppApi *mpi         = nullptr;

    // input / output
    MppPacket packet    = nullptr;
    MppFrame  frame     = nullptr;

    MpiCmd mpi_cmd      = MPP_CMD_BASE;
    MppParam param      = nullptr;
    RK_U32 need_split   = 1;

    // paramter for resource malloc
    //srcWidth srcHight 为 ffmpeg 中获取的宽高
    RK_U32 width        = static_cast<RK_U32>(srcWidth);
    RK_U32 height       = static_cast<RK_U32>(srcHight);;
    MppCodingType type  = MPP_VIDEO_CodingAVC;
    //MppCodingType type  = MPP_VIDEO_CodingHEVC;


    // resources
    char *buf           = nullptr;
    size_t packet_size  = 8*1024;
    MppBuffer pkt_buf   = nullptr;
    MppBuffer frm_buf   = nullptr;

    qDebug("mpi_dec_test start\n");
    memset(&Mppdata, 0, sizeof(Mppdata));


    qDebug("mpi_dec_test decoder test start w %d h %d type %d\n", width, height, type);

    // decoder demo
    ret = mpp_create(&ctx, &mpi);

    if (MPP_OK != ret) {

        qErrnoWarning("mpp_create failed\n");
        MyFFmpegMPPDeInit(&packet, &frame, ctx, buf, Mppdata);
    }

    // NOTE: decoder split mode need to be set before init
    // 按帧输入码流
    mpi_cmd = MPP_DEC_SET_PARSER_SPLIT_MODE;
    param = &need_split;
    ret = mpi->control(ctx, mpi_cmd, param);
    if (MPP_OK != ret) {
        qErrnoWarning("mpi->control failed\n");
        MyFFmpegMPPDeInit(&packet, &frame, ctx, buf, Mppdata);
    }
    // 队列输入
    mpi_cmd = MPP_SET_INPUT_BLOCK;
    param = &need_split;
    ret = mpi->control(ctx, mpi_cmd, param);
    if (MPP_OK != ret) {
        qErrnoWarning("mpi->control failed\n");
        MyFFmpegMPPDeInit(&packet, &frame, ctx, buf, Mppdata);
    }

    ret = mpp_init(ctx, MPP_CTX_DEC, type);
    if (MPP_OK != ret) {
        qErrnoWarning("mpp_init failed\n");
        MyFFmpegMPPDeInit(&packet, &frame, ctx, buf, Mppdata);
    }

    Mppdata.ctx            = ctx;
    Mppdata.mpi            = mpi;
    Mppdata.eos            = 0;
    Mppdata.packet_size    = packet_size;
    Mppdata.frame          = frame;
    Mppdata.frame_count    = 0;

    myMppDeCode.setWidthHight(srcWidth,srcHight);
    connect(&myMppDeCode, SIGNAL(MySigSendMppImg(QImage)), this, SLOT(slotGetMppImg(QImage)));

2 mpp解码与rga格式转换

2.1mpp解码

MppPacket : 存放编码数据,例如264、265数据
MppFrame : 存放解码的数据,例如YUV、RGB数据

2、解码器接口

decode_put_packet:

MPP_RET decode_put_packet(MppCtx ctx,MppPacket packet)

ctx:MPP解码器实例;

packet:待输入的码流数据;

输入码流的方式:分帧与不分帧。裸码流输入有两种,一种是按帧分段的数据,每一个输入给decode_put_packet函数的数据包都包含完整的一帧,不多也不少。在这种情况下,MPP可以直接按包处理码流。另一种是按长度读取数据,无法判断一个包的数据是否为完整的一帧,需要MPP内部进行分帧处理。在进行这种形式的输入时,需要在mpp_init前,通过control接口的MPP_DEC_SET_PARSER_SPLIT_MODE命令,打开need_split标志。分帧方式效率高,但需要在输入码流前进行解析与分帧,不分帧方式使用简单,效率会受影响。官方解码示例采用的是不分帧方式,因此上述代码也是不分帧方式。

decode_get_frame:

MPP_RET decode_get_frame(MppCtx ctx,MppFrame *frame)

ctx:MPP解码器实例;

frame:用于MppFrame实例的指针;

完整的解码过程是上面两个函数的结合。

int MppDecode::decode_simple(MppDecode::MpiDecLoopData *data, AVPacket *av_packet)
{
    RK_U32 pkt_done = 0;
    RK_U32 pkt_eos  = 0;
    RK_U32 err_info = 0;
    MPP_RET ret = MPP_OK;
    MppCtx ctx  = data->ctx;
    MppApi *mpi = data->mpi;
    MppPacket packet = nullptr;
    MppFrame  frame  = nullptr;



    ret = mpp_packet_init(&packet, av_packet->data, av_packet->size);

    if(ret < 0)
    {
        return -1;
    }
    mpp_packet_set_pts(packet, av_packet->pts);


    //qDebug()<<"av_packet->data:"<<av_packet->data;
    do {
        RK_S32 times = 5;
        // send the packet first if packet is not done
        if (!pkt_done) {
            ret = mpi->decode_put_packet(ctx, packet);
            if (MPP_OK == ret)
                pkt_done = 1;
        }

        // then get all available frame and release
        do {
            RK_S32 get_frm = 0;
            RK_U32 frm_eos = 0;

try_again:
            ret = mpi->decode_get_frame(ctx, &frame);
            if (MPP_ERR_TIMEOUT == ret) {
                if (times > 0) {
                    times--;
                    mmsleep(2);
                    goto try_again;
                }
                mpp_err("decode_get_frame failed too much time\n");
            }

            //qDebug()<< "mpp_log" <<"get MPP_OK"<<MPP_OK;
            if (MPP_OK != ret) {
                mpp_err("decode_get_frame failed ret %d\n", ret);
                break;
            }


            if (frame)
            {
                if (mpp_frame_get_info_change(frame)) {
                    RK_U32 width = mpp_frame_get_width(frame);
                    RK_U32 height = mpp_frame_get_height(frame);
                    RK_U32 hor_stride = mpp_frame_get_hor_stride(frame);
                    RK_U32 ver_stride = mpp_frame_get_ver_stride(frame);
                    RK_U32 buf_size = mpp_frame_get_buf_size(frame);

                    qDebug()<<"mpp_log  :"<<"decode_get_frame get info changed found";
                    qDebug("mpp_log :decoder require buffer w:h [%d:%d] stride [%d:%d] buf_size %d",
                           width, height, hor_stride, ver_stride, buf_size);

                    ret = mpp_buffer_group_get_internal(&data->frm_grp, MPP_BUFFER_TYPE_ION);
                    if (ret) {
                        mpp_err("get mpp buffer group  failed ret %d\n", ret);
                        break;
                    }
                    mpi->control(ctx, MPP_DEC_SET_EXT_BUF_GROUP, data->frm_grp);

                    mpi->control(ctx, MPP_DEC_SET_INFO_CHANGE_READY, nullptr);
                } else {
                    err_info = mpp_frame_get_errinfo(frame) | mpp_frame_get_discard(frame);
                    if (err_info) {
                        qDebug("decoder_get_frame get err info:%d discard:%d.\n",
                               mpp_frame_get_errinfo(frame), mpp_frame_get_discard(frame));
                    }
                    data->frame_count++;
                    //qDebug("decode_get_frame get frame %d\n", data->frame_count);


                    if (!err_info){
                        //qDebug("no err_info");
                        //qDebug("frame:%p",frame);

                        MppBuffer buff = mpp_frame_get_buffer(frame);


                        if(dst_buf == nullptr)
                        {
                            dst_buf = (char*)malloc(srcWidth*srcHight*get_bpp_from_format(DST_FORMAT));
                        }

                        convertdata((char *)mpp_buffer_get_ptr(buff),dst_buf);
                        QImage qimg((uchar *)dst_buf,srcWidth,srcHight,QImage::Format_RGB888);
                        //*tempimg = qimg.copy();
                        emit MySigSendMppImg(qimg.copy());
                        //qDebug() << "yi zhen emit";

                    }
                }
                frm_eos = mpp_frame_get_eos(frame);
                mpp_frame_deinit(&frame);

                frame = nullptr;
                get_frm = 1;
            }

            // try get runtime frame memory usage
            if (data->frm_grp) {
                size_t usage = mpp_buffer_group_usage(data->frm_grp);
                if (usage > data->max_usage)
                    data->max_usage = usage;
            }

            // if last packet is send but last frame is not found continue
            if (pkt_eos && pkt_done && !frm_eos) {
                mmsleep(10);
                continue;
            }

            if (frm_eos) {
                qDebug("mpp_log :found last frame\n");
                break;
            }

            if (data->frame_num > 0 && data->frame_count >= data->frame_num) {
                data->eos = 1;
                break;
            }

            if (get_frm)
                continue;
            break;
        } while (1);

        if (data->frame_num > 0 && data->frame_count >= data->frame_num) {
            data->eos = 1;
            qDebug("mpp_log_reach max frame number %d\n", data->frame_count);
            break;
        }

        if (pkt_done)
            break;

        /*
         * why sleep here:
         * mpi->decode_put_packet will failed when packet in internal queue is
         * full,waiting the package is consumed .Usually hardware decode one
         * frame which resolution is 1080p needs 2 ms,so here we sleep 3ms
         * * is enough.
         */
        mmsleep(3);
    } while (1);
    mpp_packet_deinit(&packet);

    return ret;
}

2.2 rag格式转换

int MppDecode::convertdata(char *srcdata, char *dst_buf)
{
    // rga
    im_rect 		src_rect;
    im_rect 		dst_rect;
    rga_buffer_t 	src;
    rga_buffer_t 	dst;
    IM_STATUS 		STATUS;

    int ret = 0;

    memset(&src_rect, 0, sizeof(src_rect));
    memset(&dst_rect, 0, sizeof(dst_rect));
    memset(&src, 0, sizeof(src));
    memset(&dst, 0, sizeof(dst));


    memset(dst_buf,0x00,srcWidth*srcHight*get_bpp_from_format(DST_FORMAT));

    src = wrapbuffer_virtualaddr(srcdata, srcWidth, srcHight, SRC_FORMAT);
    dst = wrapbuffer_virtualaddr(dst_buf, srcWidth, srcHight, DST_FORMAT);


    if(src.width == 0 || dst.width == 0) {
        printf("%s, %s\n", __FUNCTION__, imStrError());
        return -1;
    }
    src.format = SRC_FORMAT;
    dst.format = DST_FORMAT;

    ret = imcheck(src, dst, src_rect, dst_rect);
    if (IM_STATUS_NOERROR != ret) {
        printf("%d, check error! %s", __LINE__, imStrError((IM_STATUS)ret));
        return -1;
    }
    STATUS = imcvtcolor(src, dst, src.format, dst.format);
    //qDebug("resizing .... %s\n", imStrError(STATUS));

}

测试

在rga参与图片格式转换的情况下,绝大部分rk3399的cpu都被吃掉在cpu消耗上,2k解码约消耗cpu60%的资源

在rga参与后2k解码,cpu消耗约30%左右

说明

此种方法虽然能够调用mpp硬解码,与rga,但是有qt的情况下,不推荐此种解码方式给

qt可以直接采用qmediaplayer解码,能够自己调用mpp硬解码,详情参见此文章

RK3399 ,播放RTSP流,使用QMediaPlayer,不使用FFMpeg方式

出现绿屏等问题多是由于rga,图像宽高配置不正确导致,详情参见此文章

RK平台 MPP 与RGA ,解码h265绿屏,花屏解决方法

附录

1.作者自己的编译的ffmpeg+rga+mpp库,下载地址

2.利用qt,ffmpeg+rga+mpp显示摄像头画面的小demo,下载地址

Logo

华为开发者空间,是为全球开发者打造的专属开发空间,汇聚了华为优质开发资源及工具,致力于让每一位开发者拥有一台云主机,基于华为根生态开发、创新。

更多推荐