WYY
2025-02-27 aeea0feedc9e25294962be52c5e1dc7e34fdb006
Client/¶­Åìèº/code/qffmpeg.cpp
@@ -24,7 +24,6 @@
    if (pSwsContext) {
        sws_freeContext(pSwsContext);
    }
    avpicture_free(&pAVPicture);
}
bool QFFmpeg::Init()
@@ -46,9 +45,6 @@
    //获取视频流索引
    videoStreamIndex = -1;
//    qDebug()<<"nb:"<<pAVFormatContext->nb_streams;
//    qDebug()<<"type:"<<pAVFormatContext->streams[0]->codec->codec_type;
//    qDebug()<<"AVMEDIA_TYPE_VIDEO:"<<AVMEDIA_TYPE_VIDEO;
    for (uint i = 0; i < pAVFormatContext->nb_streams; i++) {
        if (pAVFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStreamIndex = i;
@@ -67,8 +63,6 @@
    videoWidth=pAVCodecContext->width;
    videoHeight=pAVCodecContext->height;
    avpicture_alloc(&pAVPicture,AV_PIX_FMT_RGB24,videoWidth,videoHeight);
    AVCodec *pAVCodec;
    //获取视频流解码器
@@ -79,7 +73,6 @@
    result=avcodec_open2(pAVCodecContext,pAVCodec,NULL);
    if (result<0){
        qDebug()<<"打开解码器失败";
        avpicture_free(&pAVPicture);
        sws_freeContext(pSwsContext);
        avformat_close_input(&pAVFormatContext);
        return false;
@@ -91,23 +84,82 @@
void QFFmpeg::Play()
{
    //一帧一帧读取视频
    int frameFinished = 0;
    while (av_read_frame(pAVFormatContext, &pAVPacket) >= 0) {
        if (pAVPacket.stream_index == videoStreamIndex) {
            qDebug() << "开始解码" << QDateTime::currentDateTime().toString("yyyy-MM-dd HH:mm:ss");
            avcodec_decode_video2(pAVCodecContext, pAVFrame, &frameFinished, &pAVPacket);
            if (frameFinished) {
                mutex.lock();
                sws_scale(pSwsContext, (const uint8_t* const *)pAVFrame->data, pAVFrame->linesize, 0, videoHeight, pAVPicture.data, pAVPicture.linesize);
                QImage image(pAVPicture.data[0], videoWidth, videoHeight, QImage::Format_RGB888);
                QImage copyImage = image.copy(); // æ·±æ‹·è´
                emit GetImage(copyImage, this->index);
                qDebug() << "解码成功,发送图像信号";
                mutex.unlock();
    AVPacket packet;
    int frameFinished;
    int videoStreamIndex = -1;
    // æ‰“开视频文件
    if (avformat_open_input(&pAVFormatContext, url.toStdString().c_str(), NULL, NULL) != 0)
        return;
    // èŽ·å–æµä¿¡æ¯
    avformat_find_stream_info(pAVFormatContext, NULL);
    // æ‰¾åˆ°è§†é¢‘流索引
    for (uint i = 0; i < pAVFormatContext->nb_streams; i++) {
        if (pAVFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStreamIndex = i;
            break;
        }
    }
    // èŽ·å–è§£ç å™¨
    AVCodec *codec = avcodec_find_decoder(pAVFormatContext->streams[videoStreamIndex]->codec->codec_id);
    if (!codec) {
        qDebug() << "未找到解码器";
        return;
    }
    if (avcodec_open2(pAVCodecContext, codec, NULL) < 0) {
        qDebug() << "打开解码器失败";
        return;
    }
    // åˆå§‹åŒ–帧
    pAVFrame = av_frame_alloc();
    AVFrame *pFrameRGB = av_frame_alloc();
    uint8_t *buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_RGB24, pAVCodecContext->width, pAVCodecContext->height));
    avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_RGB24, pAVCodecContext->width, pAVCodecContext->height);
    // åˆå§‹åŒ–缩放上下文
    struct SwsContext *sws_ctx = sws_getContext(pAVCodecContext->width,
                                                pAVCodecContext->height,
                                                pAVCodecContext->pix_fmt,
                                                pAVCodecContext->width,
                                                pAVCodecContext->height,
                                                AV_PIX_FMT_RGB24,
                                                SWS_BILINEAR,
                                                NULL,
                                                NULL,
                                                NULL);
    while (av_read_frame(pAVFormatContext, &packet) >= 0) {
        if (packet.stream_index == videoStreamIndex) {
            avcodec_send_packet(pAVCodecContext, &packet);
            while (avcodec_receive_frame(pAVCodecContext, pAVFrame) == 0) {
                sws_scale(sws_ctx,
                        (const uint8_t **)pAVFrame->data,
                        pAVFrame->linesize,
                        0,
                        pAVCodecContext->height,
                        pFrameRGB->data,
                        pFrameRGB->linesize);
                QImage img(pFrameRGB->data[0],
                        pAVCodecContext->width,
                        pAVCodecContext->height,
                        QImage::Format_RGB888);
                emit GetImage(img, 0);
            }
        }
        av_packet_unref(&pAVPacket);
        av_packet_unref(&packet);
    }
    avformat_close_input(&pAVFormatContext);
    av_frame_free(&pAVFrame);
    av_frame_free(&pFrameRGB);
    av_free(buffer);
    sws_freeContext(sws_ctx);
}