天天看點

linux下使用QT調用FFMPEG讀取攝像頭一幀資料顯示到标簽控件上

下面代碼調用FFMPEG庫,讀取攝像頭的一幀資料,轉換為RGB888,加載到QImage,再顯示到标簽控件上。

開發環境:

作業系統: ubuntu18.04 64位

QT版本: QT5.12

代碼包已經上傳到CSDN,需要的可以去下載下傳。

https://download.csdn.net/download/xiaolong1126626497/12233526 mainwindow.cpp檔案代碼:

#include "mainwindow.h"
#include "ui_mainwindow.h"
 
MainWindow::MainWindow(QWidget *parent)
    : QMainWindow(parent)
    , ui(new Ui::MainWindow)
{
    ui->setupUi(this);
    FFMPEG_Init_Config();
}
 
 
MainWindow::~MainWindow()
{
    delete ui;
}
 
int MainWindow::FFMPEG_Init_Config()
{
    AVInputFormat   *ifmt;
    AVFormatContext *pFormatCtx;
    AVCodecContext  *pCodecCtx;
    AVCodec         *pCodec;
    AVDictionary    *options=nullptr;
    AVPacket        *packet;
    AVFrame         *pFrame,*pFrameYUV;
 
    int videoindex;
    int i,ret,got_picture;
    /*1. FFMPEG初始化*/
    av_register_all();
    avcodec_register_all();
    avdevice_register_all(); //注冊多媒體裝置互動的類庫
    /*2. 查找用于輸入的裝置*/
    ifmt=av_find_input_format("video4linux2");
    pFormatCtx=avformat_alloc_context();
    av_dict_set(&options,"video_size","640x480",0); //設定攝像頭輸出的分辨率
    //av_dict_set(&options,"framerate","30",0);     //設定攝像頭幀率. 每秒為機關,這裡設定每秒30幀.
    //一般幀率不用設定,預設為最高,幀率和輸出的圖像尺寸有關系
    if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,&options)!=0)
    {
        qDebug("輸入裝置打開失敗: /dev/video0\n");
        return -1;
    }
    if(avformat_find_stream_info(pFormatCtx,nullptr)<0)
    {
        qDebug("查找輸入流失敗.\n");
        return -2;
    }
    videoindex=-1;
    for(i=0;i<pFormatCtx->nb_streams;i++)
    {
        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
        {
            videoindex=i;
            break;
        }
    }
 
    if(videoindex==-1)
    {
        qDebug("視訊流查找失敗.\n");
        return -3;
    }
    pCodecCtx=pFormatCtx->streams[videoindex]->codec;
    qDebug("攝像頭尺寸(WxH): %d x %d \n",pCodecCtx->width, pCodecCtx->height);
    pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
    if(pCodec==nullptr)
    {
        qDebug("找不到編解碼器。\n");
        return -4;
    }
    if(avcodec_open2(pCodecCtx, pCodec,nullptr)<0)
    {
        qDebug("無法打開編解碼器。\n");
        return -5;
    }
 
    packet=(AVPacket *)av_malloc(sizeof(AVPacket));
    pFrame=av_frame_alloc();
    pFrameYUV=av_frame_alloc();
    unsigned char *out_buffer=(unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height,16));   // avpicture_get_size
 
    av_image_fill_arrays(pFrameYUV->data,pFrameYUV->linesize,out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height,16);
 
    struct SwsContext *img_convert_ctx;
    img_convert_ctx=sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
 
    //讀取一幀資料
    if(av_read_frame(pFormatCtx, packet)>=0)
    {
        //輸出圖像的大小
        qDebug("資料大小=%d\n",packet->size);
        //判斷是否是視訊流
        if(packet->stream_index==videoindex)
        {
            //解碼從攝像頭擷取的資料,pframe結構
            ret=avcodec_decode_video2(pCodecCtx, pFrame,&got_picture,packet);
            if(ret<0)
            {
                qDebug("解碼Error.\n");
                return -6;
            }
            if(got_picture)
            {
                 size_t y_size=pCodecCtx->width*pCodecCtx->height;
                 sws_scale(img_convert_ctx,(const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);  //根據前面配置的縮放參數,進行圖像格式轉換以及縮放等操作
 
                 unsigned char *p=new unsigned char[y_size]; //申請空間
                 unsigned char *rgb24_p=new unsigned char[pCodecCtx->width*pCodecCtx->height*3];
 
                 //将YUV資料拷貝到緩沖區
                 memcpy(p,pFrameYUV->data[0],y_size);
                 memcpy(p+y_size,pFrameYUV->data[1],y_size/4);
                 memcpy(p+y_size+y_size/4,pFrameYUV->data[2],y_size/4);
                 //将YUV資料轉為RGB格式
                 YUV420P_to_RGB24(p,rgb24_p,pCodecCtx->width,pCodecCtx->height);
                 //加載到QIMAGE顯示到QT控件
                 QImage image(rgb24_p,pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB888);
                 QPixmap my_pixmap;
                 my_pixmap.convertFromImage(image);
                 ui->label_DisplayImage->setPixmap(my_pixmap);
 
                 delete[] p; //釋放空間
                 delete[] rgb24_p; //釋放空間
            }
        }
    }
    sws_freeContext(img_convert_ctx);
    av_free(out_buffer);
    av_free(pFrameYUV);
    avcodec_close(pCodecCtx); //關閉編碼器
    avformat_close_input(&pFormatCtx); //關閉輸入裝置
}
 
/**
 * YUV420P轉RGB24
 * @param data
 * @param rgb
 * @param width
 * @param height
 */
void MainWindow::YUV420P_to_RGB24(unsigned char *data, unsigned char *rgb, int width, int height)
{
    int index = 0;
    unsigned char *ybase = data;
    unsigned char *ubase = &data[width * height];
    unsigned char *vbase = &data[width * height * 5 / 4];
    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x++) {
            //YYYYYYYYUUVV
            u_char Y = ybase[x + y * width];
            u_char U = ubase[y / 2 * width / 2 + (x / 2)];
            u_char V = vbase[y / 2 * width / 2 + (x / 2)];
            rgb[index++] = Y + 1.402 * (V - 128); //R
            rgb[index++] = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128); //G
            rgb[index++] = Y + 1.772 * (U - 128); //B
        }
    }
}
 
 
//FFMPEG推流線程執行起始點
void Thread_ffmpgVideo::run()
{
    int ret,got_picture;
    while(1)
    {
        sleep(1);
 
    }
}
       

mainwindos.h代碼:

#ifndef MAINWINDOW_H
#define MAINWINDOW_H
 
#include <QMainWindow>
#include <QImage>
#include <QThread>
 
extern "C"{
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
 
#include <libavutil/avassert.h>
#include <libavutil/channel_layout.h>
#include <libavutil/opt.h>
#include <libavutil/mathematics.h>
#include <libavutil/timestamp.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#include "libavdevice/avdevice.h"
#include "libavutil/imgutils.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/opt.h"
#include "libavutil/pixdesc.h"
#include "libavutil/imgutils.h"
/*
FFmpeg多媒體裝置互動的類庫:Libavdevice。
使用這個庫可以讀取電腦(或者其他裝置上)的多媒體裝置的資料或者輸出資料到指定的多媒體裝置上。
*/
}
 
class MainWindow;
//線程類的子類化
class Thread_ffmpgVideo : public QThread
{
public:
    MainWindow *mianwindow;  //指向widget的指針
protected:
    void run();  //線程執行的函數
};
 
QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; }
QT_END_NAMESPACE
 
class MainWindow : public QMainWindow
{
    Q_OBJECT
 
public:
    MainWindow(QWidget *parent = nullptr);
    ~MainWindow();
    int FFMPEG_Init_Config();
    void YUV420P_to_RGB24(unsigned char *data, unsigned char *rgb, int width, int height);
private:
    Ui::MainWindow *ui;
};
#endif // MAINWINDOW_H      

main.cpp:

#include "mainwindow.h"
#include <QApplication>
 
int main(int argc, char *argv[])
{
    QApplication a(argc, argv);
    MainWindow w;
    w.show();
    return a.exec();
}      

pro 工程檔案

QT       += core gui
 
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
 
CONFIG += c++11
 
# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
 
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000    # disables all the APIs deprecated before Qt 6.0.0
 
SOURCES += \
    main.cpp \
    mainwindow.cpp
 
HEADERS += \
    mainwindow.h
 
FORMS += \
    mainwindow.ui
 
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
 
#指定庫的路徑
unix:LIBS += -L$$PWD/ffmpeg_x264_lib/lib -lavcodec
unix:LIBS += -L$$PWD/ffmpeg_x264_lib/lib -lavfilter
unix:LIBS += -L$$PWD/ffmpeg_x264_lib/lib -lavutil
unix:LIBS += -L$$PWD/ffmpeg_x264_lib/lib -lavdevice
unix:LIBS += -L$$PWD/ffmpeg_x264_lib/lib -lavformat
unix:LIBS += -L$$PWD/ffmpeg_x264_lib/lib -lpostproc
unix:LIBS += -L$$PWD/ffmpeg_x264_lib/lib -lswscale
unix:LIBS += -L$$PWD/ffmpeg_x264_lib/lib -lswresample
unix:LIBS += -L$$PWD/ffmpeg_x264_lib/lib -lx264
 
#制定頭檔案的路徑
INCLUDEPATH+=$$PWD/ffmpeg_x264_lib/include      
linux下使用QT調用FFMPEG讀取攝像頭一幀資料顯示到标簽控件上

繼續閱讀