11.QT-ffmpeg+QAudioOutput實現音訊播放器
阿新 • • 發佈:2020-09-11
1.前言
由於QAudioOutput支援的輸入資料必須是原始資料,所以播放mp3,WAV,AAC等格式檔案,需要解封裝後才能支援播放.
而在QT中,提供了QMediaPlayer類可以支援解封裝,但是該類的解碼協議都是基於平臺的,如果平臺自身無法播放,那麼QMediaPlayer也無法播放.有興趣的朋友可以去試試.
所以接下來,我們使用ffmpeg+QAudioOutput來實現一個簡單的音訊播放器.
在此之前,需要學習:
- 2.AVFormatContext和AVInputFormat
- 3.AVPacket使用
- 4.FFMPEG-AVFrame
- 5.AVStream和AVCodecParameters
- 6.AVCodecContext和AVCodec
- 7.SwrContext音訊重取樣使用
- 8.ffmpeg-基礎常用知識
- 9.下載ffmpeg、使QT支援同時編譯32位和64位
- 10.QT-QAudioOutput類使用
#include "playthread.h" playthread::playthread() { audio=NULL; type = control_none; } bool playthread::initAudio(int SampleRate) { QAudioFormat format; if(audio!=NULL) return true; format.setSampleRate(SampleRate); //設定取樣率 format.setChannelCount(2); //設定通道數 format.setSampleSize(16); //樣本資料16位 format.setCodec("audio/pcm"); //播出格式為pcm格式 format.setByteOrder(QAudioFormat::LittleEndian); //預設小端模式 format.setSampleType(QAudioFormat::UnSignedInt); //無符號整形數 QAudioDeviceInfo info(QAudioDeviceInfo::defaultOutputDevice()); //選擇預設輸出裝置 // foreach(int count,info.supportedChannelCounts()) // { // qDebug()<<"輸出裝置支援的通道數:"<<count; // } // foreach(int count,info.supportedSampleRates()) // { // qDebug()<<"輸出裝置支援的取樣率:"<<count; // } // foreach(int count,info.supportedSampleSizes()) // { // qDebug()<<"輸出裝置支援的樣本資料位數:"<<count; // } if (!info.isFormatSupported(format)) { qDebug()<<"輸出裝置不支援該格式,不能播放音訊"; return false; } audio = new QAudioOutput(format, this); audio->setBufferSize(100000); return true; } void playthread::play(QString filePath) { this->filePath = filePath; type = control_play; if(!this->isRunning()) { this->start(); } } void playthread::stop() { if(this->isRunning()) { type = control_stop; } } void playthread::pause() { if(this->isRunning()) { type = control_pause; } } void playthread::resume() { if(this->isRunning()) { type = control_resume; } } void playthread::seek(int value) { if(this->isRunning()) { seekMs = value; type = control_seek; } } void playthread::debugErr(QString prefix, int err) //根據錯誤編號獲取錯誤資訊並列印 { char errbuf[512]={0}; av_strerror(err,errbuf,sizeof(errbuf)); qDebug()<<prefix<<":"<<errbuf; emit ERROR(prefix+":"+errbuf); } bool playthread::runIsBreak() //處理控制,判斷是否需要停止 { bool ret = false; //處理播放暫停 if(type == control_pause) { while(type == control_pause) { audio->suspend(); msleep(500); } if(type == control_resume) { audio->resume(); } } if(type == control_play) //重新播放 { ret = true; if(audio->state()== QAudio::ActiveState) audio->stop(); } if(type == control_stop) //停止 { ret = true; if(audio->state()== QAudio::ActiveState) audio->stop(); } return ret; } void playthread::runPlay() { int ret; int destMs,currentMs; if(audio==NULL) { emit ERROR("輸出裝置不支援該格式,不能播放音訊"); return ; } //初始化網路庫 (可以開啟rtsp rtmp http 協議的流媒體視訊) avformat_network_init(); AVFormatContext *pFmtCtx=NULL; ret = avformat_open_input(&pFmtCtx, this->filePath.toLocal8Bit().data(),NULL, NULL) ; //開啟音視訊檔案並建立AVFormatContext結構體以及初始化. if (ret!= 0) { debugErr("avformat_open_input",ret); return ; } ret = avformat_find_stream_info(pFmtCtx, NULL); //初始化流資訊 if (ret!= 0) { debugErr("avformat_find_stream_info",ret); return ; } int audioindex=-1; audioindex = av_find_best_stream(pFmtCtx, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0); qDebug()<<"audioindex:"<<audioindex; AVCodec *acodec = avcodec_find_decoder(pFmtCtx->streams[audioindex]->codecpar->codec_id);//獲取codec AVCodecContext *acodecCtx = avcodec_alloc_context3(acodec); //構造AVCodecContext ,並將vcodec填入AVCodecContext中 avcodec_parameters_to_context(acodecCtx, pFmtCtx->streams[audioindex]->codecpar); //初始化AVCodecContext ret = avcodec_open2(acodecCtx, NULL,NULL); //開啟解碼器,由於之前呼叫avcodec_alloc_context3(vcodec)初始化了vc,那麼codec(第2個引數)可以填NULL if (ret!= 0) { debugErr("avcodec_open2",ret); return ; } SwrContext *swrctx =NULL; swrctx=swr_alloc_set_opts(swrctx, av_get_default_channel_layout(2),AV_SAMPLE_FMT_S16,44100, acodecCtx->channel_layout, acodecCtx->sample_fmt,acodecCtx->sample_rate, NULL,NULL); swr_init(swrctx); destMs = av_q2d(pFmtCtx->streams[audioindex]->time_base)*1000*pFmtCtx->streams[audioindex]->duration; qDebug()<<"位元速率:"<<acodecCtx->bit_rate; qDebug()<<"格式:"<<acodecCtx->sample_fmt; qDebug()<<"通道:"<<acodecCtx->channels; qDebug()<<"取樣率:"<<acodecCtx->sample_rate; qDebug()<<"時長:"<<destMs; qDebug()<<"解碼器:"<<acodec->name; AVPacket * packet =av_packet_alloc(); AVFrame *frame =av_frame_alloc(); audio->stop(); QIODevice*io = audio->start(); while(1) { if(runIsBreak()) break; if(type == control_seek) { av_seek_frame(pFmtCtx, audioindex, seekMs/(double)1000/av_q2d(pFmtCtx->streams[audioindex]->time_base),AVSEEK_FLAG_BACKWARD); type = control_none; emit seekOk(); } ret = av_read_frame(pFmtCtx, packet); if (ret!= 0) { debugErr("av_read_frame",ret); emit duration(destMs,destMs); break ; } //解碼一幀資料 ret = avcodec_send_packet(acodecCtx, packet); av_packet_unref(packet); if (ret != 0) { debugErr("avcodec_send_packet",ret); continue ; } if(packet->stream_index==audioindex) { while( avcodec_receive_frame(acodecCtx, frame) == 0) { if(runIsBreak()) break; uint8_t *data[2] = { 0 }; int byteCnt=frame->nb_samples * 2 * 2; unsigned char *pcm = new uint8_t[byteCnt]; //frame->nb_samples*2*2表示分配樣本資料量*兩通道*每通道2位元組大小 data[0] = pcm; //輸出格式為AV_SAMPLE_FMT_S16(packet型別),所以轉換後的LR兩通道都存在data[0]中 ret = swr_convert(swrctx, data, frame->nb_samples, //輸出 (const uint8_t**)frame->data,frame->nb_samples ); //輸入 //將重取樣後的data資料傳送到輸出裝置,進行播放 while (audio->bytesFree() < byteCnt) { if(runIsBreak()) break; msleep(10); } if(!runIsBreak()) io->write((const char *)pcm,byteCnt); currentMs = av_q2d(pFmtCtx->streams[audioindex]->time_base)*1000*frame->pts; //qDebug()<<"時長:"<<destMs<<currentMs; emit duration(currentMs,destMs); delete[] pcm; } } } //釋放記憶體 av_frame_free(&frame); av_packet_free(&packet); swr_free(&swrctx); avcodec_free_context(&acodecCtx); avformat_close_input(&pFmtCtx); } void playthread::run() { if(!initAudio(44100)) { emit ERROR("輸出裝置不支援該格式,不能播放音訊"); } while(1) { switch(type) { case control_none: msleep(100); break; case control_play : type=control_none;runPlay(); break; //播放 default: type=control_none; break; } } }
4.2 widget介面類
而在介面中要處理的就很簡單,widget.cpp如下所示:
#include "widget.h" #include "ui_widget.h" #include <QDebug> Widget::Widget(QWidget *parent) : QWidget(parent) , ui(new Ui::Widget) { ui->setupUi(this); this->setAcceptDrops(true); thread = new playthread(); connect(thread,SIGNAL(duration(int,int)),this,SLOT(onDuration(int,int))); connect(thread,SIGNAL(seekOk()),this,SLOT(onSeekOk())); void duration(long currentMs,long destMs); //播放時長 thread->start(); sliderSeeking =false; } Widget::~Widget() { delete ui; thread->stop(); } void Widget::onSeekOk() { sliderSeeking=false; } void Widget::onDuration(int currentMs,int destMs) //時長 { static int currentMs1=-1,destMs1=-1; if(currentMs1==currentMs&&destMs1==destMs) { return; } currentMs1 = currentMs; destMs1 = destMs; qDebug()<<"onDuration:"<<currentMs<<destMs<<sliderSeeking; QString currentTime = QString("%1:%2:%3").arg(currentMs1/360000%60,2,10,QChar('0')).arg(currentMs1/6000%60,2,10,QChar('0')).arg(currentMs1/1000%60,2,10,QChar('0')); QString destTime = QString("%1:%2:%3").arg(destMs1/360000%60,2,10,QChar('0')).arg(destMs1/6000%60,2,10,QChar('0')).arg(destMs1/1000%60,2,10,QChar('0')); ui->label_duration->setText(currentTime+"/"+destTime); if(!sliderSeeking) //未滑動 { ui->slider->setMaximum(destMs); ui->slider->setValue(currentMs); } } void Widget::dragEnterEvent(QDragEnterEvent *event) { if(event->mimeData()->hasUrls()) //判斷拖的型別 { event->acceptProposedAction(); } else { event->ignore(); } } void Widget::dropEvent(QDropEvent *event) { if(event->mimeData()->hasUrls()) //判斷放的型別 { QList<QUrl> List = event->mimeData()->urls(); if(List.length()!=0) { ui->line_audioPath->setText(List[0].toLocalFile()); } } else { event->ignore(); } } void Widget::on_btn_start_clicked() { sliderSeeking=false; thread->play(ui->line_audioPath->text()); } void Widget::on_btn_stop_clicked() { thread->stop(); } void Widget::on_btn_pause_clicked() { thread->pause(); } void Widget::on_btn_resume_clicked() { thread->resume(); } void Widget::on_slider_sliderPressed() { sliderSeeking=true; } void Widget::on_slider_sliderReleased() { thread->seek(ui->slider->value()); }