qt/LedOK/program/videosplitthread.cpp

226 lines
10 KiB
C++
Raw Normal View History

2023-04-18 14:14:46 +08:00
#include "videosplitthread.h"
#include <QImage>
#include <QPainter>
#include <QDebug>
extern "C"{
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
}
static void imgCleanupHandler(void *info) {
delete [] (uchar*)info;
}
2024-02-21 18:08:50 +08:00
VideoSplitThread::VideoSplitThread(int elew, int eleh, int maxLen, int sph, std::vector<int> &widths, bool isVer, QPointF pos, QByteArray file) : mEleW(elew), mEleH(eleh), maxLen(maxLen), mSPH(sph), mWidths(widths), pos(pos), file(file), isVer(isVer) {
2023-04-18 14:14:46 +08:00
connect(this, &QThread::finished, this, &QThread::deleteLater);
}
void VideoSplitThread::run() {
AVFormatContext *in_fmt = avformat_alloc_context(), *out_fmt = 0;
AVCodecContext *de_ctx = 0, *en_ctx = 0;
QString err;
char buf[AV_ERROR_MAX_STRING_SIZE];
int ret;
{
if((ret = avformat_open_input(&in_fmt, file.constData(), 0, 0)) < 0) {
err = QString("Couldn't open input stream. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free;
}
if((ret = avformat_find_stream_info(in_fmt, nullptr)) < 0) {
err = QString("Couldn't find stream information. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free;
}
auto outfile = file+"-square.mp4";
if((ret = avformat_alloc_output_context2(&out_fmt, 0, "mp4", outfile.constData())) < 0) {
err = QString("avformat_alloc_output_context2 fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free;
}
int vi_idx = -1;
AVStream *out_vi_stream;
for(uint ss=0; ss<in_fmt->nb_streams; ss++) {
AVStream *stream = in_fmt->streams[ss];
qDebug()<<"codec_type"<<av_get_media_type_string(stream->codecpar->codec_type);
if(stream->codecpar->codec_type == AVMEDIA_TYPE_DATA) continue;
AVStream *out_stream = avformat_new_stream(out_fmt, 0);
if(vi_idx == -1 && stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
vi_idx = ss;
out_vi_stream = out_stream;
} else {
if((ret = avcodec_parameters_copy(out_stream->codecpar, stream->codecpar)) < 0) {
err = QString("avcodec_parameters_copy fail. ") + av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free;
}
}
out_stream->time_base = stream->time_base;
out_stream->start_time = stream->start_time;
out_stream->duration = stream->duration;
}
if(vi_idx == -1) {
err = "Didn't find a Video Stream";
goto free;
}
auto codecpar = in_fmt->streams[vi_idx]->codecpar;
auto decoder = avcodec_find_decoder(codecpar->codec_id);
if(decoder==0) {
err = "Could not found Video Decoder";
goto free;
}
de_ctx = avcodec_alloc_context3(decoder);
de_ctx->thread_count = 4;
avcodec_parameters_to_context(de_ctx, codecpar);
if(avcodec_open2(de_ctx, decoder, 0) < 0) {
err = "Could not open Video decode Ctx";
goto free;
}
auto outPar = out_vi_stream->codecpar;
outPar->codec_type = AVMEDIA_TYPE_VIDEO;
outPar->codec_id = AV_CODEC_ID_H264;
outPar->format = AV_PIX_FMT_YUV420P;
2024-02-21 18:08:50 +08:00
if(isVer) {
outPar->height = maxLen;
outPar->width = mSPH*mWidths.size();
} else {
outPar->width = maxLen;
outPar->height = mSPH*mWidths.size();
}
2023-04-18 14:14:46 +08:00
auto encoder = avcodec_find_encoder(outPar->codec_id);
if(encoder==0) {
fprintf(stderr, "Codec not found\n");
goto free;
}
en_ctx = avcodec_alloc_context3(encoder);
en_ctx->thread_count = 4;
avcodec_parameters_to_context(en_ctx, outPar);
en_ctx->bit_rate = outPar->width * outPar->height * 6;
en_ctx->gop_size = de_ctx->gop_size;
en_ctx->max_b_frames = 3;
en_ctx->time_base = out_vi_stream->time_base;
if((ret = avcodec_open2(en_ctx, encoder, 0)) < 0) {
err = QString("Open video encode ctx failed. ") + av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free;
}
if(out_fmt->flags & AVFMT_NOFILE) qDebug()<<"AVFMT_NOFILE";
else if((ret = avio_open(&out_fmt->pb, outfile.constData(), AVIO_FLAG_WRITE)) < 0) {
err = QString("avio_open fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free;
}
if((ret = avformat_write_header(out_fmt, 0)) < 0) {
err = QString("avformat_write_header fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free;
}
auto sws_ctx = sws_getContext(de_ctx->width, de_ctx->height, de_ctx->pix_fmt, mEleW, mEleH, AV_PIX_FMT_RGB32, SWS_FAST_BILINEAR, 0, 0, 0);
auto out_sws_ctx = sws_getContext(outPar->width, outPar->height, AV_PIX_FMT_RGB32, outPar->width, outPar->height, AV_PIX_FMT_YUV420P, SWS_FAST_BILINEAR, 0, 0, 0);
auto packet = av_packet_alloc();
auto frm = av_frame_alloc();
int img_linesize[4]{(mEleW*4+63)/64*64};
uint8_t *img_data[4]{new uchar[img_linesize[0] * mEleH]};
QImage img(img_data[0], mEleW, mEleH, img_linesize[0], QImage::Format_ARGB32, imgCleanupHandler, img_data[0]);
int out_img_linesize[4]{(outPar->width*4+63)/64*64};
uint8_t *out_img_data[4]{new uchar[out_img_linesize[0] * outPar->height]};
QImage out_img(out_img_data[0], outPar->width, outPar->height, out_img_linesize[0], QImage::Format_ARGB32, imgCleanupHandler, out_img_data[0]);
QPainter painter(&out_img);
while(1) {
if((ret = av_read_frame(in_fmt, packet)) < 0) {
if(ret!=AVERROR_EOF) {
err = QString("Read packet fail: ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
break;
}
ret = avcodec_send_packet(de_ctx, 0);
} else {
if(packet->stream_index != vi_idx) {
av_interleaved_write_frame(out_fmt, packet);
continue;
}
ret = avcodec_send_packet(de_ctx, packet);
}
if(ret < 0) {
err = QString("avcodec_send_packet fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
break;
}
while((ret = avcodec_receive_frame(de_ctx, frm)) != AVERROR(EAGAIN)) {
if(ret < 0) {
if(ret!=AVERROR_EOF) {
err = QString("Receive frame fail: ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free2;
}
ret = avcodec_send_frame(en_ctx, 0);
} else {
sws_scale(sws_ctx, frm->data, frm->linesize, 0, de_ctx->height, img_data, img_linesize);
auto apos = pos;
2024-02-21 18:08:50 +08:00
if(isVer) {
painter.drawImage(apos, img, QRectF(0, 0, img.width(), mWidths[0]-apos.y()));
for(int i=1; i<(int)mWidths.size(); i++) {
apos.ry() -= mWidths[i-1];
apos.rx() += mSPH;
painter.drawImage(apos, img, QRectF(0, 0, img.width(), mWidths[i]-apos.y()));
}
} else {
painter.drawImage(apos, img, QRectF(0, 0, mWidths[0]-apos.x(), img.height()));
for(int i=1; i<(int)mWidths.size(); i++) {
apos.rx() -= mWidths[i-1];
apos.ry() += mSPH;
painter.drawImage(apos, img, QRectF(0, 0, mWidths[i]-apos.x(), img.height()));
}
2023-04-18 14:14:46 +08:00
}
auto pts = frm->pts;
auto dur = frm->pkt_duration;
av_frame_unref(frm);
frm->pts = pts;
frm->pkt_duration = dur;
frm->format = AV_PIX_FMT_YUV420P;
frm->width = outPar->width;
frm->height = outPar->height;
if((ret = av_frame_get_buffer(frm, 0)) < 0) {
err = QString("av_frame_get_buffer fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free2;
}
sws_scale(out_sws_ctx, out_img_data, out_img_linesize, 0, outPar->height, frm->data, frm->linesize);
ret = avcodec_send_frame(en_ctx, frm);
int pro = frm->pts*100/out_vi_stream->duration;
if(pro > lastPro) {
lastPro = pro;
emit emProgress(pro);
}
}
if(ret < 0) {
err = QString("avcodec_send_frame fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
goto free2;
}
while((ret = avcodec_receive_packet(en_ctx, packet)) != AVERROR(EAGAIN)) {
if(ret < 0) {
if(ret!=AVERROR_EOF) err = QString("Receive frame fail: ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
else {
av_interleaved_write_frame(out_fmt, 0);
av_write_trailer(out_fmt);
emit emProgress(100);
}
goto free2;
} else {
av_interleaved_write_frame(out_fmt, packet);
}
}
}
}
free2:
av_frame_free(&frm);
av_packet_free(&packet);
sws_freeContext(sws_ctx);
sws_freeContext(out_sws_ctx);
}
free:
avcodec_free_context(&de_ctx);
avcodec_free_context(&en_ctx);
avformat_close_input(&in_fmt);
avio_closep(&out_fmt->pb);
if(out_fmt) avformat_free_context(out_fmt);
emit emErr(err);
}