244 lines
12 KiB
C++
244 lines
12 KiB
C++
#include "videosplitthread.h"
|
|
#include <QImage>
|
|
#include <QPainter>
|
|
#include <QDebug>
|
|
#include <queue>
|
|
extern "C"{
|
|
#include <libavformat/avformat.h>
|
|
#include <libavcodec/avcodec.h>
|
|
#include <libswscale/swscale.h>
|
|
}
|
|
|
|
static void imgCleanupHandler(void *info) {
|
|
delete [] (uchar*)info;
|
|
}
|
|
|
|
VideoSplitThread::VideoSplitThread(int elew, int eleh, int maxLen, int sph, std::vector<int> &widths, bool isVer, QPointF pos, QByteArray file) : mEleW(elew), mEleH(eleh), maxLen(maxLen), mSPH(sph), mWidths(widths), pos(pos), file(file), isVer(isVer) {
|
|
connect(this, &QThread::finished, this, &QThread::deleteLater);
|
|
}
|
|
|
|
void VideoSplitThread::run() {
|
|
AVFormatContext *fmt_in = avformat_alloc_context(), *fmt_out = 0;
|
|
AVCodecContext *de_ctx = 0, *en_ctx = 0;
|
|
QString err;
|
|
char buf[AV_ERROR_MAX_STRING_SIZE];
|
|
int ret;
|
|
{
|
|
if((ret = avformat_open_input(&fmt_in, file.constData(), 0, 0)) < 0) {
|
|
err = QString("Couldn't open input stream. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free;
|
|
}
|
|
if((ret = avformat_find_stream_info(fmt_in, 0)) < 0) {
|
|
err = QString("Couldn't find stream information. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free;
|
|
}
|
|
auto outfile = file+"-square.mp4";
|
|
if((ret = avformat_alloc_output_context2(&fmt_out, 0, "mp4", outfile.constData())) < 0) {
|
|
err = QString("avformat_alloc_output_context2 fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free;
|
|
}
|
|
int video_idx = -1;
|
|
AVStream *stream_out_video;
|
|
for(uint ss=0; ss<fmt_in->nb_streams; ss++) {
|
|
auto streamIn = fmt_in->streams[ss];
|
|
qDebug() << streamIn->index << av_get_media_type_string(streamIn->codecpar->codec_type);
|
|
if(streamIn->codecpar->codec_type == AVMEDIA_TYPE_DATA) continue;
|
|
auto streamOut = avformat_new_stream(fmt_out, 0);
|
|
if((ret = avcodec_parameters_copy(streamOut->codecpar, streamIn->codecpar)) < 0) {
|
|
err = QString("avcodec_parameters_copy fail. ") + av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free;
|
|
}
|
|
streamOut->time_base = streamIn->time_base;
|
|
streamOut->start_time = streamIn->start_time;
|
|
streamOut->duration = streamIn->duration;
|
|
if(video_idx == -1 && streamIn->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
|
video_idx = ss;
|
|
stream_out_video = streamOut;
|
|
}
|
|
}
|
|
if(video_idx == -1) {
|
|
err = "Didn't find a Video Stream";
|
|
goto free;
|
|
}
|
|
|
|
auto par_in_video = fmt_in->streams[video_idx]->codecpar;
|
|
|
|
auto decoder = avcodec_find_decoder(par_in_video->codec_id);
|
|
if(decoder==0) {
|
|
err = "Could not found Video Decoder";
|
|
goto free;
|
|
}
|
|
de_ctx = avcodec_alloc_context3(decoder);
|
|
de_ctx->thread_count = 4;
|
|
avcodec_parameters_to_context(de_ctx, par_in_video);
|
|
if(avcodec_open2(de_ctx, decoder, 0) < 0) {
|
|
err = "Could not open Video decode Ctx";
|
|
goto free;
|
|
}
|
|
|
|
auto par_out_video = stream_out_video->codecpar;
|
|
par_out_video->codec_type = AVMEDIA_TYPE_VIDEO;
|
|
par_out_video->codec_id = AV_CODEC_ID_H264;
|
|
par_out_video->format = AV_PIX_FMT_YUV420P;
|
|
par_out_video->profile = 77;
|
|
par_out_video->level = 42;
|
|
if(isVer) {
|
|
par_out_video->height = maxLen;
|
|
par_out_video->width = mSPH * (int)mWidths.size();
|
|
} else {
|
|
par_out_video->width = maxLen;
|
|
par_out_video->height = mSPH * (int)mWidths.size();
|
|
}
|
|
qDebug().nospace()<<"out "<<par_out_video->width<<" x "<<par_out_video->height;
|
|
|
|
auto encoder = avcodec_find_encoder(par_out_video->codec_id);
|
|
if(encoder==0) {
|
|
fprintf(stderr, "Codec not found\n");
|
|
goto free;
|
|
}
|
|
en_ctx = avcodec_alloc_context3(encoder);
|
|
en_ctx->thread_count = 4;
|
|
avcodec_parameters_to_context(en_ctx, par_out_video);
|
|
en_ctx->bit_rate = par_out_video->width * par_out_video->height * 6;
|
|
en_ctx->gop_size = de_ctx->gop_size;
|
|
en_ctx->max_b_frames = 0;
|
|
en_ctx->time_base = stream_out_video->time_base;
|
|
|
|
if((ret = avcodec_open2(en_ctx, encoder, 0)) < 0) {
|
|
err = QString("Open video encode ctx failed. ") + av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free;
|
|
}
|
|
if(fmt_out->flags & AVFMT_NOFILE) qDebug()<<"AVFMT_NOFILE";
|
|
else if((ret = avio_open(&fmt_out->pb, outfile.constData(), AVIO_FLAG_WRITE)) < 0) {
|
|
err = QString("avio_open fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free;
|
|
}
|
|
if((ret = avformat_write_header(fmt_out, 0)) < 0) {
|
|
err = QString("avformat_write_header fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free;
|
|
}
|
|
auto sws_ctx = sws_getContext(de_ctx->width, de_ctx->height, de_ctx->pix_fmt, mEleW, mEleH, AV_PIX_FMT_RGB32, SWS_FAST_BILINEAR, 0, 0, 0);
|
|
auto out_sws_ctx = sws_getContext(par_out_video->width, par_out_video->height, AV_PIX_FMT_RGB32, par_out_video->width, par_out_video->height, AV_PIX_FMT_YUV420P, SWS_FAST_BILINEAR, 0, 0, 0);
|
|
auto packet = av_packet_alloc();
|
|
auto frm = av_frame_alloc();
|
|
|
|
int img_linesize[4]{(mEleW*4+63)/64*64};
|
|
uint8_t *img_data[4]{new uchar[img_linesize[0] * mEleH]};
|
|
QImage img(img_data[0], mEleW, mEleH, img_linesize[0], QImage::Format_ARGB32, imgCleanupHandler, img_data[0]);
|
|
|
|
int out_img_linesize[4]{(par_out_video->width*4+63)/64*64};
|
|
uint8_t *out_img_data[4]{new uchar[out_img_linesize[0] * par_out_video->height]};
|
|
QImage out_img(out_img_data[0], par_out_video->width, par_out_video->height, out_img_linesize[0], QImage::Format_ARGB32, imgCleanupHandler, out_img_data[0]);
|
|
QPainter painter(&out_img);
|
|
while(1) {
|
|
if((ret = av_read_frame(fmt_in, packet)) < 0) {
|
|
if(ret!=AVERROR_EOF) {
|
|
err = QString("Read packet fail: ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
break;
|
|
}
|
|
ret = avcodec_send_packet(de_ctx, 0);
|
|
} else {
|
|
if(packet->stream_index != video_idx) {
|
|
ret = av_interleaved_write_frame(fmt_out, packet);
|
|
if(ret < 0) {
|
|
err = QString("write_frame(A) failed. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free2;
|
|
}
|
|
continue;
|
|
}
|
|
ret = avcodec_send_packet(de_ctx, packet);
|
|
}
|
|
if(ret < 0) {
|
|
err = QString("avcodec_send_packet fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
break;
|
|
}
|
|
while((ret = avcodec_receive_frame(de_ctx, frm)) != AVERROR(EAGAIN)) {
|
|
if(ret < 0) {
|
|
if(ret!=AVERROR_EOF) {
|
|
err = QString("Receive frame fail: ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free2;
|
|
}
|
|
ret = avcodec_send_frame(en_ctx, 0);
|
|
} else {
|
|
sws_scale(sws_ctx, frm->data, frm->linesize, 0, de_ctx->height, img_data, img_linesize);
|
|
auto apos = pos;
|
|
if(isVer) {
|
|
painter.drawImage(apos, img, QRectF(0, 0, img.width(), mWidths[0]-apos.y()));
|
|
for(int i=1; i<(int)mWidths.size(); i++) {
|
|
apos.ry() -= mWidths[i-1];
|
|
apos.rx() += mSPH;
|
|
painter.drawImage(apos, img, QRectF(0, 0, img.width(), mWidths[i]-apos.y()));
|
|
}
|
|
} else {
|
|
painter.drawImage(apos, img, QRectF(0, 0, mWidths[0]-apos.x(), img.height()));
|
|
for(int i=1; i<(int)mWidths.size(); i++) {
|
|
apos.rx() -= mWidths[i-1];
|
|
apos.ry() += mSPH;
|
|
painter.drawImage(apos, img, QRectF(0, 0, mWidths[i]-apos.x(), img.height()));
|
|
}
|
|
}
|
|
auto pts = frm->pts;
|
|
auto pkt_dts = frm->pkt_dts;
|
|
auto dur = frm->pkt_duration;
|
|
av_frame_unref(frm);
|
|
frm->pts = pts;
|
|
frm->pkt_dts = pkt_dts;
|
|
frm->pkt_duration = dur;
|
|
frm->format = AV_PIX_FMT_YUV420P;
|
|
frm->width = par_out_video->width;
|
|
frm->height = par_out_video->height;
|
|
if((ret = av_frame_get_buffer(frm, 0)) < 0) {
|
|
err = QString("av_frame_get_buffer fail. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free2;
|
|
}
|
|
sws_scale(out_sws_ctx, out_img_data, out_img_linesize, 0, par_out_video->height, frm->data, frm->linesize);
|
|
ret = avcodec_send_frame(en_ctx, frm);
|
|
int progress = frm->pts * 100 / stream_out_video->duration;
|
|
if(progress > lastProgress) {
|
|
lastProgress = progress;
|
|
emit emProgress(progress);
|
|
}
|
|
}
|
|
if(ret < 0) {
|
|
err = QString("avcodec_send_frame failed. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free2;
|
|
}
|
|
while((ret = avcodec_receive_packet(en_ctx, packet)) != AVERROR(EAGAIN)) {
|
|
if(ret < 0) {
|
|
if(ret!=AVERROR_EOF) err = QString("Receive frame fail: ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
else {
|
|
ret = av_interleaved_write_frame(fmt_out, 0);
|
|
if(ret < 0) {
|
|
err = QString("write_frame(0) failed. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free2;
|
|
}
|
|
av_write_trailer(fmt_out);
|
|
emit emProgress(100);
|
|
}
|
|
goto free2;
|
|
} else {
|
|
packet->stream_index = video_idx;
|
|
ret = av_interleaved_write_frame(fmt_out, packet);
|
|
if(ret < 0) {
|
|
err = QString("write_frame(V) failed. ")+av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, ret);
|
|
goto free2;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
free2:
|
|
av_frame_free(&frm);
|
|
av_packet_free(&packet);
|
|
sws_freeContext(sws_ctx);
|
|
sws_freeContext(out_sws_ctx);
|
|
}
|
|
free:
|
|
avcodec_free_context(&de_ctx);
|
|
avcodec_free_context(&en_ctx);
|
|
avformat_close_input(&fmt_in);
|
|
avio_closep(&fmt_out->pb);
|
|
if(fmt_out) avformat_free_context(fmt_out);
|
|
emit emErr(err);
|
|
}
|