Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
O
OpenBoard
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
lifo
Nicolas Ollinger
OpenBoard
Commits
a2fb735b
Commit
a2fb735b
authored
Apr 28, 2016
by
Craig Watson
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Podcasts on Linux: working video, no audio yet
parent
13a47116
Changes
5
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
560 additions
and
2 deletions
+560
-2
UBApplication.cpp
src/core/UBApplication.cpp
+0
-2
UBPodcastController.cpp
src/podcast/UBPodcastController.cpp
+4
-0
UBFFmpegVideoEncoder.cpp
src/podcast/ffmpeg/UBFFmpegVideoEncoder.cpp
+396
-0
UBFFmpegVideoEncoder.h
src/podcast/ffmpeg/UBFFmpegVideoEncoder.h
+135
-0
podcast.pri
src/podcast/podcast.pri
+25
-0
No files found.
src/core/UBApplication.cpp
View file @
a2fb735b
...
...
@@ -520,10 +520,8 @@ void UBApplication::decorateActionMenu(QAction* action)
menu
->
addSeparator
();
#ifndef Q_OS_LINUX // No Podcast on Linux yet
menu
->
addAction
(
mainWindow
->
actionPodcast
);
mainWindow
->
actionPodcast
->
setText
(
tr
(
"Podcast"
));
#endif
menu
->
addSeparator
();
menu
->
addAction
(
mainWindow
->
actionQuit
);
...
...
src/podcast/UBPodcastController.cpp
View file @
a2fb735b
...
...
@@ -64,6 +64,8 @@
#elif defined(Q_OS_OSX)
#include "quicktime/UBQuickTimeVideoEncoder.h"
#include "quicktime/UBAudioQueueRecorder.h"
#elif defined(Q_OS_LINUX)
#include "ffmpeg/UBFFmpegVideoEncoder.h"
#endif
#include "core/memcheck.h"
...
...
@@ -309,6 +311,8 @@ void UBPodcastController::start()
mVideoEncoder
=
new
UBWindowsMediaVideoEncoder
(
this
);
//deleted on stop
#elif defined(Q_OS_OSX)
mVideoEncoder
=
new
UBQuickTimeVideoEncoder
(
this
);
//deleted on stop
#elif defined(Q_OS_LINUX)
mVideoEncoder
=
new
UBFFmpegVideoEncoder
(
this
);
#endif
if
(
mVideoEncoder
)
...
...
src/podcast/ffmpeg/UBFFmpegVideoEncoder.cpp
0 → 100644
View file @
a2fb735b
#include "UBFFmpegVideoEncoder.h"
// Future proofing
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,28,1)
#define av_frame_alloc avcodec_alloc_frame
#define av_frame_free avcodec_free_frame
#endif
QString
avErrorToQString
(
int
errnum
)
{
char
error
[
AV_ERROR_MAX_STRING_SIZE
];
av_make_error_string
(
error
,
AV_ERROR_MAX_STRING_SIZE
,
errnum
);
return
QString
(
error
);
}
/**
* @brief Constructor for the ffmpeg video encoder
*
*
* This class provides an interface between the screencast controller and the ffmpeg
* back-end. It initializes the audio and video encoders and frees them when done;
* worker threads handle the actual encoding of frames.
*
*/
UBFFmpegVideoEncoder
::
UBFFmpegVideoEncoder
(
QObject
*
parent
)
:
UBAbstractVideoEncoder
(
parent
)
,
mOutputFormatContext
(
NULL
)
,
mSwsContext
(
NULL
)
,
mFile
(
NULL
)
{
mTimebase
=
100
*
framesPerSecond
();
qDebug
()
<<
"timebase: "
<<
mTimebase
;
mVideoEncoderThread
=
new
QThread
;
mVideoWorker
=
new
UBFFmpegVideoEncoderWorker
(
this
);
mVideoWorker
->
moveToThread
(
mVideoEncoderThread
);
connect
(
mVideoWorker
,
SIGNAL
(
error
(
QString
)),
this
,
SLOT
(
setLastErrorMessage
(
QString
)));
connect
(
mVideoEncoderThread
,
SIGNAL
(
started
()),
mVideoWorker
,
SLOT
(
runEncoding
()));
connect
(
mVideoWorker
,
SIGNAL
(
encodingFinished
()),
mVideoEncoderThread
,
SLOT
(
quit
()));
connect
(
mVideoEncoderThread
,
SIGNAL
(
finished
()),
this
,
SLOT
(
finishEncoding
()));
}
UBFFmpegVideoEncoder
::~
UBFFmpegVideoEncoder
()
{
if
(
mVideoWorker
)
delete
mVideoWorker
;
if
(
mVideoEncoderThread
)
delete
mVideoEncoderThread
;
}
void
UBFFmpegVideoEncoder
::
setLastErrorMessage
(
const
QString
&
pMessage
)
{
qDebug
()
<<
"FFmpeg video encoder:"
<<
pMessage
;
mLastErrorMessage
=
pMessage
;
}
bool
UBFFmpegVideoEncoder
::
start
()
{
bool
initialized
=
init
();
if
(
initialized
)
mVideoEncoderThread
->
start
();
return
initialized
;
}
bool
UBFFmpegVideoEncoder
::
stop
()
{
qDebug
()
<<
"Video encoder: stop requested"
;
mVideoWorker
->
stopEncoding
();
return
true
;
}
bool
UBFFmpegVideoEncoder
::
init
()
{
// Initialize ffmpeg lib
av_register_all
();
avcodec_register_all
();
AVDictionary
*
options
=
NULL
;
int
ret
;
// Output format and context
// --------------------------------------
if
(
avformat_alloc_output_context2
(
&
mOutputFormatContext
,
NULL
,
"mp4"
,
NULL
)
<
0
)
{
setLastErrorMessage
(
"Couldn't allocate video format context"
);
return
false
;
}
// The default codecs for mp4 are h264 and aac, we use those
// Video codec and context
// -------------------------------------
AVCodec
*
videoCodec
=
avcodec_find_encoder
(
mOutputFormatContext
->
oformat
->
video_codec
);
if
(
!
videoCodec
)
{
setLastErrorMessage
(
"Video codec not found"
);
return
false
;
}
mVideoStream
=
avformat_new_stream
(
mOutputFormatContext
,
0
);
mVideoStream
->
time_base
=
{
1
,
mTimebase
};
avcodec_get_context_defaults3
(
mVideoStream
->
codec
,
videoCodec
);
AVCodecContext
*
c
=
avcodec_alloc_context3
(
videoCodec
);
c
->
bit_rate
=
videoBitsPerSecond
();
c
->
width
=
videoSize
().
width
();
c
->
height
=
videoSize
().
height
();
c
->
time_base
=
{
1
,
mTimebase
};
c
->
gop_size
=
10
;
c
->
max_b_frames
=
0
;
c
->
pix_fmt
=
AV_PIX_FMT_YUV420P
;
if
(
mOutputFormatContext
->
oformat
->
flags
&
AVFMT_GLOBALHEADER
)
c
->
flags
|=
AV_CODEC_FLAG_GLOBAL_HEADER
;
/*
* Supported pixel formats for h264 are:
* AV_PIX_FMT_YUV420P
* AV_PIX_FMT_YUV422P
* AV_PIX_FMT_YUV444P
* AV_PIX_FMT_YUVJ420P
*/
av_dict_set
(
&
options
,
"preset"
,
"slow"
,
0
);
av_dict_set
(
&
options
,
"crf"
,
"20"
,
0
);
ret
=
avcodec_open2
(
c
,
videoCodec
,
&
options
);
if
(
ret
<
0
)
{
setLastErrorMessage
(
QString
(
"Couldn't open video codec: "
)
+
avErrorToQString
(
ret
));
return
false
;
}
mVideoStream
->
codec
=
c
;
// Source images are RGB32, and should be converted to YUV for h264 video
mSwsContext
=
sws_getCachedContext
(
mSwsContext
,
c
->
width
,
c
->
height
,
AV_PIX_FMT_RGB32
,
c
->
width
,
c
->
height
,
c
->
pix_fmt
,
SWS_BICUBIC
,
0
,
0
,
0
);
// Audio codec and context
// -------------------------------------
/*
AVCodec * audioCodec = avcodec_find_encoder(mOutputFormatContext->oformat->audio_codec);
mAudioStream = avformat_new_stream(mOutputFormatContext, audioCodec);
*/
// Open the output file
ret
=
avio_open
(
&
(
mOutputFormatContext
->
pb
),
videoFileName
().
toStdString
().
c_str
(),
AVIO_FLAG_WRITE
);
if
(
ret
<
0
)
{
setLastErrorMessage
(
QString
(
"Couldn't open video file for writing: "
)
+
avErrorToQString
(
ret
));
return
false
;
}
// Write stream header
ret
=
avformat_write_header
(
mOutputFormatContext
,
NULL
);
if
(
ret
<
0
)
{
setLastErrorMessage
(
QString
(
"Couldn't write header to file: "
)
+
avErrorToQString
(
ret
));
return
false
;
}
return
true
;
}
void
UBFFmpegVideoEncoder
::
newPixmap
(
const
QImage
&
pImage
,
long
timestamp
)
{
static
bool
isFirstFrame
=
true
;
if
(
isFirstFrame
)
{
timestamp
=
0
;
isFirstFrame
=
false
;
}
if
(
!
mVideoWorker
->
isRunning
())
{
qDebug
()
<<
"Encoder worker thread not running. Queuing frame."
;
mPendingFrames
.
enqueue
({
pImage
,
timestamp
});
}
else
{
// First send any queued frames, then the latest one
while
(
!
mPendingFrames
.
isEmpty
())
{
AVFrame
*
avFrame
=
convertFrame
(
mPendingFrames
.
dequeue
());
if
(
avFrame
)
mVideoWorker
->
queueFrame
(
avFrame
);
}
// note: if converting the frame turns out to be too slow to do here, it
// can always be done from the worker thread (in thta case,
// the worker's queue would contain ImageFrames rather than AVFrames)
AVFrame
*
avFrame
=
convertFrame
({
pImage
,
timestamp
});
if
(
avFrame
)
mVideoWorker
->
queueFrame
(
avFrame
);
// signal the worker that frames are available
mVideoWorker
->
mWaitCondition
.
wakeAll
();
}
}
/** Convert a frame consisting of a QImage and timestamp to an AVFrame
* with the right pixel format and PTS
*/
AVFrame
*
UBFFmpegVideoEncoder
::
convertFrame
(
ImageFrame
frame
)
{
AVFrame
*
avFrame
=
av_frame_alloc
();
avFrame
->
format
=
mVideoStream
->
codec
->
pix_fmt
;
avFrame
->
width
=
mVideoStream
->
codec
->
width
;
avFrame
->
height
=
mVideoStream
->
codec
->
height
;
avFrame
->
pts
=
mTimebase
*
frame
.
timestamp
/
1000
;
const
uchar
*
rgbImage
=
frame
.
image
.
bits
();
const
int
in_linesize
[
1
]
=
{
frame
.
image
.
bytesPerLine
()
};
// Allocate the output image
if
(
av_image_alloc
(
avFrame
->
data
,
avFrame
->
linesize
,
mVideoStream
->
codec
->
width
,
mVideoStream
->
codec
->
height
,
mVideoStream
->
codec
->
pix_fmt
,
32
)
<
0
)
{
setLastErrorMessage
(
"Couldn't allocate image"
);
return
NULL
;
}
sws_scale
(
mSwsContext
,
(
const
uint8_t
*
const
*
)
&
rgbImage
,
in_linesize
,
0
,
mVideoStream
->
codec
->
height
,
avFrame
->
data
,
avFrame
->
linesize
);
return
avFrame
;
}
void
UBFFmpegVideoEncoder
::
finishEncoding
()
{
qDebug
()
<<
"VideoEncoder::finishEncoding called"
;
// Some frames may not be encoded, so we call avcodec_encode_video2 until they're all done
int
gotOutput
;
do
{
// TODO: get rid of duplicated code (videoWorker does almost exactly this during encoding)
AVPacket
*
packet
=
mVideoWorker
->
mPacket
;
if
(
avcodec_encode_video2
(
mVideoStream
->
codec
,
packet
,
NULL
,
&
gotOutput
)
<
0
)
{
setLastErrorMessage
(
"Couldn't encode frame to video"
);
continue
;
}
if
(
gotOutput
)
{
AVRational
codecTimebase
=
mVideoStream
->
codec
->
time_base
;
AVRational
streamTimebase
=
mVideoStream
->
time_base
;
av_packet_rescale_ts
(
packet
,
codecTimebase
,
streamTimebase
);
packet
->
stream_index
=
mVideoStream
->
index
;
av_interleaved_write_frame
(
mOutputFormatContext
,
packet
);
av_packet_unref
(
packet
);
}
}
while
(
gotOutput
);
av_write_trailer
(
mOutputFormatContext
);
avio_close
(
mOutputFormatContext
->
pb
);
avcodec_close
(
mVideoStream
->
codec
);
sws_freeContext
(
mSwsContext
);
avformat_free_context
(
mOutputFormatContext
);
emit
encodingFinished
(
true
);
}
//-------------------------------------------------------------------------
// Worker
//-------------------------------------------------------------------------
UBFFmpegVideoEncoderWorker
::
UBFFmpegVideoEncoderWorker
(
UBFFmpegVideoEncoder
*
controller
)
:
mController
(
controller
)
{
mStopRequested
=
false
;
mIsRunning
=
false
;
mPacket
=
new
AVPacket
();
}
UBFFmpegVideoEncoderWorker
::~
UBFFmpegVideoEncoderWorker
()
{}
void
UBFFmpegVideoEncoderWorker
::
stopEncoding
()
{
qDebug
()
<<
"Video worker: stop requested"
;
mStopRequested
=
true
;
mWaitCondition
.
wakeAll
();
}
void
UBFFmpegVideoEncoderWorker
::
queueFrame
(
AVFrame
*
frame
)
{
mFrameQueueMutex
.
lock
();
mFrameQueue
.
enqueue
(
frame
);
mFrameQueueMutex
.
unlock
();
}
/**
* The main encoding function. Takes the queued image frames and
* assembles them into the video
*/
void
UBFFmpegVideoEncoderWorker
::
runEncoding
()
{
mIsRunning
=
true
;
while
(
!
mStopRequested
)
{
mFrameQueueMutex
.
lock
();
mWaitCondition
.
wait
(
&
mFrameQueueMutex
);
while
(
!
mFrameQueue
.
isEmpty
())
{
writeLatestVideoFrame
();
}
/*
while (!mAudioQueue.isEmpty()) {
writeLatestAudioFrame();
}
*/
mFrameQueueMutex
.
unlock
();
}
emit
encodingFinished
();
}
void
UBFFmpegVideoEncoderWorker
::
writeLatestVideoFrame
()
{
AVFrame
*
frame
=
mFrameQueue
.
dequeue
();
int
gotOutput
;
av_init_packet
(
mPacket
);
mPacket
->
data
=
NULL
;
mPacket
->
size
=
0
;
// qDebug() << "Encoding frame to video. Pts: " << frame->pts << "/" << mController->mTimebase;
if
(
avcodec_encode_video2
(
mController
->
mVideoStream
->
codec
,
mPacket
,
frame
,
&
gotOutput
)
<
0
)
emit
error
(
"Error encoding frame to video"
);
if
(
gotOutput
)
{
AVRational
codecTimebase
=
mController
->
mVideoStream
->
codec
->
time_base
;
AVRational
streamTimebase
=
mController
->
mVideoStream
->
time_base
;
// recalculate the timestamp to match the stream's timebase
av_packet_rescale_ts
(
mPacket
,
codecTimebase
,
streamTimebase
);
mPacket
->
stream_index
=
mController
->
mVideoStream
->
index
;
// qDebug() << "Writing encoded packet to file; pts: " << mPacket->pts << "/" << streamTimebase.den;
av_interleaved_write_frame
(
mController
->
mOutputFormatContext
,
mPacket
);
av_packet_unref
(
mPacket
);
}
// Duct-tape solution. I assume there's a better way of doing this, but:
// some players like VLC show a black screen until the second frame (which
// can be several seconds after the first one). Simply duplicating the first frame
// seems to solve this problem, and also allows the thumbnail to be generated.
static
bool
firstRun
=
true
;
if
(
firstRun
)
{
firstRun
=
false
;
frame
->
pts
+=
1
;
mFrameQueue
.
enqueue
(
frame
);
// only works when the queue is empty at this point. todo: clean this up!
}
else
// free the frame
av_frame_free
(
&
frame
);
}
src/podcast/ffmpeg/UBFFmpegVideoEncoder.h
0 → 100644
View file @
a2fb735b
#ifndef UBFFMPEGVIDEOENCODER_H
#define UBFFMPEGVIDEOENCODER_H
extern
"C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavformat/avio.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
#include <libavutil/mathematics.h>
#include <libswscale/swscale.h>
}
#include <atomic>
#include <stdio.h>
#include <QtCore>
#include <QImage>
#include "podcast/UBAbstractVideoEncoder.h"
class
UBFFmpegVideoEncoderWorker
;
class
UBPodcastController
;
class
UBFFmpegVideoEncoder
:
public
UBAbstractVideoEncoder
{
Q_OBJECT
friend
class
UBFFmpegVideoEncoderWorker
;
public
:
UBFFmpegVideoEncoder
(
QObject
*
parent
=
NULL
);
virtual
~
UBFFmpegVideoEncoder
();
bool
start
();
bool
stop
();
void
newPixmap
(
const
QImage
&
pImage
,
long
timestamp
);
QString
videoFileExtension
()
const
{
return
"mp4"
;
}
QString
lastErrorMessage
()
{
return
mLastErrorMessage
;
}
void
setRecordAudio
(
bool
pRecordAudio
)
{
mShouldRecordAudio
=
pRecordAudio
;
}
signals
:
void
encodingFinished
(
bool
ok
);
private
slots
:
void
setLastErrorMessage
(
const
QString
&
pMessage
);
void
finishEncoding
();
private
:
struct
ImageFrame
{
QImage
image
;
long
timestamp
;
// unit: ms
};
AVFrame
*
convertFrame
(
ImageFrame
frame
);
bool
init
();
// Queue for any pixmap that might be sent before the encoder is ready
QQueue
<
ImageFrame
>
mPendingFrames
;
QString
mLastErrorMessage
;
bool
mShouldRecordAudio
;
QThread
*
mVideoEncoderThread
;
UBFFmpegVideoEncoderWorker
*
mVideoWorker
;
// Muxer
AVFormatContext
*
mOutputFormatContext
;
int
mTimebase
;
// Video
AVStream
*
mVideoStream
;
struct
SwsContext
*
mSwsContext
;
// Audio
AVStream
*
mAudioStream
;
FILE
*
mFile
;
};
class
UBFFmpegVideoEncoderWorker
:
public
QObject
{
Q_OBJECT
friend
class
UBFFmpegVideoEncoder
;
public
:
UBFFmpegVideoEncoderWorker
(
UBFFmpegVideoEncoder
*
controller
);
~
UBFFmpegVideoEncoderWorker
();
bool
isRunning
()
{
return
mIsRunning
;
}
void
queueFrame
(
AVFrame
*
frame
);
public
slots
:
void
runEncoding
();
void
stopEncoding
();
signals
:
void
encodingFinished
();
void
error
(
QString
message
);
private
:
void
writeLatestVideoFrame
();
UBFFmpegVideoEncoder
*
mController
;
// std::atomic is C++11. This won't work with msvc2010, so a
// newer compiler must be used if this is to be used on Windows
std
::
atomic
<
bool
>
mStopRequested
;
std
::
atomic
<
bool
>
mIsRunning
;
QQueue
<
AVFrame
*>
mFrameQueue
;
QMutex
mFrameQueueMutex
;
QWaitCondition
mWaitCondition
;
AVPacket
*
mPacket
;
};
#endif // UBFFMPEGVIDEOENCODER_H
src/podcast/podcast.pri
View file @
a2fb735b
...
...
@@ -33,3 +33,28 @@ macx {
OBJECTIVE_SOURCES += src/podcast/quicktime/UBQuickTimeFile.mm
}
linux-g++* {
HEADERS += src/podcast/ffmpeg/UBFFmpegVideoEncoder.h
SOURCES += src/podcast/ffmpeg/UBFFmpegVideoEncoder.cpp
FFMPEG = /opt/ffmpeg
INCLUDEPATH += $${FFMPEG}/include
DEPENDPATH += /usr/lib/x86_64-linux-gnu
LIBS += -L $${FFMPEG}/lib -lavformat \
-L $${FFMPEG}/lib -lavcodec \
-L $${FFMPEG}/lib -lswscale \
-L $${FFMPEG}/lib -lavutil \
-lva-x11 \
-lva \
-lxcb-shm \
-lxcb-xfixes \
-lxcb-render -lxcb-shape -lxcb -lX11 -lasound -lSDL -lx264 -lpthread -lvpx -lvorbisenc -lvorbis -ltheoraenc -ltheoradec -logg -lopus -lmp3lame -lfreetype -lfdk-aac -lass -llzma -lbz2 -lz -ldl -lswresample -lswscale -lavutil -lm
QMAKE_CXXFLAGS += -std=c++11 # move this to OpenBoard.pro when we can use C++11 on all platforms
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment