This is a recording application that takes audio from a microphone and video from either a camera or the X11 screen, encodes them with theora and speex and saves the result in a file.
The intention of this example is to show how simple it is to perform such complex tasks with GStreamer and how easy it is to change the functionality of the program by changing just one element (with autovideosrc it will do a webcam recording, but with ximagesrc it will do a screencast).
#include "ui_recorder.h"
#include <QtCore/QDir>
#include <QtGui/QApplication>
#include <QtGui/QDialog>
#include <QtGui/QMessageBox>
#include <QGlib/Error>
#include <QGlib/Connect>
#include <QGst/Init>
#include <QGst/ElementFactory>
#include <QGst/ChildProxy>
#include <QGst/PropertyProbe>
#include <QGst/Pipeline>
#include <QGst/Pad>
#include <QGst/Event>
#include <QGst/Message>
#include <QGst/Bus>
#ifdef Q_WS_X11
# include <QtGui/QX11Info>
#endif
class Recorder : public QDialog
{
Q_OBJECT
public:
Recorder(QWidget *parent = 0);
private:
enum Device { AudioSrc, VideoSrc };
void findDevices(Device device);
void start();
void stop();
private Q_SLOTS:
void on_startStopButton_clicked();
private:
Ui::Recorder m_ui;
};
Recorder::Recorder(QWidget *parent)
: QDialog(parent)
{
m_ui.setupUi(this);
findDevices(AudioSrc);
findDevices(VideoSrc);
if (!ximagesrc) {
m_ui.videoSourceComboBox->removeItem(1);
} else {
#ifdef Q_WS_X11
m_ui.displayNumSpinBox->setValue(QX11Info::appScreen());
#endif
}
m_ui.outputFileEdit->setText(QDir::currentPath() + QDir::separator() + "out.ogv");
}
void Recorder::findDevices(Device device)
{
const char *srcElementName = (device == AudioSrc) ? "autoaudiosrc" : "autovideosrc";
if (!src) {
QMessageBox::critical(this, tr("Error"),
tr("Failed to create element \"%1\". Make sure you have "
"gstreamer-plugins-good installed").arg(srcElementName));
return;
}
src->setState(QGst::StateReady);
if (childProxy && childProxy->childrenCount() > 0) {
}
src->setState(QGst::StateNull);
if (propertyProbe && propertyProbe->propertySupportsProbe("device")) {
((device == AudioSrc) ? m_audioProbe : m_videoProbe) = propertyProbe;
probeForDevices(propertyProbe);
} else {
QComboBox *box = (device == AudioSrc) ? m_ui.audioDeviceComboBox : m_ui.videoDeviceComboBox;
box->addItem(tr("Default"));
}
}
{
QComboBox *box = (propertyProbe == m_audioProbe) ?
m_ui.audioDeviceComboBox : m_ui.videoDeviceComboBox;
box->clear();
box->addItem(tr("Default"));
QList<QGlib::Value> devices = propertyProbe->probeAndGetValues("device");
propertyProbe->setProperty("device", device);
QString deviceName = propertyProbe->property("device-name").toString();
box->addItem(QString(
"%1 (%2)").arg(deviceName, device.
toString()),
}
}
{
try {
"audioresample ! audiorate ! speexenc ! queue");
qCritical() << "Failed to create audio source bin:" << error;
}
QVariant device = m_ui.audioDeviceComboBox->itemData(m_ui.audioDeviceComboBox->currentIndex());
if (device.isValid()) {
src->setState(QGst::StateReady);
if (childProxy && childProxy->childrenCount() > 0) {
realSrc->setProperty("device", device.toString());
}
}
return audioBin;
}
{
try {
if (m_ui.videoSourceComboBox->currentIndex() == 0) {
"ffmpegcolorspace ! theoraenc ! queue");
} else {
"ffmpegcolorspace ! theoraenc ! queue");
}
qCritical() << "Failed to create video source bin:" << error;
}
if (m_ui.videoSourceComboBox->currentIndex() == 0) {
QVariant device = m_ui.videoDeviceComboBox->itemData(m_ui.videoDeviceComboBox->currentIndex());
if (device.isValid()) {
src->setState(QGst::StateReady);
if (childProxy && childProxy->childrenCount() > 0) {
realSrc->setProperty("device", device.toString());
}
}
} else {
videoBin->getElementByName("videosrc")->setProperty("screen-num", m_ui.displayNumSpinBox->value());
}
return videoBin;
}
void Recorder::start()
{
if (!audioSrcBin || !videoSrcBin || !mux || !sink) {
QMessageBox::critical(this, tr("Error"), tr("One or more elements could not be created. "
"Verify that you have all the necessary element plugins installed."));
return;
}
sink->setProperty("location", m_ui.outputFileEdit->text());
m_pipeline = QGst::Pipeline::create();
m_pipeline->add(audioSrcBin, videoSrcBin, mux, sink);
audioSrcBin->getStaticPad("src")->link(audioPad);
videoSrcBin->getStaticPad("src")->link(videoPad);
mux->link(sink);
m_pipeline->bus()->addSignalWatch();
QGlib::connect(m_pipeline->bus(),
"message",
this, &Recorder::onBusMessage);
m_pipeline->setState(QGst::StatePlaying);
m_ui.startStopButton->setText(tr("Stop recording"));
}
void Recorder::stop()
{
m_pipeline->setState(QGst::StateNull);
m_pipeline.clear();
m_ui.startStopButton->setText(tr("Start recording"));
}
{
switch (message->type()) {
case QGst::MessageEos:
stop();
break;
case QGst::MessageError:
if (m_pipeline) {
stop();
}
QMessageBox::critical(this, tr("Pipeline Error"),
break;
default:
break;
}
}
void Recorder::on_startStopButton_clicked()
{
if (m_pipeline) {
m_pipeline->sendEvent(QGst::EosEvent::create());
} else {
start();
}
}
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
Recorder r;
r.show();
return app.exec();
}
#include "main.moc"