Intel® INDE Media for Mobile Tutorials - Video Capturing for Qt* Quick Applications on Android*

This tutorial explains how to use Intel® INDE Media for Mobile to add video capturing capability to Qt Quick applications on Android.

Qt Quick Capturing

Prerequisites:

This tutorial is designed for a experienced Qt-programmer. If you have never done Qt apps for Android, you should check official manual. Getting Started with Qt for Android. We will pay attention only to the key moments.

Let’s make simple QML app. We need any moving content, FPS counter and record button.  Create new Qt Quick 2 Application project. Select Qt Quick 2.4 component set. Qt Creator generates main.qml file automatically:

import QtQuick 2.4
import QtQuick.Window 2.2

Window {
    ...
}

The Window object creates a new top-level window for a Qt Quick scene. We need to make significant changes to its behavior. But we can't change the Window sources. So let’s inherit from QQuickWindow:

#ifndef QTCAPTURINGWINDOW_H
#define QTCAPTURINGWINDOW_H

#include <QQuickWindow>
#include <jni.h>

class QOpenGLFramebufferObject;
class QOpenGLShaderProgram;
class QElapsedTimer;

class QtCapturingWindow : public QQuickWindow
{
    Q_OBJECT
    Q_PROPERTY(bool isRunning READ isRunning NOTIFY isRunningChanged)
    Q_PROPERTY(int fps READ fps NOTIFY fpsChanged)

    friend class InitAndStartCapturingJob;

public:
    explicit QtCapturingWindow(QWindow *parent = 0);
    ~QtCapturingWindow();

    bool isRunning() const { return m_isRunning; }
    int fps() const { return m_fps; }

    Q_INVOKABLE void startCapturing(int width, int height, int frameRate, int bitRate, QString fileName);
    Q_INVOKABLE void stopCapturing();

signals:
    void isRunningChanged();
    void fpsChanged();

private:
    jobject m_capturingObject;
    QOpenGLFramebufferObject *m_fbo;
    QOpenGLShaderProgram *m_program;
    QElapsedTimer *m_timer;
    bool m_isRunning;
    bool m_finalizeFrame;
    int m_fps;
    qint64 m_nextCapture;
    qint64 m_startTime;

    int m_videoWidth;
    int m_videoHeight;
    int m_videoFrameRate;
    int m_videoBitRate;
    QString m_videoName;

    QString m_videoDir;

    void drawQuad(int textureID);
    void captureFrame(int textureID);
    void initAndStartCapturing();

private slots:
    void onSceneGraphInitialized();
    void onBeforeRendering();
    void onAfterRendering();
};

#endif // QTCAPTURINGWINDOW_H

As you can see now we have FPS counter Q_PROPERTY and start/stop Q_INVOKABLE methods for video capturing.

Edit the file main.cpp:

#include <QGuiApplication>
#include <QQmlApplicationEngine>
#include <QtQml>
#include "qtcapturingwindow.h"

int main(int argc, char *argv[])
{
    QGuiApplication app(argc, argv);

    qmlRegisterType<QtCapturingWindow>("INDE.MediaForMobile", 1, 0, "CapturingWindow");

    QQmlApplicationEngine engine;
    engine.load(QUrl(QStringLiteral("qrc:/main.qml")));

    return app.exec();
}

Now we can use FPS property and startCapturing/stopCapturing methods inside QML:

import QtQuick 2.4
import INDE.MediaForMobile 1.0

CapturingWindow {
    id: capturing
    visible: true
    color: "darkgrey"

    property int elapsed: 0
    Timer {
        interval: 1000; running: true; repeat: true
        property int largeInterval: 5
        property int counter: 0
        onTriggered: {
            capturing.elapsed++;
            if (++counter == largeInterval) {
                counter = 0;
                capturing.color = Qt.hsla(Math.random(), 0.5, 0.5, 1.0);
            }
        }
    }

    Text {
        id: timeText
        z: 10
        anchors {
            left: parent.left
            top: parent.top
            leftMargin: 25
            topMargin: 80
        }
        font.pixelSize: 50
        text: "Time: " + capturing.elapsed
    }

    Rectangle {
        radius: 50
        width: (parent.width > parent.height ? parent.width : parent.height) / 3
        height: width
        anchors.centerIn: parent
        gradient: Gradient {
            GradientStop { position: 0.0; color: "red" }
            GradientStop { position: 0.5; color: "yellow" }
            GradientStop { position: 1.0; color: "green" }
        }
        PropertyAnimation on rotation {
            running: Qt.application.state === Qt.ApplicationActive
            loops: Animation.Infinite
            easing.type: Easing.Linear
            from: 0
            to: 360
            duration: 8000
        }
    }

    Connections {
        target: Qt.application
        onStateChanged: {
            switch (Qt.application.state) {
            case Qt.ApplicationSuspended:
            case Qt.ApplicationHidden:
                if (capturing.isRunning)
                    capturing.stopCapturing();
                console.log("ApplicationHidden");
                break
            case Qt.ApplicationActive:
                console.log("ApplicationActive");
                break
            }
        }
    }

    Rectangle {
        z: 10
        id: exitButton
        width: 150
        height: 150
        radius: 75
        anchors {
            right: parent.right
            bottom: parent.bottom
            margins: 50
        }
        color: "black"
        MouseArea {
            anchors.fill: parent
            onClicked: Qt.quit()
        }
    }

    Rectangle {
        z: 10
        id: capturingButton
        width: 150
        height: 150
        radius: 75
        anchors {
            right: parent.right
            top: parent.top
            margins: 50
        }
        color: "green"
        Behavior on color {
            ColorAnimation { duration: 150 }
        }
        MouseArea {
            id: mouseArea
            anchors.fill: parent
            property int videoNumber: 0
            onClicked: {
                if (!capturing.isRunning) {
                    capturing.startCapturing(1280, 720, 25, 3000, "QtCapturing_" + videoNumber + ".mp4");
                    videoNumber++;
                } else {
                    capturing.stopCapturing();
                }
            }
        }
        states: [
            State {
                when: capturing.isRunning
                PropertyChanges {
                    target: capturingButton
                    color: "red"
                }
            }
        ]
    }

    Text {
        anchors {
            left: parent.left
            top: parent.top
            margins: 25
        }
        font.pixelSize: 50
        text: "FPS: " + capturing.fps
    }
}

Download and install Intel INDE by visiting http://intel.com/software/inde. After installing Intel INDE, choose to download and install the Media for Mobile. For additional assistance visit the Intel INDE forum. Go to the installation folder of Media for Mobile -> libs and copy two jar files (android-<version>.jar and domain-<version>.jar) to your /android-sources/libs / folder. Of course you should create this folders before that. As you can imagine folder /android-sources/ can have any name. But you should specify it in the project file:

ANDROID_PACKAGE_SOURCE_DIR = $$PWD/android-sources

Deploying an Application on Android – check this if you have additional questions.

No let’s go to Java side. It isn't convenient to make changes to the source code of the main activity. It is much easier to create a separate class and instantiate it at application startup from C++ side using JNI. Let’s create a chain of folders /android-sources/src/org/qtproject/qt5/android/bindings/. Add a Java* file  Capturing.java into the last folder with the following code in it:

package org.qtproject.qt5.android.bindings;

import com.intel.inde.mp.IProgressListener;
import com.intel.inde.mp.domain.Resolution;
import com.intel.inde.mp.android.graphics.FullFrameTexture;
import com.intel.inde.mp.android.graphics.FrameBuffer;
import com.intel.inde.mp.android.graphics.EglUtil;

import android.opengl.GLES20;
import android.os.Environment;
import android.util.Log;
import android.content.Context;

import java.io.IOException;
import java.io.File;

public class Capturing
{
	private static final String TAG = "Capturing";
	
	private static FullFrameTexture texture;
	private FrameBuffer frameBuffer;
	
	private VideoCapture videoCapture;
	private int width = 0;
	private int height = 0;
	
	private int videoWidth = 0;
	private int videoHeight = 0;
	private int videoFrameRate = 0;
	
	private long nextCaptureTime = 0;
	private long startTime = 0;
	
	private static Capturing instance = null;
	
	private SharedContext sharedContext = null;
    private EncodeThread encodeThread = null;
	private boolean finalizeFrame = false;
	private boolean isRunning = false;
	
	private IProgressListener progressListener = new IProgressListener() {
        @Override
        public void onMediaStart() {
        	startTime = System.nanoTime();
        	nextCaptureTime = 0;
        	encodeThread.start();
        	isRunning = true;
        }

        @Override
        public void onMediaProgress(float progress) {
        }

        @Override
        public void onMediaDone() {
        }

        @Override
        public void onMediaPause() {
        }

        @Override
        public void onMediaStop() {
        }

        @Override
        public void onError(Exception exception) {
        }
    };
    
    private class EncodeThread extends Thread
    {
    	private static final String TAG = "EncodeThread";
    	
    	private SharedContext sharedContext;  	
    	private boolean isStopped = false;
    	private boolean newFrameIsAvailable = false;
    	private FrameBuffer encodeFrameBuffer;
    	
    	EncodeThread(SharedContext sharedContext, int width, int height) {
    		super();
    		this.sharedContext = sharedContext;
    		encodeFrameBuffer = new FrameBuffer(EglUtil.getInstance());
    		encodeFrameBuffer.setResolution(new Resolution(width, height));
    	}
    	
		@Override
		public void run() {
			int textureID = encodeFrameBuffer.getTextureId();
			while (!isStopped) {
				if (newFrameIsAvailable) {
					synchronized (videoCapture) {
						sharedContext.makeCurrent();
						videoCapture.beginCaptureFrame();
						GLES20.glViewport(0, 0, videoWidth, videoHeight);
						texture.draw(textureID);
						videoCapture.endCaptureFrame();
						newFrameIsAvailable = false;
						sharedContext.doneCurrent();
					}
				}
			}
			isStopped = false;
			synchronized (videoCapture) {
				videoCapture.stop();
			}
		}
		
		public void queryStop() {
			isStopped = true;
		}

		public void pushFrame(int textureID) {
			// Render to intermediate FBO
			encodeFrameBuffer.bind();
			texture.draw(textureID);
			encodeFrameBuffer.unbind();
			newFrameIsAvailable = true;
		}
    }
	
    public Capturing(Context context, int width, int height)
    {
		videoCapture = new VideoCapture(context, progressListener);
		
	    frameBuffer = new FrameBuffer(EglUtil.getInstance());
		frameBuffer.setResolution(new Resolution(width, height));
		this.width = width;
		this.height = height;
		
		texture = new FullFrameTexture();
		sharedContext = new SharedContext();
		instance = this;
    }
    
    public static Capturing getInstance()
    {
    	return instance;
    }

    public static String getDirectoryDCIM()
    {
        return Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + File.separator;
    }
    
    public void initCapturing(int width, int height, int frameRate, int bitRate)
    {
    	videoFrameRate = frameRate;
        VideoCapture.init(width, height, frameRate, bitRate);
        videoWidth = width;
    	videoHeight = height;

    	encodeThread = new EncodeThread(sharedContext, width, height);
    }

    public void startCapturing(final String videoPath)
    {
        if (videoCapture == null) {
            return;
        }

		(new Thread() {
			public void run() {
		        synchronized (videoCapture) {
		            try {
		                videoCapture.start(videoPath);
		            } catch (IOException e) {
		            	Log.e(TAG, "--- startCapturing error");
		            }
		        }
			}
		}).start();
    }
	
	public void beginCaptureFrame()
    {
		long elapsedTime = System.nanoTime() - startTime;
		if (elapsedTime >= nextCaptureTime) {
			finalizeFrame = true;
			frameBuffer.bind();
			nextCaptureTime += 1000000000 / videoFrameRate;
		}
    }
	
	public void captureFrame(int textureID)
	{
		// Submit new frame
		encodeThread.pushFrame(textureID);
		// Restore viewport
		GLES20.glViewport(0, 0, width, height);
	}
	
	public void endCaptureFrame()
    {
		if (!finalizeFrame)
			return;
		
    	frameBuffer.unbind();
		int textureID = frameBuffer.getTextureId();
		texture.draw(textureID);

		captureFrame(textureID);

		finalizeFrame = false;
    }

    public void stopCapturing()
    {
    	isRunning = false;

    	if (finalizeFrame) {
    		finalizeFrame = false;
    	}
        encodeThread.queryStop();
    }
    
    public boolean isRunning()
    {
    	return isRunning;
    }

}

Then create another Java file in the same directory. Name it VideoCapture.java and put the following contents in it:

package org.qtproject.qt5.android.bindings;

import android.content.Context;
import android.util.Log;

import com.intel.inde.mp.*;
import com.intel.inde.mp.android.AndroidMediaObjectFactory;
import com.intel.inde.mp.android.AudioFormatAndroid;
import com.intel.inde.mp.android.VideoFormatAndroid;

import java.io.IOException;

public class VideoCapture
{
    private static final String TAG = "VideoCapture";

    private static final String Codec = "video/avc";
    private static int IFrameInterval = 1;

    private static final Object syncObject = new Object();
    private static volatile VideoCapture videoCapture;

    private static VideoFormat videoFormat;
    private static int videoWidth;
    private static int videoHeight;
    private GLCapture capturer;

    private boolean isConfigured;
    private boolean isStarted;
    private long framesCaptured;
	private Context context;
	private IProgressListener progressListener;

    public VideoCapture(Context context, IProgressListener progressListener)
    {
		this.context = context;
        this.progressListener = progressListener;
    }
    
    public static void init(int width, int height, int frameRate, int bitRate)
    {
    	videoWidth = width;
    	videoHeight = height;
    	
    	videoFormat = new VideoFormatAndroid(Codec, videoWidth, videoHeight);
    	videoFormat.setVideoFrameRate(frameRate);
        videoFormat.setVideoBitRateInKBytes(bitRate);
        videoFormat.setVideoIFrameInterval(IFrameInterval);
    }

    public void start(String videoPath) throws IOException
    {
        if (isStarted())
            throw new IllegalStateException(TAG + " already started!");

        capturer = new GLCapture(new AndroidMediaObjectFactory(context), progressListener);
        capturer.setTargetFile(videoPath);
        capturer.setTargetVideoFormat(videoFormat);

        AudioFormat audioFormat = new AudioFormatAndroid("audio/mp4a-latm", 44100, 2);
        capturer.setTargetAudioFormat(audioFormat);

        capturer.start();

        isStarted = true;
        isConfigured = false;
        framesCaptured = 0;
    }    
    
    public void stop()
    {
        if (!isStarted())
            throw new IllegalStateException(TAG + " not started or already stopped!");

        try {
            capturer.stop();
            isStarted = false;
        } catch (Exception ex) {
        	Log.e(TAG, "--- Exception: GLCapture can't stop");
        }

        capturer = null;
        isConfigured = false;
    }

    private void configure()
    {
        if (isConfigured())
            return;

        try {
            capturer.setSurfaceSize(videoWidth, videoHeight);
            isConfigured = true;
        } catch (Exception ex) {
        }
    }

    public void beginCaptureFrame()
    {
        if (!isStarted())
            return;

        configure();
        if (!isConfigured())
        	return;

        capturer.beginCaptureFrame();
    }

    public void endCaptureFrame()
    {
        if (!isStarted() || !isConfigured())
            return;

        capturer.endCaptureFrame();
        framesCaptured++;
    }

    public boolean isStarted()
    {
        return isStarted;
    }

    public boolean isConfigured()
    {
        return isConfigured;
    }

}

Create one more Java file. Name it SharedContext.java and put the following contents in it: 

package org.qtproject.qt5.android.bindings;

import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;

import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.util.Log;

public class SharedContext
{
	private static final String TAG = "SharedContext";
	
	private EGL10 egl;
	private EGLContext eglContext;
	private EGLDisplay eglDisplay;
	EGLConfig auxConfig;
	private EGLSurface auxSurface = null;
	private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
	private static final int EGL_OPENGL_ES2_BIT = 4;
	private int[] textures = new int[1];
	private SurfaceTexture surfaceTexture;
	
	SharedContext() {
		egl = (EGL10)EGLContext.getEGL();
		
		eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
		if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
			Log.e(TAG, "--- eglGetDisplay failed: " + GLUtils.getEGLErrorString(egl.eglGetError()));
		}
		
		int[] version = new int[2];
        if (!egl.eglInitialize(eglDisplay, version)) {
            Log.e(TAG, "--- eglInitialize failed: " + GLUtils.getEGLErrorString(egl.eglGetError()));
        }
        
        auxConfig = chooseEglConfig();
        if (auxConfig == null) {
            Log.e(TAG, "--- eglConfig not initialized");
        }
		
		int[] contextAttrs = new int[] {
				EGL_CONTEXT_CLIENT_VERSION, 2,
				EGL10.EGL_NONE
		};
		
		// Create a shared context for this thread
		eglContext = egl.eglCreateContext(eglDisplay, auxConfig, egl.eglGetCurrentContext(), contextAttrs);
		if (eglContext != null) {
			Log.d(TAG, "--- eglContext created");
		}
		
		GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glGenTextures(1, textures, 0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
		surfaceTexture = new SurfaceTexture(textures[0]);
		
		auxSurface = egl.eglCreateWindowSurface(eglDisplay, auxConfig, surfaceTexture, null);
		if (auxSurface == null || auxSurface == EGL10.EGL_NO_SURFACE) {
            Log.e(TAG,"--- createWindowSurface returned error: " + GLUtils.getEGLErrorString(egl.eglGetError()));
        }
	}
	
	final int[] auxConfigAttribs = {
		EGL10.EGL_SURFACE_TYPE, EGL10.EGL_WINDOW_BIT,
		EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
		EGL10.EGL_RED_SIZE, 8,
		EGL10.EGL_GREEN_SIZE, 8,
		EGL10.EGL_BLUE_SIZE, 8,
		EGL10.EGL_ALPHA_SIZE, 0,
		EGL10.EGL_DEPTH_SIZE, 0,
		EGL10.EGL_STENCIL_SIZE, 0,
		EGL10.EGL_NONE
	};

	private EGLConfig chooseEglConfig() {
		EGLConfig[] auxConfigs = new EGLConfig[1];
		int[] auxConfigsCount = new int[1];
		Log.d(TAG, "--- chooseEglConfig()");
		if (!egl.eglChooseConfig(eglDisplay, auxConfigAttribs, auxConfigs, 1, auxConfigsCount)) {
			throw new IllegalArgumentException("eglChooseConfig failed " + GLUtils.getEGLErrorString(egl.eglGetError()));
		} else if (auxConfigsCount[0] > 0) {
			return auxConfigs[0];
		}
		return null;
	}

	public void makeCurrent() {
		egl.eglMakeCurrent(eglDisplay, auxSurface, auxSurface, eglContext);
	}

	public void doneCurrent() {
		egl.eglMakeCurrent(eglDisplay, EGL10.EGL_NO_SURFACE,
				EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
	}
}

Now, like for any other Android application, we need to setup a manifest XML file. This manifest file will tell at compilation time which activities should be launched and which functions are allowed to be accessed. Go to Projects tab. Switch to Run settings of your Android kit. Expand Deploy configurations and press Create AndroidManifest.xml button. Press Finish on wizard. Adjust manifest features and permissions:

AndroidManifest.xml

Switch to XML Source. Lock orientation to landscape and hide the status bar by setting an activity theme: 

<activity
    ...
    <activity ... android:screenOrientation="landscape" android:theme="@android:style/Theme.NoTitleBar.Fullscreen">
    ...
</activity>

I prefer to add all necessary files to project file:

OTHER_FILES += \
    android-sources/libs/android-1.2.2415.jar \
    android-sources/libs/domain-1.2.2415.jar \
    android-sources/src/org/qtproject/qt5/android/bindings/Capturing.java \
    android-sources/src/org/qtproject/qt5/android/bindings/VideoCapture.java \
    android-sources/src/org/qtproject/qt5/android/bindings/SharedContext.java \
    android-sources/AndroidManifest.xml \
    main.qml

So your project structure should look like this:

Project Tree

Core functionality is concentrated inside the qtcapturingwindow.cpp file.  First of all we need to connect with our Java side. JNI_OnLoad is a convenient place to look up and cache class object references:

#include "qtcapturingwindow.h"
#include <QDebug>
#include <QOpenGLFramebufferObject>
#include <QOpenGLShaderProgram>
#include <QElapsedTimer>
#include <QtAndroid>

static JavaVM *s_javaVM = 0;
static jclass s_classID = 0;

static jmethodID s_constructorMethodID = 0;
static jmethodID s_initCapturingMethodID = 0;
static jmethodID s_startCapturingMethodID = 0;
static jmethodID s_captureFrameMethodID = 0;
static jmethodID s_stopCapturingMethodID = 0;

static jmethodID s_getDirectoryDCIMMethodID =0;

// This method is called immediately after the module is load
JNIEXPORT jint JNI_OnLoad(JavaVM *vm, void */*reserved*/)
{
    JNIEnv *env;
    if (vm->GetEnv(reinterpret_cast<void **>(&env), JNI_VERSION_1_6) != JNI_OK) {
        qCritical() << "Can't get the enviroument";
        return -1;
    }

    s_javaVM = vm;
    // Search for our class
    jclass clazz = env->FindClass("org/qtproject/qt5/android/bindings/Capturing");
    if (!clazz) {
        qCritical() << "Can't find Capturing class";
        return -1;
    }
    // Keep a global reference to it
    s_classID = (jclass)env->NewGlobalRef(clazz);

    // Search for its contructor
    s_constructorMethodID = env->GetMethodID(s_classID, "<init>", "(Landroid/content/Context;II)V");
    if (!s_constructorMethodID) {
        qCritical() << "Can't find Capturing class contructor";
        return -1;
    }

    s_initCapturingMethodID = env->GetMethodID(s_classID, "initCapturing", "(IIII)V");
    if (!s_initCapturingMethodID) {
        qCritical() << "Can't find initCapturing() method";
        return -1;
    }

    s_startCapturingMethodID = env->GetMethodID(s_classID, "startCapturing", "(Ljava/lang/String;)V");
    if (!s_startCapturingMethodID) {
        qCritical() << "Can't find startCapturing() method";
        return -1;
    }

    s_captureFrameMethodID = env->GetMethodID(s_classID, "captureFrame", "(I)V");
    if (!s_startCapturingMethodID) {
        qCritical() << "Can't find captureFrame() method";
        return -1;
    }

    s_stopCapturingMethodID = env->GetMethodID(s_classID, "stopCapturing", "()V");
    if (!s_stopCapturingMethodID) {
        qCritical() << "Can't find stopCapturing() method";
        return -1;
    }

    // Register and call our static method
    s_getDirectoryDCIMMethodID = env->GetStaticMethodID(s_classID, "getDirectoryDCIM", "()Ljava/lang/String;");
    if (!s_getDirectoryDCIMMethodID) {
        qCritical() << "Can't find getDirectoryDCIM() static method";
        return -1;
    }

    return JNI_VERSION_1_6;
}

The QQuickWindow::sceneGraphInitialized() signal is emitted when a new OpenGL context is created for this window.  The QQuickWindow::beforeRendering() signal is emitted before the scene starts rendering. The QQuickWindow::afterRendering() signal is emitted after the scene has completed rendering, before swapbuffers is called. Make a Qt::DirectConnection to these signals to be notified.

QtCapturingWindow::QtCapturingWindow(QWindow *parent)
    : QQuickWindow(parent)
    , m_capturingObject(nullptr)
    , m_fbo(nullptr)
    , m_program(nullptr)
    , m_isRunning(false)
    , m_finalizeFrame(false)
    , m_fps(0)
    , m_nextCapture(0)
{
    connect(this, &QtCapturingWindow::sceneGraphInitialized, this, &QtCapturingWindow::onSceneGraphInitialized, Qt::DirectConnection);
    connect(this, &QtCapturingWindow::beforeRendering, this, &QtCapturingWindow::onBeforeRendering, Qt::DirectConnection);
    connect(this, &QtCapturingWindow::afterRendering, this, &QtCapturingWindow::onAfterRendering, Qt::DirectConnection);

    m_timer = new QElapsedTimer();
    m_timer->start();
}

Now we have OpenGL context and we can instantiate our Capturing.java class. The QOpenGLFramebufferObject class encapsulates an OpenGL framebuffer object. Be sure to attach depth to framebuffer.

void QtCapturingWindow::onSceneGraphInitialized()
{
    // Create a new instance of Capturing
    JNIEnv *env;
    // Qt is running in a different thread than Java UI, so you always Java VM *MUST* be attached to current thread
    if (s_javaVM->AttachCurrentThread(&env, NULL) < 0) {
        qCritical( ) << "AttachCurrentThread failed";
        return;
    }

    m_capturingObject = env->NewGlobalRef(env->NewObject(s_classID, s_constructorMethodID, QtAndroid::androidActivity().object<jobject>(), width(), height()));
    if (!m_capturingObject) {
        qCritical() << "Can't create the Capturing object";
        return;
    }

    // Get DCIM dir
    jstring value = (jstring)env->CallStaticObjectMethod(s_classID, s_getDirectoryDCIMMethodID);
    const char *res = env->GetStringUTFChars(value, NULL);
    m_videoDir = QString(res);
    env->ReleaseStringUTFChars(value, res);

    // Don't forget to detach from current thread
    s_javaVM->DetachCurrentThread();

    m_fbo = new QOpenGLFramebufferObject(size());
    m_fbo->setAttachment(QOpenGLFramebufferObject::Depth);
}

To link against the Qt Android Extras module, add this line to your project file:

QT += androidextras

Don't forget to release properly all resources:

QtCapturingWindow::~QtCapturingWindow()
{
    if (m_isRunning)
        stopCapturing();

    delete m_fbo;
    delete m_timer;
}

Method QQuickWindow::setRenderTarget() sets the render target for this window. The default is to render to the surface of the window, in which case the render target is 0.

void QtCapturingWindow::onBeforeRendering()
{
    if (m_isRunning) {
        if (renderTarget() == 0) {
            setRenderTarget(m_fbo); // This function can only be called from the thread doing the rendering.
        }
        qint64 elapsedTime = m_timer->elapsed() - m_startTime;
        if (elapsedTime >= m_nextCapture) {
            m_finalizeFrame = true;
            m_nextCapture += 1000 / m_videoFrameRate;
        }
    } else {
        if (renderTarget() != 0) {
            setRenderTarget(0); // This function can only be called from the thread doing the rendering.
        }
    }
}

After that we have a texture with a QML scene rendered into it. Next we need to render this texture to display and to the video surface.

void QtCapturingWindow::onAfterRendering()
{
    static qint64 frameCount = 0;
    static qint64 fpsUpdate = 0;
    static const int fpsUpdateRate = 4; // updates per sec

    if (m_isRunning) {
        // Draw fullscreen quad
        QOpenGLFramebufferObject::bindDefault();
        drawQuad(m_fbo->texture());

        if (m_finalizeFrame) {
            // Pass color attachment to java side for actual capturing
            captureFrame(m_fbo->texture());
            m_finalizeFrame = false;
        }
    }

    // Update FPS
    frameCount++;
    if (m_timer->elapsed() > fpsUpdate) {
        fpsUpdate += 1000 / fpsUpdateRate;
        m_fps = frameCount * fpsUpdateRate;
        frameCount = 0;
        emit fpsChanged();
    }
}

This method represents a full screen renderer:

void QtCapturingWindow::drawQuad(int textureID)
{
    if (!m_program) {
        m_program = new QOpenGLShaderProgram();
        m_program->addShaderFromSourceCode(QOpenGLShader::Vertex,
                                           "attribute highp vec4 vertices;"
                                           "varying highp vec2 coords;"
                                           "void main() {"
                                           "    gl_Position = vertices;"
                                           "    coords = (vertices.xy + 1.0) * 0.5;"
                                           "}");
        m_program->addShaderFromSourceCode(QOpenGLShader::Fragment,
                                           "uniform sampler2D texture;"
                                           "varying highp vec2 coords;"
                                           "void main() {"
                                           "    gl_FragColor = texture2D(texture, coords);"
                                           "}");

        m_program->bindAttributeLocation("vertices", 0);

        if (!m_program->link()) {
            qDebug() << "Link wasn't successful: " << m_program->log();
        }
    }

    m_program->bind();
    m_program->enableAttributeArray(0);

    float values[] = {
        -1, -1,
        1, -1,
        -1,  1,
        1,  1
    };

    m_program->setAttributeArray(0, GL_FLOAT, values, 2);
    glBindTexture(GL_TEXTURE_2D, textureID);
    glViewport(0, 0, size().width(), size().height());
    glDisable(GL_DEPTH_TEST);
    glClearColor(0, 0, 0, 1);
    glClear(GL_COLOR_BUFFER_BIT);
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    m_program->disableAttributeArray(0);
    m_program->release();
}

Every time we start capturing we are configuring output video format. So you can alter your parameters between calls.

class InitAndStartCapturingJob : public QRunnable
{
    QtCapturingWindow *m_window;

public:
    InitAndStartCapturingJob(QtCapturingWindow *window) : m_window(window) {}

    void run()
    {
        qDebug() << "--- InitAndStartCapturingJob::run()";
        m_window->initAndStartCapturing();
    }
};

void QtCapturingWindow::startCapturing(int width, int height, int frameRate, int bitRate, QString videoName)
{
    if (!m_capturingObject)
        return;

    m_videoWidth = width;
    m_videoHeight = height;
    m_videoFrameRate = frameRate;
    m_videoBitRate = bitRate;
    m_videoName = videoName;

    scheduleRenderJob(new InitAndStartCapturingJob(this), RenderStage::AfterSynchronizingStage);
}

void QtCapturingWindow::initAndStartCapturing()
{
    JNIEnv *env;
    if (s_javaVM->AttachCurrentThread(&env, NULL) < 0) {
        qCritical() << "AttachCurrentThread failed";
        return;
    }

    // Setup format
    env->CallVoidMethod(m_capturingObject, s_initCapturingMethodID, m_videoWidth, m_videoHeight, m_videoFrameRate, m_videoBitRate);

    // Start capturing
    QString videoPath = m_videoDir + m_videoName;
    jstring string = env->NewString(reinterpret_cast<const jchar *>(videoPath.constData()), videoPath.length());
    env->CallVoidMethod(m_capturingObject, s_startCapturingMethodID, string);
    env->DeleteLocalRef(string);

    s_javaVM->DetachCurrentThread();

    m_startTime = m_timer->elapsed();
    m_nextCapture = 0;
    m_isRunning = true;
    emit isRunningChanged();
}

This is how we pass the texture handle to the captureFrame() method  of our QtCapturing.java class:

void QtCapturingWindow::captureFrame(int textureID)
{
    if (!m_capturingObject)
        return;

    JNIEnv *env;
    if (s_javaVM->AttachCurrentThread(&env, NULL) < 0) {
        qCritical() << "AttachCurrentThread failed";
        return;
    }

    env->CallVoidMethod(m_capturingObject, s_captureFrameMethodID, textureID);

    s_javaVM->DetachCurrentThread();
}

And last thing you should know:

void QtCapturingWindow::stopCapturing()
{
    if (!m_capturingObject)
        return;

    m_isRunning = false;
    emit isRunningChanged();

    JNIEnv *env;
    if (s_javaVM->AttachCurrentThread(&env, NULL) < 0) {
        qCritical() << "AttachCurrentThread failed";
        return;
    }

    env->CallVoidMethod(m_capturingObject, s_stopCapturingMethodID);

    s_javaVM->DetachCurrentThread();
}

It’s all you need to know to be able to add video capturing capability to Qt Quick applications. Run your test application. You can find recorded videos in /mnt/sdcard/DCIM/ folder of your Android device.  Enjoy!