来自 GigE 相机的实时视频

Posted

技术标签:

【中文标题】来自 GigE 相机的实时视频【英文标题】:Live video from GigE Cameras 【发布时间】:2021-02-25 09:22:11 【问题描述】:

我在使用 QML 中的 2 个 GigE 摄像机的实时视频流时遇到问题。我之前用 QLabels 和 QPixmap 尝试过,它没有任何问题。 QML 标签没有使用信号槽发送图像的 pixmap 属性。

这是我的 Python 代码:

import sys
import os
from PySide2.QtGui import QGuiApplication
from PySide2.QtQml import QQmlApplicationEngine
from PySide2.QtGui import QImage, QPixmap
from PySide2.QtCore import Slot, QThread, Signal, Qt, QObject
import cv2
from pypylon import pylon


tlFactory = pylon.TlFactory.GetInstance()
devices = tlFactory.EnumerateDevices()
if len(devices) == 0:
    raise pylon.RuntimeException("No camera present.")

cameras = pylon.InstantCameraArray(min(len(devices), 2))


for i, cam in enumerate(cameras):
    cam.Attach(tlFactory.CreateDevice(devices[i]))


class CamThread(QThread):
    cam1 = Signal(QImage)
    cam2 = Signal(QImage)

    def run(self):
        cameras.StartGrabbing(pylon.GrabStrategy_LatestImageOnly)

        try:

            while cameras.IsGrabbing():
                grabResult1 = cameras[0].RetrieveResult(
                    5000, pylon.TimeoutHandling_ThrowException
                )
                grabResult2 = cameras[1].RetrieveResult(
                    5000, pylon.TimeoutHandling_ThrowException
                )

                if grabResult1.GrabSucceeded() and grabResult2.GrabSucceeded():
                    img1 = grabResult1.GetArray()
                    img2 = grabResult2.GetArray()
                    rgb1 = cv2.cvtColor(img1, cv2.COLOR_YUV2RGB_Y422)
                    rgb2 = cv2.cvtColor(img2, cv2.COLOR_YUV2RGB_Y422)

                    h1, w1, ch1 = rgb1.shape
                    h2, w2, ch2 = rgb2.shape

                    bytesPerLine1 = ch1 * w1
                    bytesPerLine2 = ch2 * w1
                    convertToQtFormat1 = QImage(
                        img1.data, w1, h1, bytesPerLine1, QImage.Format_RGB888
                    )
                    convertToQtFormat2 = QImage(
                        img2.data, w2, h2, bytesPerLine2, QImage.Format_RGB888
                    )

                    p = convertToQtFormat1.scaled(800, 746, Qt.KeepAspectRatio)
                    q = convertToQtFormat2.scaled(800, 746, Qt.KeepAspectRatio)

                    self.cam1.emit(p)
                    self.cam2.emit(q)

        except Exception as error:
            print(error)


class MainWindow(QObject):
    def __init__(self):
        QObject.__init__(self)
        self.CamThread = CamThread()
        self.CamThread.cam1.connect(self.camera1)
        self.CamThread.cam2.connect(self.camera2)
        self.CamThread.start()

    @Slot(QImage)
    def camera1(self, image):
        pass

    @Slot(QImage)
    def camera2(self, image):
        pass


if __name__ == "__main__":
    app = QGuiApplication(sys.argv)
    backend = MainWindow()
    engine = QQmlApplicationEngine()
    engine.rootContext().setContextProperty("backend", backend)
    engine.load(os.path.join(os.path.dirname(__file__), "main.qml"))

    if not engine.rootObjects():
        sys.exit(-1)
    sys.exit(app.exec_())

那么如何使用 QML/PySide2 显示实时视频流? 我正在使用 QT Design Studio。

【问题讨论】:

我不知道它在 Python 中是如何工作的,但通常你会使用 QQuickImageProvider 来处理这类事情。 我不确定这是否是您的解决方案,但VideoOutput 可能有用。 【参考方案1】:

虽然 QQuickImageProvider 选项可能是一个不错的选项,但缺点是您必须生成不同的 url,而更好的选择是使用 VideoOutput,例如,在您的情况下,以下实现应该可以工作(未经测试):

from functools import cached_property
import os
import random
import sys
import threading

import cv2

from PySide2.QtCore import Property, QObject, Qt, QSize, QTimer, Signal, Slot
from PySide2.QtGui import QColor, QGuiApplication, QImage
from PySide2.QtMultimedia import QAbstractVideoSurface, QVideoFrame, QVideoSurfaceFormat
from PySide2.QtQml import QQmlApplicationEngine
import shiboken2

from pypylon import pylon


class CameraProvider(QObject):
    imageChanged = Signal(int, QImage)

    def start(self, cameras):
        threading.Thread(target=self._execute, args=(cameras,), daemon=True).start()

    def _execute(self, cameras):
        while cameras.IsGrabbing():
            for i, camera in enumerate(cameras):
                try:
                    grab_result = cameras[i].RetrieveResult(
                        5000, pylon.TimeoutHandling_ThrowException
                    )
                    if grab_result.GrabSucceeded():
                        img = grab_result.GetArray()
                        # FIXME
                        # convert img to qimage
                        qimage = QImage(800, 746, QImage.Format_RGB888)
                        qimage.fill(QColor(*random.sample(range(0, 255), 3)))
                        if shiboken2.isValid(self):
                            self.imageChanged.emit(i, qimage.copy())
                except Exception as error:
                    print(error)


class CameraService(QObject):
    surfaceChanged = Signal()

    def __init__(self, parent=None):
        super().__init__(parent)
        self._surface = None
        self._format = QVideoSurfaceFormat()
        self._format_is_valid = False

    def get_surface(self):
        return self._surface

    def set_surface(self, surface):
        if self._surface is surface:
            return
        if (
            self._surface is not None
            and self._surface is not surface
            and self._surface.isActive()
        ):
            self._surface.stop()
        self._surface = surface
        self.surfaceChanged.emit()

        if self._surface is not None:
            self._format = self._surface.nearestFormat(self._format)
            self._surface.start(self._format)

    videoSurface = Property(
        QAbstractVideoSurface,
        fget=get_surface,
        fset=set_surface,
        notify=surfaceChanged,
    )

    @Slot(QImage)
    def update_frame(self, qimage):
        if self.videoSurface is None or qimage.isNull():
            return
        if not self._format_is_valid:
            self._set_format(qimage.width(), qimage.height(), QVideoFrame.Format_RGB32)
            self._format_is_valid = True
        qimage.convertTo(
            QVideoFrame.imageFormatFromPixelFormat(QVideoFrame.Format_RGB32)
        )
        self._surface.present(QVideoFrame(qimage))

    def _set_format(self, width, height, pixel_format):
        size = QSize(width, height)
        video_format = QVideoSurfaceFormat(size, pixel_format)
        self._format = video_format
        if self._surface is not None:
            if self._surface.isActive():
                self._surface.stop()
            self._format = self._surface.nearestFormat(self._format)
            self._surface.start(self._format)


class CameraManager(QObject):
    def __init__(self, cameras, parent=None):
        super().__init__(parent)
        self._services = []
        self.provider.imageChanged.connect(self.handle_image_changed)
        self.provider.start(cameras)
        for _ in cameras:
            self._services.append(CameraService())

    @cached_property
    def provider(self):
        return CameraProvider()

    @Slot(int, QImage)
    def handle_image_changed(self, index, qimage):
        self._services[index].update_frame(qimage)

    def get_services(self):
        return self._services

    services = Property("QVariantList", fget=get_services, constant=True)


def main():
    app = QGuiApplication(sys.argv)

    tlFactory = pylon.TlFactory.GetInstance()
    devices = tlFactory.EnumerateDevices()
    if len(devices) == 0:
        raise pylon.RuntimeException("No camera present.")

    cameras = pylon.InstantCameraArray(min(len(devices), 2))

    for i, cam in enumerate(cameras):
        cam.Attach(tlFactory.CreateDevice(devices[i]))

    manager = CameraManager(cameras)

    engine = QQmlApplicationEngine()
    engine.rootContext().setContextProperty("manager", manager)
    engine.load(os.path.join(os.path.dirname(__file__), "main.qml"))
    if not engine.rootObjects():
        sys.exit(-1)

    sys.exit(app.exec_())


if __name__ == "__main__":
    main()
import QtQuick 2.14
import QtQuick.Window 2.14
import QtMultimedia 5.14

Window 
    visible: true
    width: 640
    height: 480
    title: qsTr("Hello World")

    GridView 
        width: 300; height: 200

        model: manager !== null ? manager.services : []
        delegate: VideoOutput 
            width: 100
            height: 100
            fillMode: VideoOutput.PreserveAspectCrop
            source: model.modelData
        
    

【讨论】:

我试过了,2秒后窗口关闭,没有任何错误和警告? @user31562 我没有办法测试代码,因为我没有硬件。我一直在测试使用随机颜色创建 QImage 并且它们可以正常工作,因此我建议您检查我没有修改任何内容的转换逻辑,我还建议从控制台运行脚本,因为这样您可能会获得有关错误的更多信息跨度> 谢谢,它在图像转换方面做了一些改变。当我测试一台相机时,没问题,但使用两台相机时,帧率下降,视频变慢。我该如何解决这个问题?【参考方案2】:

Qt 提供了不同的方法将图像/视频流传递给 QML:

1。将像素图转换为base64编码

QByteArray byteArray;
QBuffer buffer(&byteArray);
buffer.open(QIODevice::WriteOnly);
pixmap.save(&buffer,"PNG");
QString data("data:image/png;base64,");
data.append(QString::fromLatin1(byteArray.toBase64().data()));

这个base64编码的图片可以传给Image::source

2。使用QQuickImageProvider

这允许将自定义 image://... url 直接连接到 QPixmapQImage。查看文档以获取更多信息。

3。使用QtMultimedia

尤其是VideoOutput 可能有用。

【讨论】:

以上是关于来自 GigE 相机的实时视频的主要内容,如果未能解决你的问题,请参考以下文章

使用 CIImage 进行实时过滤时降低视频质量以获得更高的 FPS

将实时视频广播从 android 相机流式传输到服务器

Ionic 框架拍摄实时视频

在实时视频捕获应用程序中使用相机校准参数

如何实时可视化来自不同线程的图像?

WPF。显示来自相机设备的视频。性能和资源