1

I am using the PyQt framework to create a GUI which will display the feed from two cameras (attached via USB) simultaneously. So far, I am working on displaying just the one camera in the GUI window and my code may be summarised as follows:

from PyQt5 import QtWidgets, QtGui
from PyQt5.QtCore import pyqtSignal, pyqtSlot, Qt, QThread, QRunnable, QThreadPool
import sys
import cv2
import numpy as np

class CameraThread(QRunnable):
    '''
    Thread that will read the output from a given camera and emit signal containing the image data 
    and the camera it comes from
    '''
    ## the int in this signal indicates the camID, once more than one cam is instantiated
    change_pixmap_signal = pyqtSignal(int, np.ndarray)
    
    def __init__(self, camID):
        super().__init__()
        self.camID = camID
        
    def run(self):
        cam = cv2.VideoCapture(self.camID)
        while True:
            ret, cv_img = cam.read()
            if ret:
                self.change_pixmap_signal.emit(self.camID, cv_img)

class MainWindow(QtWidgets.QMainWindow):
    
    def __init__(self):
        super().__init__()
        self.initUI()

    def initUI(self):
        ## general window stuff
        self.setWindowTitle('Cam Feeds')
        self.setGeometry(500, 50, 900, 900)

        self.cam_display_w = 400
        self.cam_display_h = 400
        
        self.camC_lab = QtWidgets.QLabel('camC')
        self.camC_lab.setAlignment(Qt.AlignCenter)
        self.camC_lab.resize(self.cam_display_w, self.cam_display_h)
        self.camC_lab.setStyleSheet('QLabel {background-color: rgb(50, 200, 100)}')

        self.setCentralWidget(self.camC_lab)
        
        self.show()
          
        ## Setting up the camera threads
        self.camC_thread = CameraThread(1)
        self.camC_thread.change_pixmap_signal.connect(self.update_image)
        
        ## Setting up threadpool
        self.threadpool = QThreadPool()
        self.threadpool.start(self.camC_thread)
        
    @pyqtSlot(int, np.ndarray)
    def update_image(self, camID, cv_img):
        '''
        updates the images displayed in each camera label
        '''
        qt_img = self.convert_cv_qt(cv_img)
        self.camC_lab.setPixmap(qt_img)

            
    def convert_cv_qt(self, cv_img):
        ''' convert from opencv image to QPixmap (without saving to a physical .png file') '''
        rgb_image = cv2.cvtColor(cv_img, cv2.COLOR_BGR2RGB)
        h, w, ch = rgb_image.shape
        bytes_per_line = ch * w
        convert_to_Qt_format = QtGui.QImage(rgb_image.data, w, h, bytes_per_line, QtGui.QImage.Format_RGB888)
        p = convert_to_Qt_format.scaled(self.cam_display_w, self.cam_display_h, Qt.KeepAspectRatio)
        return QtGui.QPixmap.fromImage(p)            


def main():
    app = QtWidgets.QApplication(sys.argv)
    a = MainWindow()
    sys.exit(app.exec_())
        
if __name__ == '__main__':
    main()

When I run this code, I get the following error:

File "C:/Users/henry/Desktop/main_take1.py", line 105, in initUI
    self.camC_thread.change_pixmap_signal.connect(self.update_image)

TypeError: CameraThread cannot be converted to PyQt5.QtCore.QObject in this context

Please could someone explain this error message to me!

Ideally, I'd create CameraThread objects which inherit QRunnables', therefore allowing the use of a QThreadPool to manage the execution of parallel threads, but I get the above TypeError...

I've tried switching the parent of the CameraThread class to a QThread, and this works, however I'm anticipating once I connect a second camera (to be displayed simultaneously), I'd need to manage the order/priority of execution of the two CameraThread instances myself, and I'd really rather not.

What would you suggest? Are there any multithreading tools, similar to QThreadPool, which take QThreads'? Or can I just stick with QRunnables? halp plz x

andra_16
  • 21
  • 2

0 Answers0