问题描述
我正在使用PyQt框架创建一个GUI,该GUI将同时显示两个摄像头(通过USB连接)的提要。到目前为止,我仅在GUI窗口中显示一台摄像机,我的代码可以总结如下:
from PyQt5 import QtWidgets,QtGui
from PyQt5.QtCore import pyqtSignal,pyqtSlot,Qt,QThread,QRunnable,QThreadPool
import sys
import cv2
import numpy as np
class CameraThread(QRunnable):
'''
Thread that will read the output from a given camera and emit signal containing the image data
and the camera it comes from
'''
## the int in this signal indicates the camID,once more than one cam is instantiated
change_pixmap_signal = pyqtSignal(int,np.ndarray)
def __init__(self,camID):
super().__init__()
self.camID = camID
def run(self):
cam = cv2.VideoCapture(self.camID)
while True:
ret,cv_img = cam.read()
if ret:
self.change_pixmap_signal.emit(self.camID,cv_img)
class MainWindow(QtWidgets.QMainWindow):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
## general window stuff
self.setwindowTitle('Cam Feeds')
self.setGeometry(500,50,900,900)
self.cam_display_w = 400
self.cam_display_h = 400
self.camC_lab = QtWidgets.QLabel('camC')
self.camC_lab.setAlignment(Qt.AlignCenter)
self.camC_lab.resize(self.cam_display_w,self.cam_display_h)
self.camC_lab.setStyleSheet('QLabel {background-color: rgb(50,200,100)}')
self.setCentralWidget(self.camC_lab)
self.show()
## Setting up the camera threads
self.camC_thread = CameraThread(1)
self.camC_thread.change_pixmap_signal.connect(self.update_image)
## Setting up threadpool
self.threadpool = QThreadPool()
self.threadpool.start(self.camC_thread)
@pyqtSlot(int,np.ndarray)
def update_image(self,camID,cv_img):
'''
updates the images displayed in each camera label
'''
qt_img = self.convert_cv_qt(cv_img)
self.camC_lab.setpixmap(qt_img)
def convert_cv_qt(self,cv_img):
''' convert from opencv image to Qpixmap (without saving to a physical .png file') '''
rgb_image = cv2.cvtColor(cv_img,cv2.COLOR_BGR2RGB)
h,w,ch = rgb_image.shape
bytes_per_line = ch * w
convert_to_Qt_format = QtGui.QImage(rgb_image.data,h,bytes_per_line,QtGui.QImage.Format_RGB888)
p = convert_to_Qt_format.scaled(self.cam_display_w,self.cam_display_h,Qt.KeepAspectRatio)
return QtGui.Qpixmap.fromImage(p)
def main():
app = QtWidgets.QApplication(sys.argv)
a = MainWindow()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
File "C:/Users/henry/Desktop/main_take1.py",line 105,in initUI
self.camC_thread.change_pixmap_signal.connect(self.update_image)
TypeError: CameraThread cannot be converted to PyQt5.QtCore.QObject in this context
请有人可以向我解释此错误消息!
理想情况下,我会创建继承QRunnables的CameraThread对象,因此允许使用QThreadPool来管理并行线程的执行,但是出现上述TypeError ...
我尝试将CameraThread类的父级切换到QThread,并且可以正常工作,但是我期望一旦连接第二台摄像机(同时显示),我就需要管理顺序/优先级我自己执行两个CameraThread实例的过程,我宁愿不这样做。
您有什么建议?是否有任何类似QThreadPool的多线程工具采用QThreads?还是我可以坚持使用QRunnables? halp plz x
解决方法
暂无找到可以解决该程序问题的有效方法,小编努力寻找整理中!
如果你已经找到好的解决方法,欢迎将解决方案带上本链接一起发送给小编。
小编邮箱:dio#foxmail.com (将#修改为@)