Can I use OpenCV YOLO to detect my own objects?
-
Unfortunately I can't use QT for this because I am working with something that isn't compatible, but OpenCV does apply to QT so I thought I would ask here.
-
Hi,
What is that incompatible something ?
OpenCV and Qt can indeed be used together.
Your question is not really clear. -
Unfortunately I can't use QT for this because I am working with something that isn't compatible, but OpenCV does apply to QT so I thought I would ask here.
@AI_Messiah It looks like that your question is not directly related to Qt.
By the way, have you checked some OpenCV YOLO tutorials? Or this one? -
I completed the code by adding a couple of new functions, taking into account the output of images from the camera and video, and also added logs and improved the display of the detected object
self.BtCamera.clicked.connect(self.Camera) self.BtVideo.clicked.connect(self.Video) self.BtDetectObject.clicked.connect(self.toggle_objects) self.BtDetectPeople.clicked.connect(self.toggle_people) self.yolo = YOLO('best.pt') self.cap = None self.videoActive = False self.cameraActive = False self.det_objects = False self.det_was_active = False self.people_present = False self._seen, self._ttl = {}, 2000 self.frame_timer = QtCore.QTimer(self) self.frame_timer.timeout.connect(self.tick) self.people_timer = QtCore.QTimer(self) self.people_timer.timeout.connect(self.tick_people) def Camera(self): if not self.cameraActive: if self.cap: try: self.cap.release() except: pass self.cap = cv2.VideoCapture(0) if not self.cap.isOpened(): self.add_log('Камера: не удалось открыть 0'); return self.videoActive, self.cameraActive = False, True if not self.frame_timer.isActive(): self.frame_timer.start(33) self.add_log('Камера: старт') else: if self.frame_timer.isActive(): self.frame_timer.stop() if self.cap: try: self.cap.release() except: pass self.cap = None self.videoActive = self.cameraActive = False if hasattr(self, 'LCamera1'): self.LCamera1.clear() if hasattr(self, 'LCamera2'): self.LCamera2.clear() self.add_log('Камера: стоп') def Video(self): if not self.videoActive: src = 'Video2.mp4' if self.cap: try: self.cap.release() except: pass self.cap = cv2.VideoCapture(src) if not self.cap.isOpened(): self.add_log(f'Видео: не удалось открыть {src}'); return self.videoActive, self.cameraActive = True, False if not self.frame_timer.isActive(): self.frame_timer.start(33) self.add_log(f'Видео: старт {src}') else: if self.frame_timer.isActive(): self.frame_timer.stop() if self.cap: try: self.cap.release() except: pass self.cap = None self.videoActive = self.cameraActive = False if hasattr(self, 'LCamera1'): self.LCamera1.clear() if hasattr(self, 'LCamera2'): self.LCamera2.clear() self.add_log('Видео: стоп') def toggle_objects(self): self.det_objects = not self.det_objects if hasattr(self, 'BtDetectObject'): self.BtDetectObject.setText("Stop detection" if self.det_objects else "Detect objects") self.add_log("Детекция объектов: ON" if self.det_objects else "Детекция объектов: OFF") def toggle_people(self): if self.people_timer.isActive(): self.people_timer.stop() self.add_log('Мониторинг человека: OFF') else: self.people_timer.start(200) self.add_log('Мониторинг человека: ON') def _dedup(self, name, cx, cy): k, now = (name, cx//10, cy//10), int(time.time()*1000) if now - self._seen.get(k, 0) < self._ttl: return False self._seen[k] = now; return True def _ok(self, name: str) -> bool: name = name.lower() mapping = { 'adjustable_wrench': getattr(self, 'CBBox1', None), 'gear': getattr(self, 'CBBox2', None), 'hammer': getattr(self, 'CBBox3', None), 'screwdriver': getattr(self, 'CBReject', None) } w = mapping.get(name) return bool(w and w.isChecked()) def _to_label(self, label: QtWidgets.QLabel, bgr): if label is None: return rgb = cv2.cvtColor(bgr, cv2.COLOR_BGR2RGB) h, w, c = rgb.shape qimg = QtGui.QImage(rgb.data, w, h, w * c, QtGui.QImage.Format_RGB888) label.setPixmap(QtGui.QPixmap.fromImage(qimg).scaled(label.width(), label.height(), QtCore.Qt.KeepAspectRatio)) def tick_people(self): try: if self.smart_camera.getPerson(): self.lamp.setLamp('0001') if self.det_objects: self.det_was_active = True self.det_objects = False if self.frame_timer.isActive(): self.frame_timer.stop() if not self.people_present: self.people_present = True self.add_log('Человек в рабочей зоне! Детекция остановлена') else: self.lamp.setLamp('1000') if self.people_present: self.people_present = False self.add_log('Зона свободна') if not self.frame_timer.isActive() and (self.videoActive or self.cameraActive) and self.cap: self.frame_timer.start(33) if self.det_was_active and not self.det_objects: self.det_objects = True self.det_was_active = False self.add_log('Детекция объектов возобновлена') except Exception as e: self.add_log(f'getPerson() error: {e}') def tick(self): if not self.cap: return ok, frame = self.cap.read() if not ok: if self.videoActive: self.add_log("Видео: конец файла") if self.frame_timer.isActive(): self.frame_timer.stop() if self.cap: try: self.cap.release() except: pass self.cap = None self.videoActive = self.cameraActive = False if hasattr(self, 'LCamera1'): self.LCamera1.clear() if hasattr(self, 'LCamera2'): self.LCamera2.clear() return img1 = frame.copy() img2 = frame if self.det_objects: try: r = self.yolo(frame, verbose=False, device='cpu')[0] if hasattr(r, 'boxes') and r.boxes is not None: keep = [self._ok(str(self.yolo.names[int(b.cls[0])]).lower()) for b in r.boxes] if any(keep): import numpy as np r.boxes = r.boxes[np.array(keep, dtype=bool)] img1 = r.plot() # боксы + название + доверие except Exception as e: self.add_log(f"YOLO error: {e}") if hasattr(self, 'LCamera1'): self._to_label(self.LCamera1, img1) if hasattr(self, 'LCamera2'): self._to_label(self.LCamera2, img2)
-
I completed the code by adding a couple of new functions, taking into account the output of images from the camera and video, and also added logs and improved the display of the detected object
self.busy = False self.moves_enabled = False self.cls2cat = { "adjustable_wrench": "Box1", "gear": "Box2", "hammer": "Box3", "screwdriver": "Брак" } self.BtCamera.clicked.connect(self.Camera) self.BtVideo.clicked.connect(self.Video) self.BtDetectObject.clicked.connect(self.toggle_objects) self.BtDetectPeople.clicked.connect(self.toggle_people) self.BtMoveObjects.clicked.connect(self.toggle_moves) self.yolo = YOLO('best.pt') self.cap = None self.videoActive = False self.cameraActive = False self.det_objects = False self.det_was_active = False self.people_present = False self._seen, self._ttl = {}, 2000 self.frame_timer = QtCore.QTimer(self) self.frame_timer.timeout.connect(self.tick) self.people_timer = QtCore.QTimer(self) self.people_timer.timeout.connect(self.tick_people)
def Camera(self): if not self.cameraActive: if self.cap: try: self.cap.release() except: pass self.cap = cv2.VideoCapture(0) if not self.cap.isOpened(): self.add_log('Камера: не удалось открыть 0'); return self.videoActive, self.cameraActive = False, True if not self.frame_timer.isActive(): self.frame_timer.start(33) self.add_log('Камера: старт') else: if self.frame_timer.isActive(): self.frame_timer.stop() if self.cap: try: self.cap.release() except: pass self.cap = None self.videoActive = self.cameraActive = False if hasattr(self, 'LCamera1'): self.LCamera1.clear() if hasattr(self, 'LCamera2'): self.LCamera2.clear() self.add_log('Камера: стоп') def Video(self): if not self.videoActive: src = 'Video2.mp4' if self.cap: try: self.cap.release() except: pass self.cap = cv2.VideoCapture(src) if not self.cap.isOpened(): self.add_log(f'Видео: не удалось открыть {src}'); return self.videoActive, self.cameraActive = True, False if not self.frame_timer.isActive(): self.frame_timer.start(33) self.add_log(f'Видео: старт {src}') else: if self.frame_timer.isActive(): self.frame_timer.stop() if self.cap: try: self.cap.release() except: pass self.cap = None self.videoActive = self.cameraActive = False if hasattr(self, 'LCamera1'): self.LCamera1.clear() if hasattr(self, 'LCamera2'): self.LCamera2.clear() self.add_log('Видео: стоп') def toggle_objects(self): self.det_objects = not self.det_objects if hasattr(self, 'BtDetectObject'): self.BtDetectObject.setText("Stop detection" if self.det_objects else "Detect objects") self.add_log("Детекция объектов: ON" if self.det_objects else "Детекция объектов: OFF") def toggle_people(self): if self.people_timer.isActive(): self.people_timer.stop() self.add_log('Мониторинг человека: OFF') else: self.people_timer.start(200) self.add_log('Мониторинг человека: ON') def _dedup(self, name, cx, cy): k, now = (name, cx//10, cy//10), int(time.time()*1000) if now - self._seen.get(k, 0) < self._ttl: return False self._seen[k] = now; return True def _ok(self, name: str) -> bool: name = name.lower() mapping = { 'adjustable_wrench': getattr(self, 'CBBox1', None), 'gear': getattr(self, 'CBBox2', None), 'hammer': getattr(self, 'CBBox3', None), 'screwdriver': getattr(self, 'CBReject', None) } w = mapping.get(name) return bool(w and w.isChecked()) def _to_label(self, label: QtWidgets.QLabel, bgr): if label is None: return rgb = cv2.cvtColor(bgr, cv2.COLOR_BGR2RGB) h, w, c = rgb.shape qimg = QtGui.QImage(rgb.data, w, h, w * c, QtGui.QImage.Format_RGB888) label.setPixmap(QtGui.QPixmap.fromImage(qimg).scaled(label.width(), label.height(), QtCore.Qt.KeepAspectRatio)) def tick_people(self): try: if self.smart_camera.getPerson(): self.lamp.setLamp('0001') if self.det_objects: self.det_was_active = True self.det_objects = False if self.frame_timer.isActive(): self.frame_timer.stop() if not self.people_present: self.people_present = True self.add_log('Человек в рабочей зоне! Детекция остановлена') else: self.lamp.setLamp('1000') if self.people_present: self.people_present = False self.add_log('Зона свободна') if not self.frame_timer.isActive() and (self.videoActive or self.cameraActive) and self.cap: self.frame_timer.start(33) if self.det_was_active and not self.det_objects: self.det_objects = True self.det_was_active = False self.add_log('Детекция объектов возобновлена') except Exception as e: self.add_log(f'getPerson() error: {e}') def tick(self): if not self.cap: return ok, frame = self.cap.read() if not ok: if self.videoActive: self.add_log("Видео: конец файла") if self.frame_timer.isActive(): self.frame_timer.stop() if self.cap: try: self.cap.release() except: pass self.cap = None self.videoActive = self.cameraActive = False if hasattr(self, 'LCamera1'): self.LCamera1.clear() if hasattr(self, 'LCamera2'): self.LCamera2.clear() return img2 = frame img1 = frame.copy() if self.det_objects: try: r = self.yolo(frame, verbose=False, device='cpu')[0] if hasattr(r, 'boxes') and r.boxes is not None: keep = [self._ok(str(self.yolo.names[int(b.cls[0])]).lower()) for b in r.boxes] if any(keep): import numpy as np r.boxes = r.boxes[np.array(keep, dtype=bool)] else: r.boxes = r.boxes[:0] for b in r.boxes: cls_id = int(b.cls[0]) name = str(self.yolo.names[cls_id]).lower() x1, y1, x2, y2 = map(int, b.xyxy[0]) cx, cy = (x1 + x2) // 2, (y1 + y2) // 2 if self._dedup(name, cx, cy): self.add_log(f"Detected {name}") if self.moves_enabled and not self.people_present: self.run_session_yolo(name) else: self.add_log("Перемещение отключено" if not self.moves_enabled else "Ожидание: человек в зоне") img1 = r.plot() except Exception as e: self.add_log(f"YOLO error: {e}") if hasattr(self, 'LCamera1'): self._to_label(self.LCamera1, img1) if hasattr(self, 'LCamera2'): self._to_label(self.LCamera2, img2) def run_session_yolo(self, name: str): cat = self.cls2cat.get(name.lower()) if not cat: self.add_log(f"Нет маппинга для класса: {name}") return try: if cat == 'Брак': self._pick_and_place(self.rejectCell, self.rejectTrack) else: slot = self._choose_slot(cat) if slot is None: self.add_log(f"Нет свободных ячеек для {cat}") return self._pick_and_place(self.cells[slot], self.cellTrack[slot]) self.robot.play() while self.robot.getActualStateOut() != InterpreterStates.PROGRAM_IS_DONE.value: QtWidgets.QApplication.processEvents() time.sleep(0.05) self.add_log(f"Перемещено: {name} -> {cat}") except Exception as e: self.add_log(f"Ошибка перемещения: {e}") QtWidgets.QMessageBox.warning(self, "Ошибка", str(e)) def toggle_moves(self): self.moves_enabled = not self.moves_enabled try: self.BtMoveObjects.setText("Moves: ON" if self.moves_enabled else "Moves: OFF") except Exception: pass self.add_log("Перемещения: ON" if self.moves_enabled else "Перемещения: OFF")