Untitled
unknown
python
a year ago
1.2 kB
2
Indexable
Never
def emotions_generator(self, video_path): video_capture = cv2.VideoCapture(video_path) fps = video_capture.get(cv2.CAP_PROP_FPS) frame_interval = math.ceil(fps / self.fps_desired) frame_counter = 0 while video_capture.isOpened(): ret, frame = video_capture.read() if not ret: break if frame_counter % frame_interval == 0: img_path = "./temp_frame.jpg" cv2.imwrite(img_path, frame, [cv2.IMWRITE_JPEG_QUALITY, 75]) #cv2.imwrite(img_path, frame) detections = DeepFace.analyze(img_path=img_path, actions=['emotion'], enforce_detection=False, silent= True) emotions = detections[0]['emotion'] emotions = {self.emotions_mapping.get(emotion, emotion): value for emotion, value in emotions.items()} second = frame_counter / fps yield second, emotions os.remove(img_path) frame_counter += 1 logging.info(f"\n========= Frame processed: {frame_counter} ==========") video_capture.release()