I’m after constantly reading images from an OpenCV camera in Python and reading from the main program the latest image. This is needed because of problematic HW.
After messing around with threads and getting a very low efficiency (duh!), I’d like to switch to multiprocessing.
Here’s the threading version:
class WebcamStream: # initialization method def __init__(self, stream_id=0): self.stream_id = stream_id # default is 0 for main camera # opening video capture stream self.camera = cv2.VideoCapture(self.stream_id) self.camera.set(cv2.CAP_PROP_FRAME_WIDTH, 3840) self.camera.set(cv2.CAP_PROP_FRAME_HEIGHT, 2880) if self.camera.isOpened() is False: print("[Exiting]: Error accessing webcam stream.") exit(0) # reading a single frame from camera stream for initializing _, self.frame = self.camera.read() # self.stopped is initialized to False self.stopped = True # thread instantiation self.t = Thread(target=self.update, args=()) self.t.daemon = True # daemon threads run in background # method to start thread def start(self): self.stopped = False self.t.start() # method passed to thread to read next available frame def update(self): while True: if self.stopped is True: break _, self.frame = self.camera.read() self.camera.release() # method to return latest read frame def read(self): return self.frame # method to stop reading frames def stop(self): self.stopped = True
And –
if __name__ == "__main__": main_camera_stream = WebcamStream(stream_id=0) main_camera_stream.start() frame = main_camera_stream.read()
Can someone please help me translate this to multiprocess land ?
Thanks!
Advertisement
Answer
I’ve written several solutions to similar problems, but it’s been a little while so here we go:
I would use shared_memory
as a buffer to read frames into, which can then be read by another process. My first inclination is to initialize the camera and read frames in the child process, because that seems like it would be a “set it and forget it” kind of thing.
import numpy as np import cv2 from multiprocessing import Process, Queue from multiprocessing.shared_memory import SharedMemory def produce_frames(q): #get the first frame to calculate size of buffer cap = cv2.VideoCapture(0) success, frame = cap.read() shm = SharedMemory(create=True, size=frame.nbytes) framebuffer = np.ndarray(frame.shape, frame.dtype, buffer=shm.buf) #could also maybe use array.array instead of numpy, but I'm familiar with numpy framebuffer[:] = frame #in case you need to send the first frame to the main process q.put(shm) #send the buffer back to main q.put(frame.shape) #send the array details q.put(frame.dtype) try: while True: cap.read(framebuffer) except KeyboardInterrupt: pass finally: shm.close() #call this in all processes where the shm exists shm.unlink() #call from only one process def consume_frames(q): shm = q.get() #get the shared buffer shape = q.get() dtype = q.get() framebuffer = np.ndarray(shape, dtype, buffer=shm.buf) #reconstruct the array try: while True: cv2.imshow("window title", framebuffer) cv2.waitKey(100) except KeyboardInterrupt: pass finally: shm.close() if __name__ == "__main__": q = Queue() producer = Process(target=produce_frames, args=(q,)) producer.start() consume_frames(q)