diff --git a/latency_measure.py b/latency_measure.py index 93f2af7..942d4f5 100644 --- a/latency_measure.py +++ b/latency_measure.py @@ -5,6 +5,8 @@ From: https://github.com/perrytsao/Webcam-Latency-Measurement/blob/master/Camera """ from collections import deque +from multiprocessing import Process, Queue +import queue import time import timeit import numpy as np @@ -34,10 +36,9 @@ class Source: class GigE(Source): def __init__(self): self.camera = neoapi.Cam() - # self.camera.Connect('-B127') - self.camera.Connect('-B105') + self.camera.Connect() # Default buffer mode, streaming, always returns latest frame - self.camera.SetImageBufferCount(10) + self.camera.SetImageBufferCount(2) # neoAPI docs: Setting the neoapi.Cam.SetImageBufferCycleCount()to one ensures that all buffers but one are given back to the neoAPI to be re-cycled and never given to the user by the neoapi.Cam.GetImage() method. self.camera.SetImageBufferCycleCount(1) if self.camera.IsConnected(): @@ -45,9 +46,18 @@ class GigE(Source): self.camera.f.BinningHorizontal.Set(2) self.camera.f.BinningVertical.Set(2) self.pixfmt = self.camera.f.PixelFormat.Get() + self._last_timestamp = None def recv(self): - i = self.camera.GetImage(0) + i = self.camera.GetImage(0) + + # check that we're not getting an older image from the buffer + # because buffer is LIFO + ts = i.GetTimestamp() + if self._last_timestamp is not None and self._last_timestamp > ts: + return None + self._last_timestamp = ts + if i.IsEmpty(): return None @@ -61,13 +71,32 @@ class GigE(Source): img = cv2.convertScaleAbs(img, alpha=(255.0/65535.0)) return img +def qr_detector(frame_q: Queue, intervals_q: Queue): + while True: + now, img_for_qr = frame_q.get() + + code,bbox,rectifiedImage = qrDecoder.detectAndDecode(img_for_qr) + if len(code) > 0: + detected_t = float(code) + try: + intervals_q.put_nowait(now-detected_t) + except queue.Full as e: + pass + # latencies.append(now-detected_t) + + source = GigE() +# pass frames to QR detector process +frame_q = Queue(2) +# pass detected intervals back from detector process +intervals_q = Queue(20) + config = pyglet.gl.Config(sample_buffers=1, samples=4) -display = pyglet.canvas.get_display() +display = pyglet.display.get_display() screen = display.get_screens()[0] print(screen) window = pyglet.window.Window(width=screen.width, height=screen.height, config=config, fullscreen=True, screen=screen) @@ -92,20 +121,12 @@ def check_frames(dt: float): img = cv2.flip(cv2.cvtColor(img, cv2.COLOR_BGR2RGB), 0) # TODO: offload to queue and multiprocessing - img_for_qr = cv2.resize(img, (0,0), fx=.1, fy=.1) - retval, codes,bboxes,rectifiedImages = qrDecoder.detectAndDecodeMulti(img) + # img_for_qr = cv2.resize(img, (0,0), fx=.1, fy=.1) + try: + frame_q.put_nowait((now, img)) + except queue.Full as e: + pass - if retval: - # print(retval, codes) - for code in codes: - if len(code) == 0: - continue - - # TODO)) Handle/filter/sort multiple QRs - # I.e. filter out newest, and only go for single most new. - # In that case we should verify there's >=2 QRs in sight. - detected_t = float(code) - latencies.append(now - detected_t) img_data = pyglet.image.ImageData(img.shape[1], img.shape[0], 'RGB', img.tobytes()) @@ -121,6 +142,12 @@ def on_refresh(dt): if qr_sprite is not None: qr_sprite.delete() # clear texture from memory + + try: + latency = intervals_q.get_nowait() + latencies.append(latency) + except queue.Empty as e: + pass intervals = [video_fps[i] - video_fps[i-1] for i in range(1, len(video_fps))] @@ -187,6 +214,7 @@ window.set_handler('on_draw', on_draw) try: event_loop = pyglet.app.EventLoop() pyglet.clock.schedule(check_frames) + Process(target=qr_detector, args=(frame_q, intervals_q), daemon=True).start() event_loop.run() finally: window.close() \ No newline at end of file