Offload QR detector to external process to retain good FPS

This commit is contained in:
Ruben van de Ven 2025-02-24 11:23:42 +01:00
parent 1569a488ef
commit 6e126d757b

View file

@ -5,6 +5,8 @@ From: https://github.com/perrytsao/Webcam-Latency-Measurement/blob/master/Camera
""" """
from collections import deque from collections import deque
from multiprocessing import Process, Queue
import queue
import time import time
import timeit import timeit
import numpy as np import numpy as np
@ -34,10 +36,9 @@ class Source:
class GigE(Source): class GigE(Source):
def __init__(self): def __init__(self):
self.camera = neoapi.Cam() self.camera = neoapi.Cam()
# self.camera.Connect('-B127') self.camera.Connect()
self.camera.Connect('-B105')
# Default buffer mode, streaming, always returns latest frame # Default buffer mode, streaming, always returns latest frame
self.camera.SetImageBufferCount(10) self.camera.SetImageBufferCount(2)
# neoAPI docs: Setting the neoapi.Cam.SetImageBufferCycleCount()to one ensures that all buffers but one are given back to the neoAPI to be re-cycled and never given to the user by the neoapi.Cam.GetImage() method. # neoAPI docs: Setting the neoapi.Cam.SetImageBufferCycleCount()to one ensures that all buffers but one are given back to the neoAPI to be re-cycled and never given to the user by the neoapi.Cam.GetImage() method.
self.camera.SetImageBufferCycleCount(1) self.camera.SetImageBufferCycleCount(1)
if self.camera.IsConnected(): if self.camera.IsConnected():
@ -45,9 +46,18 @@ class GigE(Source):
self.camera.f.BinningHorizontal.Set(2) self.camera.f.BinningHorizontal.Set(2)
self.camera.f.BinningVertical.Set(2) self.camera.f.BinningVertical.Set(2)
self.pixfmt = self.camera.f.PixelFormat.Get() self.pixfmt = self.camera.f.PixelFormat.Get()
self._last_timestamp = None
def recv(self): def recv(self):
i = self.camera.GetImage(0) i = self.camera.GetImage(0)
# check that we're not getting an older image from the buffer
# because buffer is LIFO
ts = i.GetTimestamp()
if self._last_timestamp is not None and self._last_timestamp > ts:
return None
self._last_timestamp = ts
if i.IsEmpty(): if i.IsEmpty():
return None return None
@ -61,13 +71,32 @@ class GigE(Source):
img = cv2.convertScaleAbs(img, alpha=(255.0/65535.0)) img = cv2.convertScaleAbs(img, alpha=(255.0/65535.0))
return img return img
def qr_detector(frame_q: Queue, intervals_q: Queue):
while True:
now, img_for_qr = frame_q.get()
code,bbox,rectifiedImage = qrDecoder.detectAndDecode(img_for_qr)
if len(code) > 0:
detected_t = float(code)
try:
intervals_q.put_nowait(now-detected_t)
except queue.Full as e:
pass
# latencies.append(now-detected_t)
source = GigE() source = GigE()
# pass frames to QR detector process
frame_q = Queue(2)
# pass detected intervals back from detector process
intervals_q = Queue(20)
config = pyglet.gl.Config(sample_buffers=1, samples=4) config = pyglet.gl.Config(sample_buffers=1, samples=4)
display = pyglet.canvas.get_display() display = pyglet.display.get_display()
screen = display.get_screens()[0] screen = display.get_screens()[0]
print(screen) print(screen)
window = pyglet.window.Window(width=screen.width, height=screen.height, config=config, fullscreen=True, screen=screen) window = pyglet.window.Window(width=screen.width, height=screen.height, config=config, fullscreen=True, screen=screen)
@ -92,20 +121,12 @@ def check_frames(dt: float):
img = cv2.flip(cv2.cvtColor(img, cv2.COLOR_BGR2RGB), 0) img = cv2.flip(cv2.cvtColor(img, cv2.COLOR_BGR2RGB), 0)
# TODO: offload to queue and multiprocessing # TODO: offload to queue and multiprocessing
img_for_qr = cv2.resize(img, (0,0), fx=.1, fy=.1) # img_for_qr = cv2.resize(img, (0,0), fx=.1, fy=.1)
retval, codes,bboxes,rectifiedImages = qrDecoder.detectAndDecodeMulti(img) try:
frame_q.put_nowait((now, img))
except queue.Full as e:
pass
if retval:
# print(retval, codes)
for code in codes:
if len(code) == 0:
continue
# TODO)) Handle/filter/sort multiple QRs
# I.e. filter out newest, and only go for single most new.
# In that case we should verify there's >=2 QRs in sight.
detected_t = float(code)
latencies.append(now - detected_t)
img_data = pyglet.image.ImageData(img.shape[1], img.shape[0], 'RGB', img.tobytes()) img_data = pyglet.image.ImageData(img.shape[1], img.shape[0], 'RGB', img.tobytes())
@ -122,6 +143,12 @@ def on_refresh(dt):
if qr_sprite is not None: if qr_sprite is not None:
qr_sprite.delete() # clear texture from memory qr_sprite.delete() # clear texture from memory
try:
latency = intervals_q.get_nowait()
latencies.append(latency)
except queue.Empty as e:
pass
intervals = [video_fps[i] - video_fps[i-1] for i in range(1, len(video_fps))] intervals = [video_fps[i] - video_fps[i-1] for i in range(1, len(video_fps))]
# intervals = video_fps[1:] - video_fps[:-1] # intervals = video_fps[1:] - video_fps[:-1]
@ -187,6 +214,7 @@ window.set_handler('on_draw', on_draw)
try: try:
event_loop = pyglet.app.EventLoop() event_loop = pyglet.app.EventLoop()
pyglet.clock.schedule(check_frames) pyglet.clock.schedule(check_frames)
Process(target=qr_detector, args=(frame_q, intervals_q), daemon=True).start()
event_loop.run() event_loop.run()
finally: finally:
window.close() window.close()