Compare commits

..

4 commits

Author SHA1 Message Date
Ruben van de Ven
53c18d9a7b Some comments on parameters 2024-06-20 19:21:44 +02:00
Ruben van de Ven
531d61b69a detach render FPS from frame emit 2024-06-20 12:32:05 +02:00
Ruben van de Ven
7f1c3d86f7 remove superfluous logging 2024-06-20 12:11:23 +02:00
Ruben van de Ven
d9547bb372 Render window option to preview without delay 2024-06-19 14:34:07 +02:00
5 changed files with 58 additions and 19 deletions

View file

@ -183,22 +183,22 @@ inference_parser.add_argument("--z-mode",
connection_parser.add_argument('--zmq-trajectory-addr', connection_parser.add_argument('--zmq-trajectory-addr',
help='Manually specity communication addr for the trajectory messages', help='Manually specity communication addr for the trajectory messages',
type=str, type=str,
default="ipc:///tmp/feeds/traj") default="ipc:///tmp/feeds_traj")
connection_parser.add_argument('--zmq-camera-stream-addr', connection_parser.add_argument('--zmq-camera-stream-addr',
help='Manually specity communication addr for the camera stream messages', help='Manually specity communication addr for the camera stream messages',
type=str, type=str,
default="ipc:///tmp/feeds/img") default="ipc:///tmp/feeds_img")
connection_parser.add_argument('--zmq-prediction-addr', connection_parser.add_argument('--zmq-prediction-addr',
help='Manually specity communication addr for the prediction messages', help='Manually specity communication addr for the prediction messages',
type=str, type=str,
default="ipc:///tmp/feeds/preds") default="ipc:///tmp/feeds_preds")
connection_parser.add_argument('--zmq-frame-addr', connection_parser.add_argument('--zmq-frame-addr',
help='Manually specity communication addr for the frame messages', help='Manually specity communication addr for the frame messages',
type=str, type=str,
default="ipc:///tmp/feeds/frame") default="ipc:///tmp/feeds_frame")
connection_parser.add_argument('--ws-port', connection_parser.add_argument('--ws-port',
@ -252,6 +252,9 @@ tracker_parser.add_argument("--smooth-tracks",
render_parser.add_argument("--render-file", render_parser.add_argument("--render-file",
help="Render a video file previewing the prediction, and its delay compared to the current frame", help="Render a video file previewing the prediction, and its delay compared to the current frame",
action='store_true') action='store_true')
render_parser.add_argument("--render-window",
help="Render a previewing to a window",
action='store_true')
render_parser.add_argument("--render-url", render_parser.add_argument("--render-url",
help="""Stream renderer on given URL. Two easy approaches: help="""Stream renderer on given URL. Two easy approaches:

View file

@ -145,6 +145,15 @@ class FrameEmitter:
i = 0 i = 0
for video_path in self.video_srcs: for video_path in self.video_srcs:
logger.info(f"Play from '{str(video_path)}'") logger.info(f"Play from '{str(video_path)}'")
if str(video_path).isdigit():
# numeric input is a CV camera
video = cv2.VideoCapture(int(str(video_path)))
# TODO: make config variables
video.set(cv2.CAP_PROP_FRAME_WIDTH, int(1280))
video.set(cv2.CAP_PROP_FRAME_HEIGHT, int(720))
print("exposure!", video.get(cv2.CAP_PROP_AUTO_EXPOSURE))
video.set(cv2.CAP_PROP_FPS, 5)
else:
video = cv2.VideoCapture(str(video_path)) video = cv2.VideoCapture(str(video_path))
fps = video.get(cv2.CAP_PROP_FPS) fps = video.get(cv2.CAP_PROP_FPS)
target_frame_duration = 1./fps target_frame_duration = 1./fps

View file

@ -73,7 +73,7 @@ def start():
ExceptionHandlingProcess(target=run_tracker, kwargs={'config': args, 'is_running': isRunning}, name='tracker'), ExceptionHandlingProcess(target=run_tracker, kwargs={'config': args, 'is_running': isRunning}, name='tracker'),
] ]
if args.render_file or args.render_url: if args.render_file or args.render_url or args.render_window:
procs.append( procs.append(
ExceptionHandlingProcess(target=run_renderer, kwargs={'config': args, 'is_running': isRunning}, name='renderer') ExceptionHandlingProcess(target=run_renderer, kwargs={'config': args, 'is_running': isRunning}, name='renderer')
) )

View file

@ -326,13 +326,18 @@ class PredictionServer:
start = time.time() start = time.time()
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter('ignore') # prevent deluge of UserWarning from torch's rrn.py warnings.simplefilter('ignore') # prevent deluge of UserWarning from torch's rrn.py
# in the OnlineMultimodalGenerativeCVAE (see trajectron.model.online_mgcvae.py) each node's distribution
# is put stored in self.latent.p_dist by OnlineMultimodalGenerativeCVAE.p_z_x(). Type: torch.distributions.OneHotCategorical
# Later sampling in discrete_latent.py: DiscreteLatent.sample_p()
dists, preds = trajectron.incremental_forward(input_dict, dists, preds = trajectron.incremental_forward(input_dict,
maps, maps,
prediction_horizon=self.config.prediction_horizon, # TODO: make variable prediction_horizon=self.config.prediction_horizon, # TODO: make variable
num_samples=self.config.num_samples, # TODO: make variable num_samples=self.config.num_samples, # TODO: make variable
full_dist=self.config.full_dist, full_dist=self.config.full_dist, # "The models full sampled output, where z and y are sampled sequentially"
gmm_mode=self.config.gmm_mode, gmm_mode=self.config.gmm_mode, # "If True: The mode of the Gaussian Mixture Model (GMM) is sampled (see trajectron.model.mgcvae.py)"
z_mode=self.config.z_mode) z_mode=self.config.z_mode # "Predictions from the models most-likely high-level latent behavior mode" (see trajecton.models.components.discrete_latent:sample_p(most_likely_z=z_mode))
)
end = time.time() end = time.time()
logger.debug("took %.2f s (= %.2f Hz) w/ %d nodes and %d edges" % (end - start, logger.debug("took %.2f s (= %.2f Hz) w/ %d nodes and %d edges" % (end - start,
1. / (end - start), len(trajectron.nodes), 1. / (end - start), len(trajectron.nodes),

View file

@ -81,6 +81,10 @@ class Renderer:
self.out_writer = self.start_writer() if self.config.render_file else None self.out_writer = self.start_writer() if self.config.render_file else None
self.streaming_process = self.start_streaming() if self.config.render_url else None self.streaming_process = self.start_streaming() if self.config.render_url else None
if self.config.render_window:
cv2.namedWindow("frame", cv2.WND_PROP_FULLSCREEN)
cv2.setWindowProperty("frame",cv2.WND_PROP_FULLSCREEN,cv2.WINDOW_FULLSCREEN)
def start_writer(self): def start_writer(self):
if not self.config.output_dir.exists(): if not self.config.output_dir.exists():
raise FileNotFoundError("Path does not exist") raise FileNotFoundError("Path does not exist")
@ -121,18 +125,33 @@ class Renderer:
def run(self): def run(self):
frame = None
prediction_frame = None prediction_frame = None
i=0 i=0
first_time = None first_time = None
while self.is_running.is_set(): while self.is_running.is_set():
i+=1 i+=1
zmq_ev = self.frame_sock.poll(timeout=2000)
if not zmq_ev: # zmq_ev = self.frame_sock.poll(timeout=2000)
# when no data comes in, loop so that is_running is checked # if not zmq_ev:
# # when no data comes in, loop so that is_running is checked
# continue
try:
frame: Frame = self.frame_sock.recv_pyobj(zmq.NOBLOCK)
except zmq.ZMQError as e:
idx = frame.index if frame else "NONE"
logger.debug(f"reuse video frame {idx}")
else:
logger.debug(f'new video frame {frame.index}')
if frame is None:
# might need to wait a few iterations before first frame comes available
time.sleep(.1)
continue continue
frame: Frame = self.frame_sock.recv_pyobj()
try: try:
prediction_frame: Frame = self.prediction_sock.recv_pyobj(zmq.NOBLOCK) prediction_frame: Frame = self.prediction_sock.recv_pyobj(zmq.NOBLOCK)
except zmq.ZMQError as e: except zmq.ZMQError as e:
@ -141,7 +160,7 @@ class Renderer:
if first_time is None: if first_time is None:
first_time = frame.time first_time = frame.time
decorate_frame(frame, prediction_frame, first_time, self.config) img = decorate_frame(frame, prediction_frame, first_time, self.config)
img_path = (self.config.output_dir / f"{i:05d}.png").resolve() img_path = (self.config.output_dir / f"{i:05d}.png").resolve()
@ -149,9 +168,12 @@ class Renderer:
logger.debug(f"write frame {frame.time - first_time:.3f}s") logger.debug(f"write frame {frame.time - first_time:.3f}s")
if self.out_writer: if self.out_writer:
self.out_writer.write(frame.img) self.out_writer.write(img)
if self.streaming_process: if self.streaming_process:
self.streaming_process.stdin.write(frame.img.tobytes()) self.streaming_process.stdin.write(img.tobytes())
if self.config.render_window:
cv2.imshow('frame',img)
cv2.waitKey(1)
logger.info('Stopping') logger.info('Stopping')
if i>2: if i>2:
@ -181,8 +203,8 @@ def decorate_frame(frame: Frame, prediction_frame: Frame, first_time: float, con
# Fill image with red color(set each pixel to red) # Fill image with red color(set each pixel to red)
overlay[:] = (130, 0, 75) overlay[:] = (130, 0, 75)
frame.img = cv2.addWeighted(frame.img, .4, overlay, .6, 0) img = cv2.addWeighted(frame.img, .4, overlay, .6, 0)
img = frame.img # img = frame.img.copy()
# all not working: # all not working:
# if i == 1: # if i == 1: