Compare commits
No commits in common. "5728213e2c43d33687fbd18c261ad2684adddc7d" and "2ade58549f9b270585aae3f66bfa508d94b63217" have entirely different histories.
5728213e2c
...
2ade58549f
5 changed files with 4027 additions and 2403 deletions
|
@ -3,7 +3,7 @@
|
|||
## Install
|
||||
|
||||
* Run `bash build_opencv_with_gstreamer.sh` to build opencv with gstreamer support
|
||||
* Use `uv` to install
|
||||
* Use pyenv + poetry to install
|
||||
|
||||
## How to
|
||||
|
||||
|
|
3981
poetry.lock
generated
Normal file
3981
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,36 +1,11 @@
|
|||
[project]
|
||||
[tool.poetry]
|
||||
name = "trap"
|
||||
version = "0.1.0"
|
||||
description = "Art installation with trajectory prediction"
|
||||
authors = [{ name = "Ruben van de Ven", email = "git@rubenvandeven.com" }]
|
||||
requires-python = "~=3.10.4"
|
||||
authors = ["Ruben van de Ven <git@rubenvandeven.com>"]
|
||||
readme = "README.md"
|
||||
dependencies = [
|
||||
"trajectron-plus-plus",
|
||||
"torch==1.12.1",
|
||||
"torchvision==0.13.1",
|
||||
"deep-sort-realtime>=1.3.2,<2",
|
||||
"ultralytics~=8.3",
|
||||
"ffmpeg-python>=0.2.0,<0.3",
|
||||
"torchreid>=0.2.5,<0.3",
|
||||
"gdown>=4.7.1,<5",
|
||||
"pandas-helper-calc",
|
||||
"tsmoothie>=1.0.5,<2",
|
||||
"pyglet>=2.0.15,<3",
|
||||
"pyglet-cornerpin>=0.3.0,<0.4",
|
||||
"opencv-python",
|
||||
"setproctitle>=1.3.3,<2",
|
||||
"bytetracker",
|
||||
"jsonlines>=4.0.0,<5",
|
||||
"tensorboardx>=2.6.2.2,<3",
|
||||
"shapely>=1,<2",
|
||||
"baumer-neoapi",
|
||||
"qrcode~=8.0",
|
||||
"pyusb>=1.3.1,<2",
|
||||
"ipywidgets>=8.1.5,<9",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
[tool.poetry.scripts]
|
||||
trapserv = "trap.plumber:start"
|
||||
tracker = "trap.tools:tracker_preprocess"
|
||||
compare = "trap.tools:tracker_compare"
|
||||
|
@ -38,16 +13,42 @@ process_data = "trap.process_data:main"
|
|||
blacklist = "trap.tools:blacklist_tracks"
|
||||
rewrite_tracks = "trap.tools:rewrite_raw_track_files"
|
||||
|
||||
[tool.uv]
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10,<3.12,"
|
||||
|
||||
[tool.uv.sources]
|
||||
trajectron-plus-plus = { path = "../Trajectron-plus-plus/", editable = true }
|
||||
torch = [{ url = "https://download.pytorch.org/whl/cu113/torch-1.12.1%2Bcu113-cp310-cp310-linux_x86_64.whl", marker = "python_version ~= '3.10' and sys_platform == 'linux'" }]
|
||||
torchvision = [{ url = "https://download.pytorch.org/whl/cu113/torchvision-0.13.1%2Bcu113-cp310-cp310-linux_x86_64.whl", marker = "python_version ~= '3.10' and sys_platform == 'linux'" }]
|
||||
pandas-helper-calc = { git = "https://github.com/scls19fr/pandas-helper-calc" }
|
||||
bytetracker = { git = "https://github.com/rubenvandeven/bytetrack-pip" }
|
||||
baumer-neoapi = { path = "../../Downloads/Baumer_neoAPI_1.4.1_lin_x86_64_python/wheel/baumer_neoapi-1.4.1-cp34.cp35.cp36.cp37.cp38.cp39.cp310.cp311.cp312-none-linux_x86_64.whl" }
|
||||
trajectron-plus-plus = { path = "../Trajectron-plus-plus/", develop = true }
|
||||
#trajectron-plus-plus = { git = "https://git.rubenvandeven.com/security_vision/Trajectron-plus-plus/" }
|
||||
torch = [
|
||||
{ version="1.12.1" },
|
||||
# { url = "https://download.pytorch.org/whl/cu113/torch-1.12.1%2Bcu113-cp38-cp38-linux_x86_64.whl", markers = "python_version ~= '3.8' and sys_platform == 'linux'" },
|
||||
{ url = "https://download.pytorch.org/whl/cu113/torch-1.12.1%2Bcu113-cp310-cp310-linux_x86_64.whl", markers = "python_version ~= '3.10' and sys_platform == 'linux'" },
|
||||
]
|
||||
|
||||
torchvision = [
|
||||
{ version="0.13.1" },
|
||||
# { url = "https://download.pytorch.org/whl/cu113/torchvision-0.13.1%2Bcu113-cp38-cp38-linux_x86_64.whl", markers = "python_version ~= '3.8' and sys_platform == 'linux'" },
|
||||
{ url = "https://download.pytorch.org/whl/cu113/torchvision-0.13.1%2Bcu113-cp310-cp310-linux_x86_64.whl", markers = "python_version ~= '3.10' and sys_platform == 'linux'" },
|
||||
]
|
||||
deep-sort-realtime = "^1.3.2"
|
||||
ultralytics = "^8.3"
|
||||
ffmpeg-python = "^0.2.0"
|
||||
torchreid = "^0.2.5"
|
||||
gdown = "^4.7.1"
|
||||
pandas-helper-calc = {git = "https://github.com/scls19fr/pandas-helper-calc"}
|
||||
tsmoothie = "^1.0.5"
|
||||
pyglet = "^2.0.15"
|
||||
pyglet-cornerpin = "^0.3.0"
|
||||
opencv-python = {file="./opencv_python-4.10.0.84-cp310-cp310-linux_x86_64.whl"}
|
||||
setproctitle = "^1.3.3"
|
||||
bytetracker = { git = "https://github.com/rubenvandeven/bytetrack-pip" }
|
||||
jsonlines = "^4.0.0"
|
||||
tensorboardx = "^2.6.2.2"
|
||||
shapely = "^1"
|
||||
baumer-neoapi = {path = "../../Downloads/Baumer_neoAPI_1.4.1_lin_x86_64_python/wheel/baumer_neoapi-1.4.1-cp34.cp35.cp36.cp37.cp38.cp39.cp310.cp311.cp312-none-linux_x86_64.whl"}
|
||||
qrcode = "^8.0"
|
||||
pyusb = "^1.3.1"
|
||||
ipywidgets = "^8.1.5"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
|
|
@ -70,7 +70,6 @@ class LaserFrame():
|
|||
steps = int(distance // point_interval)
|
||||
for step in range(steps+1): # have both 0 and 1 in the lerp for empty points
|
||||
t = step/(steps+1)
|
||||
t = 1 # just asap to starting point of next shape
|
||||
x = int(lerp(a.x, b.x, t))
|
||||
y = int(lerp(a.y, b.y, t))
|
||||
points.append(LaserPoint(x,y, (0,0,0), 0, True))
|
||||
|
@ -98,58 +97,6 @@ class LaserPath():
|
|||
def first(self):
|
||||
return self.points[0]
|
||||
|
||||
def as_array(self):
|
||||
np.array([[p.x, p.y] for p in self.points])
|
||||
|
||||
def simplyfied_path(self, start_v= 10., max_v= 20., a = 2):
|
||||
"""walk over the path with specific velocity,
|
||||
continuously accelerate (a) until max_v is reached
|
||||
place point at each step
|
||||
|
||||
(see also tools.transition_path_points() )
|
||||
"""
|
||||
if len(self.points) < 1:
|
||||
return self.points
|
||||
|
||||
path = self.as_array()
|
||||
|
||||
# new_path = np.array([])
|
||||
lengths = np.sqrt(np.sum(np.diff(path, axis=0)**2, axis=1))
|
||||
cum_lenghts = np.cumsum(lengths)
|
||||
# distance = cum_lenghts[-1] * t
|
||||
# ts = np.concatenate((np.array([0.]), cum_lenghts / cum_lenghts[-1]))
|
||||
# print(cum_lenghts[-1])
|
||||
# DRAW_SPEED = 35 # fixed speed (independent of lenght) TODO)) make variable
|
||||
# ts = np.concatenate((np.array([0.]), cum_lenghts / DRAW_SPEED))
|
||||
new_path = [path[0]]
|
||||
|
||||
position = 0
|
||||
next_pos = position + v
|
||||
|
||||
for a, b, pos in zip(path[:-1], path[1:], cum_lenghts):
|
||||
# TODO))
|
||||
if pos < (next_pos):
|
||||
continue
|
||||
|
||||
v = min(v+a, max_v)
|
||||
next_pos = position + v
|
||||
|
||||
relative_t = inv_lerp(t_a, t_b, t)
|
||||
|
||||
pass
|
||||
|
||||
# for a, b, t_a, t_b in zip(path[:-1], path[1:], ts[:-1], ts[1:]):
|
||||
# if t_b < t:
|
||||
# new_path.append(b)
|
||||
# continue
|
||||
# # interpolate
|
||||
# relative_t = inv_lerp(t_a, t_b, t)
|
||||
# x = lerp(a[0], b[0], relative_t)
|
||||
# y = lerp(a[1], b[1], relative_t)
|
||||
# new_path.append([x,y])
|
||||
# break
|
||||
# return np.array(new_path)
|
||||
|
||||
class LaserPoint():
|
||||
def __init__(self,x,y,c: Color = (255,0,0),i= 255,blank=False):
|
||||
self.x = x
|
||||
|
@ -337,18 +284,14 @@ class LaserRenderer:
|
|||
first_time = tracker_frame.time
|
||||
|
||||
|
||||
# print('-------')
|
||||
paths = render_frame_to_pathlist( tracker_frame, prediction_frame, first_time, self.config, self.tracks, self.predictions, self.config.render_clusters)
|
||||
print(f"Paths: {len(paths)} ... points {sum([len(p.points) for p in paths])}")
|
||||
paths = render_frame_to_pointlist( tracker_frame, prediction_frame, first_time, self.config, self.tracks, self.predictions, self.config.render_clusters)
|
||||
laserframe = LaserFrame(paths)
|
||||
# pointlist=pointlist_test
|
||||
# print([(p.x, p.y) for p in pointlist])
|
||||
# pointlist.extend(pointlist_test)
|
||||
|
||||
pointlist = laserframe.get_points_interpolated_by_distance(30, last_laser_point)
|
||||
print(len(pointlist))
|
||||
# pointlist = pointlist[::2]
|
||||
# print('decimated', len(pointlist))
|
||||
pointlist = laserframe.get_points_interpolated_by_distance(2, last_laser_point)
|
||||
# print(len(pointlist))
|
||||
|
||||
if len(pointlist):
|
||||
last_laser_point = pointlist[-1]
|
||||
|
@ -356,7 +299,7 @@ class LaserRenderer:
|
|||
frameType = CHeliosPoint * len(pointlist)
|
||||
frame = frameType()
|
||||
|
||||
# print(len(pointlist)) #, last_laser_point.x, last_laser_point.y)
|
||||
# print(len(pointlist), last_laser_point.x, last_laser_point.y)
|
||||
|
||||
for j, point in enumerate(pointlist):
|
||||
frame[j] = CHeliosPoint(int(point.x), int(point.y), point.color[0],point.color[1], point.color[2], point.i)
|
||||
|
@ -420,7 +363,7 @@ def get_animation_position(track: Track, current_frame: Frame) -> float:
|
|||
|
||||
def circle_points(cx, cy, r, c: Color):
|
||||
r = 100
|
||||
steps = 30
|
||||
steps = 100
|
||||
pointlist: list[LaserPoint] = []
|
||||
for i in range(steps):
|
||||
x = int(cx + math.cos(i * (2*math.pi)/steps) * r)
|
||||
|
@ -441,7 +384,7 @@ def world_points_to_laser_points(points):
|
|||
return cv2.perspectiveTransform(np.array([points]), laser_H)
|
||||
|
||||
# Deprecated
|
||||
def render_frame_to_pathlist(tracker_frame: Optional[Frame], prediction_frame: Optional[Frame], first_time: Optional[float], config: Namespace, tracks: Dict[str, Track], predictions: Dict[str, Track], as_clusters = True):
|
||||
def render_frame_to_pointlist(tracker_frame: Optional[Frame], prediction_frame: Optional[Frame], first_time: Optional[float], config: Namespace, tracks: Dict[str, Track], predictions: Dict[str, Track], as_clusters = True):
|
||||
# TODO: replace opencv with QPainter to support alpha? https://doc.qt.io/qtforpython-5/PySide2/QtGui/QPainter.html#PySide2.QtGui.PySide2.QtGui.QPainter.drawImage
|
||||
# or https://github.com/pygobject/pycairo?tab=readme-ov-file
|
||||
# or https://pyglet.readthedocs.io/en/latest/programming_guide/shapes.html
|
||||
|
@ -501,11 +444,6 @@ def render_frame_to_pathlist(tracker_frame: Optional[Frame], prediction_frame: O
|
|||
LaserPath(circle_points(0xFFF/2+2*test_r, 0xFFF/2, test_r, track_c))
|
||||
)
|
||||
else:
|
||||
# if not len(tracks):
|
||||
# paths.append(
|
||||
# LaserPath(circle_points(0xFFF/2+4*test_r, 0xFFF/2, test_r/2, pred_c))
|
||||
# )
|
||||
|
||||
for track_id, track in tracks.items():
|
||||
inv_H = np.linalg.pinv(tracker_frame.H)
|
||||
history = track.get_projected_history(camera=config.camera)
|
||||
|
|
Loading…
Reference in a new issue