WIP clustering of predictions
This commit is contained in:
parent
1aff04d444
commit
06181c8440
4 changed files with 102 additions and 4 deletions
2
poetry.lock
generated
2
poetry.lock
generated
|
@ -3927,4 +3927,4 @@ watchdog = ["watchdog (>=2.3)"]
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.10,<3.12,"
|
python-versions = "^3.10,<3.12,"
|
||||||
content-hash = "716919f29853fc36b09594af35aa8ca09d4d3ceb7ad9cf54a85f4042569ecf1c"
|
content-hash = "9cc6f3a29659b174253167ef5d9a4dbc1b99399265f372453200b7f86c833322"
|
||||||
|
|
|
@ -43,6 +43,7 @@ setproctitle = "^1.3.3"
|
||||||
bytetracker = { git = "https://github.com/rubenvandeven/bytetrack-pip" }
|
bytetracker = { git = "https://github.com/rubenvandeven/bytetrack-pip" }
|
||||||
jsonlines = "^4.0.0"
|
jsonlines = "^4.0.0"
|
||||||
tensorboardx = "^2.6.2.2"
|
tensorboardx = "^2.6.2.2"
|
||||||
|
shapely = "^1"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core"]
|
requires = ["poetry-core"]
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
|
from dataclasses import dataclass
|
||||||
import json
|
import json
|
||||||
import math
|
import math
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
@ -8,6 +9,7 @@ from tempfile import mktemp
|
||||||
import jsonlines
|
import jsonlines
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
import shapely
|
||||||
import trap.tracker
|
import trap.tracker
|
||||||
from trap.config import parser
|
from trap.config import parser
|
||||||
from trap.frame_emitter import Camera, Detection, DetectionState, video_src_from_config, Frame
|
from trap.frame_emitter import Camera, Detection, DetectionState, video_src_from_config, Frame
|
||||||
|
@ -216,6 +218,65 @@ def transition_path_points(path: np.array, t: float):
|
||||||
break
|
break
|
||||||
return np.array(new_path)
|
return np.array(new_path)
|
||||||
|
|
||||||
|
from shapely.geometry import LineString
|
||||||
|
from shapely.geometry import Point
|
||||||
|
from sklearn.cluster import AgglomerativeClustering
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PointCluster:
|
||||||
|
point: np.ndarray
|
||||||
|
source_points: List[np.ndarray]
|
||||||
|
probability: float
|
||||||
|
|
||||||
|
|
||||||
|
def cluster_predictions_by_radius(start_point, lines, radius = .5):
|
||||||
|
# start = lines[0][0]
|
||||||
|
p0 = Point(*start_point)
|
||||||
|
print(lines[0][0], start_point)
|
||||||
|
circle = p0.buffer(radius).boundary
|
||||||
|
|
||||||
|
# print(lines)
|
||||||
|
# print([line.tolist() for line in lines])
|
||||||
|
linestrings = [LineString(line.tolist()) for line in lines]
|
||||||
|
intersections = [circle.intersection(line) for line in linestrings]
|
||||||
|
print(intersections)
|
||||||
|
intersections = [p if type(p) is Point else p.geoms[0] for p in intersections]
|
||||||
|
|
||||||
|
clustering = AgglomerativeClustering(None, linkage="ward", distance_threshold=radius/2)
|
||||||
|
# TODO)) test with cosine distance. because it should not be equal to radius
|
||||||
|
assigned_clusters = clustering.fit_predict(intersections)
|
||||||
|
|
||||||
|
clusters = defaultdict(lambda: [])
|
||||||
|
for point, c in zip(intersections, assigned_clusters):
|
||||||
|
clusters[c] = point
|
||||||
|
|
||||||
|
points = []
|
||||||
|
for c, points in clusters:
|
||||||
|
mean = np.mean(points, axis=0)
|
||||||
|
point = len(points) / len(assigned_clusters)
|
||||||
|
|
||||||
|
points.append(PointCluster(mean, points, point))
|
||||||
|
|
||||||
|
split_lines = [shapely.ops.split(line, point) for line, point in zip(linestrings, intersections)]
|
||||||
|
remaining_lines = [l[1] for l in split_lines if len(l) > 1]
|
||||||
|
|
||||||
|
|
||||||
|
print(points)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# def cosine_similarity(point1, point2):
|
||||||
|
# dot_product = np.dot(point1, point2)
|
||||||
|
# norm1 = np.linalg.norm(point1)
|
||||||
|
# norm2 = np.linalg.norm(point2)
|
||||||
|
# return dot_product / (norm1 * norm2)
|
||||||
|
|
||||||
|
# p = Point(5,5)
|
||||||
|
# c = p.buffer(3).boundary
|
||||||
|
# l = LineString([(0,0), (10, 10)])
|
||||||
|
# i = c.intersection(l)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -235,6 +296,9 @@ def draw_track_predictions(img: cv2.Mat, track: Track, color_index: int, camera:
|
||||||
# if convert_points:
|
# if convert_points:
|
||||||
# current_point = convert_points([current_point])[0]
|
# current_point = convert_points([current_point])[0]
|
||||||
|
|
||||||
|
color = bgr_colors[color_index % len(bgr_colors)]
|
||||||
|
color = tuple([int(c*opacity) for c in color])
|
||||||
|
|
||||||
lines = []
|
lines = []
|
||||||
for pred_i, pred in enumerate(track.predictions):
|
for pred_i, pred in enumerate(track.predictions):
|
||||||
pred_coords = pred #cv2.perspectiveTransform(np.array([pred]), inv_H)[0].tolist()
|
pred_coords = pred #cv2.perspectiveTransform(np.array([pred]), inv_H)[0].tolist()
|
||||||
|
@ -247,13 +311,16 @@ def draw_track_predictions(img: cv2.Mat, track: Track, color_index: int, camera:
|
||||||
line_points = np.rint(line_points).astype(int)
|
line_points = np.rint(line_points).astype(int)
|
||||||
# color = (128,0,128) if pred_i else (128,128,0)
|
# color = (128,0,128) if pred_i else (128,128,0)
|
||||||
|
|
||||||
color = bgr_colors[color_index % len(bgr_colors)]
|
|
||||||
color = tuple([int(c*opacity) for c in color])
|
|
||||||
|
|
||||||
line_points = line_points.reshape((-1,1,2))
|
|
||||||
lines.append(line_points)
|
lines.append(line_points)
|
||||||
|
|
||||||
|
# TODO)) implement:
|
||||||
|
# these points are alerayd projected. unlike `current_point` UNDO that, and cluster
|
||||||
|
# on actual (meter) positions.
|
||||||
|
cluster_predictions_by_radius(current_point, lines)
|
||||||
|
|
||||||
# draw in a single pass
|
# draw in a single pass
|
||||||
|
line_points = line_points.reshape((1, -1,1,2))
|
||||||
cv2.polylines(img, lines, False, color, 2, cv2.LINE_AA)
|
cv2.polylines(img, lines, False, color, 2, cv2.LINE_AA)
|
||||||
# for start, end in zip(line_points[:-1], line_points[1:]):
|
# for start, end in zip(line_points[:-1], line_points[1:]):
|
||||||
# cv2.line(img, start, end, color, 2, lineType=cv2.LINE_AA)
|
# cv2.line(img, start, end, color, 2, lineType=cv2.LINE_AA)
|
||||||
|
|
|
@ -27,6 +27,36 @@ def inv_lerp(a: float, b: float, v: float) -> float:
|
||||||
"""
|
"""
|
||||||
return (v - a) / (b - a)
|
return (v - a) / (b - a)
|
||||||
|
|
||||||
|
# def line_intersection(line1, line2):
|
||||||
|
# xdiff = (line1[0][0] - line1[1][0], line2[0][0] - line2[1][0])
|
||||||
|
# ydiff = (line1[0][1] - line1[1][1], line2[0][1] - line2[1][1])
|
||||||
|
|
||||||
|
# def det(a, b):
|
||||||
|
# return a[0] * b[1] - a[1] * b[0]
|
||||||
|
|
||||||
|
# div = det(xdiff, ydiff)
|
||||||
|
# if div == 0:
|
||||||
|
# return None
|
||||||
|
|
||||||
|
# d = (det(*line1), det(*line2))
|
||||||
|
# x = det(d, xdiff) / div
|
||||||
|
# y = det(d, ydiff) / div
|
||||||
|
# return x, y
|
||||||
|
|
||||||
|
# def polyline_intersection(poly1, poly2):
|
||||||
|
# for i, p1_first_point in enumerate(poly1[:-1]):
|
||||||
|
# p1_second_point = poly1[i + 1]
|
||||||
|
|
||||||
|
# for j, p2_first_point in enumerate(poly2[:-1]):
|
||||||
|
# p2_second_point = poly2[j + 1]
|
||||||
|
|
||||||
|
# intersection = line_intersection((p1_first_point, p1_second_point), (p2_first_point, p2_second_point))
|
||||||
|
# if intersection:
|
||||||
|
# return intersection # returns x,y
|
||||||
|
|
||||||
|
# return None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_bins(bin_size: float):
|
def get_bins(bin_size: float):
|
||||||
return [[bin_size, 0], [bin_size, bin_size], [0, bin_size], [-bin_size, bin_size], [-bin_size, 0], [-bin_size, -bin_size], [0, -bin_size], [bin_size, -bin_size]]
|
return [[bin_size, 0], [bin_size, bin_size], [0, bin_size], [-bin_size, bin_size], [-bin_size, 0], [-bin_size, -bin_size], [0, -bin_size], [bin_size, -bin_size]]
|
||||||
|
|
Loading…
Reference in a new issue