trajpred/04_track_objects_with_deeps...

996 lines
2.9 MiB
Plaintext
Raw Permalink Normal View History

{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Use DeepSORT instead of SORT for tracking\n",
"\n",
"Based on [ZQPei's repository](https://github.com/ZQPei/deep_sort_pytorch), I replace SORT with DeepSort:\n",
"\n",
"> Deep sort is basicly the same with sort but added a CNN model to extract features in image of human part bounded by a detector. [ZQPei](https://github.com/ZQPei/deep_sort_pytorch)\n",
"\n",
"Other additions:\n",
"\n",
2023-08-21 15:59:01 +02:00
"* Use a generator function (a programming construct) for for video analysis and detection per frame.\n",
"* This also allows for caching of intermediate steps"
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"from pathlib import Path\n",
"import numpy as np\n",
"from PIL import Image\n",
"import torch\n",
"from torchvision.io.video import read_video\n",
"import matplotlib.pyplot as plt\n",
"from torchvision.utils import draw_bounding_boxes\n",
"from torchvision.transforms.functional import to_pil_image\n",
"from torchvision.models.detection import retinanet_resnet50_fpn_v2, RetinaNet_ResNet50_FPN_V2_Weights\n",
2023-10-04 15:38:44 +02:00
"import tempfile "
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"source = Path('../DATASETS/VIRAT_subset_0102x')\n",
"videos = list(source.glob('*.mp4'))\n",
2023-10-04 15:38:44 +02:00
"#tmpdir = Path(tempfile.gettempdir()) / 'trajpred' #this doesn't survive reboot, change to a local cache dir\n",
"tmpdir = Path('.') / 'cache'\n",
"tmpdir.mkdir(exist_ok=True)\n"
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"device(type='cuda')"
]
},
2023-10-04 15:38:44 +02:00
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
"device"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Based on code from: https://stackabuse.com/retinanet-object-detection-with-pytorch-and-torchvision/"
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"weights = RetinaNet_ResNet50_FPN_V2_Weights.DEFAULT\n",
"model = retinanet_resnet50_fpn_v2(weights=weights, score_thresh=0.35)\n",
"model.to(device)\n",
"# Put the model in inference mode\n",
"model.eval()\n",
"# Get the transforms for the model's weights\n",
"preprocess = weights.transforms().to(device)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"> The score_thresh argument defines the threshold at which an object is detected as an object of a class. Intuitively, it's the confidence threshold, and we won't classify an object to belong to a class if the model is less than 35% confident that it belongs to a class."
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"The result from a single prediction coming from `model(batch)` looks like:\n",
"\n",
"```python\n",
"{'boxes': tensor([[5.7001e+02, 2.5786e+02, 6.3138e+02, 3.6970e+02],\n",
" [5.0109e+02, 2.4508e+02, 5.5308e+02, 3.4852e+02],\n",
" [3.4096e+02, 2.7015e+02, 3.6156e+02, 3.1857e+02],\n",
" [5.0219e-01, 3.7588e+02, 9.7911e+01, 7.2000e+02],\n",
" [3.4096e+02, 2.7015e+02, 3.6156e+02, 3.1857e+02],\n",
" [8.3241e+01, 5.8410e+02, 1.7502e+02, 7.1743e+02]]),\n",
" 'scores': tensor([0.8525, 0.6491, 0.5985, 0.4999, 0.3753, 0.3746]),\n",
" 'labels': tensor([64, 64, 1, 64, 18, 86])}\n",
"```"
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 6,
"metadata": {},
2023-10-04 15:38:44 +02:00
"outputs": [],
"source": [
"%matplotlib inline\n",
"\n",
"\n",
"import pylab as pl\n",
"from IPython import display\n",
"from utils.timer import Timer\n",
"import pickle\n",
"\n",
"def detect_persons(video_path: Path):\n",
2023-08-21 15:59:01 +02:00
" \"\"\"\n",
2023-10-04 15:38:44 +02:00
" generator that returns detections as structure: [[x1,y1,x2,y2,score, label],...] for each frame\n",
2023-08-21 15:59:01 +02:00
" \"\"\"\n",
" video = cv2.VideoCapture(str(video_path))\n",
"\n",
"\n",
2023-10-04 15:38:44 +02:00
" cachefile = tmpdir / f\"detections-all-{video_path.name}.pcl\"\n",
" if cachefile.exists():\n",
" with cachefile.open('rb') as fp:\n",
2023-10-04 15:38:44 +02:00
" print('use cache', cachefile)\n",
" all_detections = pickle.load(fp)\n",
" for detections in all_detections:\n",
2023-10-04 15:38:44 +02:00
" ret, frame = video.read()\n",
" yield frame, detections\n",
" else:\n",
" all_detections = []\n",
" while True:\n",
" ret, frame = video.read()\n",
" \n",
" if not ret:\n",
2023-10-04 15:38:44 +02:00
" all_detections.append([])\n",
" # print(\"Can't receive frame (stream end?). Exiting ...\")\n",
" break\n",
"\n",
" t = torch.from_numpy(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n",
" # change axes of image loaded image to be compatilbe with torch.io.read_image (which has C,W,H format instead of W,H,C)\n",
" t = t.permute(2, 0, 1)\n",
"\n",
" batch = preprocess(t)[None, :].to(device)\n",
" # no_grad can be used on inference, should be slightly faster\n",
" with torch.no_grad():\n",
" predictions = model(batch)\n",
" prediction = predictions[0] # we feed only one frame at the once\n",
"\n",
" mask = prediction['labels'] == 1 # if we want more than one: np.isin(prediction['labels'], [1,86])\n",
"\n",
" scores = prediction['scores'][mask]\n",
" labels = prediction['labels'][mask]\n",
" boxes = prediction['boxes'][mask]\n",
" \n",
" # TODO: introduce confidence and NMS supression: https://github.com/cfotache/pytorch_objectdetecttrack/blob/master/PyTorch_Object_Tracking.ipynb\n",
" # (which I _think_ we better do after filtering)\n",
" # alternatively look at Soft-NMS https://towardsdatascience.com/non-maximum-suppression-nms-93ce178e177c\n",
"\n",
2023-10-04 15:38:44 +02:00
" # dets - a numpy array of detections in the format [[x1,y1,x2,y2,score, label],[x1,y1,x2,y2,score, label],...]\n",
" detections = np.array([np.append(bbox, [score, label]) for bbox, score, label in zip(boxes.cpu(), scores.cpu(), labels.cpu())])\n",
" \n",
" all_detections.append(detections)\n",
" \n",
2023-10-04 15:38:44 +02:00
" yield frame, detections\n",
" \n",
" with cachefile.open('wb') as fp:\n",
2023-10-04 15:38:44 +02:00
" pickle.dump(all_detections, fp)\n"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"def detect_persons_deepsort_wrapper(video_path: Path):\n",
" \"\"\"make detect_persons() compatible with\n",
" deep_sort_realtime tracker by going from ltrb to ltwh and\n",
" different nesting\n",
" \"\"\"\n",
" for frame, detections in detect_persons(video_path):\n",
" yield frame, [([d[0], d[1], d[2]-d[0], d[3]-d[1]], d[4], d[5]) for d in detections]"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"from deep_sort_realtime.deepsort_tracker import DeepSort\n",
"\n",
"def track_video(video_path: Path) -> dict:\n",
2023-10-04 15:38:44 +02:00
" # mot_tracker = Sort()\n",
" mot_tracker = DeepSort(max_age=5)\n",
" \n",
2023-10-04 15:38:44 +02:00
" for frame_nr, (frame, detections) in enumerate(detect_persons(video_path)):\n",
" detections = [([d[0], d[1], d[2]-d[0], d[3]-d[1]], d[4], d[5]) for d in detections]\n",
" \n",
2023-08-21 15:59:01 +02:00
" # tracks structure: [[x1,y1,x2,y2,score, obj_id],...]\n",
2023-10-04 15:38:44 +02:00
" # tracks = mot_tracker.update(detections)\n",
" tracks = mot_tracker.update_tracks(detections, frame=frame)\n",
" # print(tracks)\n",
"\n",
" # now convert back to boxes and labels\n",
2023-10-04 15:38:44 +02:00
" # # print(tracks)\n",
" # boxes = np.array([t.to_ltwh() for t in tracks])\n",
" # # initialize empty with the necesserary dimensions for drawing_bounding_boxes glitch\n",
" # t_boxes = torch.from_numpy(boxes) if len(boxes) else torch.Tensor().new_empty([0, 6])\n",
" # labels = [str(int(t.track_id)) for t in tracks]\n",
" # print(t_boxes, boxes, labels)\n",
"\n",
" for track in tracks:\n",
" yield track\n",
" \n",
"\n",
2023-10-04 15:38:44 +02:00
" # display.clear_output(wait=True)\n",
"\n",
"# for track in track_video(Path(\"../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_00_000060_000218.mp4\")):\n",
"# print(track)\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Interestingly, the Deepsort tracker does not keep the history of the track, so I needed to alter the code a little: I do not append the track itself, but collect `track.to_ltwh()` for every frame."
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"def track_videos(video_paths: list[Path]) -> dict:\n",
2023-08-21 15:59:01 +02:00
" \"\"\"\n",
" returns tracked instances as dict with lists:\n",
" {'obj_id': [ [x1, y1, x2, y2, obj_id, obj_class ], ...]}\n",
" \"\"\"\n",
" # collect instances of all videos with unique key\n",
" video_paths = list(video_paths)\n",
" tracked_instances = {}\n",
" timer = Timer()\n",
" for i, p in enumerate(video_paths):\n",
" print(f\"{i}/{len(video_paths)}: {p}\")\n",
"\n",
2023-10-04 15:38:44 +02:00
" cachefile = tmpdir / (p.name + '-deepsort.pcl')\n",
2023-08-21 15:59:01 +02:00
" if cachefile.exists():\n",
" print('\\tLoad pickle')\n",
" with cachefile.open('rb') as fp:\n",
" new_instances = pickle.load(fp)\n",
" else:\n",
" #continue # to quickly test from cache\n",
" new_instances = {}\n",
" timer.tic()\n",
" for track in track_video(p):\n",
2023-10-04 15:38:44 +02:00
" track_id = f\"{i}_{str(int(track.track_id))}\"\n",
" # track = np.append(track, i) # append video id, for disambiguation later\n",
" if track_id not in new_instances:\n",
" new_instances[track_id] = []\n",
2023-10-04 15:38:44 +02:00
" new_instances[track_id].append(track.to_ltwh())\n",
" with cachefile.open('wb') as fp:\n",
" pickle.dump(new_instances, fp)\n",
" print(\" time for video: \", timer.toc())\n",
" tracked_instances.update(new_instances)\n",
" \n",
" return tracked_instances"
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-10-04 15:38:44 +02:00
"0/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_00_000060_000218.mp4\n"
]
}
],
"source": [
"tracked_instances = track_videos(videos)\n",
"len(tracked_instances)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Project / Homography\n",
"\n",
"Now that all trajectories are captured (for a single video), these can then be projected onto a flat surface by [homography](https://en.wikipedia.org/wiki/Homography_(computer_vision)). The necessary $H$ matrix is already provided by VIRAT in the [homographies folder](https://data.kitware.com/#folder/56f581c88d777f753209c9d2) of their online data repository."
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"\n",
"homography = list(source.glob('*img2world.txt'))[0]\n",
"H = np.loadtxt(homography, delimiter=',')\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"The homography matrix helps to transform points from image space to a flat world plane. The `README_homography.txt` from VIRAT describes:\n",
"\n",
"> Roughly estimated 3-by-3 homographies are included for convenience. \n",
"> Each homography H provides a mapping from image coordinate to scene-dependent world coordinate.\n",
"> \n",
"> [xw,yw,zw]' = H*[xi,yi,1]'\n",
"> \n",
"> xi: horizontal axis on image with left top corner as origin, increases right.\n",
"> yi: vertical axis on image with left top corner as origin, increases downward.\n",
"> \n",
"> xw/zw: world x coordinate\n",
"> yw/zw: world y coordiante"
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(1200, 900)\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABLAAAAOECAIAAAA+D1+tAAEAAElEQVR4nOy915Mk2XUeftOU97799LiddVgYklgq9CPBEKkHvTD0V+pJEdIzgyGSEgCBIMxizezMjutpb8p7l5m/hw/36FSamu7ZXWDBPt9DR3d1VubNa443hud5SiAQCAQCgUAgEAgEtw/mH3sAAoFAIBAIBAKBQCD440AUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAgEAoFAIBAIBAKBQCC4TTC2t7c9z1NKeZ6HX1zXvby8qFZr9CH9yzRDPIr4F76olIrFYoZheJ4Xj8cNw7Asy7ZtfNHzvBcvnt+7d9/zPMdxPM9zXRc3j8fj+NB13eVyid8ty3IcBx/i6bjbYrEIDmO5XNq2HY/H4/G4UspxnOVy6TiOYRjBix3HSafT4/F4sVhUq9VMJjOfz03TNAxjOp1Wq9XFYnF6erq1tbW5ufnLX/4ym812u93pdFqv103T7HQ6SqlUKpVIJPDinueNRqN4PH737t2Tk5NYLHbnzp1qtZrP50ulUjKZTKVSsVis3+8/f/788ePHh4eHrusWCgXTNIvF4v7+/jvvvFOr1RaLRb/fN00znU6Xy+XxeNzr9a6urv7t3/6t2+3+6Ec/Ojs7SyaT9+/fn0wmtm3HYrHz8/NkMnnnzp3ZbPbzn//89PS0XC6Xy+V2u10qlebz+c7OztbW1uHhYbVa/e1vfzudTn/84x+XSqUnT54cHR31er1sNptKpUzTdF03l8ttbW0Vi0XDMD799FOl1Hw+Xy6XhmE4jjOdTpVShmGYpul53mKxsCyrWq3W63WlVKVScRzn8vLy9PR0MBjUarX/+B//Yzqdxt0mk0mtVsvn861W6/Ly0rZt13Vns5nneYZhDIfDdDpdKpUqlUo2mzVNE0vveV7orosC7oY9iaU3DAP7h3/If/cfCcOo1WrFYhHva5pmPB6PxWJYi5OTk+Pj48FgkEgkGo2GaZqDwaDVao3HY8dxaOOlUqnQXUePUOw0TSYT7G06JpiZ2Ww2nU7pWzga0+l0uVzGYjHcMJlM0uzFYjHXdS3LsixruVzOZrNMJmNZVr1ej8fjR0dH4/EYk5zL5QzD6Pf7SqnhcHh0dJRMJpPJZDqdzuVymUymWq12Op3PP//8448/zuVy0+k0nU4nEonRaJRMJjOZDM2qZVlKqcViMZlM8Dh63/l8fnV1dXV1hXe5vLzEyAuFwp07dzDIQqHQarV+97vfpdPp9957D7uC5iqfz1uWdXBwoJQqlUqmac5mM9AT13Vt28ZaP3nyZD6fz+fz2Wy2v78/mUySyWSn04nFYvv7+7PZ7OXLl/P5HKSGNgN+4XsD+00xmkZrZBgG7RZOKvGnbdvB5Y7H4/1+/969ex988MHnn3/e6/VM0zRNMxaLYZdyLJfL0P2M+eRX4miEXuy6Lgbpui6Oz3K5/NGPftTpdDKZzMbGhmma8/nctm28Pr8tnhJ6Z5oNH7AJgwg9Vpir4J3XXAyCzA9L6EhC73xT8H37xpvz5cNMgmeBHfgQ+oJR+EbeJRTYYNg/+IRTwj8Yol4Q21UphTOiovfGTRH6gm+9KHzSQg+LYRiz2Qy8FQdNKQUZw3Xd4XA4m80syyIphY4qpzme5y2Xy+Vy6bouZgOSSTqdfvjw4XK5PDw8TCQS+Xw+lUqdnp46jpNIJNLptGmai8XCcZxerwchCo+wNDKZTCKRSKVSy+VyOBy6rptMJpU+X6AYSimIUoPBAH/idDiOA45Pu4jkN9M0cZ9YLAYJ0LZty7KSySROh+M4s9kMd3BdNxaL4TVjsZhlWeCtGxsb+G8ikcBRGgwGo9FI6c2AVyDpEYME1cKflmXl83maW6UUeDfxIMdxbNsmAlgqlTB4fi4gfmDO8bJgzZhGPB1LYxgG3hoDIL7AB8A/XywW2N6
"text/plain": [
"<PIL.PngImagePlugin.PngImageFile image mode=RGB size=1200x900>"
]
},
2023-10-04 15:38:44 +02:00
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"print(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_0102_homography_img2world.png\").size)\n",
"Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_0102_homography_img2world.png\")\n"
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 22,
"metadata": {},
"outputs": [
2023-10-04 15:38:44 +02:00
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'mean': array([2.21015548e+02, 2.41720687e+02, 4.25961370e-01, 2.38039405e+01,\n",
" 8.98401171e-02, 2.16254966e-02, 4.59794012e-07, 4.23068743e-02]), 'covariance': array([[1.81834256e+01, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
" 1.51804145e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
" [0.00000000e+00, 1.81834256e+01, 0.00000000e+00, 0.00000000e+00,\n",
" 0.00000000e+00, 1.51804145e+00, 0.00000000e+00, 0.00000000e+00],\n",
" [0.00000000e+00, 0.00000000e+00, 1.53354367e-03, 0.00000000e+00,\n",
" 0.00000000e+00, 0.00000000e+00, 2.86545590e-08, 0.00000000e+00],\n",
" [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.81834256e+01,\n",
" 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.51804145e+00],\n",
" [1.51804145e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
" 3.13885555e-01, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
" [0.00000000e+00, 1.51804145e+00, 0.00000000e+00, 0.00000000e+00,\n",
" 0.00000000e+00, 3.13885555e-01, 0.00000000e+00, 0.00000000e+00],\n",
" [0.00000000e+00, 0.00000000e+00, 2.86545590e-08, 0.00000000e+00,\n",
" 0.00000000e+00, 0.00000000e+00, 2.89987800e-09, 0.00000000e+00],\n",
" [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.51804145e+00,\n",
" 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 3.13885555e-01]]), 'track_id': '9', 'hits': 23, 'age': 29, 'time_since_update': 6, 'state': 3, 'features': [array([0.00404739, 0. , 0. , ..., 0.28051758, 0.4423828 ,\n",
" 0.4506836 ], dtype=float32)], '_n_init': 3, '_max_age': 5, 'original_ltwh': None, 'det_class': 1.0, 'det_conf': None, 'instance_mask': None, 'others': None}\n",
"[[[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]\n",
"\n",
" [[221.01554825 253.62265674]]] [[[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]\n",
"\n",
" [[-37.48426637 5.76580803]]] 28\n"
]
},
{
"data": {
2023-10-04 15:38:44 +02:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAABhUAAAKZCAYAAAC/T6pfAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9Waxt2X3f937HGLNb3e5PW6eKVWSx2IukSVGhIlzHuMplgIAXuoABNxe2I8AO/CDANh9sM7EkCEHCByOCgECOHgIhD0EA5yV5USBHl4hiJJKohqJiUWJX3Tmn6jT77H61sxnjPow5115r71NVu2g2kvz7AKfqnL3XWnPuteaca+3/f/z/fxNCCIiIiIiIiIiIiIiIiLwD+8PeARERERERERERERER+fNBSQUREREREREREREREbkSJRVERERERERERERERORKlFQQEREREREREREREZErUVJBRERERERERERERESuREkFERERERERERERERG5EiUVRERERERERERERETkSpRUEBERERERERERERGRK1FSQURERERERERERERErkRJBRERERERERERERERuZJ3nVT41//6X/P5z3+e27dvY4zhf/lf/pd3vM9v/uZv8pf+0l8iz3NefPFF/vv//r//LnZVRERERERERERERER+mN51UmEymfDxj3+cX/7lX77S7V999VX+4//4P+av/JW/wte+9jX+4T/8h/zdv/t3+Vf/6l+9650VEREREREREREREZEfHhNCCN/1nY3hf/6f/2d+6qd+6i1v80/+yT/h137t1/jjP/7j5df++l//6xwfH/Prv/7r3+2mRURERERERERERETkByz5fm/gt3/7t/nJn/zJta997nOf4x/+w3/4lvdZLBYsFovlv733HB4esru7izHm+7WrIiIiInJFIQTOzs64ffs21mpMl4iIiIiIyL8rvu9JhYcPH3Ljxo21r924cYPT01Nmsxm9Xu/Sfb70pS/xC7/wC9/vXRMRERGRf0v37t3jzp07P+zdEBERERERkR+Q73tS4bvxxS9+kS984QvLf5+cnPDcc89x7949NjY2foh7JiIiIiIAp6enPPvss4xGox/2roiIiIiIiMgP0Pc9qXDz5k0ePXq09rVHjx6xsbHx1CoFgDzPyfP80tc3NjaUVBARERH5M0StKUVERERERP7d8n1vgPvZz36WL3/5y2tf+43f+A0++9nPfr83LSIiIiIiIiIiIiIi30PvOqkwHo/52te+xte+9jUAXn31Vb72ta9x9+5dILYu+tt/+28vb//3//7f55VXXuEf/+N/zDe+8Q3+xb/4F/xP/9P/xD/6R//oe/MTiIiIiIiIiIiIiIjID8S7Tir8/u//Pp/85Cf55Cc/CcAXvvAFPvnJT/JzP/dzADx48GCZYAB44YUX+LVf+zV+4zd+g49//OP81//1f81/99/9d3zuc5/7Hv0IIiIiIiIiIiIiIiLyg2BCCOGHvRPv5PT0lM3NTU5OTjRTQUREROTPAH0+ExERERER+XfT932mgoiIiIiIiIiIiIiI/MWgpIKIiIiIiIiIiIiIiFyJkgoiIiIiIiIiIiIiInIlSiqIiIiIiIiIiIiIiMiVKKkgIiIiIiIiIiIiIiJXoqSCiIiIiIiIiIiIiIhciZIKIiIiIiIiIiIiIiJyJUoqiIiIiIiIiIiIiIjIlSipICIiIiIiIiIiIiIiV6KkgoiIiIiIiIiIiIiIXImSCiIiIiIiIiIiIiIiciVKKoiIiIiIiIiIiIiIyJUoqSAiIiIiIiIiIiIiIleipIKIiIiIiIiIiIiIiFyJkgoiIiIiIiIiIiIiInIlSiqIiIiIiIiIiIiIiMiVKKkgIiIiIiIiIiIiIiJXoqSCiIiIiIiIiIiIiIhciZIKIiIiIiIiIiIiIiJyJUoqiIiIiIiIiIiIiIjIlSipICIiIiIiIiIiIiIiV6KkgoiIiIiIiIiIiIiIXImSCiIiIiIiIiIiIiIiciVKKoiIiIiIiIiIiIiIyJUoqSAiIiIiIiIiIiIiIleipIKIiIiIiIiIiIiIiFyJkgoiIiIiIiIiIiIiInIlSiqIiIiIiIiIiIiIiMiVKKkgIiIiIiIiIiIiIiJXoqSCiIiIiIiIiIiIiIhciZIKIiIiIiIiIiIiIiJyJUoqiIiIiIiIiIiIiIjIlSipICIiIiIiIiIiIiIiV6KkgoiIiIiIiIiIiIiIXImSCiIiIiIiIiIiIiIiciVKKoiIiIiIiIiIiIiIyJUoqSAiIiIiIiIiIiIiIleipIKIiIiIiIiIiIiIiFyJkgoiIiIiIiIiIiIiInIlSiqIiIiIiIiIiIiIiMiVKKkgIiIiIiIiIiIiIiJXoqSCiIiIiIiIiIiIiIhciZIKIiIiIiIiIiIiIiJyJUoqiIiIiIiIiIiIiIjIlSipICIiIiIiIiIiIiIiV6KkgoiIiIiIiIiIiIiIXImSCiIiIiIiIiIiIiIiciVKKoiIiIiIiIiIiIiIyJUoqSAiIiIiIiIiIiIiIleipIKIiIiIiIiIiIiIiFyJkgoiIiIiIiIiIiIiInIlSiqIiIiIiIiIiIiIiMiVKKkgIiIiIiIiIiIiIiJXoqSCiIiIiIiIiIiIiIhciZIKIiIiIiIiIiIiIiJyJUoqiIiIiIiIiIiIiIjIlSipICIiIiIiIiIiIiIiV6KkgoiIiIiIiIiIiIiIXImSCiIiIiIiIiIiIiIiciVKKoiIiIiIiIiIiIiIyJUoqSAiIiIiIiIiIiIiIleipIKIiIiIiIiIiIiIiFyJkgoiIiIiIiIiIiIiInIlSiqIiIiIiIiIiIiIiMiVKKkgIiIiIiIiIiIiIiJXoqSCiIiIiIiIiIiIiIhciZIKIiIiIiIiIiIiIiJyJUoqiIiIiIiIiIiIiIjIlSipICIiIiIiIiIiIiIiV6KkgoiIiIiIiIiIiIiIXImSCiIiIiIiIiIiIiIiciVKKoiIiIiIiIiIiIiIyJUoqSAiIiIiIiIiIiIiIleipIKIiIiIiIiIiIiIiFyJkgoiIiIiIiIiIiIiInIlSiqIiIiIiIiIiIiIiMiVKKkgIiIiIiIiIiIiIiJXoqSCiIiIiIiIiIiIiIhciZIKIiIiIiIiIiIiIiJyJUoqiIiIiIiIiIiIiIjIlSipICIiIiIiIiIiIiIiV6KkgoiIiIiIiIiIiIiIXImSCiIiIiIiIiIiIiIiciVKKoiIiIiIiIiIiIiIyJUoqSAiIiIiIiIiIiIiIleipIKIiIiIiIiIiIiIiFyJkgoiIiIiIiIiIiIiInIlSiqIiIiIiIiIiIiIiMiVKKkgIiIiIiIiIiIiIiJXoqSCiIiIiIiIiIiIiIhciZIKIiIiIiIiIiIiIiJyJUoqiIiIiIiIiIiIiIjIlSipICIiIiIiIiIiIiIiV6KkgoiIiIiIiIiIiIiIXImSCiIiIiIiIiIiIiIiciVKKoiIiIiIiIiIiIiIyJUoqSAiIiIiIiIiIiIiIleipIKIiIiIiIiIiIiIiFyJkgoiIiIiIiIiIiIiInIlSiqIiIiIiMhfSL/8y7/M888/T1EU/NiP/Ri/+7u/+8PeJRERERGRP/eUVBARERERkb9w/uW//Jd84Qtf4Od//uf56le/ysc//nE+97nP8fjx4x/2romIiIiI/LmmpIKIiIiIiPyF84u/+Iv8vb/39/jpn/5pPvzhD/Mrv/Ir9Pt9fvVXf/WHvWsiIiIiIn+uJT/sHRAREREREfleKsuSP/iDP+CLX/zi8mvWWn7yJ3+S3/7t337qfRaLBYvFYvlv7z2Hh4fs7u5ijPm+77OIiIiIyA9TCIGzszNu376NtW9fi6CkgoiIiIiI/IXy5MkTmqbhxo0ba1+/ceMG3/jGN556ny996Uv8wi/8wg9i90RERERE/sy6d+8ed+7cedvbKKkgIiIiIiL/zvviF7/IF77wheW/T05OeO6557j53utgwAdPCGH5x/sAPoD3lx4rTRN6RY9FuaDyngBkaYq1tr2vp/ENTePxTYMzBmto/x0fO4SAwQFmuc2LjDFrj4lN1qoqqrr
"text/plain": [
"<Figure size 2000x800 with 2 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from matplotlib import pyplot as plt\n",
"\n",
"fig = plt.figure(figsize=(20,8))\n",
"ax1, ax2 = fig.subplots(1,2)\n",
"\n",
"ax1.set_aspect(1)\n",
"ax2.imshow(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_S_0102.jpg\"))\n",
"\n",
2023-10-04 15:38:44 +02:00
"for i, tracks in enumerate(tracked_instances.values()):\n",
" bboxes = [track.to_ltrb() for track in tracks]\n",
2023-08-21 15:59:01 +02:00
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" # plt.plot(projected_traj[:,0])\n",
" ax1.plot(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1))\n",
" ax2.plot(traj[:,:,0].reshape(-1), traj[:,:,1].reshape(-1))\n",
" \n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"One of the things that stands out from these plots in the detections at the edges of the image. In particular the bottom edge (on the right-hand side in the projected image) is visible. As the 'anchor' of the person detection is follows the detection bounding box, which is no longer moving, but growing or shrinking at the edge.\n",
"\n",
"Let's apply a simple filter to ditch the detections close to the edge."
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 11,
2023-08-21 15:59:01 +02:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"removed 2.87% of bounding boxes\n"
]
}
],
"source": [
"def filter_edges(tracked_instances):\n",
" filtered_tracks = {}\n",
" for track_id in tracked_instances:\n",
" bboxes = tracked_instances[track_id]\n",
" track = list(filter(lambda bbox: bbox[1] < 710 and bbox[3] < 710, bboxes))\n",
" if len(track):\n",
" filtered_tracks[track_id] = track\n",
" return filtered_tracks\n",
"\n",
"filtered_tracks = filter_edges(tracked_instances)\n",
"\n",
"# validate it works:\n",
"bbox_count = sum([len(t) for t in tracked_instances.values()])\n",
"bbox_count_filtered = sum([len(t) for t in filtered_tracks.values()])\n",
"\n",
"print(f\"removed {((bbox_count-bbox_count_filtered)/bbox_count)*100:.2f}% of bounding boxes\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Below we plot the filtered trajectories"
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 77,
2023-08-21 15:59:01 +02:00
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABkAAAAHJCAYAAADdFPU5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9Wawt2ZnfB/7WFMOeznjHvDcnJpOzSJnFGuRCy1ZLXbaG9oBG15sLevNDuQHTD+16kawGBPnRBmwXINgQDBi23DJgQIDcctnlggxLRYoiq2gWq5gkc7rzvWfeZ08RsYZ+WBFx9j7n5s0kq4pZw/oRl3mG2BFrR6xYsc/3/77/J0IIgUQikUgkEolEIpFIJBKJRCKRSCQSiT9ByI97AIlEIpFIJBKJRCKRSCQSiUQikUgkEn/QJAEkkUgkEolEIpFIJBKJRCKRSCQSicSfOJIAkkgkEolEIpFIJBKJRCKRSCQSiUTiTxxJAEkkEolEIpFIJBKJRCKRSCQSiUQi8SeOJIAkEolEIpFIJBKJRCKRSCQSiUQikfgTRxJAEolEIpFIJBKJRCKRSCQSiUQikUj8iSMJIIlEIpFIJBKJRCKRSCQSiUQikUgk/sShP+4BvAjvPY8ePWI8HiOE+LiHk0gkEolEIpFI/KETQuD8/Jzbt28jZcpXSiQSiUQikUgkEokflz/SAsijR4+4e/fuxz2MRCKRSCQSiUTiJ879+/e5c+fOxz2MRCKRSCQSiUQikfhjyx9pAWQ8HgPxj7/JZPIxjyaRSCQSiUQikfjDZzqdcvfu3f6zcCKRSCQSiUQikUgkfjz+SAsgne3VZDJJAkgikUgkEolE4k8VyQI2kUgkEolEIpFIJH5/JFPhRCKRSCQSiUQikUgkEolEIpFIJBJ/4kgCSCKRSCQSiUQikUgkEolEIpFIJBKJP3EkASSRSCQSiUQikUgkEolEIpFIJBKJxJ84kgCSSCQSiUQikUgkEh8z//l//p/z6quvUhQFP/MzP8M//+f//OMeUiKRSCQSiUQi8ceeJIAkEolEIpFIJBKJxMfIf//f//d89atf5W/+zb/Jt771Lb74xS/yC7/wCzx79uzjHloikUgkEolEIvHHGhFCCB/3ID6I6XTK1tYWZ2dnTCaTj3s4iUQikUgkEonEHzrpM/CfPn7mZ36Gr3zlK/xn/9l/BoD3nrt37/Lv/Xv/Hv/hf/gffujrvfc8evSI8XiMEOIPe7iJRCKRSCQSicTHSgiB8/Nzbt++jZQvrvHQP6ExJRKJRCKRSCQSiUTiEnVd881vfpNf+ZVf6X8mpeQv/sW/yG/+5m8+9zVVVVFVVf/9w4cP+exnP/uHPtZEIpFIJBKJROKPEvfv3+fOnTsv3CYJIIlEIpFIJBKJRCLxMXF4eIhzjhs3bmz8/MaNG3zve9977mv+zt/5O/ytv/W3rvx8+6UtIBBCwHtPCBdfAxhxNTtOCEHd1JTlgCBAKEVmDFLKi9cHj3MO7zxKCAQBax3OOpyLhgLeB0IQKKmuHMMHjxCCPMsJIWCtJQi1Ua3Sjbcocpxz3Hn1FRbOsVguGA1HfOmLX+SH3/8+o2LIe+++S11XNFVDVVmUUngCL738MpkxPHr8iM9+4QvcvvMSi+D54YP7HB884+TJQySerckW21u73H31dT77hS+iR9uoomR2PuO999/nu9/+Nn/mC1/EO8vTx48Zjyc8efqET7z2Kg/ff5sf/O53uHP9BkoIysGQz37+C6hM8y9++2vcuH6dH7z1FovFEikkWkleunkLKSSnp2c8ffqULMvJ8wxjDCEElFJIqdjamlAMJ3ihmS8WPHj8lE99/nPs3r6BV4rGeT755qfIB9uczSu+//3v01jHnbt3KIcDdvZ2ybKcALgQeLHVQ0D1X8V/Do9vvwYIAoIQSAIyWExwjDPNta0xuQio4Dk5OeD73/sd5vMZL926TV3XPH30mOPDIwC0VoQQWC9MCgGctSAEov3nnYMQcM5h6wbbNNiqITiPAJpqyfl8hlQCrTVZlnE+nTIcjciyDEJgMV+xt3WdV+++ypNHj2hsw2A85rxaMl3OWbqGwWjEjdsvsbW1Te0s/+Kb3+KLX/oyg8GYfDDAh8BknCOwCCFw1jJfzBmPJ0gp8d7z9PETnj56zMmzQ1arFdduv8T+3dtUBJZ1zXvff5vPvvEpMBmN9+R5zmw25dHDB7zyyl2ECEglkAFuDkccPn1GtVwyyAtCY6nnS4zSSODg8IDKVvG8WIdzjqap8SEQvMd7jwgKiW7v9Xg14/0LIcRtpBRcLg5TSvGZz3yO4+NjVtUCZ0RcA9prAiJerDUa6/rfCyEQCFi/tu31DQQa21BXNa+99hqD4YAb12/E+1wGXHAIKZBSxuvfrlHr+9lAcCW7VwDi8oaX3uTlX/vg280ujo3/sDuFjff4PC7OWfua5xmteBAiHtM5h/ceY8zVsb7gWHG//oM3+Ih4H+fK+rV87oB/v8TJcIW6rgkhoHWct/FivvgkS158fj/KdfLB9huLALTPne5lIgBW8Dvf+T85OHyKlILJ1gBjFMtqhhAeKTTCauraYWsHCKRQ+AAhxK8DFYOxZmdni+n0jOGwZDqdUjc1RZGRZxmnJ2dUVd0/A5RUlIOyX2cCxPMj4vlyzqOUZLVc4b1Ha421Fm8tZVHivUMqwWQ8xrqG4WgQ73tgXA6p64r5fMVqFZMnpJD4dk7apmE4KBFCYIxhsZizWlVoffE8N6bE+zh3siLHZBl5noNq70shaJoKEQIahVaG4WBIOR7ipSIoRRCSDIPwgjiTA14Q1wprUVoDAW+b9pzItWst2vUsrn1413/ecN7164hWGiEEi7rGISnygsY2hBDITLYx14VzKKCuK5aLJU8PnjEoB9y5cwdjDFVV4VY1RkpOp2ecn8+obcNoMmZrZ4e8yJEIFOCdpakbZvMZSsVQe11XPHz4iNHWmFc//WkQAini2v/s6VOePXnC1mSbV155BWsbiqJEa0VuMp48esT7779PbnKaumIwGFJ7x2h7wuHxIeVwyGuf+AQmy6ibGpNlWGc5OzxmdnrG7Vu3mS/mPHn8hNdff53xZAzA/XsPOHnyGFvVHE9PKYcjRltjFvWK3WvXuPvaK3ihGO+/hCkHPH76hN9963t4ATvX9vncFz5PUZYQApNBSVmWBOdZzmYopSjy+DslJUYrTg4POT09Zb5aUJQD8iKP66CWjMYj8qJAADoItsdjjp4dcHB0SFaUqNww2pqwWC44Oj5CKcPW9g5ZUZIXBVJqFrMFf/uv/T8Yj8cvvvlJAkgikUgkEolEIpFI/LHiV37lV/jqV7/afz+dTrl79y6DQQmCDfGjF0N8QPirAS1jDLvlLlVV0XhHAKSSvQDivQcXt+1C6kKAlIKgFIjQB3BigCJcCU4potjhg++DXUJuBt2881EIUIpr169TFPGP6NffeIPr+9co84I3Pvkpdra2mZ7PEMDsfMbx0QkIcN5jraWua6qq4jv/57cphwNkWfLgvfdw1QolJKOypMwycp3z8N5DHj8+5Obrb7B/+yV+53e/y5e++CX+rV/8f7K9s8OjR4/4xne/w+duXue1T73JnRs3KHPFk/vvMz0/Z29nG4BHjx7x5PAZ1jecnZ1RNw37+/ucnZxQVxUPHzwEAnlesL2zgzGG+Xwegy9KIaTsA4DKaBarhu1r+8yt4+DgkMfPnlE3lsnOLndv3mHv+l3MeJetvX18CBRlgSPgQ0AohZQC2QZaP5AQ+oaggSh2+OD7r0P/c4kkoIJDB0tRaMrxiOp8yjvvvcuDe++wPS65desmz54949H9B3jnyMsCW9UYpdFGR8FjDa8V9LHRgAuBpm6wVRWFD+/JjWE4KQk+cHzs0JlBaYVSCp1lBCkJQqCzDADdeCY720x2tnj05BHLuubg4X0aAjfuvoS2FZO9PXZfuhXHsFqhBgPkIEcMMtQgR4SAGZSo0CCEwFpL4+P76QKTpsjRmUYZibISkymyIo96gZQYYyjyAp8bpI8CjhQwyDO2BgNE8CgpUQRGZU55+ybeOYQPCOuxdYPwHhF
"text/plain": [
"<Figure size 2000x800 with 2 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from matplotlib import pyplot as plt\n",
"\n",
"fig = plt.figure(figsize=(20,8))\n",
"ax1, ax2 = fig.subplots(1,2)\n",
"\n",
"ax1.set_aspect(1)\n",
"ax2.imshow(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_S_0102.jpg\"))\n",
"\n",
"for bboxes in filtered_tracks.values():\n",
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" # plt.plot(projected_traj[:,0])\n",
" ax1.plot(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1))\n",
" ax2.plot(traj[:,:,0].reshape(-1), traj[:,:,1].reshape(-1))\n",
" \n",
"plt.show()"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
2023-08-21 15:59:01 +02:00
"What if the projection is a heatmap of where people are? For this I reuse the above plot and apply blurring effects of pyplot from [their documentation](https://matplotlib.org/stable/gallery/misc/demo_agg_filter.html)\n",
"\n",
"Note that person tracking would not really be necessary for this to work. Detection on a frame-by-frame basis would be sufficient to achieve something similar.\n",
"\n",
"The plots below use two slightly different ways of plotting. The first shows the tracks as transparent lines, the second plots the detections as points. This last way of plotting would not stricktly require the tracking as only individual detections are aggregrated."
]
},
{
"cell_type": "code",
2023-10-04 15:38:44 +02:00
"execution_count": 78,
"metadata": {},
"outputs": [
{
"data": {
2023-08-21 15:59:01 +02:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAABkAAAAPBCAYAAACm2GZ9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9e6wk213ff3/WWlXVl32dPZczZ86Zc/EFXzDYeYyx/RDxg+BgHIkHB6OHKIoCEUoEspHAf5BYIkgoeeSE/AGJAuQ/CFIcIqQYBD8BIuaHrfx+xglODBjwMfY59rnM5cx137u7qtZ6/qhetVfX7j3XPbNn5rxfR/vsPd3VVdXdVdXV61PftUwIIQgAAAAAAAAAAOARYo96BQAAAAAAAAAAAA4bAQgAAAAAAAAAAHjkEIAAAAAAAAAAAIBHDgEIAAAAAAAAAAB45BCAAAAAAAAAAACARw4BCAAAAAAAAAAAeOQQgAAAAAAAAAAAgEdOdtQrcCPee507d05LS0syxhz16gAAAAD3XAhBm5ubOnPmjKzleiUAAAAAuFMPdABy7tw5nT179qhXAwAAALjvXnrpJT355JNHvRoAAAAA8NB6oAOQpaUlSc2Xv+Xl5SNeGwAAAODe29jY0NmzZ9tzYQAAAADAnXmgA5DY7dXy8jIBCAAAAF5T6AIWAAAAAO4OnQoDAAAAAAAAAIBHDgEIAAAAAAAAAAB45BCAAAAAAAAAAACARw4BCAAAAAAAAAAAeOQQgAAAAAAAAAAAgEcOAQgAAAAAAAAAAHjkEIAAAAAAAAAAAIBHDgEIAAAAAAAAAAB45BCAAAAAAAAAAACARw4BCAAAAAAAAAAAeOQQgAAAAAAAAAAAgEcOAQgAAAAAAAAAAHjkEIAAAAAAAAAAAIBHDgEIAAAAAAAAAAB45BCAAAAAAAAAAACAR0521CsAAABw1EIICiFIkowxMsYc8RoBAAAAAIC7RQACAAAgtQFIRAgCAAAAAMDDjQAEAABgKg1BCEAAAAAAAHi4MQYIAABAIoYg3YoQAAAAAADwcCEAAQAAr3ndcT8IPwAAAAAAePgRgAAAAGg2BImDohOEAAAAAADw8CIAAQAAEFUgAAAAAAA8aghAAAAApqgCAQAAAADg0UEAAgAAMEUVCAAAAAAAjw4CEAAAgARVIAAAAAAAPBoIQAAAADq6AQgAAAAAAHj4EIAAAAAkYgUIVSAAAAAAADzc7jgA+eVf/mV98zd/s5aXl7W8vKz3vve9+t3f/d32/tFopA9/+MM6fvy4FhcX9aEPfUgXL148lJUGAAC4H4wxhB8AAAAAADyk7jgAefLJJ/Wv/tW/0uc//3n9yZ/8if7W3/pb+r7v+z79xV/8hSTpJ3/yJ/Xbv/3b+o3f+A19+tOf1rlz5/T93//9h7biAAAA98q8wdAJQgAAAAAAeLiYcIjf5NfW1vRv/s2/0Q/8wA/o5MmT+sQnPqEf+IEfkCR96Utf0lve8hZ99rOf1Xve855bmt/GxoZWVla0vr6u5eXlw1pNAACAmwohyHvfBh/WWllrZ4IR4F7gHBgAAAAADsehjAFS17V+/dd/Xdvb23rve9+rz3/+8yrLUu973/vaad785jfrqaee0mc/+9kD5zMej7WxsTHzAwAAcBSoAgEAAAAA4OF2VwHIn//5n2txcVG9Xk8/+qM/qk9+8pN661vfqgsXLqgoCq2urs5M/9hjj+nChQsHzu/jH/+4VlZW2p+zZ8/ezeoBAADclXRAdMIPAAAAAAAeLncVgLzpTW/SF77wBX3uc5/Tj/3Yj+mHfuiH9Jd/+Zd3PL+PfexjWl9fb39eeumlu1k9AACAu9KtAvHet91iAQAAAACAB1t2Nw8uikJveMMbJEnvfOc79T//5//Uv/23/1Y/+IM/qMlkouvXr89UgVy8eFGnT58+cH69Xk+9Xu9uVgkAAOBQxRAkHQ9EkpxzR7xmAAAAAADgRg5lDJDIe6/xeKx3vvOdyvNcn/rUp9r7nnvuOb344ot673vfe5iLBAAAuOfSbrBiFYj3/qhXCwAAAAAA3MAdV4B87GMf0wc+8AE99dRT2tzc1Cc+8Qn90R/9kX7/939fKysr+pEf+RF99KMf1drampaXl/XjP/7jeu9736v3vOc9h7n+AAAA91TsAiv+jiGItYd6HQkAAAAAADhkdxyAvPrqq/qH//Af6vz581pZWdE3f/M36/d///f1t//235Yk/fzP/7ystfrQhz6k8Xis97///fqlX/qlQ1txAACA+6k7GLr3XsYYghAAAAAAAB5QJjzAo3hubGxoZWVF6+vrWl5ePurVAQAAr2Hee9V1raqqJDVjgMQf4DBxDgwAAAAAh4NLFgEAAG5BrACJFR9xUHTGAgEAAAAA4MFEAAIAAHALYvjRHQsEAAAAAAA8mAhAAAAAblE3BPHet5UgAAAAAADgwUIAAgAAcIu63WDFKhACEAAAAAAAHjwEIAAAALdhXjdYdIUFAAAAAMCDhwAEAADgNllr53aFBQAAAAAAHhwEIAAAALdhXjdY8QcAAAAAADw4CEAAAABuUwxAYghijCEEAQAAAADgAUMAAgAAcJti11exGiQOhE4AAgAAAADAg4MABAAA4A6kFSASXWEBAAAAAPCgIQABAAC4A/PGAomDoROCAAAAAABw9AhAAAAA7lAMQaS9ChACEAAAAAAAHgwEIAAAAHfIWjtTBVLXtbz37W8AAAAAAHB0sqNeAQAAgIeZtbat+khDj3R8EAAAAAAAcP/xzRwAAOAuxAqQ2B1WDEHoCgsAAAAAgKNFAAIAAHAXuoOhS01XWJLoBgsAAAAAgCNEAAIAAHCXYgiSDogeww+qQAAAAAAAOBoEIAAAAHcp7QJL2gtACD8AAAAAADg6BCAAAACHIA1AohiCEIQAAAAAAHD/EYAAAAAcgm43WHVdz3SFBQAAAAAA7i8CEAAAgEMQB0KPAYj3ngoQAAAAAACOEAEIAADAIbLWKoQgY0xb/dHtGgsAAAAAANx7BCAAAACHIK0AiSFI+gMAAAAAAO4vAhAAAIBDko4BYowhAAEAAAAA4AgRgAAAABwi55ycc20Y4r1XXdcMhg4AAAAAwH1GAAIAAHBIYvWHc67tBquqqjYAoRIEAAAAAID7hwAEAADgEMXKD2utrG1Otbz3BCAAAAAAANxnBCAAAACHLIYgsSIkrQAhBAEAAAAA4P4gAAEAADgE3XDDWtsOhC5JVVUxDggAAAAAAPcRAQgAAMAhCCHs6+oqdoUVQpAxRnVdUwECAAAAAMB9kh31CgAAADzsYqiR/k4rQkIIKstSUtMdVto9FgAAAAAAuDeoAAEAADhEafDRDTliBQhVIAAAAAAA3HsEIAAAAHcpBh2xssNa2/47jgXivVdd14QgAAAAAADcJwQgAAAAhyCt9kiDD2l2QPS6rhkMHQAAAACA+4AxQAAAAA5JGoLEKpAsy9pB0CXNHSgdAAAAAAAcPipAAAAADkE6sHns4spa21Z/xEDEe6+qqqgCAQAAAADgHiMAAQAAuIeMMcqyTM65mSoQxgEBAAAAAODeIgABAAA4JLEKpNutVToeSAw/CEEAAAAAALi3CEAAAAAOUbcbrDQUiffFgdAJQAAAAAAAuHcIQAAAAA5ZdzwQY4ycczOVIHVdq67rthoEAAAAAAAcLgIQAACAQzQv/IhVHmkAEoOPGIJQCQIAAAAAwOEiAAEAALhH0uqOedUf6XggVIEAAAAAAHC4sqNeAQAAgEdNrOyIf0v7K0NCCKrrWtY216NYa9uKEQAAAAAAcPfuuALk4x//uN71rndpaWlJp06d0gc/+EE999xzM9N8x3d8x76BP3/0R3/0rlcaAADgQZUObl5Vlaqqaqs9ImttezvjgAAAAAAAcG/ccQDy6U9/Wh/+8If1x3/8x/qDP/gDlWWp7/7u79b29vbMdP/4H/9jnT9/vv35uZ/7ubteaQAAgAdZDEBiJUgaihhj2i6vvPdtEEIIAgAAAADA4brjLrB+7/d+b+bfv/qrv6pTp07p85//vL7927+9vX04HOr06dN3voYAAAAPkbQ
"text/plain": [
"<Figure size 2000x1200 with 3 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from matplotlib import gridspec\n",
"import matplotlib.cm as cm\n",
"import matplotlib.transforms as mtransforms\n",
"from matplotlib.colors import LightSource\n",
"from matplotlib.artist import Artist\n",
"\n",
"\n",
"def smooth1d(x, window_len):\n",
" # copied from https://scipy-cookbook.readthedocs.io/items/SignalSmooth.html\n",
" s = np.r_[2*x[0] - x[window_len:1:-1], x, 2*x[-1] - x[-1:-window_len:-1]]\n",
" w = np.hanning(window_len)\n",
" y = np.convolve(w/w.sum(), s, mode='same')\n",
" return y[window_len-1:-window_len+1]\n",
"\n",
"\n",
"def smooth2d(A, sigma=3):\n",
" window_len = max(int(sigma), 3) * 2 + 1\n",
" A = np.apply_along_axis(smooth1d, 0, A, window_len)\n",
" A = np.apply_along_axis(smooth1d, 1, A, window_len)\n",
" return A\n",
"\n",
"\n",
"class BaseFilter:\n",
"\n",
" def get_pad(self, dpi):\n",
" return 0\n",
"\n",
" def process_image(self, padded_src, dpi):\n",
" raise NotImplementedError(\"Should be overridden by subclasses\")\n",
"\n",
" def __call__(self, im, dpi):\n",
" pad = self.get_pad(dpi)\n",
" padded_src = np.pad(im, [(pad, pad), (pad, pad), (0, 0)], \"constant\")\n",
" tgt_image = self.process_image(padded_src, dpi)\n",
" return tgt_image, -pad, -pad\n",
"\n",
"\n",
"\n",
"class GaussianFilter(BaseFilter):\n",
" \"\"\"Simple Gaussian filter.\"\"\"\n",
"\n",
" def __init__(self, sigma, alpha=0.5, color=(0, 0, 0)):\n",
" self.sigma = sigma\n",
" self.alpha = alpha\n",
" self.color = color\n",
"\n",
" def get_pad(self, dpi):\n",
" return int(self.sigma*3 / 72 * dpi)\n",
"\n",
" def process_image(self, padded_src, dpi):\n",
" tgt_image = np.empty_like(padded_src)\n",
" tgt_image[:, :, :3] = self.color\n",
" tgt_image[:, :, 3] = smooth2d(padded_src[:, :, 3] * self.alpha,\n",
" self.sigma / 72 * dpi)\n",
" return tgt_image\n",
"\n",
"gauss = GaussianFilter(2)\n",
"\n",
"fig = plt.figure(figsize=(20,12))\n",
"\n",
"\n",
"# Create 2x2 sub plots\n",
"gs = gridspec.GridSpec(2, 2)\n",
"\n",
"# (ax1, ax2), (ax3, ax4) = fig.subplots(2,2)\n",
"ax1 = fig.add_subplot(gs[0,0])\n",
"ax3 = fig.add_subplot(gs[1,0])\n",
"ax2 = fig.add_subplot(gs[:,1])\n",
"\n",
"ax1.set_aspect(1)\n",
"ax3.set_aspect(1)\n",
"\n",
2023-08-21 15:59:01 +02:00
"# show the image from the dataset on ax2\n",
"ax2.imshow(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_S_0102.jpg\"))\n",
"\n",
2023-08-21 15:59:01 +02:00
"for bboxes in filtered_tracks.values():\n",
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" # plt.plot(projected_traj[:,0])\n",
2023-08-21 15:59:01 +02:00
" \n",
" # option1: draw the tracks as lines\n",
" line, = ax1.plot(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1), color=(0,0,0,0.05))\n",
" line.set_agg_filter(gauss)\n",
" line.set_rasterized(True) # \"to suport mixed-mode renderers\"\n",
"\n",
2023-08-21 15:59:01 +02:00
" # option2: draw the tracks merely as individual detection points (for which no tracking would have been necessary)\n",
" points = ax3.scatter(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1), color=(0,0,0,0.01))\n",
" points.set_agg_filter(gauss)\n",
" points.set_rasterized(True) # \"to suport mixed-mode renderers\"\n",
"\n",
" ax2.plot(traj[:,:,0].reshape(-1), traj[:,:,1].reshape(-1))\n",
" \n",
"plt.show()"
]
2023-10-04 15:38:44 +02:00
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"For the quickest way to test trajectory prediciton, export the tracks to the common format (as used by e.g. Social-GAN). A `txt`-file with `frameNR pedestrianID, x, y`"
]
},
{
"cell_type": "code",
"execution_count": 94,
"metadata": {},
"outputs": [],
"source": [
"def convert_to_pedestrian_id(pid: str) -> int:\n",
" return int(pid.replace('_', ''))\n",
"\n",
"# make sure no duplicates happen in the conversion\n",
"assert len(filtered_tracks.keys()) == np.unique([convert_to_pedestrian_id(k) for k in filtered_tracks.keys()]).shape[0]"
]
},
{
"cell_type": "code",
"execution_count": 123,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"19 56.0\n"
]
}
],
"source": [
"from math import ceil\n",
"\n",
"\n",
"nr_videos = max([ b[0][7] for b in filtered_tracks.values()] )\n",
"nr_val = ceil(nr_videos/4)\n",
"nr_train = nr_videos - nr_val\n",
"print(nr_val, nr_train)"
]
},
{
"cell_type": "code",
"execution_count": 125,
"metadata": {},
"outputs": [],
"source": [
"frame_peds_train = []\n",
"frame_peds_val = []\n",
"\n",
"for track_id, bboxes in filtered_tracks.items():\n",
" ped_id = convert_to_pedestrian_id(track_id)\n",
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]\n",
" ] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" # len(projected_traj), len([b[6] for b in bboxes]))\n",
" frame_pedestrians = [[bboxes[i][6] + bboxes[i][7] * 1000, ped_id, point[0][0], point[0][1]] for i, point in enumerate(projected_traj)]\n",
"\n",
" if bboxes[0][7] < nr_train:\n",
" frame_peds_train.extend(frame_pedestrians)\n",
" else:\n",
" frame_peds_val.extend(frame_pedestrians)\n",
"\n",
"frame_peds_train.sort()\n",
"frame_peds_val.sort()"
]
},
{
"cell_type": "code",
"execution_count": 126,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(268320, 79913)"
]
},
"execution_count": 126,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(frame_peds_train), len(frame_peds_val)"
]
},
{
"cell_type": "code",
"execution_count": 127,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[[56000.0, 5646342, 9.635322232960501, 5.007763488524792],\n",
" [56000.0, 5646343, 10.242926638190253, 6.041340117572103],\n",
" [56000.0, 5646344, -24.361324332610195, 6.360463121093617],\n",
" [56000.0, 5646345, -36.33278268357405, 6.5426289049817115],\n",
" [56001.0, 5646342, 9.655806533781627, 4.999777317969075],\n",
" [56001.0, 5646343, 10.261907375159826, 6.057640070999422],\n",
" [56001.0, 5646344, -24.382238557678683, 6.385945927025485],\n",
" [56001.0, 5646345, -36.360648356525196, 6.54482638578711],\n",
" [56001.0, 5646346, -37.88799589844155, 5.549882189065968],\n",
" [56002.0, 5646342, 9.732648082414663, 4.998398827649513]]"
]
},
"execution_count": 127,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"frame_peds_val[:10]"
]
},
{
"cell_type": "code",
"execution_count": 128,
"metadata": {},
"outputs": [],
"source": [
"from pathlib import Path\n",
"\n",
"base_path = Path('datasets/VIRAT_subset')\n",
"train_path = base_path / 'train'\n",
"val_path = base_path / 'val'\n",
"\n",
"train_file = train_path / 'VIRAT_train.txt'\n",
"val_file = val_path / 'VIRAT_test.txt'\n",
"\n",
"train_path.mkdir(parents=True, exist_ok=True)\n",
"val_path.mkdir(parents=True, exist_ok=True)"
]
},
{
"cell_type": "code",
"execution_count": 129,
"metadata": {},
"outputs": [],
"source": [
"comment = \"See https://git.rubenvandeven.com/security_vision/trajpred/\"\n",
"np.savetxt(train_file, frame_peds_train, delimiter=' ', fmt='%1.2f')\n",
"np.savetxt(val_file, frame_peds_val, delimiter=' ', fmt='%1.2f')"
]
},
{
"cell_type": "code",
"execution_count": 130,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"average track lenght\n"
]
},
{
"data": {
"text/plain": [
"58.9127051260362"
]
},
"execution_count": 130,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"print('average track lenght')\n",
"sum([len(t) for t in filtered_tracks.values()])/len(filtered_tracks)"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"ename": "NameError",
"evalue": "name 'filtered_tracks' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[1], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[39mfor\u001b[39;00m track_id, bboxes \u001b[39min\u001b[39;00m filtered_tracks\u001b[39m.\u001b[39mitems():\n\u001b[1;32m 2\u001b[0m traj \u001b[39m=\u001b[39m np\u001b[39m.\u001b[39marray([[[\u001b[39m0.5\u001b[39m \u001b[39m*\u001b[39m (det[\u001b[39m0\u001b[39m]\u001b[39m+\u001b[39mdet[\u001b[39m2\u001b[39m]), det[\u001b[39m3\u001b[39m]]\n\u001b[1;32m 3\u001b[0m ] \u001b[39mfor\u001b[39;00m det \u001b[39min\u001b[39;00m bboxes])\n\u001b[1;32m 4\u001b[0m projected_traj \u001b[39m=\u001b[39m cv2\u001b[39m.\u001b[39mperspectiveTransform(traj,H)\n",
"\u001b[0;31mNameError\u001b[0m: name 'filtered_tracks' is not defined"
]
}
],
"source": [
"for track_id, bboxes in filtered_tracks.items():\n",
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]\n",
" ] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" print(projected_traj)\n",
" break"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.2"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "1135f674f58caf91385e41dd32dc418daf761a3c5d4526b1ac3bad0b893c2eb5"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}