trajpred/04_track_objects_with_deeps...

781 lines
3.3 MiB
Plaintext
Raw Normal View History

{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Use DeepSORT instead of SORT for tracking\n",
"\n",
"Based on [ZQPei's repository](https://github.com/ZQPei/deep_sort_pytorch), I replace SORT with DeepSort:\n",
"\n",
"> Deep sort is basicly the same with sort but added a CNN model to extract features in image of human part bounded by a detector. [ZQPei](https://github.com/ZQPei/deep_sort_pytorch)\n",
"\n",
"Other additions:\n",
"\n",
2023-08-21 15:59:01 +02:00
"* Use a generator function (a programming construct) for for video analysis and detection per frame.\n",
"* This also allows for caching of intermediate steps"
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"from pathlib import Path\n",
"import numpy as np\n",
"from PIL import Image\n",
"import torch\n",
"from torchvision.io.video import read_video\n",
"import matplotlib.pyplot as plt\n",
"from torchvision.utils import draw_bounding_boxes\n",
"from torchvision.transforms.functional import to_pil_image\n",
"from torchvision.models.detection import retinanet_resnet50_fpn_v2, RetinaNet_ResNet50_FPN_V2_Weights\n",
"import tempfile "
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"source = Path('../DATASETS/VIRAT_subset_0102x')\n",
"videos = list(source.glob('*.mp4'))\n",
"tmpdir = Path(tempfile.gettempdir()) / 'trajpred'\n",
"tmpdir.mkdir(exist_ok=True)\n"
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"device(type='cuda')"
]
},
2023-08-21 15:59:01 +02:00
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
"device"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Based on code from: https://stackabuse.com/retinanet-object-detection-with-pytorch-and-torchvision/"
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"weights = RetinaNet_ResNet50_FPN_V2_Weights.DEFAULT\n",
"model = retinanet_resnet50_fpn_v2(weights=weights, score_thresh=0.35)\n",
"model.to(device)\n",
"# Put the model in inference mode\n",
"model.eval()\n",
"# Get the transforms for the model's weights\n",
"preprocess = weights.transforms().to(device)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"> The score_thresh argument defines the threshold at which an object is detected as an object of a class. Intuitively, it's the confidence threshold, and we won't classify an object to belong to a class if the model is less than 35% confident that it belongs to a class."
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"The result from a single prediction coming from `model(batch)` looks like:\n",
"\n",
"```python\n",
"{'boxes': tensor([[5.7001e+02, 2.5786e+02, 6.3138e+02, 3.6970e+02],\n",
" [5.0109e+02, 2.4508e+02, 5.5308e+02, 3.4852e+02],\n",
" [3.4096e+02, 2.7015e+02, 3.6156e+02, 3.1857e+02],\n",
" [5.0219e-01, 3.7588e+02, 9.7911e+01, 7.2000e+02],\n",
" [3.4096e+02, 2.7015e+02, 3.6156e+02, 3.1857e+02],\n",
" [8.3241e+01, 5.8410e+02, 1.7502e+02, 7.1743e+02]]),\n",
" 'scores': tensor([0.8525, 0.6491, 0.5985, 0.4999, 0.3753, 0.3746]),\n",
" 'labels': tensor([64, 64, 1, 64, 18, 86])}\n",
"```"
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 5,
"metadata": {},
2023-08-21 15:59:01 +02:00
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/ruben/suspicion/trajpred/sort_cfotache.py:36: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.\n",
" def iou(bb_test,bb_gt):\n"
]
}
],
"source": [
"%matplotlib inline\n",
"\n",
"\n",
"import pylab as pl\n",
"from IPython import display\n",
"from utils.timer import Timer\n",
"from sort_cfotache import Sort\n",
"import pickle\n",
"\n",
"def detect_persons(video_path: Path):\n",
2023-08-21 15:59:01 +02:00
" \"\"\"\n",
" returns detections as structure: [[x1,y1,x2,y2,score],...]\n",
" \"\"\"\n",
" video = cv2.VideoCapture(str(video_path))\n",
"\n",
"\n",
" cachefile = tmpdir / f\"detections-{video_path.name}.pcl\"\n",
" if cachefile.exists():\n",
" with cachefile.open('rb') as fp:\n",
" all_detections = pickle.load(fp)\n",
" for detections in all_detections:\n",
" yield detections\n",
" else:\n",
" all_detections = []\n",
" while True:\n",
" ret, frame = video.read()\n",
" \n",
" if not ret:\n",
" # print(\"Can't receive frame (stream end?). Exiting ...\")\n",
" break\n",
"\n",
" t = torch.from_numpy(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n",
" # change axes of image loaded image to be compatilbe with torch.io.read_image (which has C,W,H format instead of W,H,C)\n",
" t = t.permute(2, 0, 1)\n",
"\n",
" batch = preprocess(t)[None, :].to(device)\n",
" # no_grad can be used on inference, should be slightly faster\n",
" with torch.no_grad():\n",
" predictions = model(batch)\n",
" prediction = predictions[0] # we feed only one frame at the once\n",
"\n",
" mask = prediction['labels'] == 1 # if we want more than one: np.isin(prediction['labels'], [1,86])\n",
"\n",
" scores = prediction['scores'][mask]\n",
" labels = prediction['labels'][mask]\n",
" boxes = prediction['boxes'][mask]\n",
" \n",
" # TODO: introduce confidence and NMS supression: https://github.com/cfotache/pytorch_objectdetecttrack/blob/master/PyTorch_Object_Tracking.ipynb\n",
" # (which I _think_ we better do after filtering)\n",
" # alternatively look at Soft-NMS https://towardsdatascience.com/non-maximum-suppression-nms-93ce178e177c\n",
"\n",
" # dets - a numpy array of detections in the format [[x1,y1,x2,y2,score],[x1,y1,x2,y2,score],...]\n",
" detections = np.array([np.append(bbox, [score, label]) for bbox, score, label in zip(boxes.cpu(), scores.cpu(), labels.cpu())])\n",
" \n",
" all_detections.append(detections)\n",
" \n",
" yield detections\n",
" \n",
" with cachefile.open('wb') as fp:\n",
" pickle.dump(all_detections, fp)\n",
"\n",
"def track_video(video_path: Path) -> dict:\n",
" mot_tracker = Sort()\n",
" \n",
"\n",
" for detections in detect_persons(video_path):\n",
2023-08-21 15:59:01 +02:00
" # tracks structure: [[x1,y1,x2,y2,score, obj_id],...]\n",
" tracks = mot_tracker.update(detections)\n",
"\n",
" # now convert back to boxes and labels\n",
" # print(tracks)\n",
" boxes = np.array([t[:4] for t in tracks])\n",
" # initialize empty with the necesserary dimensions for drawing_bounding_boxes glitch\n",
" t_boxes = torch.from_numpy(boxes) if len(boxes) else torch.Tensor().new_empty([0, 6])\n",
" labels = [str(int(t[4])) for t in tracks]\n",
" # print(t_boxes, boxes, labels)\n",
"\n",
" for track in tracks:\n",
" yield track\n",
" \n",
"\n",
2023-08-21 15:59:01 +02:00
" # display.clear_output(wait=True)\n"
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"def track_videos(video_paths: list[Path]) -> dict:\n",
2023-08-21 15:59:01 +02:00
" \"\"\"\n",
" returns tracked instances as dict with lists:\n",
" {'obj_id': [ [x1, y1, x2, y2, obj_id, obj_class ], ...]}\n",
" \"\"\"\n",
" # collect instances of all videos with unique key\n",
" video_paths = list(video_paths)\n",
" tracked_instances = {}\n",
" timer = Timer()\n",
" for i, p in enumerate(video_paths):\n",
" print(f\"{i}/{len(video_paths)}: {p}\")\n",
"\n",
" cachefile = tmpdir / (p.name + '.pcl')\n",
2023-08-21 15:59:01 +02:00
" if cachefile.exists():\n",
" print('\\tLoad pickle')\n",
" with cachefile.open('rb') as fp:\n",
" new_instances = pickle.load(fp)\n",
" else:\n",
" #continue # to quickly test from cache\n",
" new_instances = {}\n",
" timer.tic()\n",
" for track in track_video(p):\n",
" track_id = f\"{i}_{str(int(track[4]))}\"\n",
" if track_id not in new_instances:\n",
" new_instances[track_id] = []\n",
" new_instances[track_id].append(track)\n",
" with cachefile.open('wb') as fp:\n",
" pickle.dump(new_instances, fp)\n",
" print(\" time for video: \", timer.toc())\n",
" tracked_instances.update(new_instances)\n",
" \n",
" return tracked_instances"
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_00_000060_000218.mp4\n",
2023-08-21 15:59:01 +02:00
"\tLoad pickle\n",
"1/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_09_001285_001336.mp4\n",
"\tLoad pickle\n",
"2/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_08_000895_000975.mp4\n",
" time for video: 68.54228949546814\n",
"3/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_04_000545_000576.mp4\n",
" time for video: 44.32873034477234\n",
"4/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_04_000929_000954.mp4\n",
" time for video: 33.93052832285563\n",
"5/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_10_000923_000959.mp4\n",
" time for video: 32.10865515470505\n",
"6/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_06_000830_000904.mp4\n",
" time for video: 37.853862571716306\n",
"7/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_08_001308_001332.mp4\n",
" time for video: 34.11902721722921\n",
"8/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_09_001484_001510.mp4\n",
" time for video: 31.546590941292898\n",
"9/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_00_000047_000139.mp4\n",
" time for video: 37.09887546300888\n",
"10/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_03_000370_000395.mp4\n",
" time for video: 34.8637207614051\n",
"11/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_02_000414_000439.mp4\n",
" time for video: 33.25210754871368\n",
"12/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_03_000865_000911.mp4\n",
" time for video: 33.80455418066545\n",
"13/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_09_000857_000886.mp4\n",
" time for video: 32.45200256506602\n",
"14/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_09_001010_001036.mp4\n",
" time for video: 31.583646572553196\n",
"15/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_00_000000_000053.mp4\n",
" time for video: 32.641018697193694\n",
"16/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_05_000499_000527.mp4\n",
" time for video: 32.00097533861796\n",
"17/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_03_000400_000435.mp4\n",
" time for video: 31.541199699044228\n",
"18/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_08_000705_000739.mp4\n",
" time for video: 31.2182135441724\n",
"19/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_01_000712_000752.mp4\n",
" time for video: 31.049288577503628\n",
"20/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_06_000671_000744.mp4\n",
" time for video: 32.444181505002476\n",
"21/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_05_000856_000890.mp4\n",
" time for video: 31.905359435081483\n",
"22/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_06_000620_000760.mp4\n",
" time for video: 36.00819862456549\n",
"23/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_04_000374_000469.mp4\n",
" time for video: 38.11233546517112\n",
"24/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_03_000270_000359.mp4\n",
" time for video: 39.8342572917109\n",
"25/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_04_000646_000754.mp4\n",
" time for video: 42.07712532083193\n",
"26/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_00_000001_000033.mp4\n",
" time for video: 41.18428315162659\n",
"27/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_08_000838_000867.mp4\n",
" time for video: 40.349428882965675\n",
"28/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_03_000606_000632.mp4\n",
" time for video: 39.4015819673185\n",
"29/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_02_000301_000345.mp4\n",
" time for video: 39.33332359790802\n",
"30/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_05_000515_000593.mp4\n",
" time for video: 40.141791606771534\n",
"31/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_01_000207_000288.mp4\n",
" time for video: 41.12426764170329\n",
"32/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_07_000942_000989.mp4\n",
" time for video: 40.93559175152932\n",
"33/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_02_000167_000197.mp4\n",
" time for video: 40.19536356627941\n",
"34/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_05_001013_001038.mp4\n",
" time for video: 39.39239246195013\n",
"35/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_06_000784_000873.mp4\n",
" time for video: 40.31660431974075\n",
"36/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_02_000347_000397.mp4\n",
" time for video: 40.229051855632235\n",
"37/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_01_000072_000225.mp4\n",
" time for video: 42.85438562101788\n",
"38/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_02_000349_000398.mp4\n",
" time for video: 42.71105306857341\n",
"39/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_01_000124_000206.mp4\n",
" time for video: 43.457573558154856\n",
"40/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_02_000161_000189.mp4\n",
" time for video: 42.839277823766075\n",
"41/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_03_000546_000580.mp4\n",
" time for video: 42.360220515727995\n",
"42/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_06_000702_000744.mp4\n",
" time for video: 42.07845686121685\n",
"43/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_00_000000_000049.mp4\n",
" time for video: 42.10020278749012\n",
"44/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_01_000125_000152.mp4\n",
" time for video: 41.67217009566551\n",
"45/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_04_000568_000620.mp4\n",
" time for video: 41.65529489517212\n",
"46/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_07_000748_000837.mp4\n",
" time for video: 42.41890318128798\n",
"47/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_07_001195_001260.mp4\n",
" time for video: 42.65501337466033\n",
"48/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_07_000601_000697.mp4\n",
" time for video: 43.47806889452833\n",
"49/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_09_000886_000915.mp4\n",
" time for video: 43.088951567808785\n",
"50/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_10_001372_001395.mp4\n",
" time for video: 42.53280181300883\n",
"51/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_05_000658_000700.mp4\n",
" time for video: 42.25315068721771\n",
"52/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_03_000313_000355.mp4\n",
" time for video: 41.96725109979218\n",
"53/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_02_000790_000816.mp4\n",
" time for video: 41.553693734682525\n",
"54/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_04_000457_000511.mp4\n",
" time for video: 41.64384494187697\n",
"55/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_01_000055_000147.mp4\n",
" time for video: 42.249081523330126\n",
"56/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_11_001524_001607.mp4\n",
" time for video: 42.495637898011644\n",
"57/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_01_000254_000322.mp4\n",
" time for video: 42.69913638489587\n",
"58/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_05_000591_000631.mp4\n",
" time for video: 42.50969683914854\n",
"59/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_10_000904_000991.mp4\n",
" time for video: 43.049635936473976\n",
"60/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_06_001064_001097.mp4\n",
" time for video: 42.65656978397046\n",
"61/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_02_000150_000180.mp4\n",
" time for video: 42.29242134888967\n",
"62/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_04_000720_000767.mp4\n",
" time for video: 42.12751168501182\n",
"63/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_08_000807_000831.mp4\n",
" time for video: 41.69687400325652\n",
"64/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_10_001092_001121.mp4\n",
" time for video: 41.40194404692877\n",
"65/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_05_000797_000823.mp4\n",
" time for video: 41.02733028307557\n",
"66/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_06_000550_000600.mp4\n",
" time for video: 41.03235809986408\n",
"67/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_09_000770_000801.mp4\n",
" time for video: 40.803716746243566\n",
"68/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_00_000007_000035.mp4\n",
" time for video: 40.51510126910993\n",
"69/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_00_000030_000059.mp4\n",
" time for video: 40.27838978697272\n",
"70/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_03_000201_000232.mp4\n",
" time for video: 39.99749055461607\n",
"71/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_06_000913_000939.mp4\n",
" time for video: 39.69219965934754\n",
"72/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_10_001549_001596.mp4\n",
" time for video: 39.65503925001118\n",
"73/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_07_000775_000869.mp4\n",
" time for video: 40.16896542575624\n",
"74/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_03_000470_000567.mp4\n",
" time for video: 40.67782217835727\n",
"75/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_07_000768_000791.mp4\n",
" time for video: 40.35599481092917\n"
]
},
{
"data": {
"text/plain": [
2023-08-21 15:59:01 +02:00
"5952"
]
},
2023-08-21 15:59:01 +02:00
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"tracked_instances = track_videos(videos)\n",
"len(tracked_instances)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Project / Homography\n",
"\n",
"Now that all trajectories are captured (for a single video), these can then be projected onto a flat surface by [homography](https://en.wikipedia.org/wiki/Homography_(computer_vision)). The necessary $H$ matrix is already provided by VIRAT in the [homographies folder](https://data.kitware.com/#folder/56f581c88d777f753209c9d2) of their online data repository."
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"\n",
"homography = list(source.glob('*img2world.txt'))[0]\n",
"H = np.loadtxt(homography, delimiter=',')\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"The homography matrix helps to transform points from image space to a flat world plane. The `README_homography.txt` from VIRAT describes:\n",
"\n",
"> Roughly estimated 3-by-3 homographies are included for convenience. \n",
"> Each homography H provides a mapping from image coordinate to scene-dependent world coordinate.\n",
"> \n",
"> [xw,yw,zw]' = H*[xi,yi,1]'\n",
"> \n",
"> xi: horizontal axis on image with left top corner as origin, increases right.\n",
"> yi: vertical axis on image with left top corner as origin, increases downward.\n",
"> \n",
"> xw/zw: world x coordinate\n",
"> yw/zw: world y coordiante"
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(1200, 900)\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABLAAAAOECAIAAAA+D1+tAAEAAElEQVR4nOy915Mk2XUeftOU97799LiddVgYklgq9CPBEKkHvTD0V+pJEdIzgyGSEgCBIMxizezMjutpb8p7l5m/hw/36FSamu7ZXWDBPt9DR3d1VubNa443hud5SiAQCAQCgUAgEAgEtw/mH3sAAoFAIBAIBAKBQCD440AUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAgEAoFAIBAIBAKBQCC4TTC2t7c9z1NKeZ6HX1zXvby8qFZr9CH9yzRDPIr4F76olIrFYoZheJ4Xj8cNw7Asy7ZtfNHzvBcvnt+7d9/zPMdxPM9zXRc3j8fj+NB13eVyid8ty3IcBx/i6bjbYrEIDmO5XNq2HY/H4/G4UspxnOVy6TiOYRjBix3HSafT4/F4sVhUq9VMJjOfz03TNAxjOp1Wq9XFYnF6erq1tbW5ufnLX/4ym812u93pdFqv103T7HQ6SqlUKpVIJPDinueNRqN4PH737t2Tk5NYLHbnzp1qtZrP50ulUjKZTKVSsVis3+8/f/788ePHh4eHrusWCgXTNIvF4v7+/jvvvFOr1RaLRb/fN00znU6Xy+XxeNzr9a6urv7t3/6t2+3+6Ec/Ojs7SyaT9+/fn0wmtm3HYrHz8/NkMnnnzp3ZbPbzn//89PS0XC6Xy+V2u10qlebz+c7OztbW1uHhYbVa/e1vfzudTn/84x+XSqUnT54cHR31er1sNptKpUzTdF03l8ttbW0Vi0XDMD799FOl1Hw+Xy6XhmE4jjOdTpVShmGYpul53mKxsCyrWq3W63WlVKVScRzn8vLy9PR0MBjUarX/+B//Yzqdxt0mk0mtVsvn861W6/Ly0rZt13Vns5nneYZhDIfDdDpdKpUqlUo2mzVNE0vveV7orosC7oY9iaU3DAP7h3/If/cfCcOo1WrFYhHva5pmPB6PxWJYi5OTk+Pj48FgkEgkGo2GaZqDwaDVao3HY8dxaOOlUqnQXUePUOw0TSYT7G06JpiZ2Ww2nU7pWzga0+l0uVzGYjHcMJlM0uzFYjHXdS3LsixruVzOZrNMJmNZVr1ej8fjR0dH4/EYk5zL5QzD6Pf7SqnhcHh0dJRMJpPJZDqdzuVymUymWq12Op3PP//8448/zuVy0+k0nU4nEonRaJRMJjOZDM2qZVlKqcViMZlM8Dh63/l8fnV1dXV1hXe5vLzEyAuFwp07dzDIQqHQarV+97vfpdPp9957D7uC5iqfz1uWdXBwoJQqlUqmac5mM9AT13Vt28ZaP3nyZD6fz+fz2Wy2v78/mUySyWSn04nFYvv7+7PZ7OXLl/P5HKSGNgN+4XsD+00xmkZrZBgG7RZOKvGnbdvB5Y7H4/1+/969ex988MHnn3/e6/VM0zRNMxaLYZdyLJfL0P2M+eRX4miEXuy6Lgbpui6Oz3K5/NGPftTpdDKZzMbGhmma8/nctm28Pr8tnhJ6Z5oNH7AJgwg9Vpir4J3XXAyCzA9L6EhC73xT8H37xpvz5cNMgmeBHfgQ+oJR+EbeJRTYYNg/+IRTwj8Yol4Q21UphTOiovfGTRH6gm+9KHzSQg+LYRiz2Qy8FQdNKQUZw3Xd4XA4m80syyIphY4qpzme5y2Xy+Vy6bouZgOSSTqdfvjw4XK5PDw8TCQS+Xw+lUqdnp46jpNIJNLptGmai8XCcZxerwchCo+wNDKZTCKRSKVSy+VyOBy6rptMJpU+X6AYSimIUoPBAH/idDiOA45Pu4jkN9M0cZ9YLAYJ0LZty7KSySROh+M4s9kMd3BdNxaL4TVjsZhlWeCtGxsb+G8ikcBRGgwGo9FI6c2AVyDpEYME1cKflmXl83maW6UUeDfxIMdxbNsmAlgqlTB4fi4gfmDO8bJgzZhGPB1LYxgG3hoDIL7AB8A/XywW2N6
"text/plain": [
"<PIL.PngImagePlugin.PngImageFile image mode=RGB size=1200x900>"
]
},
2023-08-21 15:59:01 +02:00
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"print(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_0102_homography_img2world.png\").size)\n",
"Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_0102_homography_img2world.png\")\n"
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 41,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABkAAAAHZCAYAAADewveiAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9yY8sWZrdCf7uJINONr7Rn4/h4RGRETmQkZHJJIhmVxerWehGNdhAo7noBcFNr5KbRC+YGxJccdEbLshN/wfsVW8IsFiVIEhUMTNyiGRUZszh05uf2bNZJxG5Qy+uiOhg5s89cvKMiHsAe/ZMVVREVOTKFdVzvnM+EUIIJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQk/AxBft47kJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQ8BeNJIAkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJPzMIQkgCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQk/c0gCSEJCQkJCQkJCQkJCQkJCQkJCQkJCQkLCzxySAJKQkJCQkJCQkJCQkJCQkJCQkJCQkJCQ8DOHJIAkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJPzMIQkgCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQk/c0gCSEJCQkJCQkJCQkJCQkJCQkJCQkJCQkLCzxySAJKQkJCQkJCQkJCQkJCQkJCQkJCQkJCQ8DOHJIAkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJPzMIQkgCQkJCQkJCQkJCQkJnzP+zb/5N7z11lsURcGv//qv8/u///uf9y4lJCQkJCQkJCQk/NQjCSAJCQkJCQkJCQkJCQmfI/7tv/23/NZv/Rb//J//c771rW/xy7/8y/z9v//3OTo6+rx3LSEhISEhISEhIeGnGiKEED7vnUhISEhISEhISEhISPh5xa//+q/zjW98g3/9r/81AN57Xn/9df7JP/kn/NN/+k8/571LSEhISEhISEhI+OmF/rx3ICEhISEhISEhISEh4ecVdV3zR3/0R/z2b/92/5iUkr/39/4ev/u7v3vja6qqoqqq/m/vPaenpxwcHCCE+Evf54SEhISEhISEhITPEyEErq6uuH//PlK+OuQqCSAJCQkJCQkJCQkJCQmfE16+fIlzjjt37mw8fufOHb7//e/f+Jp/+S//Jf/iX/yLv4rdS0hISEhISEhISPhri0ePHvHgwYNXLpMEkISEhISEhISEhISEhJ8i/PZv/za/9Vu/1f99cXHBG2+8wd13boMAHzwhhP7H+wA+gPfX1mWMpixKqrqi8Z4AZMYgpWxf63He4ZzHO4cSAilo/47rDiEgUIDot7kNIcTGOpF6w63S2AYlFXmRs7+/z2gyYRkCd+7d5dbBIUWWc3l5ye5khz/8/T9AALPpjLPTcxDgvOfg1i2897x8eYzODF//tV9DFgW/960/xNUVoakYFTmDQcl4tE/jQZqcO2+9w8G9e3z3+9/nl772ixwcHrCzu8uz58/5d//u3/HlL3+ZUhse3L7N6fEzfvc//UeEtezv7pLnBTt7+7w4Ocb5JaPRkCdPnrAz2eHy/Bzb1BhpgECe5wQExhhmszlaK5RSaB2PxWQyYTDZZbpsGO3s8Oz5EUIqbPDUjWW8u8ev/cZvcP+d96iEpmksIQTyMscR8CEglUJIQRASwavcQKFvCBraH4/v/9+dwbiegAoOjWWca27tjKmmlzx5+BFPH33Ezrhkf3eX4+Njnj1+gvceAbiqwRiD1grn3MbWffAQuu0EfGNp6oa6rrBVQ/AerTTDsiD4wNnZGVeLKao9ZpkxnJ2dMRgOGQwGACxmS1679wZvvvY6P/zBD5jN5kyXCxoCtx/cY2Frxvv73HktkiSzasmf/m/f5au/9IuU5ZBiMMSHwO6oQAaLEAJnLbP5jPF4gpQS7z0vnj3nxdOnnB0dUy2X3Lr/GoevP6AWUNU1H/7gx/zCu1/G5wbXHovZ1RVnJ8e889abiOBRUqII7A8HuLrBO4cIAWE9trYI7xEBqroCJVZnpH3MO48PgeA91gZc4+O16jzeOxaLRXttxsebpsH7zXOglOTBG28wvbxiNp8hMk1QEC/fcOO1HLoxFSAQEDeMMyElgUBjG+qq5o033qAsS+7cuYMPcZQ10iOFQEiJFILtKSOI1RiMKwW55W4TW68RiE91wHnvCD7E7cp2+RtC4dfXE9rtvwpCbG77xqR5TzxeQuCcw3uPMWZzO5+yrbje6/P4Twrv49zd7ffNx+3Pvx1CuPH41nVNCAGtdXxPAviUcyd59fH9LOfJB9svLALgfXv/ihBAaODb//WPOT19iVKSclCgDThXo5TANg5NRl03OOvjYEUgpUIq084TDcVAcni4z9npCVorprMp1taYTFPkBdPpnOViibWW4XBEnmcAVFUNQJaZdj6X1FWFDwGtFMvlEu88Simss9SLJWVZEoKjLEt2dnZYLucYo6jriuFwSKY13nls47DWY208DkoZggsIAc42SCnRxuCco65rMmPi/RrIsyEeQV3XCKXIy4K79+8hleLNt99iNBohCJRZTqYNOLCNpfYOi8AJgRNAFQjO45ylcRbX3t98CCitqauKxWy2Oh8huly9dzjrsK7B+4BRCiUVAM47pJBYZzHaYLIM6z1X1ZLMZFhnsdZS5OXGMJMBvLNIqZhOp5yenLColrz77rsMBkOmV5eIxlEWJVJKTs/OefTkEYPBkN39PbIyRwqBloLcZAQf4ucPYyiKgsePHnNxecn9B/c5eO01pJRIKVFCMru64tHHDxmPRty/cw/rLJnJKIoc21geffQRdd1wenJCruN91JQl2aDg9PyMO3fu8tY7bzOvliyWS4qywDnHj7/7A7y1/MJXfgFrGx4/fsJ4PGZnZ8JkMuHs7IwnH33E5dk5jWsoRiOGkzFXsynvvPcuJ2enHJ+c482QbFCyrGps8OSDgv07t/mbX/86Os8YDYfs7exgjMFbRzVfIIQgON/O6YG6WrKczajqimVVEQTs7OwyW8wRUqCNIS/jeBnkBZNyyPxqyrMXzxFaUU5G5GXJdHrFbDYnL0qKwQClDUVZ0jjP/HLG//v/9v9gPB6/+uInCSAJCQkJCQkJCQkJCQmfGw4PD1FK8eLFi43HX7x4wd27d298TZ7n5Hl+7XGTGRBsiR+tGOI9wW6R0N6jtY6ksotEuxACqeRKrGgfE5HjoWOzYtKARMhI6hltAIm1lqZpNggqKSWmJVVCCJGgR2wQXlJKsjxDSskXvvAFGuf59vd/wOHtuywXNX/4e3/AeDwmCMGzF0fs7+1zcnqKQBJ8JE6msxnGGIrhEE/g4ePHDEYjcilZeo9RGoVgfjlnObPcvvc6e/u3ODs6QQTNwWgXLRVvvPEmTfD44yO+/Cu/xL279zh9+ozf+Z3f4fXX7jKajGlmcy6vrhDTOZO9fWbTGV947w3Ozk4JwbNcLiAEpJBoFY9lVVWUgyF7+/t84d13e/dPlmXs7u7inOO7P3yfX/zGr2G9j/tQWWxVI5CIytJMFzx58hSze8CDBw9wwYMULOuaxjmysmgPq/wUMjj0pzO0i/ngV8IH8XGBQuCRwaNDQ/A1Rycvefzh+6jgefsLX8DVMz5++DEnxy8x2lBkGb6x5INOSPNIuUle+iAJviPxA65pcFUN3lNmGSbLGA9H7OxMqKuaqq64quYIpdF5Rl6W5HVNVpYUgyFlWbK7B4cHtzGDEl0WiKZGOMWtWwd84cvv8b0f/RCnFGJQ4LwneI8zCllkqEGGLjM8AVMW6FYAsdbSeEdeFr0AYoocYzTaSKxVZEZRFFkrDkYirMgzrBYEqTFSMjCSUSa5s7fL2dFLmmpOIHB8eYZuRQThAyIEjNIYpQjekxkJ7bVICARgsLODgF4ACUFAEAghQUQhQCoJrfjpg+/O6MY5cNZSlgPmu2PqpqHG4YLvhZNOTFkhbqebV3y7Dh+21us9gYDEE5zEaEmmJblR+CBxEpCRfJdCImQnoayNkZt0iU8TIRAbESgibmbjZZ3w0AmyQohr27kmZnyWbScB5GZ8ggCCFJ+
"text/plain": [
"<Figure size 2000x800 with 2 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from matplotlib import pyplot as plt\n",
"\n",
"fig = plt.figure(figsize=(20,8))\n",
"ax1, ax2 = fig.subplots(1,2)\n",
"\n",
"ax1.set_aspect(1)\n",
"ax2.imshow(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_S_0102.jpg\"))\n",
"\n",
2023-08-21 15:59:01 +02:00
"for bboxes in tracked_instances.values():\n",
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" # plt.plot(projected_traj[:,0])\n",
" ax1.plot(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1))\n",
" ax2.plot(traj[:,:,0].reshape(-1), traj[:,:,1].reshape(-1))\n",
" \n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"One of the things that stands out from these plots in the detections at the edges of the image. In particular the bottom edge (on the right-hand side in the projected image) is visible. As the 'anchor' of the person detection is follows the detection bounding box, which is no longer moving, but growing or shrinking at the edge.\n",
"\n",
"Let's apply a simple filter to ditch the detections close to the edge."
]
},
{
"cell_type": "code",
"execution_count": 47,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"removed 2.87% of bounding boxes\n"
]
}
],
"source": [
"def filter_edges(tracked_instances):\n",
" filtered_tracks = {}\n",
" for track_id in tracked_instances:\n",
" bboxes = tracked_instances[track_id]\n",
" track = list(filter(lambda bbox: bbox[1] < 710 and bbox[3] < 710, bboxes))\n",
" if len(track):\n",
" filtered_tracks[track_id] = track\n",
" return filtered_tracks\n",
"\n",
"filtered_tracks = filter_edges(tracked_instances)\n",
"\n",
"# validate it works:\n",
"bbox_count = sum([len(t) for t in tracked_instances.values()])\n",
"bbox_count_filtered = sum([len(t) for t in filtered_tracks.values()])\n",
"\n",
"print(f\"removed {((bbox_count-bbox_count_filtered)/bbox_count)*100:.2f}% of bounding boxes\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Below we plot the filtered trajectories"
]
},
{
"cell_type": "code",
"execution_count": 48,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABkAAAAHJCAYAAADdFPU5AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9Wawt2ZnfB/7WFMOeznjHvDcnJpOzSJnFGuRCy1ZLXbaG9oBG15sLevNDuQHTD+16kawGBPnRBmwXINgQDBi23DJgQIDcctnlggxLRYoiq2gWq5gkc7rzvWfeZ08RsYZ+WBFx9j7n5s0kq4pZw/oRl3mG2BFrR6xYsc/3/77/J0IIgUQikUgkEolEIpFIJBKJRCKRSCQSiT9ByI97AIlEIpFIJBKJRCKRSCQSiUQikUgkEn/QJAEkkUgkEolEIpFIJBKJRCKRSCQSicSfOJIAkkgkEolEIpFIJBKJRCKRSCQSiUTiTxxJAEkkEolEIpFIJBKJRCKRSCQSiUQi8SeOJIAkEolEIpFIJBKJRCKRSCQSiUQikfgTRxJAEolEIpFIJBKJRCKRSCQSiUQikUj8iSMJIIlEIpFIJBKJRCKRSCQSiUQikUgk/sShP+4BvAjvPY8ePWI8HiOE+LiHk0gkEolEIpFI/KETQuD8/Jzbt28jZcpXSiQSiUQikUgkEokflz/SAsijR4+4e/fuxz2MRCKRSCQSiUTiJ879+/e5c+fOxz2MRCKRSCQSiUQikfhjyx9pAWQ8HgPxj7/JZPIxjyaRSCQSiUQikfjDZzqdcvfu3f6zcCKRSCQSiUQikUgkfjz+SAsgne3VZDJJAkgikUgkEolE4k8VyQI2kUgkEolEIpFIJH5/JFPhRCKRSCQSiUQikUgkEolEIpFIJBJ/4kgCSCKRSCQSiUQikUgkEolEIpFIJBKJP3EkASSRSCQSiUQikUgkEolEIpFIJBKJxJ84kgCSSCQSiUQikUgkEh8z//l//p/z6quvUhQFP/MzP8M//+f//OMeUiKRSCQSiUQi8ceeJIAkEolEIpFIJBKJxMfIf//f//d89atf5W/+zb/Jt771Lb74xS/yC7/wCzx79uzjHloikUgkEolEIvHHGhFCCB/3ID6I6XTK1tYWZ2dnTCaTj3s4iUQikUgkEonEHzrpM/CfPn7mZ36Gr3zlK/xn/9l/BoD3nrt37/Lv/Xv/Hv/hf/gffujrvfc8evSI8XiMEOIPe7iJRCKRSCQSicTHSgiB8/Nzbt++jZQvrvHQP6ExJRKJRCKRSCQSiUTiEnVd881vfpNf+ZVf6X8mpeQv/sW/yG/+5m8+9zVVVVFVVf/9w4cP+exnP/uHPtZEIpFIJBKJROKPEvfv3+fOnTsv3CYJIIlEIpFIJBKJRCLxMXF4eIhzjhs3bmz8/MaNG3zve9977mv+zt/5O/ytv/W3rvx8+6UtIBBCwHtPCBdfAxhxNTtOCEHd1JTlgCBAKEVmDFLKi9cHj3MO7zxKCAQBax3OOpyLhgLeB0IQKKmuHMMHjxCCPMsJIWCtJQi1Ua3Sjbcocpxz3Hn1FRbOsVguGA1HfOmLX+SH3/8+o2LIe+++S11XNFVDVVmUUngCL738MpkxPHr8iM9+4QvcvvMSi+D54YP7HB884+TJQySerckW21u73H31dT77hS+iR9uoomR2PuO999/nu9/+Nn/mC1/EO8vTx48Zjyc8efqET7z2Kg/ff5sf/O53uHP9BkoIysGQz37+C6hM8y9++2vcuH6dH7z1FovFEikkWkleunkLKSSnp2c8ffqULMvJ8wxjDCEElFJIqdjamlAMJ3ihmS8WPHj8lE99/nPs3r6BV4rGeT755qfIB9uczSu+//3v01jHnbt3KIcDdvZ2ybKcALgQeLHVQ0D1X8V/Do9vvwYIAoIQSAIyWExwjDPNta0xuQio4Dk5OeD73/sd5vMZL926TV3XPH30mOPDIwC0VoQQWC9MCgGctSAEov3nnYMQcM5h6wbbNNiqITiPAJpqyfl8hlQCrTVZlnE+nTIcjciyDEJgMV+xt3WdV+++ypNHj2hsw2A85rxaMl3OWbqGwWjEjdsvsbW1Te0s/+Kb3+KLX/oyg8GYfDDAh8BknCOwCCFw1jJfzBmPJ0gp8d7z9PETnj56zMmzQ1arFdduv8T+3dtUBJZ1zXvff5vPvvEpMBmN9+R5zmw25dHDB7zyyl2ECEglkAFuDkccPn1GtVwyyAtCY6nnS4zSSODg8IDKVvG8WIdzjqap8SEQvMd7jwgKiW7v9Xg14/0LIcRtpBRcLg5TSvGZz3yO4+NjVtUCZ0RcA9prAiJerDUa6/rfCyEQCFi/tu31DQQa21BXNa+99hqD4YAb12/E+1wGXHAIKZBSxuvfrlHr+9lAcCW7VwDi8oaX3uTlX/vg280ujo3/sDuFjff4PC7OWfua5xmteBAiHtM5h/ceY8zVsb7gWHG//oM3+Ih4H+fK+rV87oB/v8TJcIW6rgkhoHWct/FivvgkS158fj/KdfLB9huLALTPne5lIgBW8Dvf+T85OHyKlILJ1gBjFMtqhhAeKTTCauraYWsHCKRQ+AAhxK8DFYOxZmdni+n0jOGwZDqdUjc1RZGRZxmnJ2dUVd0/A5RUlIOyX2cCxPMj4vlyzqOUZLVc4b1Ha421Fm8tZVHivUMqwWQ8xrqG4WgQ73tgXA6p64r5fMVqFZMnpJD4dk7apmE4KBFCYIxhsZizWlVoffE8N6bE+zh3siLHZBl5noNq70shaJoKEQIahVaG4WBIOR7ipSIoRRCSDIPwgjiTA14Q1wprUVoDAW+b9pzItWst2vUsrn1413/ecN7164hWGiEEi7rGISnygsY2hBDITLYx14VzKKCuK5aLJU8PnjEoB9y5cwdjDFVV4VY1RkpOp2ecn8+obcNoMmZrZ4e8yJEIFOCdpakbZvMZSsVQe11XPHz4iNHWmFc//WkQAini2v/s6VOePXnC1mSbV155BWsbiqJEa0VuMp48esT7779PbnKaumIwGFJ7x2h7wuHxIeVwyGuf+AQmy6ibGpNlWGc5OzxmdnrG7Vu3mS/mPHn8hNdff53xZAzA/XsPOHnyGFvVHE9PKYcjRltjFvWK3WvXuPvaK3ihGO+/hCkHPH76hN9963t4ATvX9vncFz5PUZYQApNBSVmWBOdZzmYopSjy+DslJUYrTg4POT09Zb5aUJQD8iKP66CWjMYj8qJAADoItsdjjp4dcHB0SFaUqNww2pqwWC44Oj5CKcPW9g5ZUZIXBVJqFrMFf/uv/T8Yj8cvvvlJAkgikUgkEolEIpFI/LHiV37lV/jqV7/afz+dTrl79y6DQQmCDfGjF0N8QPirAS1jDLvlLlVV0XhHAKSSvQDivQcXt+1C6kKAlIKgFIjQB3BigCJcCU4potjhg++DXUJuBt2881EIUIpr169TFPGP6NffeIPr+9co84I3Pvkpdra2mZ7PEMDsfMbx0QkIcN5jraWua6qq4jv/57cphwNkWfLgvfdw1QolJKOypMwycp3z8N5DHj8+5Obrb7B/+yV+53e/y5e++CX+rV/8f7K9s8OjR4/4xne/w+duXue1T73JnRs3KHPFk/vvMz0/Z29nG4BHjx7x5PAZ1jecnZ1RNw37+/ucnZxQVxUPHzwEAnlesL2zgzGG+Xwegy9KIaTsA4DKaBarhu1r+8yt4+DgkMfPnlE3lsnOLndv3mHv+l3MeJetvX18CBRlgSPgQ0AohZQC2QZaP5AQ+oaggSh2+OD7r0P/c4kkoIJDB0tRaMrxiOp8yjvvvcuDe++wPS65desmz54949H9B3jnyMsCW9UYpdFGR8FjDa8V9LHRgAuBpm6wVRWFD+/JjWE4KQk+cHzs0JlBaYVSCp1lBCkJQqCzDADdeCY720x2tnj05BHLuubg4X0aAjfuvoS2FZO9PXZfuhXHsFqhBgPkIEcMMtQgR4SAGZSo0CCEwFpL4+P76QKTpsjRmUYZibISkymyIo96gZQYYyjyAp8bpI8CjhQwyDO2BgNE8CgpUQRGZU55+ybeOYQPCOuxdYPwHhF
"text/plain": [
"<Figure size 2000x800 with 2 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from matplotlib import pyplot as plt\n",
"\n",
"fig = plt.figure(figsize=(20,8))\n",
"ax1, ax2 = fig.subplots(1,2)\n",
"\n",
"ax1.set_aspect(1)\n",
"ax2.imshow(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_S_0102.jpg\"))\n",
"\n",
"for bboxes in filtered_tracks.values():\n",
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" # plt.plot(projected_traj[:,0])\n",
" ax1.plot(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1))\n",
" ax2.plot(traj[:,:,0].reshape(-1), traj[:,:,1].reshape(-1))\n",
" \n",
"plt.show()"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
2023-08-21 15:59:01 +02:00
"What if the projection is a heatmap of where people are? For this I reuse the above plot and apply blurring effects of pyplot from [their documentation](https://matplotlib.org/stable/gallery/misc/demo_agg_filter.html)\n",
"\n",
"Note that person tracking would not really be necessary for this to work. Detection on a frame-by-frame basis would be sufficient to achieve something similar.\n",
"\n",
"The plots below use two slightly different ways of plotting. The first shows the tracks as transparent lines, the second plots the detections as points. This last way of plotting would not stricktly require the tracking as only individual detections are aggregrated."
]
},
{
"cell_type": "code",
2023-08-21 15:59:01 +02:00
"execution_count": 49,
"metadata": {},
"outputs": [
{
"data": {
2023-08-21 15:59:01 +02:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAABkAAAAPBCAYAAACm2GZ9AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9e6wk213ff3/WWlXVl32dPZczZ86Zc/EFXzDYeYyx/RDxg+BgHIkHB6OHKIoCEUoEspHAf5BYIkgoeeSE/AGJAuQ/CFIcIqQYBD8BIuaHrfx+xglODBjwMfY59rnM5cx137u7qtZ6/qhetVfX7j3XPbNn5rxfR/vsPd3VVdXdVdXV61PftUwIIQgAAAAAAAAAAOARYo96BQAAAAAAAAAAAA4bAQgAAAAAAAAAAHjkEIAAAAAAAAAAAIBHDgEIAAAAAAAAAAB45BCAAAAAAAAAAACARw4BCAAAAAAAAAAAeOQQgAAAAAAAAAAAgEdOdtQrcCPee507d05LS0syxhz16gAAAAD3XAhBm5ubOnPmjKzleiUAAAAAuFMPdABy7tw5nT179qhXAwAAALjvXnrpJT355JNHvRoAAAAA8NB6oAOQpaUlSc2Xv+Xl5SNeGwAAAODe29jY0NmzZ9tzYQAAAADAnXmgA5DY7dXy8jIBCAAAAF5T6AIWAAAAAO4OnQoDAAAAAAAAAIBHDgEIAAAAAAAAAAB45BCAAAAAAAAAAACARw4BCAAAAAAAAAAAeOQQgAAAAAAAAAAAgEcOAQgAAAAAAAAAAHjkEIAAAAAAAAAAAIBHDgEIAAAAAAAAAAB45BCAAAAAAAAAAACARw4BCAAAAAAAAAAAeOQQgAAAAAAAAAAAgEcOAQgAAAAAAAAAAHjkEIAAAAAAAAAAAIBHDgEIAAAAAAAAAAB45BCAAAAAAAAAAACAR0521CsAAABw1EIICiFIkowxMsYc8RoBAAAAAIC7RQACAAAgtQFIRAgCAAAAAMDDjQAEAABgKg1BCEAAAAAAAHi4MQYIAABAIoYg3YoQAAAAAADwcCEAAQAAr3ndcT8IPwAAAAAAePgRgAAAAGg2BImDohOEAAAAAADw8CIAAQAAEFUgAAAAAAA8aghAAAAApqgCAQAAAADg0UEAAgAAMEUVCAAAAAAAjw4CEAAAgARVIAAAAAAAPBoIQAAAADq6AQgAAAAAAHj4EIAAAAAkYgUIVSAAAAAAADzc7jgA+eVf/mV98zd/s5aXl7W8vKz3vve9+t3f/d32/tFopA9/+MM6fvy4FhcX9aEPfUgXL148lJUGAAC4H4wxhB8AAAAAADyk7jgAefLJJ/Wv/tW/0uc//3n9yZ/8if7W3/pb+r7v+z79xV/8hSTpJ3/yJ/Xbv/3b+o3f+A19+tOf1rlz5/T93//9h7biAAAA98q8wdAJQgAAAAAAeLiYcIjf5NfW1vRv/s2/0Q/8wA/o5MmT+sQnPqEf+IEfkCR96Utf0lve8hZ99rOf1Xve855bmt/GxoZWVla0vr6u5eXlw1pNAACAmwohyHvfBh/WWllrZ4IR4F7gHBgAAAAADsehjAFS17V+/dd/Xdvb23rve9+rz3/+8yrLUu973/vaad785jfrqaee0mc/+9kD5zMej7WxsTHzAwAAcBSoAgEAAAAA4OF2VwHIn//5n2txcVG9Xk8/+qM/qk9+8pN661vfqgsXLqgoCq2urs5M/9hjj+nChQsHzu/jH/+4VlZW2p+zZ8/ezeoBAADclXRAdMIPAAAAAAAeLncVgLzpTW/SF77wBX3uc5/Tj/3Yj+mHfuiH9Jd/+Zd3PL+PfexjWl9fb39eeumlu1k9AACAu9KtAvHet91iAQAAAACAB1t2Nw8uikJveMMbJEnvfOc79T//5//Uv/23/1Y/+IM/qMlkouvXr89UgVy8eFGnT58+cH69Xk+9Xu9uVgkAAOBQxRAkHQ9EkpxzR7xmAAAAAADgRg5lDJDIe6/xeKx3vvOdyvNcn/rUp9r7nnvuOb344ot673vfe5iLBAAAuOfSbrBiFYj3/qhXCwAAAAAA3MAdV4B87GMf0wc+8AE99dRT2tzc1Cc+8Qn90R/9kX7/939fKysr+pEf+RF99KMf1drampaXl/XjP/7jeu9736v3vOc9h7n+AAAA91TsAiv+jiGItYd6HQkAAAAAADhkdxyAvPrqq/qH//Af6vz581pZWdE3f/M36/d///f1t//235Yk/fzP/7ystfrQhz6k8Xis97///fqlX/qlQ1txAACA+6k7GLr3XsYYghAAAAAAAB5QJjzAo3hubGxoZWVF6+vrWl5ePurVAQAAr2Hee9V1raqqJDVjgMQf4DBxDgwAAAAAh4NLFgEAAG5BrACJFR9xUHTGAgEAAAAA4MFEAAIAAHALYvjRHQsEAAAAAAA8mAhAAAAAblE3BPHet5UgAAAAAADgwUIAAgAAcIu63WDFKhACEAAAAAAAHjwEIAAAALdhXjdYdIUFAAAAAMCDhwAEAADgNllr53aFBQAAAAAAHhwEIAAAALdhXjdY8QcAAAAAADw4CEAAAABuUwxAYghijCEEAQAAAADgAUMAAgAAcJti11exGiQOhE4AAgAAAADAg4MABAAA4A6kFSASXWEBAAAAAPCgIQABAAC4A/PGAomDoROCAAAAAABw9AhAAAAA7lAMQaS9ChACEAAAAAAAHgwEIAAAAHfIWjtTBVLXtbz37W8AAAAAAHB0sqNeAQAAgIeZtbat+khDj3R8EAAAAAAAcP/xzRwAAOAuxAqQ2B1WDEHoCgsAAAAAgKNFAAIAAHAXuoOhS01XWJLoBgsAAAAAgCNEAAIAAHCXYgiSDogeww+qQAAAAAAAOBoEIAAAAHcp7QJL2gtACD8AAAAAADg6BCAAAACHIA1AohiCEIQAAAAAAHD/EYAAAAAcgm43WHVdz3SFBQAAAAAA7i8CEAAAgEMQB0KPAYj3ngoQAAAAAACOEAEIAADAIbLWKoQgY0xb/dHtGgsAAAAAANx7BCAAAACHIK0AiSFI+gMAAAAAAO4vAhAAAIBDko4BYowhAAEAAAAA4AgRgAAAABwi55ycc20Y4r1XXdcMhg4AAAAAwH1GAAIAAHBIYvWHc67tBquqqjYAoRIEAAAAAID7hwAEAADgEMXKD2utrG1Otbz3BCAAAAAAANxnBCAAAACHLIYgsSIkrQAhBAEAAAAA4P4gAAEAADgE3XDDWtsOhC5JVVUxDggAAAAAAPcRAQgAAMAhCCHs6+oqdoUVQpAxRnVdUwECAAAAAMB9kh31CgAAADzsYqiR/k4rQkIIKstSUtMdVto9FgAAAAAAuDeoAAEAADhEafDRDTliBQhVIAAAAAAA3HsEIAAAAHcpBh2xssNa2/47jgXivVdd14QgAAAAAADcJwQgAAAAhyCt9kiDD2l2QPS6rhkMHQAAAACA+4AxQAAAAA5JGoLEKpAsy9pB0CXNHSgdAAAAAAAcPipAAAAADkE6sHns4spa21Z/xEDEe6+qqqgCAQAAAADgHiMAAQAAuIeMMcqyTM65mSoQxgEBAAAAAODeIgABAAA4JLEKpNutVToeSAw/CEEAAAAAALi3CEAAAAAOUbcbrDQUiffFgdAJQAAAAAAAuHcIQAAAAA5ZdzwQY4ycczOVIHVdq67rthoEAAAAAAAcLgIQAACAQzQv/IhVHmkAEoOPGIJQCQIAAAAAwOEiAAEAALhH0uqOedUf6XggVIEAAAAAAHC4sqNeAQAAgEdNrOyIf0v7K0NCCKrrWtY216NYa9uKEQAAAAAAcPfuuALk4x//uN71rndpaWlJp06d0gc/+EE999xzM9N8x3d8x76BP3/0R3/0rlcaAADgQZUObl5Vlaqqaqs9ImttezvjgAAAAAAAcG/ccQDy6U9/Wh/+8If1x3/8x/qDP/gDlWWp7/7u79b29vbMdP/4H/9jnT9/vv35uZ/7ubteaQAAgAdZDEBiJUgaihhj2i6vvPdtEEIIAgAAAADA4brjLrB+7/d+b+bfv/qrv6pTp07p85//vL7927+9vX04HOr06dN3voYAAAAPkbQ
"text/plain": [
"<Figure size 2000x1200 with 3 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from matplotlib import gridspec\n",
"import matplotlib.cm as cm\n",
"import matplotlib.transforms as mtransforms\n",
"from matplotlib.colors import LightSource\n",
"from matplotlib.artist import Artist\n",
"\n",
"\n",
"def smooth1d(x, window_len):\n",
" # copied from https://scipy-cookbook.readthedocs.io/items/SignalSmooth.html\n",
" s = np.r_[2*x[0] - x[window_len:1:-1], x, 2*x[-1] - x[-1:-window_len:-1]]\n",
" w = np.hanning(window_len)\n",
" y = np.convolve(w/w.sum(), s, mode='same')\n",
" return y[window_len-1:-window_len+1]\n",
"\n",
"\n",
"def smooth2d(A, sigma=3):\n",
" window_len = max(int(sigma), 3) * 2 + 1\n",
" A = np.apply_along_axis(smooth1d, 0, A, window_len)\n",
" A = np.apply_along_axis(smooth1d, 1, A, window_len)\n",
" return A\n",
"\n",
"\n",
"class BaseFilter:\n",
"\n",
" def get_pad(self, dpi):\n",
" return 0\n",
"\n",
" def process_image(self, padded_src, dpi):\n",
" raise NotImplementedError(\"Should be overridden by subclasses\")\n",
"\n",
" def __call__(self, im, dpi):\n",
" pad = self.get_pad(dpi)\n",
" padded_src = np.pad(im, [(pad, pad), (pad, pad), (0, 0)], \"constant\")\n",
" tgt_image = self.process_image(padded_src, dpi)\n",
" return tgt_image, -pad, -pad\n",
"\n",
"\n",
"\n",
"class GaussianFilter(BaseFilter):\n",
" \"\"\"Simple Gaussian filter.\"\"\"\n",
"\n",
" def __init__(self, sigma, alpha=0.5, color=(0, 0, 0)):\n",
" self.sigma = sigma\n",
" self.alpha = alpha\n",
" self.color = color\n",
"\n",
" def get_pad(self, dpi):\n",
" return int(self.sigma*3 / 72 * dpi)\n",
"\n",
" def process_image(self, padded_src, dpi):\n",
" tgt_image = np.empty_like(padded_src)\n",
" tgt_image[:, :, :3] = self.color\n",
" tgt_image[:, :, 3] = smooth2d(padded_src[:, :, 3] * self.alpha,\n",
" self.sigma / 72 * dpi)\n",
" return tgt_image\n",
"\n",
"gauss = GaussianFilter(2)\n",
"\n",
"fig = plt.figure(figsize=(20,12))\n",
"\n",
"\n",
"# Create 2x2 sub plots\n",
"gs = gridspec.GridSpec(2, 2)\n",
"\n",
"# (ax1, ax2), (ax3, ax4) = fig.subplots(2,2)\n",
"ax1 = fig.add_subplot(gs[0,0])\n",
"ax3 = fig.add_subplot(gs[1,0])\n",
"ax2 = fig.add_subplot(gs[:,1])\n",
"\n",
"ax1.set_aspect(1)\n",
"ax3.set_aspect(1)\n",
"\n",
2023-08-21 15:59:01 +02:00
"# show the image from the dataset on ax2\n",
"ax2.imshow(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_S_0102.jpg\"))\n",
"\n",
2023-08-21 15:59:01 +02:00
"for bboxes in filtered_tracks.values():\n",
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" # plt.plot(projected_traj[:,0])\n",
2023-08-21 15:59:01 +02:00
" \n",
" # option1: draw the tracks as lines\n",
" line, = ax1.plot(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1), color=(0,0,0,0.05))\n",
" line.set_agg_filter(gauss)\n",
" line.set_rasterized(True) # \"to suport mixed-mode renderers\"\n",
"\n",
2023-08-21 15:59:01 +02:00
" # option2: draw the tracks merely as individual detection points (for which no tracking would have been necessary)\n",
" points = ax3.scatter(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1), color=(0,0,0,0.01))\n",
" points.set_agg_filter(gauss)\n",
" points.set_rasterized(True) # \"to suport mixed-mode renderers\"\n",
"\n",
" ax2.plot(traj[:,:,0].reshape(-1), traj[:,:,1].reshape(-1))\n",
" \n",
"plt.show()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.2"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "1135f674f58caf91385e41dd32dc418daf761a3c5d4526b1ac3bad0b893c2eb5"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}