trajpred/03_track_objects_and_collec...

672 lines
2.4 MiB
Plaintext
Raw Permalink Normal View History

{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Use SORT tracking over a video collection and project results\n",
"\n",
"Using a sort implementation originally by Alex Bewley, but adapted by [Chris Fotache](https://github.com/cfotache/pytorch_objectdetecttrack/blob/master/README.md). For an example implementation, see [his notebook](https://github.com/cfotache/pytorch_objectdetecttrack/blob/master/PyTorch_Object_Tracking.ipynb).\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"from pathlib import Path\n",
"import numpy as np\n",
"from PIL import Image\n",
"import torch\n",
"from torchvision.io.video import read_video\n",
"import matplotlib.pyplot as plt\n",
"from torchvision.utils import draw_bounding_boxes\n",
"from torchvision.transforms.functional import to_pil_image\n",
"from torchvision.models.detection import retinanet_resnet50_fpn_v2, RetinaNet_ResNet50_FPN_V2_Weights\n",
"import tempfile "
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"source = Path('../DATASETS/VIRAT_subset_0102x')\n",
"videos = list(source.glob('*.mp4'))\n",
"tmpdir = Path(tempfile.gettempdir()) / 'trajpred'\n",
"tmpdir.mkdir(exist_ok=True)\n"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"device(type='cuda')"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
"device"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Based on code from: https://stackabuse.com/retinanet-object-detection-with-pytorch-and-torchvision/"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"weights = RetinaNet_ResNet50_FPN_V2_Weights.DEFAULT\n",
"model = retinanet_resnet50_fpn_v2(weights=weights, score_thresh=0.35)\n",
"model.to(device)\n",
"# Put the model in inference mode\n",
"model.eval()\n",
"# Get the transforms for the model's weights\n",
"preprocess = weights.transforms().to(device)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"> The score_thresh argument defines the threshold at which an object is detected as an object of a class. Intuitively, it's the confidence threshold, and we won't classify an object to belong to a class if the model is less than 35% confident that it belongs to a class."
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"The result from a single prediction coming from `model(batch)` looks like:\n",
"\n",
"```python\n",
"{'boxes': tensor([[5.7001e+02, 2.5786e+02, 6.3138e+02, 3.6970e+02],\n",
" [5.0109e+02, 2.4508e+02, 5.5308e+02, 3.4852e+02],\n",
" [3.4096e+02, 2.7015e+02, 3.6156e+02, 3.1857e+02],\n",
" [5.0219e-01, 3.7588e+02, 9.7911e+01, 7.2000e+02],\n",
" [3.4096e+02, 2.7015e+02, 3.6156e+02, 3.1857e+02],\n",
" [8.3241e+01, 5.8410e+02, 1.7502e+02, 7.1743e+02]]),\n",
" 'scores': tensor([0.8525, 0.6491, 0.5985, 0.4999, 0.3753, 0.3746]),\n",
" 'labels': tensor([64, 64, 1, 64, 18, 86])}\n",
"```"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/ruben/suspicion/trajpred/sort_cfotache.py:36: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.\n",
" def iou(bb_test,bb_gt):\n"
]
}
],
"source": [
"%matplotlib inline\n",
"\n",
"\n",
"import pylab as pl\n",
"from IPython import display\n",
"from utils.timer import Timer\n",
"from sort_cfotache import Sort\n",
"import pickle\n",
"\n",
"\n",
"def track_video(video_path: Path) -> dict:\n",
" tracked_instances = {}\n",
" mot_tracker = Sort()\n",
"\n",
" video = cv2.VideoCapture(str(video_path))\n",
"\n",
" # timer = Timer()\n",
" while True:\n",
" # timer.tic()\n",
" ret, frame = video.read()\n",
" \n",
" if not ret:\n",
" # print(\"Can't receive frame (stream end?). Exiting ...\")\n",
" break\n",
"\n",
" t = torch.from_numpy(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))\n",
" # change axes of image loaded image to be compatilbe with torch.io.read_image (which has C,W,H format instead of W,H,C)\n",
" t = t.permute(2, 0, 1)\n",
"\n",
" batch = preprocess(t)[None, :].to(device)\n",
" # no_grad can be used on inference, should be slightly faster\n",
" with torch.no_grad():\n",
" predictions = model(batch)\n",
" prediction = predictions[0] # we feed only one frame at the once\n",
"\n",
" mask = prediction['labels'] == 1 # if we want more than one: np.isin(prediction['labels'], [1,86])\n",
"\n",
" scores = prediction['scores'][mask]\n",
" labels = prediction['labels'][mask]\n",
" boxes = prediction['boxes'][mask]\n",
" \n",
" # TODO: introduce confidence and NMS supression: https://github.com/cfotache/pytorch_objectdetecttrack/blob/master/PyTorch_Object_Tracking.ipynb\n",
" # (which I _think_ we better do after filtering)\n",
" # alternatively look at Soft-NMS https://towardsdatascience.com/non-maximum-suppression-nms-93ce178e177c\n",
"\n",
" \n",
" # dets - a numpy array of detections in the format [[x1,y1,x2,y2,score],[x1,y1,x2,y2,score],...]\n",
" detections = np.array([np.append(bbox, [score, label]) for bbox, score, label in zip(boxes.cpu(), scores.cpu(), labels.cpu())])\n",
" # print(detections)\n",
" tracks = mot_tracker.update(detections)\n",
"\n",
" # now convert back to boxes and labels\n",
" # print(tracks)\n",
" boxes = np.array([t[:4] for t in tracks])\n",
" # initialize empty with the necesserary dimensions for drawing_bounding_boxes glitch\n",
" t_boxes = torch.from_numpy(boxes) if len(boxes) else torch.Tensor().new_empty([0, 6])\n",
" labels = [str(int(t[4])) for t in tracks]\n",
" # print(t_boxes, boxes, labels)\n",
"\n",
"\n",
"\n",
" for track in tracks:\n",
" yield track\n",
" \n",
"\n",
" # print(\"time for frame: \", timer.toc(), \", avg:\", 1/timer.average_time, \"fps\")\n",
"\n",
" # display.clear_output(wait=True)\n",
"\n",
" # return tracked_instances"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"def track_videos(video_paths: list[Path]) -> dict:\n",
" # collect instances of all videos with unique key\n",
" video_paths = list(video_paths)\n",
" tracked_instances = {}\n",
" timer = Timer()\n",
" for i, p in enumerate(video_paths):\n",
" print(f\"{i}/{len(video_paths)}: {p}\")\n",
"\n",
" cachefile = tmpdir / (p.name + '.pcl')\n",
" if cachefile.exists():\n",
" print('\\tLoad pickle')\n",
" with cachefile.open('rb') as fp:\n",
" new_instances = pickle.load(fp)\n",
" else:\n",
" #continue # to quickly test from cache\n",
" new_instances = {}\n",
" timer.tic()\n",
" for track in track_video(p):\n",
" track_id = f\"{i}_{str(int(track[4]))}\"\n",
" if track_id not in new_instances:\n",
" new_instances[track_id] = []\n",
" new_instances[track_id].append(track)\n",
" with cachefile.open('wb') as fp:\n",
" pickle.dump(new_instances, fp)\n",
" print(\" time for video: \", timer.toc())\n",
" tracked_instances.update(new_instances)\n",
" \n",
" return tracked_instances"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_00_000060_000218.mp4\n",
"\tLoad pickle\n",
"1/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_09_001285_001336.mp4\n",
"\tLoad pickle\n",
"2/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_08_000895_000975.mp4\n",
"\tLoad pickle\n",
"3/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_04_000545_000576.mp4\n",
"\tLoad pickle\n",
"4/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_04_000929_000954.mp4\n",
"\tLoad pickle\n",
"5/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_10_000923_000959.mp4\n",
"\tLoad pickle\n",
"6/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_06_000830_000904.mp4\n",
"\tLoad pickle\n",
"7/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_08_001308_001332.mp4\n",
"\tLoad pickle\n",
"8/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_09_001484_001510.mp4\n",
"\tLoad pickle\n",
"9/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_00_000047_000139.mp4\n",
"\tLoad pickle\n",
"10/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_03_000370_000395.mp4\n",
"\tLoad pickle\n",
"11/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_02_000414_000439.mp4\n",
"\tLoad pickle\n",
"12/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_03_000865_000911.mp4\n",
"\tLoad pickle\n",
"13/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_09_000857_000886.mp4\n",
"\tLoad pickle\n",
"14/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_09_001010_001036.mp4\n",
"\tLoad pickle\n",
"15/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_00_000000_000053.mp4\n",
"\tLoad pickle\n",
"16/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_05_000499_000527.mp4\n",
"\tLoad pickle\n",
"17/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_03_000400_000435.mp4\n",
"\tLoad pickle\n",
"18/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_08_000705_000739.mp4\n",
"\tLoad pickle\n",
"19/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_01_000712_000752.mp4\n",
"\tLoad pickle\n",
"20/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_06_000671_000744.mp4\n",
"\tLoad pickle\n",
"21/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_05_000856_000890.mp4\n",
"\tLoad pickle\n",
"22/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_06_000620_000760.mp4\n",
"\tLoad pickle\n",
"23/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_04_000374_000469.mp4\n",
"\tLoad pickle\n",
"24/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_03_000270_000359.mp4\n",
" time for video: 76.47440218925476\n",
"25/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_04_000646_000754.mp4\n",
" time for video: 84.25160992145538\n",
"26/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_00_000001_000033.mp4\n",
" time for video: 62.6530507405599\n",
"27/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_08_000838_000867.mp4\n",
" time for video: 51.79480332136154\n",
"28/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_03_000606_000632.mp4\n",
" time for video: 44.33411946296692\n",
"29/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_02_000301_000345.mp4\n",
" time for video: 43.13727605342865\n",
"30/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_05_000515_000593.mp4\n",
" time for video: 45.87533599989755\n",
"31/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010205_01_000207_000288.mp4\n",
" time for video: 48.75653102993965\n",
"32/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_07_000942_000989.mp4\n",
" time for video: 47.200045612123276\n",
"33/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_02_000167_000197.mp4\n",
" time for video: 44.192170357704164\n",
"34/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_05_001013_001038.mp4\n",
" time for video: 41.411013711582534\n",
"35/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_06_000784_000873.mp4\n",
" time for video: 43.82025545835495\n",
"36/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_02_000347_000397.mp4\n",
" time for video: 43.30084228515625\n",
"37/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_01_000072_000225.mp4\n",
" time for video: 49.74186216081892\n",
"38/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_02_000349_000398.mp4\n",
" time for video: 48.90173446337382\n",
"39/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_01_000124_000206.mp4\n",
" time for video: 50.23752883076668\n",
"40/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_02_000161_000189.mp4\n",
" time for video: 48.407087087631226\n",
"41/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_03_000546_000580.mp4\n",
" time for video: 47.02203807565901\n",
"42/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_06_000702_000744.mp4\n",
" time for video: 46.14274973618357\n",
"43/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_00_000000_000049.mp4\n",
" time for video: 45.96134517192841\n",
"44/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_01_000125_000152.mp4\n",
" time for video: 44.89160401480539\n",
"45/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_04_000568_000620.mp4\n",
" time for video: 44.69535830887881\n",
"46/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_07_000748_000837.mp4\n",
" time for video: 46.03415718285934\n",
"47/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_07_001195_001260.mp4\n",
" time for video: 46.31604018807411\n",
"48/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_07_000601_000697.mp4\n",
" time for video: 47.671081705093385\n",
"49/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_09_000886_000915.mp4\n",
" time for video: 46.78188127737779\n",
"50/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_10_001372_001395.mp4\n",
" time for video: 45.63375103032148\n",
"51/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_05_000658_000700.mp4\n",
" time for video: 45.015538905348095\n",
"52/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_03_000313_000355.mp4\n",
" time for video: 44.412532288452674\n",
"53/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_02_000790_000816.mp4\n",
" time for video: 43.610562173525494\n",
"54/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_04_000457_000511.mp4\n",
" time for video: 43.67899775505066\n",
"55/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010202_01_000055_000147.mp4\n",
" time for video: 44.6242256090045\n",
"56/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_11_001524_001607.mp4\n",
" time for video: 44.944525480270386\n",
"57/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_01_000254_000322.mp4\n",
" time for video: 45.192202406771045\n",
"58/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_05_000591_000631.mp4\n",
" time for video: 44.800596714019775\n",
"59/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_10_000904_000991.mp4\n",
" time for video: 45.58240665329827\n",
"60/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_06_001064_001097.mp4\n",
" time for video: 44.88245353827605\n",
"61/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_02_000150_000180.mp4\n",
" time for video: 44.24212918156072\n",
"62/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_04_000720_000767.mp4\n",
" time for video: 43.92639535512679\n",
"63/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_08_000807_000831.mp4\n",
" time for video: 43.20958806276322\n",
"64/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_10_001092_001121.mp4\n",
" time for video: 42.71179392279648\n",
"65/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_05_000797_000823.mp4\n",
" time for video: 42.104674679892405\n",
"66/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_06_000550_000600.mp4\n",
" time for video: 42.07939862650494\n",
"67/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010201_09_000770_000801.mp4\n",
" time for video: 41.708983686837286\n",
"68/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010206_00_000007_000035.mp4\n",
" time for video: 41.25493524339464\n",
"69/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_00_000030_000059.mp4\n",
" time for video: 40.88401547203893\n",
"70/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_03_000201_000232.mp4\n",
" time for video: 40.45448579686753\n",
"71/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010204_06_000913_000939.mp4\n",
" time for video: 39.99556113779545\n",
"72/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010207_10_001549_001596.mp4\n",
" time for video: 39.92924086415038\n",
"73/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010203_07_000775_000869.mp4\n",
" time for video: 40.64969404220581\n",
"74/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010200_03_000470_000567.mp4\n",
" time for video: 41.35395318854089\n",
"75/76: ../DATASETS/VIRAT_subset_0102x/VIRAT_S_010208_07_000768_000791.mp4\n",
" time for video: 40.88396269083023\n"
]
},
{
"data": {
"text/plain": [
"5952"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"tracked_instances = track_videos(videos)\n",
"len(tracked_instances)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Project / Homography\n",
"\n",
"Now that all trajectories are captured (for a single video), these can then be projected onto a flat surface by [homography](https://en.wikipedia.org/wiki/Homography_(computer_vision)). The necessary $H$ matrix is already provided by VIRAT in the [homographies folder](https://data.kitware.com/#folder/56f581c88d777f753209c9d2) of their online data repository."
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"\n",
"homography = list(source.glob('*img2world.txt'))[0]\n",
"H = np.loadtxt(homography, delimiter=',')\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"The homography matrix helps to transform points from image space to a flat world plane. The `README_homography.txt` from VIRAT describes:\n",
"\n",
"> Roughly estimated 3-by-3 homographies are included for convenience. \n",
"> Each homography H provides a mapping from image coordinate to scene-dependent world coordinate.\n",
"> \n",
"> [xw,yw,zw]' = H*[xi,yi,1]'\n",
"> \n",
"> xi: horizontal axis on image with left top corner as origin, increases right.\n",
"> yi: vertical axis on image with left top corner as origin, increases downward.\n",
"> \n",
"> xw/zw: world x coordinate\n",
"> yw/zw: world y coordiante"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(1200, 900)\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABLAAAAOECAIAAAA+D1+tAAEAAElEQVR4nOy915Mk2XUeftOU97799LiddVgYklgq9CPBEKkHvTD0V+pJEdIzgyGSEgCBIMxizezMjutpb8p7l5m/hw/36FSamu7ZXWDBPt9DR3d1VubNa443hud5SiAQCAQCgUAgEAgEtw/mH3sAAoFAIBAIBAKBQCD440AUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAhuKUQhFAgEAoFAIBAIBIJbClEIBQKBQCAQCAQCgeCWQhRCgUAgEAgEAoFAILilEIVQIBAIBAKBQCAQCG4pRCEUCAQCgUAgEAgEglsKUQgFAoFAIBAIBAKB4JZCFEKBQCAQCAQCgUAguKUQhVAgEAgEAoFAIBAIbilEIRQIBAKBQCAQCASCWwpRCAUCgUAgEAgEAoHglkIUQoFAIBAIBAKBQCC4pRCFUCAQCAQCgUAgEAgEAoFAIBAIBAKBQCC4TTC2t7c9z1NKeZ6HX1zXvby8qFZr9CH9yzRDPIr4F76olIrFYoZheJ4Xj8cNw7Asy7ZtfNHzvBcvnt+7d9/zPMdxPM9zXRc3j8fj+NB13eVyid8ty3IcBx/i6bjbYrEIDmO5XNq2HY/H4/G4UspxnOVy6TiOYRjBix3HSafT4/F4sVhUq9VMJjOfz03TNAxjOp1Wq9XFYnF6erq1tbW5ufnLX/4ym812u93pdFqv103T7HQ6SqlUKpVIJPDinueNRqN4PH737t2Tk5NYLHbnzp1qtZrP50ulUjKZTKVSsVis3+8/f/788ePHh4eHrusWCgXTNIvF4v7+/jvvvFOr1RaLRb/fN00znU6Xy+XxeNzr9a6urv7t3/6t2+3+6Ec/Ojs7SyaT9+/fn0wmtm3HYrHz8/NkMnnnzp3ZbPbzn//89PS0XC6Xy+V2u10qlebz+c7OztbW1uHhYbVa/e1vfzudTn/84x+XSqUnT54cHR31er1sNptKpUzTdF03l8ttbW0Vi0XDMD799FOl1Hw+Xy6XhmE4jjOdTpVShmGYpul53mKxsCyrWq3W63WlVKVScRzn8vLy9PR0MBjUarX/+B//Yzqdxt0mk0mtVsvn861W6/Ly0rZt13Vns5nneYZhDIfDdDpdKpUqlUo2mzVNE0vveV7orosC7oY9iaU3DAP7h3/If/cfCcOo1WrFYhHva5pmPB6PxWJYi5OTk+Pj48FgkEgkGo2GaZqDwaDVao3HY8dxaOOlUqnQXUePUOw0TSYT7G06JpiZ2Ww2nU7pWzga0+l0uVzGYjHcMJlM0uzFYjHXdS3LsixruVzOZrNMJmNZVr1ej8fjR0dH4/EYk5zL5QzD6Pf7SqnhcHh0dJRMJpPJZDqdzuVymUymWq12Op3PP//8448/zuVy0+k0nU4nEonRaJRMJjOZDM2qZVlKqcViMZlM8Dh63/l8fnV1dXV1hXe5vLzEyAuFwp07dzDIQqHQarV+97vfpdPp9957D7uC5iqfz1uWdXBwoJQqlUqmac5mM9AT13Vt28ZaP3nyZD6fz+fz2Wy2v78/mUySyWSn04nFYvv7+7PZ7OXLl/P5HKSGNgN+4XsD+00xmkZrZBgG7RZOKvGnbdvB5Y7H4/1+/969ex988MHnn3/e6/VM0zRNMxaLYZdyLJfL0P2M+eRX4miEXuy6Lgbpui6Oz3K5/NGPftTpdDKZzMbGhmma8/nctm28Pr8tnhJ6Z5oNH7AJgwg9Vpir4J3XXAyCzA9L6EhC73xT8H37xpvz5cNMgmeBHfgQ+oJR+EbeJRTYYNg/+IRTwj8Yol4Q21UphTOiovfGTRH6gm+9KHzSQg+LYRiz2Qy8FQdNKQUZw3Xd4XA4m80syyIphY4qpzme5y2Xy+Vy6bouZgOSSTqdfvjw4XK5PDw8TCQS+Xw+lUqdnp46jpNIJNLptGmai8XCcZxerwchCo+wNDKZTCKRSKVSy+VyOBy6rptMJpU+X6AYSimIUoPBAH/idDiOA45Pu4jkN9M0cZ9YLAYJ0LZty7KSySROh+M4s9kMd3BdNxaL4TVjsZhlWeCtGxsb+G8ikcBRGgwGo9FI6c2AVyDpEYME1cKflmXl83maW6UUeDfxIMdxbNsmAlgqlTB4fi4gfmDO8bJgzZhGPB1LYxgG3hoDIL7AB8A/XywW2N6
"text/plain": [
"<PIL.PngImagePlugin.PngImageFile image mode=RGB size=1200x900>"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"print(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_0102_homography_img2world.png\").size)\n",
"Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_0102_homography_img2world.png\")\n"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABkAAAAHZCAYAAADewveiAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9yY8sWZrdCf7uJINONr7Rn4/h4RGRETmQkZHJJIhmVxerWehGNdhAo7noBcFNr5KbRC+YGxJccdEbLshN/wfsVW8IsFiVIEhUMTNyiGRUZszh05uf2bNZJxG5Qy+uiOhg5s89cvKMiHsAe/ZMVVREVOTKFdVzvnM+EUIIJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQk/AxBft47kJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQ8BeNJIAkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJPzMIQkgCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQk/c0gCSEJCQkJCQkJCQkJCQkJCQkJCQkJCQkLCzxySAJKQkJCQkJCQkJCQkJCQkJCQkJCQkJCQ8DOHJIAkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJPzMIQkgCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQk/c0gCSEJCQkJCQkJCQkJCQkJCQkJCQkJCQkLCzxySAJKQkJCQkJCQkJCQkJCQkJCQkJCQkJCQ8DOHJIAkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJPzMIQkgCQkJCQkJCQkJCQkJnzP+zb/5N7z11lsURcGv//qv8/u///uf9y4lJCQkJCQkJCQk/NQjCSAJCQkJCQkJCQkJCQmfI/7tv/23/NZv/Rb//J//c771rW/xy7/8y/z9v//3OTo6+rx3LSEhISEhISEhIeGnGiKEED7vnUhISEhISEhISEhISPh5xa//+q/zjW98g3/9r/81AN57Xn/9df7JP/kn/NN/+k8/571LSEhISEhISEhI+OmF/rx3ICEhISEhISEhISEh4ecVdV3zR3/0R/z2b/92/5iUkr/39/4ev/u7v3vja6qqoqqq/m/vPaenpxwcHCCE+Evf54SEhISEhISEhITPEyEErq6uuH//PlK+OuQqCSAJCQkJCQkJCQkJCQmfE16+fIlzjjt37mw8fufOHb7//e/f+Jp/+S//Jf/iX/yLv4rdS0hISEhISEhISPhri0ePHvHgwYNXLpMEkISEhISEhISEhISEhJ8i/PZv/za/9Vu/1f99cXHBG2+8wd13boMAHzwhhP7H+wA+gPfX1mWMpixKqrqi8Z4AZMYgpWxf63He4ZzHO4cSAilo/47rDiEgUIDot7kNIcTGOpF6w63S2AYlFXmRs7+/z2gyYRkCd+7d5dbBIUWWc3l5ye5khz/8/T9AALPpjLPTcxDgvOfg1i2897x8eYzODF//tV9DFgW/960/xNUVoakYFTmDQcl4tE/jQZqcO2+9w8G9e3z3+9/nl772ixwcHrCzu8uz58/5d//u3/HlL3+ZUhse3L7N6fEzfvc//UeEtezv7pLnBTt7+7w4Ocb5JaPRkCdPnrAz2eHy/Bzb1BhpgECe5wQExhhmszlaK5RSaB2PxWQyYTDZZbpsGO3s8Oz5EUIqbPDUjWW8u8ev/cZvcP+d96iEpmksIQTyMscR8CEglUJIQRASwavcQKFvCBraH4/v/9+dwbiegAoOjWWca27tjKmmlzx5+BFPH33Ezrhkf3eX4+Njnj1+gvceAbiqwRiD1grn3MbWffAQuu0EfGNp6oa6rrBVQ/AerTTDsiD4wNnZGVeLKao9ZpkxnJ2dMRgOGQwGACxmS1679wZvvvY6P/zBD5jN5kyXCxoCtx/cY2Frxvv73HktkiSzasmf/m/f5au/9IuU5ZBiMMSHwO6oQAaLEAJnLbP5jPF4gpQS7z0vnj3nxdOnnB0dUy2X3Lr/GoevP6AWUNU1H/7gx/zCu1/G5wbXHovZ1RVnJ8e889abiOBRUqII7A8HuLrBO4cIAWE9trYI7xEBqroCJVZnpH3MO48PgeA91gZc4+O16jzeOxaLRXttxsebpsH7zXOglOTBG28wvbxiNp8hMk1QEC/fcOO1HLoxFSAQEDeMMyElgUBjG+qq5o033qAsS+7cuYMPcZQ10iOFQEiJFILtKSOI1RiMKwW55W4TW68RiE91wHnvCD7E7cp2+RtC4dfXE9rtvwpCbG77xqR5TzxeQuCcw3uPMWZzO5+yrbje6/P4Twrv49zd7ffNx+3Pvx1CuPH41nVNCAGtdXxPAviUcyd59fH9LOfJB9svLALgfXv/ihBAaODb//WPOT19iVKSclCgDThXo5TANg5NRl03OOvjYEUgpUIq084TDcVAcni4z9npCVorprMp1taYTFPkBdPpnOViibWW4XBEnmcAVFUNQJaZdj6X1FWFDwGtFMvlEu88Simss9SLJWVZEoKjLEt2dnZYLucYo6jriuFwSKY13nls47DWY208DkoZggsIAc42SCnRxuCco65rMmPi/RrIsyEeQV3XCKXIy4K79+8hleLNt99iNBohCJRZTqYNOLCNpfYOi8AJgRNAFQjO45ylcRbX3t98CCitqauKxWy2Oh8huly9dzjrsK7B+4BRCiUVAM47pJBYZzHaYLIM6z1X1ZLMZFhnsdZS5OXGMJMBvLNIqZhOp5yenLColrz77rsMBkOmV5eIxlEWJVJKTs/OefTkEYPBkN39PbIyRwqBloLcZAQf4ucPYyiKgsePHnNxecn9B/c5eO01pJRIKVFCMru64tHHDxmPRty/cw/rLJnJKIoc21geffQRdd1wenJCruN91JQl2aDg9PyMO3fu8tY7bzOvliyWS4qywDnHj7/7A7y1/MJXfgFrGx4/fsJ4PGZnZ8JkMuHs7IwnH33E5dk5jWsoRiOGkzFXsynvvPcuJ2enHJ+c482QbFCyrGps8OSDgv07t/mbX/86Os8YDYfs7exgjMFbRzVfIIQgON/O6YG6WrKczajqimVVEQTs7OwyW8wRUqCNIS/jeBnkBZNyyPxqyrMXzxFaUU5G5GXJdHrFbDYnL0qKwQClDUVZ0jjP/HLG//v/9v9gPB6/+uInCSAJCQkJCQkJCQkJCQmfGw4PD1FK8eLFi43HX7x4wd27d298TZ7n5Hl+7XGTGRBsiR+tGOI9wW6R0N6jtY6ksotEuxACqeRKrGgfE5HjoWOzYtKARMhI6hltAIm1lqZpNggqKSWmJVVCCJGgR2wQXlJKsjxDSskXvvAFGuf59vd/wOHtuywXNX/4e3/AeDwmCMGzF0fs7+1zcnqKQBJ8JE6msxnGGIrhEE/g4ePHDEYjcilZeo9RGoVgfjlnObPcvvc6e/u3ODs6QQTNwWgXLRVvvPEmTfD44yO+/Cu/xL279zh9+ozf+Z3f4fXX7jKajGlmcy6vrhDTOZO9fWbTGV947w3Ozk4JwbNcLiAEpJBoFY9lVVWUgyF7+/t84d13e/dPlmXs7u7inOO7P3yfX/zGr2G9j/tQWWxVI5CIytJMFzx58hSze8CDBw9wwYMULOuaxjmysmgPq/wUMjj0pzO0i/ngV8IH8XGBQuCRwaNDQ/A1Rycvefzh+6jgefsLX8DVMz5++DEnxy8x2lBkGb6x5INOSPNIuUle+iAJviPxA65pcFUN3lNmGSbLGA9H7OxMqKuaqq64quYIpdF5Rl6W5HVNVpYUgyFlWbK7B4cHtzGDEl0WiKZGOMWtWwd84cvv8b0f/RCnFGJQ4LwneI8zCllkqEGGLjM8AVMW6FYAsdbSeEdeFr0AYoocYzTaSKxVZEZRFFkrDkYirMgzrBYEqTFSMjCSUSa5s7fL2dFLmmpOIHB8eYZuRQThAyIEjNIYpQjekxkJ7bVICARgsLODgF4ACUFAEAghQUQhQCoJrfjpg+/O6MY5cNZSlgPmu2PqpqHG4YLvhZNOTFkhbqebV3y7Dh+21us9gYDEE5zEaEmmJblR+CBxEpCRfJdCImQnoayNkZt0iU8TIRAbESgibmbjZZ3w0AmyQohr27kmZnyWbScB5GZ8ggCCFJ+
"text/plain": [
"<Figure size 2000x800 with 2 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from matplotlib import pyplot as plt\n",
"\n",
"fig = plt.figure(figsize=(20,8))\n",
"ax1, ax2 = fig.subplots(1,2)\n",
"\n",
"ax1.set_aspect(1)\n",
"ax2.imshow(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_S_0102.jpg\"))\n",
"\n",
"for track_id in tracked_instances:\n",
" # print(track_id)\n",
" bboxes = tracked_instances[track_id]\n",
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" # plt.plot(projected_traj[:,0])\n",
" ax1.plot(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1))\n",
" ax2.plot(traj[:,:,0].reshape(-1), traj[:,:,1].reshape(-1))\n",
" \n",
"plt.show()"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"What if the projection is a heatmap of where people are (tracking would not really be necessary for this thoug). Using the above plot and some blurring effects of pyplot from [their documentation](https://matplotlib.org/stable/gallery/misc/demo_agg_filter.html)"
]
},
{
"cell_type": "code",
"execution_count": 37,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABkAAAAPACAYAAABthLXYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9eYws2Xnfef/OORGRS2Utt+7S996+3WSTzVUUqRmKEntkaKSRLIoG/FoWBcxg3sFIHsPGCKQAiX94QMAeQ54BaFuAlwFsDTDAeAFM2zBg2rANSyPTFgW/Q9EmJUqiKLbUG/t2992X2jMzIs55/4g8USejsu6+9/cD1K1buURGVWVFZZ1fPM9jQghBAAAAAAAAAAAATxD7sHcAAAAAAAAAAADgXiMAAQAAAAAAAAAATxwCEAAAAAAAAAAA8MQhAAEAAAAAAAAAAE8cAhAAAAAAAAAAAPDEIQABAAAAAAAAAABPHAIQAAAAAAAAAADwxCEAAQAAAAAAAAAAT5zsYe/AjXjv9dZbb2l5eVnGmIe9OwAAAMB9F0LQ1taWTp8+LWs5XwkAAAAA7tQjHYC89dZbeuaZZx72bgAAAAAP3NmzZ3XmzJmHvRsAAAAA8Nh6pAOQ5eVlSc0ffysrKw95bwAAAID7b3NzU88880z7WhgAAAAAcGce6QAktr1aWVkhAAEAAMDbCi1gAQAAAODu0FQYAAAAAAAAAAA8cQhAAAAAAAAAAADAE4cABAAAAAAAAAAAPHEIQAAAAAAAAAAAwBOHAAQAAAAAAAAAADxxCEAAAAAAAAAAAMAThwAEAAAAAAAAAAA8cQhAAAAAAAAAAADAE4cABAAAAAAAAAAAPHEIQAAAAAAAAAAAwBOHAAQAAAAAAAAAADxxCEAAAAAAAAAAAMAThwAEAAAAAAAAAAA8cQhAAAAAAAAAAADAEyd72DsAAADwMIUQ2v8bYx7ingAAAAAAgHuJAAQAALxthRDatxh+EIIAAAAAAPBkoAUWAAB4W0tDEAAAAAAA8OQgAAEAABBBCAAAAAAATxoCEAAA8LZljJlreUX4AQAAAADAk4MABAAAvK2lIQhVIAAAAAAAPDkIQAAAwNsaVSAAAAAAADyZCEAAAMDbHlUgAAAAAAA8eQhAAADA2x5VIAAAAAAAPHkIQAAAAEQVCAAAAAAATxoCEAAAgBmqQAAAAAAAeHIQgAAAAGi/AoQqEAAAAAAAngx3HID88i//sj784Q9rZWVFKysreuGFF/Rv/+2/ba8fj8f69Kc/raNHj2o0GulTn/qULly4cE92GgAA4EEg/AAAAAAA4PF1xwHImTNn9Ff/6l/V17/+dX3ta1/Tf/Pf/Df6U3/qT+n3f//3JUm/8Au/oH/1r/6V/tk/+2f68pe/rLfeeks/+ZM/ec92HAAA4F6jAgQAAAAAgCeHCffwr/r19XX90i/9kn7qp35Kx48f1xe+8AX91E/9lCTp29/+tj7wgQ/oK1/5ij7+8Y/f0vY2Nze1urqqjY0Nrays3KvdBAAAOFQIQd77NvgwxshaOzcfBLifeA0MAAAAAPfGPZkBUte1/sk/+Sfa2dnRCy+8oK9//esqy1I/+qM/2t7m/e9/v5599ll95StfuRcPCQAAcF8cVgVCJQgAAAAAAI+X7G7u/Hu/93t64YUXNB6PNRqN9MUvflEf/OAH9Y1vfENFUWhtbW3u9k899ZTOnz9/6PYmk4kmk0n78ebm5t3sHgAAwB1ZFIJQAQIAAAAAwOPlripA3ve+9+kb3/iGvvrVr+pnf/Zn9dM//dP61re+dcfb+/znP6/V1dX27Zlnnrmb3QMAALgjaQAiHWyLBQAAAAAAHn13FYAURaHnn39eH/3oR/X5z39eH/nIR/S3//bf1smTJzWdTnX9+vW521+4cEEnT548dHuf+9zntLGx0b6dPXv2bnYPAADgjsUQJISguq4JQAAAAAAAeMzckxkgkfdek8lEH/3oR5Xnub70pS+117344ot6/fXX9cILLxx6/16vp5WVlbk3AACAhyUNQbz37RsAAAAAAHj03fEMkM997nP65Cc/qWeffVZbW1v6whe+oF//9V/Xr/7qr2p1dVV/9s/+WX32s5/V+vq6VlZW9HM/93N64YUX9PGPf/xe7j8AAMB9EVtgxfcx+GAWCAAAAAAAj4c7DkAuXryo//F//B917tw5ra6u6sMf/rB+9Vd/VX/8j/9xSdLf/Jt/U9ZafepTn9JkMtEnPvEJ/d2/+3fv2Y4DAAA8CGkVSKwEMcbI2ntaSAsAAAAAAO4xEx7hZtabm5taXV3VxsYG7bAAAMBD4b1XXdeqqkqS5Jxr34D7gdfAAAAAAHBvcOoiAADADcQKkFjxEYehMwsEAAAAAIBHGwEIAADADcTwI87+IPwAAAAAAODxQAACAABwE90QxHvfVoIAAAAAAIBHEwEIAADATcQ2WN0qEAIQAAAAAAAeXQQgAAAAt8Ba284BiQEIrbAAAAAAAHh0EYAAAADcohiCGGMIQQAAAAAAeMQRgAAAANyC2AIrrQKJbwAAAAAA4NFDAAIAAHCLYgCSDkQnBAEAAAAA4NFEAAIAAHCLYugRq0GoAgEAAAAA4NFFAAIAAHAb0uqPiBAEAAAAAIBHT/awdwAAAOBxEttgdYegp9UhAAAAAADg4aMCBAAA4DbFFljSfvWH954qEAAAAAAAHiFUgAAAANwma6289+0ckLqu2+vScAQAAAAAADw8VIAAAADcgTgLJFaA1HU91xILAAAAAAA8XAQgAAAAdyDOAonVHrEFFq2wAAAAAAB4NBCAAAAA3IHY6sra/ZdTsRUWAQgAAAAAAA8fAQgAAMAdiiFIOvMjVoAQggAAAAAA8HARgAAAANyh2AIrBiC0wAIAAAAA4NFBAAIAAHAXugGIJEIQAAAAAAAeAQQgAAAAd6HbBoth6AAAAAAAPBoIQAAAAO5CHISeBiDMAQEAAAAA4OEjAAEAALhLxhg559qPCT4AAAAAAHj4CEAAAADuQmx/FUKQMUbee9V1TQgCAAAAAMBDRgACAABwl9I2WNbatv0VIQgAAAAAAA8PAQgAAMA94JxrQ5BYCRLfAAAAAADAg0cAAgAAcJfiAPQYgkhSVVWqqopKEAAAAAAAHhICEAAAgHsghiCxDZYk5oEAAAAAAPAQEYAAAADcI9batg2WJIUQ5L2nCgQAAAAAgIeAAAQAAOAuxXAjHYYuNRUgVVUxBwQAAAAAgIeAAAQAAOAuxOqOOPA8DkFPB6LTBgsAAAAAgAcve9g7AAAA8LhKw4/04/j/uq7b69JwBAAAAAAA3H9UgAAAANyhNMw4bM5HGoRQBQIAAAAAwINDAAIAAHCX0rZX6cfGGHnvVdd12waLEAQAAAAAgAeDAAQAAOAudFtapbM/YiDSbYcFAAAAAADuP2aAAAAA3IXuXI/4/yzLFEKQc64dkJ62wWIWCAAAAAAA9xcVIAAAAHcphhmxxVWsAonvpWYIelVVVIEAAAAAAPCAEIAAAADcA92B6NZaZVkm51x7WawAYQ4IAAAAAAD3HwEIAADAXYrhx6L38S0GIFVVEYIAAAAAAPAAEIAAAADcA902WGn4kV6X3gYAAAAAANw/BCAAAAD3SDfsMMbIOddWgIQQVNd12wqLeSAAAAAAANw/BCAAAAD3wGGVHpLknJubBVLXteq6phIEAAAAAID7KHvYOwAAAPCkSUMNa20bjHjvVde1jDGy1rYVIDEcAQAAAAAA9w4BCAAAwD0Uqzq89weGoXvvZa2da38VP463BQAAAAAA98Ydt8D6/Oc/r4997GNaXl7WiRMn9BM/8RN68cUX527zQz/0QwcGgP7P//P/fNc7DQAA8KiJoUZd16qqqn2fzvmw1s5dzxwQAAAAAADunzsOQL785S/r05/+tH7zN39Tv/Zrv6ayLPVjP/Zj2tnZmbvdn/tzf07nzp1r3/76X//rd73TAAAAj6I43DwGG/FjSW0FSHxL54AQggAAAAAAcO/dcQusX/mVX5n7+O///b+vEydO6Otf/7p+8Ad/sL18OBzq5MmTd76HAAAAj5m0BVY
"text/plain": [
"<Figure size 2000x1200 with 3 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"from matplotlib import gridspec\n",
"import matplotlib.cm as cm\n",
"import matplotlib.transforms as mtransforms\n",
"from matplotlib.colors import LightSource\n",
"from matplotlib.artist import Artist\n",
"\n",
"\n",
"def smooth1d(x, window_len):\n",
" # copied from https://scipy-cookbook.readthedocs.io/items/SignalSmooth.html\n",
" s = np.r_[2*x[0] - x[window_len:1:-1], x, 2*x[-1] - x[-1:-window_len:-1]]\n",
" w = np.hanning(window_len)\n",
" y = np.convolve(w/w.sum(), s, mode='same')\n",
" return y[window_len-1:-window_len+1]\n",
"\n",
"\n",
"def smooth2d(A, sigma=3):\n",
" window_len = max(int(sigma), 3) * 2 + 1\n",
" A = np.apply_along_axis(smooth1d, 0, A, window_len)\n",
" A = np.apply_along_axis(smooth1d, 1, A, window_len)\n",
" return A\n",
"\n",
"\n",
"class BaseFilter:\n",
"\n",
" def get_pad(self, dpi):\n",
" return 0\n",
"\n",
" def process_image(self, padded_src, dpi):\n",
" raise NotImplementedError(\"Should be overridden by subclasses\")\n",
"\n",
" def __call__(self, im, dpi):\n",
" pad = self.get_pad(dpi)\n",
" padded_src = np.pad(im, [(pad, pad), (pad, pad), (0, 0)], \"constant\")\n",
" tgt_image = self.process_image(padded_src, dpi)\n",
" return tgt_image, -pad, -pad\n",
"\n",
"\n",
"\n",
"class GaussianFilter(BaseFilter):\n",
" \"\"\"Simple Gaussian filter.\"\"\"\n",
"\n",
" def __init__(self, sigma, alpha=0.5, color=(0, 0, 0)):\n",
" self.sigma = sigma\n",
" self.alpha = alpha\n",
" self.color = color\n",
"\n",
" def get_pad(self, dpi):\n",
" return int(self.sigma*3 / 72 * dpi)\n",
"\n",
" def process_image(self, padded_src, dpi):\n",
" tgt_image = np.empty_like(padded_src)\n",
" tgt_image[:, :, :3] = self.color\n",
" tgt_image[:, :, 3] = smooth2d(padded_src[:, :, 3] * self.alpha,\n",
" self.sigma / 72 * dpi)\n",
" return tgt_image\n",
"\n",
"gauss = GaussianFilter(2)\n",
"\n",
"fig = plt.figure(figsize=(20,12))\n",
"\n",
"\n",
"# Create 2x2 sub plots\n",
"gs = gridspec.GridSpec(2, 2)\n",
"\n",
"# (ax1, ax2), (ax3, ax4) = fig.subplots(2,2)\n",
"ax1 = fig.add_subplot(gs[0,0])\n",
"ax3 = fig.add_subplot(gs[1,0])\n",
"ax2 = fig.add_subplot(gs[:,1])\n",
"\n",
"ax1.set_aspect(1)\n",
"ax3.set_aspect(1)\n",
"\n",
"ax2.imshow(Image.open(\"../DATASETS/VIRAT_subset_0102x/VIRAT_S_0102.jpg\"))\n",
"\n",
"for track_id in tracked_instances:\n",
" # print(track_id)\n",
" bboxes = tracked_instances[track_id]\n",
" traj = np.array([[[0.5 * (det[0]+det[2]), det[3]]] for det in bboxes])\n",
" projected_traj = cv2.perspectiveTransform(traj,H)\n",
" # plt.plot(projected_traj[:,0])\n",
" line, = ax1.plot(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1), color=(0,0,0,0.05))\n",
" line.set_agg_filter(gauss)\n",
" line.set_rasterized(True) # \"to suport mixed-mode renderers\"\n",
"\n",
" points = ax3.scatter(projected_traj[:,:,0].reshape(-1), projected_traj[:,:,1].reshape(-1), color=(0,0,0,0.01))\n",
" points.set_agg_filter(gauss)\n",
" points.set_rasterized(True) # \"to suport mixed-mode renderers\"\n",
"\n",
" ax2.plot(traj[:,:,0].reshape(-1), traj[:,:,1].reshape(-1))\n",
" \n",
"plt.show()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.2"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "1135f674f58caf91385e41dd32dc418daf761a3c5d4526b1ac3bad0b893c2eb5"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}