Scripts to run alphapose training in a loop and analyse results
This commit is contained in:
commit
c769f0f87a
8 changed files with 2447 additions and 0 deletions
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
data/
|
||||||
|
exp/
|
||||||
|
detector/
|
||||||
|
out/
|
||||||
|
pretrained-models/
|
1
.python-version
Normal file
1
.python-version
Normal file
|
@ -0,0 +1 @@
|
||||||
|
3.10.4
|
12
downloadCOCO.sh
Executable file
12
downloadCOCO.sh
Executable file
|
@ -0,0 +1,12 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
wget -nc --directory-prefix=data/coco http://images.cocodataset.org/zips/train2017.zip
|
||||||
|
wget -nc --directory-prefix=data/coco http://images.cocodataset.org/zips/val2017.zip
|
||||||
|
wget -nc --directory-prefix=data/coco http://images.cocodataset.org/annotations/annotations_trainval2017.zip
|
||||||
|
|
||||||
|
cd data/coco
|
||||||
|
unzip -n annotations_trainval2017.zip
|
||||||
|
unzip -n val2017.zip
|
||||||
|
unzip -n train2017.zip
|
||||||
|
|
||||||
|
|
680
find_diffs.ipynb
Normal file
680
find_diffs.ipynb
Normal file
File diff suppressed because one or more lines are too long
265
loop_alphapose_training.py
Normal file
265
loop_alphapose_training.py
Normal file
|
@ -0,0 +1,265 @@
|
||||||
|
"""
|
||||||
|
TODO this script runs Alphapose's train.py, the created model is used to re-annotate the training-images, which is then fed back into the system
|
||||||
|
|
||||||
|
For now the only thing it does is that it merges alphapose-results.json with the coco input dataset.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import datetime
|
||||||
|
from io import TextIOWrapper
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import subprocess
|
||||||
|
import yaml
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logging.basicConfig()
|
||||||
|
logger = logging.getLogger('loop_alphapose_training')
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
class Iteration:
|
||||||
|
def __init__(self, i: int):
|
||||||
|
assert i > 0, "Iteration cannot be < 1"
|
||||||
|
self.i = i
|
||||||
|
self.nr = f"{i:04d}"
|
||||||
|
self.name = f"feedback_iteration{self.nr}"
|
||||||
|
|
||||||
|
def training_config_path(self):
|
||||||
|
return Path("./data/coco") / "_iterations" / self.nr / "256x192_res50_lr1e-3_1x.yaml"
|
||||||
|
|
||||||
|
def model_path(self):
|
||||||
|
assert self.i > 0, "Iteration 0 only used at training time"
|
||||||
|
return Path(f'exp/{self.name}-256x192_res50_lr1e-3_1x.yaml/final_DPG.pth')
|
||||||
|
|
||||||
|
def merged_results_path(self, stage: str):
|
||||||
|
assert stage in ['val', 'train']
|
||||||
|
return Path(f"./data/coco/_iterations/{self.nr}/alphapose-results-{stage}2017.json")
|
||||||
|
|
||||||
|
def interference_results_dir(self, stage: str, for_docker=False):
|
||||||
|
assert stage in ['val', 'train']
|
||||||
|
relative = "." if not for_docker else ""
|
||||||
|
return Path(f"{relative}/out/_iterations/{self.nr}_{stage}2017/")
|
||||||
|
|
||||||
|
def prev_iteration(self):
|
||||||
|
return Iteration(self.i - 1)
|
||||||
|
|
||||||
|
def next_iteration(self):
|
||||||
|
return Iteration(self.i+1)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_str(cls, input):
|
||||||
|
raise NotImplemented
|
||||||
|
|
||||||
|
def wrap_docker_cmd(cmd: list, container: str = 'alphapose'):
|
||||||
|
pwd = Path(__file__).parent.absolute()
|
||||||
|
return [
|
||||||
|
'docker', 'run',
|
||||||
|
'--rm',
|
||||||
|
'--gpus', 'all',
|
||||||
|
'--shm-size=10g',
|
||||||
|
'-v', str(pwd) + '/exp:/build/AlphaPose/exp',
|
||||||
|
'-v', str(pwd) + '/data:/build/AlphaPose/data',
|
||||||
|
'-v', str(pwd) + '/out:/out',
|
||||||
|
'-v', str(pwd) + '/detector/yolox/data:/build/AlphaPose/detector/yolox/data',
|
||||||
|
'-v', str(pwd) + '/detector/yolo/data:/build/AlphaPose/detector/yolo/data',
|
||||||
|
'-v', str(pwd) + '/pretrained_models:/build/AlphaPose/pretrained_models',
|
||||||
|
container,
|
||||||
|
*cmd
|
||||||
|
]
|
||||||
|
|
||||||
|
def create_config(iteration: Iteration):
|
||||||
|
"""
|
||||||
|
build config e.g. configs/coco/resnet/256x192_res50_lr1e-3_1x.yaml
|
||||||
|
"""
|
||||||
|
base_config = Path("../AlphaPose/configs/coco/resnet/256x192_res50_lr1e-3_1x.yaml")
|
||||||
|
with base_config.open('r') as fp:
|
||||||
|
config = yaml.safe_load(fp)
|
||||||
|
|
||||||
|
# the first iteration simply copies config
|
||||||
|
if iteration.i > 1:
|
||||||
|
prev_iteration = iteration.prev_iteration()
|
||||||
|
config['DATASET']['TRAIN']['ANN'] = str(prev_iteration.merged_results_path('train').relative_to(config['DATASET']['TRAIN']['ROOT']))
|
||||||
|
config['DATASET']['VAL']['ANN'] = str(prev_iteration.merged_results_path('val').relative_to(config['DATASET']['TRAIN']['ROOT']))
|
||||||
|
config['DATASET']['TEST']['DET_FILE'] = f'./exp/json/{iteration.name}_test_det_yolo.json'
|
||||||
|
config['DATASET']['TEST']['ANN'] = config['DATASET']['VAL']['ANN']
|
||||||
|
|
||||||
|
new_config = iteration.training_config_path()
|
||||||
|
|
||||||
|
if not new_config.parent.exists():
|
||||||
|
logger.info(f"Make directory for config: {new_config.parent}")
|
||||||
|
new_config.parent.mkdir()
|
||||||
|
|
||||||
|
with new_config.open('w') as fp:
|
||||||
|
yaml.dump(config, fp)
|
||||||
|
|
||||||
|
return new_config
|
||||||
|
|
||||||
|
def run_cmd(cmd, in_docker):
|
||||||
|
if in_docker:
|
||||||
|
cmd = wrap_docker_cmd(cmd)
|
||||||
|
|
||||||
|
logger.info(f"Run {cmd=}")
|
||||||
|
proc = subprocess.Popen (cmd, shell=False)
|
||||||
|
proc.communicate()
|
||||||
|
|
||||||
|
def create_and_run_training(iteration: Iteration):
|
||||||
|
'''
|
||||||
|
Basically just runs
|
||||||
|
python scripts/train.py
|
||||||
|
--cfg exp/config_first.yaml
|
||||||
|
--exp-id coco_test2
|
||||||
|
'''
|
||||||
|
|
||||||
|
create_config(iteration)
|
||||||
|
cmd = [
|
||||||
|
'python', 'scripts/train.py',
|
||||||
|
'--cfg', str(iteration.training_config_path()),
|
||||||
|
'--exp-id', iteration.name
|
||||||
|
]
|
||||||
|
|
||||||
|
run_cmd(cmd, in_docker = True)
|
||||||
|
|
||||||
|
def run_inferences(iteration: Iteration):
|
||||||
|
'''
|
||||||
|
create new train & validation datasets by basically running:
|
||||||
|
python scripts/demo_inference.py
|
||||||
|
--cfg configs/coco/resnet/256x192_res50_lr1e-3_1x.yaml
|
||||||
|
--checkpoint exp/coco_test1-256x192_res50_lr1e-3_1x.yaml/final_DPG.pth
|
||||||
|
--gpus 0
|
||||||
|
--indir data/coco/train2017
|
||||||
|
--outdir /out/first_train2017/
|
||||||
|
--format coco
|
||||||
|
--eval
|
||||||
|
python scripts/demo_inference.py
|
||||||
|
--cfg configs/coco/resnet/256x192_res50_lr1e-3_1x.yaml
|
||||||
|
--checkpoint exp/coco_test1-256x192_res50_lr1e-3_1x.yaml/final_DPG.pth
|
||||||
|
--gpus 0
|
||||||
|
--indir data/coco/val2017
|
||||||
|
--outdir /out/first_val2017/
|
||||||
|
--format coco
|
||||||
|
--eval
|
||||||
|
'''
|
||||||
|
|
||||||
|
base_cmd = [
|
||||||
|
'python', 'scripts/demo_inference.py',
|
||||||
|
'--cfg', str(iteration.training_config_path()),
|
||||||
|
'--checkpoint', str(iteration.model_path()),
|
||||||
|
'--gpus', '0',
|
||||||
|
'--format', 'coco',
|
||||||
|
'--eval',
|
||||||
|
]
|
||||||
|
|
||||||
|
cmd_infer_train = base_cmd.copy()
|
||||||
|
cmd_infer_train.extend([
|
||||||
|
'--indir', 'data/coco/train2017',
|
||||||
|
'--outdir', iteration.interference_results_dir('train', for_docker=True),
|
||||||
|
])
|
||||||
|
|
||||||
|
cmd_infer_val = base_cmd.copy()
|
||||||
|
cmd_infer_val.extend([
|
||||||
|
'--indir', 'data/coco/val2017',
|
||||||
|
'--outdir', iteration.interference_results_dir('val', for_docker=True),
|
||||||
|
])
|
||||||
|
|
||||||
|
run_cmd(cmd_infer_train, in_docker=True)
|
||||||
|
|
||||||
|
run_cmd(cmd_infer_val, in_docker=True)
|
||||||
|
|
||||||
|
def coco_alphapose_merge_results_for_iteration(iteration: Iteration):
|
||||||
|
for stage in ['val', 'train']:
|
||||||
|
coco_alphapose_merge_results(
|
||||||
|
Path(f"data/coco/annotations/person_keypoints_{stage}2017.json"),
|
||||||
|
iteration.interference_results_dir(stage) / "alphapose-results.json",
|
||||||
|
iteration.merged_results_path(stage)
|
||||||
|
)
|
||||||
|
|
||||||
|
def done_training(iteration: Iteration) -> bool:
|
||||||
|
return iteration.model_path().exists()
|
||||||
|
|
||||||
|
def done_interference(iteration: Iteration) -> bool:
|
||||||
|
return all([
|
||||||
|
(iteration.interference_results_dir(stage) / "alphapose-results.json").exists() for stage in ['train','val']
|
||||||
|
])
|
||||||
|
|
||||||
|
def done_merge_results(iteration: Iteration) -> bool:
|
||||||
|
return all([
|
||||||
|
iteration.merged_results_path(stage).exists() for stage in ['train','val']
|
||||||
|
])
|
||||||
|
|
||||||
|
def run_iteration(iteration: Iteration):
|
||||||
|
if not done_training(iteration):
|
||||||
|
create_and_run_training(iteration)
|
||||||
|
else:
|
||||||
|
logger.info(f"Training exists {iteration.name}")
|
||||||
|
if not done_interference(iteration):
|
||||||
|
run_inferences(iteration)
|
||||||
|
else:
|
||||||
|
logger.info(f"Inference results exist {iteration.name}")
|
||||||
|
if not done_merge_results(iteration):
|
||||||
|
coco_alphapose_merge_results_for_iteration(iteration)
|
||||||
|
else:
|
||||||
|
logger.info(f"Merged annotations exist {iteration.name}")
|
||||||
|
|
||||||
|
def coco_alphapose_merge_results(annotations_file: Path, results_file: Path, out_file: Path):
|
||||||
|
today = datetime.datetime.now().strftime("%Y/%m/%d")
|
||||||
|
info = {"description": "COCO 2017 Dataset, modified by Ruben van de Ven","url": "http://cocodataset.org","version": "0.1","year": 2023,"contributor": "COCO Consortium, Ruben van de Ven","date_created": today}
|
||||||
|
annotations = json.loads(annotations_file.read_text())
|
||||||
|
results = json.loads(results_file.read_text())
|
||||||
|
# annotations_ann:list = annotations['annotations']
|
||||||
|
|
||||||
|
# id_counts = {}
|
||||||
|
|
||||||
|
|
||||||
|
for i, result in enumerate(results):
|
||||||
|
if type(result['image_id']) == str:
|
||||||
|
result['image_id'] = int(result['image_id'][:-4])
|
||||||
|
|
||||||
|
result['id'] = i
|
||||||
|
result['iscrowd'] = 0 # TODO make sure this is a right terminology/assumption (what is this crowd here anyway individaul/crowd?)
|
||||||
|
result['bbox'] = result['box'] # TODO result.pop('box') to rename instead of copy
|
||||||
|
result['area'] = 1 # TODO : for now to bypass ignore in alphapose/datasets/mscoco.py:87
|
||||||
|
result['num_keypoints'] = 17 # TODO : verify that this is indeed always all points
|
||||||
|
|
||||||
|
# There can be multiple annotations per image. Try to match the originals by keeping track
|
||||||
|
# of their order of occurence
|
||||||
|
# if result['image_id'] not in id_counts:
|
||||||
|
# id_counts[result['image_id']] = 0
|
||||||
|
|
||||||
|
# # find matching annotations in original
|
||||||
|
# origs = list(filter(lambda ann: ann['image_id'] == result['image_id'], annotations_ann))
|
||||||
|
# assert len(origs) > id_counts[result['image_id']], f"Len should be one, found {len(origs)} for {result['image_id']}: {origs=}"
|
||||||
|
# orig = origs[id_counts[result['image_id']]]
|
||||||
|
# id_counts[result['image_id']] += 1
|
||||||
|
|
||||||
|
# result['id'] = orig['id'] # we keep track of the original id
|
||||||
|
|
||||||
|
annotations['annotations'] = results
|
||||||
|
annotations['info'] = info
|
||||||
|
|
||||||
|
with out_file.open('w') as fp:
|
||||||
|
json.dump(annotations, fp)
|
||||||
|
|
||||||
|
logger.info(f'wrote to {out_file.as_posix()}')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
i = 1
|
||||||
|
while True:
|
||||||
|
iteration = Iteration(i)
|
||||||
|
logger.info(f"Run iteration {iteration.name}")
|
||||||
|
run_iteration(iteration)
|
||||||
|
i+=1
|
||||||
|
|
||||||
|
# parser = argparse.ArgumentParser(description='Merge alphapose-results.json with an input dataset')
|
||||||
|
# parser.add_argument('--annotations-file', required=True, type=argparse.FileType('r'),
|
||||||
|
# help='an annotations file from the COCO dataset (eg. person_keypoints_train2017.json)')
|
||||||
|
# parser.add_argument('--results-file', required=True, type=argparse.FileType('r'),
|
||||||
|
# help='path to the alphapose-results.json')
|
||||||
|
# parser.add_argument('--out-file', required=True, type=argparse.FileType('w'),
|
||||||
|
# help='the filename of the merged result')
|
||||||
|
|
||||||
|
|
||||||
|
# args = parser.parse_args()
|
||||||
|
|
||||||
|
# coco_alphapose_merge_results(args.annotations_file, args.results_file, args.out_file)
|
1399
poetry.lock
generated
Normal file
1399
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
24
pyproject.toml
Normal file
24
pyproject.toml
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
[tool.poetry]
|
||||||
|
name = "alphapose-loop"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = ""
|
||||||
|
authors = ["Ruben van de Ven <git@rubenvandeven.com>"]
|
||||||
|
readme = "README.md"
|
||||||
|
#packages = [{include = "alphapose_loop"}]
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.9"
|
||||||
|
numpy = "^1.24.2"
|
||||||
|
pycocotools = "^2.0.6"
|
||||||
|
tqdm = "^4.65.0"
|
||||||
|
coloredlogs = "^15.0.1"
|
||||||
|
PyYAML = "^6.0"
|
||||||
|
Pillow = "^9.4.0"
|
||||||
|
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
ipykernel = "^6.21.3"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
61
run_test_inferences.py
Normal file
61
run_test_inferences.py
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
from pathlib import Path
|
||||||
|
import shutil
|
||||||
|
from loop_alphapose_training import run_cmd, Iteration
|
||||||
|
import logging
|
||||||
|
import coloredlogs
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install(level=logging.INFO)
|
||||||
|
|
||||||
|
def collate(iterations):
|
||||||
|
path = Path(f'out/test_imgs_{iterations[1].nr}/vis')
|
||||||
|
images = [x for x in path.iterdir() if x.is_file()]
|
||||||
|
for img in images:
|
||||||
|
logger.info(f"collate {img.name}")
|
||||||
|
target_dir = Path(f'out/test_imgs_loops/{img.stem}/')
|
||||||
|
target_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
for iteration in iterations:
|
||||||
|
try:
|
||||||
|
src = Path(f'out/test_imgs_{iteration.nr}/vis/') / img.name
|
||||||
|
target = target_dir / (iteration.nr + img.suffix)
|
||||||
|
logger.info(f"\tcopy {src} to {target}")
|
||||||
|
shutil.copy(src, target)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
|
||||||
|
# collate([Iteration(i) for i in range(2,7)])
|
||||||
|
# exit()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
iterations = []
|
||||||
|
while True:
|
||||||
|
i+=1
|
||||||
|
iteration = Iteration(i)
|
||||||
|
path = iteration.model_path()
|
||||||
|
if not path.exists():
|
||||||
|
logger.warning(f"Model for iteration {iteration.nr} doesn't exist")
|
||||||
|
break
|
||||||
|
iterations.append(iteration)
|
||||||
|
cmd = [
|
||||||
|
'python', 'scripts/demo_inference.py',
|
||||||
|
'--cfg', str(iteration.training_config_path()),
|
||||||
|
'--checkpoint', str(iteration.model_path()),
|
||||||
|
'--gpus', '0',
|
||||||
|
'--format', 'coco',
|
||||||
|
'--indir', 'data/test_imgs',
|
||||||
|
'--outdir', f'/out/test_imgs_{iteration.nr}',
|
||||||
|
'--save_img'
|
||||||
|
|
||||||
|
]
|
||||||
|
logger.info(f"Running {cmd}")
|
||||||
|
run_cmd(cmd, in_docker=True)
|
||||||
|
# break
|
||||||
|
|
||||||
|
|
||||||
|
collate(iterations)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue