2023-03-08 13:04:03 +00:00
"""
TODO this script runs Alphapose ' s train.py, the created model is used to re-annotate the training-images, which is then fed back into the system
For now the only thing it does is that it merges alphapose - results . json with the coco input dataset .
"""
import argparse
import datetime
from io import TextIOWrapper
import json
from pathlib import Path
import subprocess
import yaml
import logging
logging . basicConfig ( )
logger = logging . getLogger ( ' loop_alphapose_training ' )
logger . setLevel ( logging . INFO )
class Iteration :
def __init__ ( self , i : int ) :
assert i > 0 , " Iteration cannot be < 1 "
self . i = i
self . nr = f " { i : 04d } "
self . name = f " feedback_iteration { self . nr } "
def training_config_path ( self ) :
return Path ( " ./data/coco " ) / " _iterations " / self . nr / " 256x192_res50_lr1e-3_1x.yaml "
def model_path ( self ) :
assert self . i > 0 , " Iteration 0 only used at training time "
return Path ( f ' exp/ { self . name } -256x192_res50_lr1e-3_1x.yaml/final_DPG.pth ' )
def merged_results_path ( self , stage : str ) :
assert stage in [ ' val ' , ' train ' ]
return Path ( f " ./data/coco/_iterations/ { self . nr } /alphapose-results- { stage } 2017.json " )
def interference_results_dir ( self , stage : str , for_docker = False ) :
assert stage in [ ' val ' , ' train ' ]
relative = " . " if not for_docker else " "
return Path ( f " { relative } /out/_iterations/ { self . nr } _ { stage } 2017/ " )
def prev_iteration ( self ) :
return Iteration ( self . i - 1 )
def next_iteration ( self ) :
return Iteration ( self . i + 1 )
@classmethod
def from_str ( cls , input ) :
raise NotImplemented
def wrap_docker_cmd ( cmd : list , container : str = ' alphapose ' ) :
pwd = Path ( __file__ ) . parent . absolute ( )
return [
' docker ' , ' run ' ,
' --rm ' ,
' --gpus ' , ' all ' ,
' --shm-size=10g ' ,
' -v ' , str ( pwd ) + ' /exp:/build/AlphaPose/exp ' ,
' -v ' , str ( pwd ) + ' /data:/build/AlphaPose/data ' ,
' -v ' , str ( pwd ) + ' /out:/out ' ,
2023-03-08 13:38:10 +00:00
' -v ' , str ( pwd ) + ' /alphapose-docker/detector/yolox/data:/build/AlphaPose/detector/yolox/data ' ,
' -v ' , str ( pwd ) + ' /alphapose-docker/detector/yolo/data:/build/AlphaPose/detector/yolo/data ' ,
' -v ' , str ( pwd ) + ' /alphapose-docker/pretrained_models:/build/AlphaPose/pretrained_models ' ,
2023-03-08 13:04:03 +00:00
container ,
* cmd
]
def create_config ( iteration : Iteration ) :
"""
build config e . g . configs / coco / resnet / 256 x192_res50_lr1e - 3_1 x . yaml
"""
base_config = Path ( " ../AlphaPose/configs/coco/resnet/256x192_res50_lr1e-3_1x.yaml " )
with base_config . open ( ' r ' ) as fp :
config = yaml . safe_load ( fp )
# the first iteration simply copies config
if iteration . i > 1 :
prev_iteration = iteration . prev_iteration ( )
config [ ' DATASET ' ] [ ' TRAIN ' ] [ ' ANN ' ] = str ( prev_iteration . merged_results_path ( ' train ' ) . relative_to ( config [ ' DATASET ' ] [ ' TRAIN ' ] [ ' ROOT ' ] ) )
config [ ' DATASET ' ] [ ' VAL ' ] [ ' ANN ' ] = str ( prev_iteration . merged_results_path ( ' val ' ) . relative_to ( config [ ' DATASET ' ] [ ' TRAIN ' ] [ ' ROOT ' ] ) )
config [ ' DATASET ' ] [ ' TEST ' ] [ ' DET_FILE ' ] = f ' ./exp/json/ { iteration . name } _test_det_yolo.json '
config [ ' DATASET ' ] [ ' TEST ' ] [ ' ANN ' ] = config [ ' DATASET ' ] [ ' VAL ' ] [ ' ANN ' ]
new_config = iteration . training_config_path ( )
if not new_config . parent . exists ( ) :
logger . info ( f " Make directory for config: { new_config . parent } " )
new_config . parent . mkdir ( )
with new_config . open ( ' w ' ) as fp :
yaml . dump ( config , fp )
return new_config
def run_cmd ( cmd , in_docker ) :
if in_docker :
cmd = wrap_docker_cmd ( cmd )
logger . info ( f " Run { cmd =} " )
proc = subprocess . Popen ( cmd , shell = False )
proc . communicate ( )
def create_and_run_training ( iteration : Iteration ) :
'''
Basically just runs
python scripts / train . py
- - cfg exp / config_first . yaml
- - exp - id coco_test2
'''
create_config ( iteration )
cmd = [
' python ' , ' scripts/train.py ' ,
' --cfg ' , str ( iteration . training_config_path ( ) ) ,
' --exp-id ' , iteration . name
]
run_cmd ( cmd , in_docker = True )
def run_inferences ( iteration : Iteration ) :
'''
create new train & validation datasets by basically running :
python scripts / demo_inference . py
- - cfg configs / coco / resnet / 256 x192_res50_lr1e - 3_1 x . yaml
- - checkpoint exp / coco_test1 - 256 x192_res50_lr1e - 3_1 x . yaml / final_DPG . pth
- - gpus 0
- - indir data / coco / train2017
- - outdir / out / first_train2017 /
- - format coco
- - eval
python scripts / demo_inference . py
- - cfg configs / coco / resnet / 256 x192_res50_lr1e - 3_1 x . yaml
- - checkpoint exp / coco_test1 - 256 x192_res50_lr1e - 3_1 x . yaml / final_DPG . pth
- - gpus 0
- - indir data / coco / val2017
- - outdir / out / first_val2017 /
- - format coco
- - eval
'''
base_cmd = [
' python ' , ' scripts/demo_inference.py ' ,
' --cfg ' , str ( iteration . training_config_path ( ) ) ,
' --checkpoint ' , str ( iteration . model_path ( ) ) ,
' --gpus ' , ' 0 ' ,
' --format ' , ' coco ' ,
' --eval ' ,
]
cmd_infer_train = base_cmd . copy ( )
cmd_infer_train . extend ( [
' --indir ' , ' data/coco/train2017 ' ,
' --outdir ' , iteration . interference_results_dir ( ' train ' , for_docker = True ) ,
] )
cmd_infer_val = base_cmd . copy ( )
cmd_infer_val . extend ( [
' --indir ' , ' data/coco/val2017 ' ,
' --outdir ' , iteration . interference_results_dir ( ' val ' , for_docker = True ) ,
] )
run_cmd ( cmd_infer_train , in_docker = True )
run_cmd ( cmd_infer_val , in_docker = True )
def coco_alphapose_merge_results_for_iteration ( iteration : Iteration ) :
for stage in [ ' val ' , ' train ' ] :
coco_alphapose_merge_results (
Path ( f " data/coco/annotations/person_keypoints_ { stage } 2017.json " ) ,
iteration . interference_results_dir ( stage ) / " alphapose-results.json " ,
iteration . merged_results_path ( stage )
)
def done_training ( iteration : Iteration ) - > bool :
return iteration . model_path ( ) . exists ( )
def done_interference ( iteration : Iteration ) - > bool :
return all ( [
( iteration . interference_results_dir ( stage ) / " alphapose-results.json " ) . exists ( ) for stage in [ ' train ' , ' val ' ]
] )
def done_merge_results ( iteration : Iteration ) - > bool :
return all ( [
iteration . merged_results_path ( stage ) . exists ( ) for stage in [ ' train ' , ' val ' ]
] )
def run_iteration ( iteration : Iteration ) :
if not done_training ( iteration ) :
create_and_run_training ( iteration )
else :
logger . info ( f " Training exists { iteration . name } " )
if not done_interference ( iteration ) :
run_inferences ( iteration )
else :
logger . info ( f " Inference results exist { iteration . name } " )
if not done_merge_results ( iteration ) :
coco_alphapose_merge_results_for_iteration ( iteration )
else :
logger . info ( f " Merged annotations exist { iteration . name } " )
def coco_alphapose_merge_results ( annotations_file : Path , results_file : Path , out_file : Path ) :
today = datetime . datetime . now ( ) . strftime ( " % Y/ % m/ %d " )
info = { " description " : " COCO 2017 Dataset, modified by Ruben van de Ven " , " url " : " http://cocodataset.org " , " version " : " 0.1 " , " year " : 2023 , " contributor " : " COCO Consortium, Ruben van de Ven " , " date_created " : today }
annotations = json . loads ( annotations_file . read_text ( ) )
results = json . loads ( results_file . read_text ( ) )
# annotations_ann:list = annotations['annotations']
# id_counts = {}
for i , result in enumerate ( results ) :
if type ( result [ ' image_id ' ] ) == str :
result [ ' image_id ' ] = int ( result [ ' image_id ' ] [ : - 4 ] )
result [ ' id ' ] = i
result [ ' iscrowd ' ] = 0 # TODO make sure this is a right terminology/assumption (what is this crowd here anyway individaul/crowd?)
result [ ' bbox ' ] = result [ ' box ' ] # TODO result.pop('box') to rename instead of copy
result [ ' area ' ] = 1 # TODO : for now to bypass ignore in alphapose/datasets/mscoco.py:87
result [ ' num_keypoints ' ] = 17 # TODO : verify that this is indeed always all points
# There can be multiple annotations per image. Try to match the originals by keeping track
# of their order of occurence
# if result['image_id'] not in id_counts:
# id_counts[result['image_id']] = 0
# # find matching annotations in original
# origs = list(filter(lambda ann: ann['image_id'] == result['image_id'], annotations_ann))
# assert len(origs) > id_counts[result['image_id']], f"Len should be one, found {len(origs)} for {result['image_id']}: {origs=}"
# orig = origs[id_counts[result['image_id']]]
# id_counts[result['image_id']] += 1
# result['id'] = orig['id'] # we keep track of the original id
annotations [ ' annotations ' ] = results
annotations [ ' info ' ] = info
with out_file . open ( ' w ' ) as fp :
json . dump ( annotations , fp )
logger . info ( f ' wrote to { out_file . as_posix ( ) } ' )
if __name__ == " __main__ " :
i = 1
while True :
iteration = Iteration ( i )
logger . info ( f " Run iteration { iteration . name } " )
run_iteration ( iteration )
i + = 1
# parser = argparse.ArgumentParser(description='Merge alphapose-results.json with an input dataset')
# parser.add_argument('--annotations-file', required=True, type=argparse.FileType('r'),
# help='an annotations file from the COCO dataset (eg. person_keypoints_train2017.json)')
# parser.add_argument('--results-file', required=True, type=argparse.FileType('r'),
# help='path to the alphapose-results.json')
# parser.add_argument('--out-file', required=True, type=argparse.FileType('w'),
# help='the filename of the merged result')
# args = parser.parse_args()
# coco_alphapose_merge_results(args.annotations_file, args.results_file, args.out_file)