Add server and format with autopep8

This commit is contained in:
Ruben van de Ven 2020-12-17 15:22:24 +01:00
parent 5a2f17445e
commit 5d825199d1
3 changed files with 174 additions and 56 deletions

View file

@ -8,6 +8,7 @@ import svgwrite
logger = logging.getLogger('coco.storage') logger = logging.getLogger('coco.storage')
class Annotation: class Annotation:
def __init__(self, result, storage): def __init__(self, result, storage):
self.storage = storage self.storage = storage
@ -16,13 +17,16 @@ class Annotation:
self.category_id = result['category_id'] self.category_id = result['category_id']
self.iscrowd = bool(result['iscrowd']) self.iscrowd = bool(result['iscrowd'])
self.area = result['area'] self.area = result['area']
self.bbox = [result['bbox_left'], result['bbox_top'], result['bbox_width'], result['bbox_height']] self.bbox = [result['bbox_left'], result['bbox_top'],
result['bbox_width'], result['bbox_height']]
self.segments = self.fetchSegments() self.segments = self.fetchSegments()
self.is_normalised = False self.is_normalised = False
if type(result['zerkine_moment']) is list: if 'zerkine_moment' in result and type(result['zerkine_moment']) is list:
self.zerkine_moment = result['zerkine_moment'] # when normalising, this is already there # when normalising, this is already there
self.zerkine_moment = result['zerkine_moment']
else: else:
self.zerkine_moment = self.parseZerkineFromDB(result['zerkine_moment']) if result['zerkine_moment'] else None self.zerkine_moment = self.parseZerkineFromDB(
result['zerkine_moment']) if 'zerkine_moment' in result else None
@classmethod @classmethod
def parseZerkineFromDB(cls, r): def parseZerkineFromDB(cls, r):
@ -32,7 +36,8 @@ class Annotation:
def fetchSegments(self): def fetchSegments(self):
try: try:
cur = self.storage.con.cursor() cur = self.storage.con.cursor()
cur.execute("SELECT * FROM segments WHERE annotation_id = :id AND points != 'ount' AND points != 'iz'", {'id': self.id}) cur.execute(
"SELECT * FROM segments WHERE annotation_id = :id AND points != 'ount' AND points != 'iz'", {'id': self.id})
segments = [] segments = []
for row in cur: for row in cur:
segments.append(Segment(row)) segments.append(Segment(row))
@ -68,8 +73,7 @@ class Annotation:
newAnn.segments[i].points = [[ newAnn.segments[i].points = [[
(p[0]-self.bbox[0]) * scale, (p[0]-self.bbox[0]) * scale,
(p[1]-self.bbox[1]) * scale (p[1]-self.bbox[1]) * scale
] for p in segment.points] ] for p in segment.points]
return newAnn return newAnn
@ -83,7 +87,8 @@ class Annotation:
for segment in self.segments: for segment in self.segments:
if len(pathSpecs) == 0: if len(pathSpecs) == 0:
pathSpecs['fill'] = 'white' pathSpecs['fill'] = 'white'
dwg.add(svgwrite.path.Path(segment.getD(), class_=f"cat_{self.category_id}", **pathSpecs)) dwg.add(svgwrite.path.Path(segment.getD(),
class_=f"cat_{self.category_id}", **pathSpecs))
def getTranslationToCenter(self): def getTranslationToCenter(self):
dimensions = (self.bbox[2], self.bbox[3]) dimensions = (self.bbox[2], self.bbox[3])
@ -108,22 +113,25 @@ class Annotation:
filename, filename,
size=dimensions, size=dimensions,
viewBox=" ".join([str(s) for s in viewbox]) viewBox=" ".join([str(s) for s in viewbox])
) )
if bg: if bg:
dwg.add(dwg.rect( dwg.add(dwg.rect(
(viewbox[0],viewbox[1]), (viewbox[0], viewbox[1]),
(viewbox[2],viewbox[3]), (viewbox[2], viewbox[3]),
fill=bg)) fill=bg))
self.writeToDrawing(dwg) self.writeToDrawing(dwg)
return dwg return dwg
class Segment(): class Segment():
def __init__(self, result): def __init__(self, result):
try: try:
self.points = self.asCoordinates(ast.literal_eval('['+result['points']+']')) self.points = self.asCoordinates(
ast.literal_eval('['+result['points']+']'))
except Exception as e: except Exception as e:
logger.critical(f"Exception loading segment for {result} {result['points']}") logger.critical(
f"Exception loading segment for {result} {result['points']}")
raise raise
@classmethod @classmethod
@ -135,7 +143,7 @@ class Segment():
points.append([ points.append([
pointList[(i)*2], pointList[(i)*2],
pointList[(i)*2+1] pointList[(i)*2+1]
]) ])
return points return points
def getD(self): def getD(self):
@ -144,12 +152,13 @@ class Segment():
for i in range(1, len(self.points)): for i in range(1, len(self.points)):
p = self.points[i] p = self.points[i]
d += f' {p[0]:.4f} {p[1]:.4f}' d += f' {p[0]:.4f} {p[1]:.4f}'
d += " Z" # segments are always closed d += " Z" # segments are always closed
return d return d
def forJson(self): def forJson(self):
return self.points return self.points
class COCOStorage: class COCOStorage:
def __init__(self, filename): def __init__(self, filename):
self.logger = logging.getLogger('coco.storage') self.logger = logging.getLogger('coco.storage')
@ -158,7 +167,7 @@ class COCOStorage:
con = sqlite3.connect(self.filename) con = sqlite3.connect(self.filename)
cur = con.cursor() cur = con.cursor()
d = os.path.dirname(os.path.realpath(__file__)) d = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(d,'coco.sql'), 'r') as fp: with open(os.path.join(d, 'coco.sql'), 'r') as fp:
cur.executescript(fp.read()) cur.executescript(fp.read())
con.close() con.close()
@ -171,7 +180,8 @@ class COCOStorage:
self.logger.info("Create categories") self.logger.info("Create categories")
cur = self.con.cursor() cur = self.con.cursor()
cur.executemany('INSERT OR IGNORE INTO categories(id, supercategory, name) VALUES (:id, :supercategory, :name)', coco.cats.values()) cur.executemany(
'INSERT OR IGNORE INTO categories(id, supercategory, name) VALUES (:id, :supercategory, :name)', coco.cats.values())
self.con.commit() self.con.commit()
self.logger.info("Images...") self.logger.info("Images...")
@ -183,7 +193,6 @@ class COCOStorage:
self.logger.info("Annotations...") self.logger.info("Annotations...")
def annotation_generator(): def annotation_generator():
for c in coco.anns.values(): for c in coco.anns.values():
ann = c.copy() ann = c.copy()
@ -199,14 +208,14 @@ class COCOStorage:
''', annotation_generator()) ''', annotation_generator())
self.con.commit() self.con.commit()
self.logger.info("Segments...") self.logger.info("Segments...")
def segment_generator(): def segment_generator():
for ann in coco.anns.values(): for ann in coco.anns.values():
for i, seg in enumerate(ann['segmentation']): for i, seg in enumerate(ann['segmentation']):
yield { yield {
'id': ann['id']*10 + i, # create a uniqe segment id, supports max 10 segments per annotation # create a uniqe segment id, supports max 10 segments per annotation
'id': ann['id']*10 + i,
'annotation_id': ann['id'], 'annotation_id': ann['id'],
'points': str(seg)[1:-1], 'points': str(seg)[1:-1],
} }
@ -217,7 +226,6 @@ class COCOStorage:
''', segment_generator()) ''', segment_generator())
self.con.commit() self.con.commit()
self.logger.info("Done...") self.logger.info("Done...")
def getCategories(self): def getCategories(self):
@ -243,7 +251,8 @@ class COCOStorage:
def getAnnotationWithoutZerkine(self): def getAnnotationWithoutZerkine(self):
cur = self.con.cursor() cur = self.con.cursor()
# annotation 918 and 2206849 have 0 height. Crashing the script... exclude them # annotation 918 and 2206849 have 0 height. Crashing the script... exclude them
cur.execute(f"SELECT * FROM annotations WHERE zerkine_moment IS NULL AND area > 0 LIMIT 1") cur.execute(
f"SELECT * FROM annotations WHERE zerkine_moment IS NULL AND area > 0 LIMIT 1")
ann = cur.fetchone() ann = cur.fetchone()
if ann: if ann:
return Annotation(ann, self) return Annotation(ann, self)
@ -253,10 +262,11 @@ class COCOStorage:
def countAnnotationsWithoutZerkine(self): def countAnnotationsWithoutZerkine(self):
cur = self.con.cursor() cur = self.con.cursor()
cur.execute(f"SELECT count(id) FROM annotations WHERE zerkine_moment IS NULL AND area > 0") cur.execute(
f"SELECT count(id) FROM annotations WHERE zerkine_moment IS NULL AND area > 0")
return int(cur.fetchone()[0]) return int(cur.fetchone()[0])
def storeZerkineForAnnotation(self, annotation, moments, delayCommit = False): def storeZerkineForAnnotation(self, annotation, moments, delayCommit=False):
m = ' '.join([str(m) for m in moments]) m = ' '.join([str(m) for m in moments])
cur = self.con.cursor() cur = self.con.cursor()
@ -270,24 +280,27 @@ class COCOStorage:
def getZerkines(self): def getZerkines(self):
cur = self.con.cursor() cur = self.con.cursor()
cur.execute(f"SELECT id, zerkine_moment FROM annotations WHERE zerkine_moment IS NOT NULL") cur.execute(
f"SELECT id, zerkine_moment FROM annotations WHERE zerkine_moment IS NOT NULL")
return cur.fetchall() return cur.fetchall()
def getAllAnnotationPoints(self): def getAllAnnotationPoints(self):
cur = self.con.cursor() cur = self.con.cursor()
cur.execute(f"SELECT annotations.id, points FROM annotations INNER JOIN segments ON segments.annotation_id = annotations.id WHERE area > 0") cur.execute(
f"SELECT annotations.id, points FROM annotations INNER JOIN segments ON segments.annotation_id = annotations.id WHERE area > 0")
return cur.fetchall() return cur.fetchall()
def getAnnotationById(self, annotation_id = None, withZerkine = False): def getAnnotationById(self, annotation_id=None, withZerkine=False):
if annotation_id == -1: if annotation_id == -1:
annotation_id = None annotation_id = None
return self.getRandomAnnotation(annotation_id = annotation_id, withZerkine = withZerkine) return self.getRandomAnnotation(annotation_id=annotation_id, withZerkine=withZerkine)
def getRandomAnnotation(self, annotation_id = None, category_id = None, withZerkine = False): def getRandomAnnotation(self, annotation_id=None, category_id=None, withZerkine=False):
result = self.getRandomAnnotations(annotation_id, category_id, withZerkine, limit=1) result = self.getRandomAnnotations(
annotation_id, category_id, withZerkine, limit=1)
return result[0] if len(result) else None return result[0] if len(result) else None
def getRandomAnnotations(self, annotation_id = None, category_id = None, withZerkine = False, limit=None): def getRandomAnnotations(self, annotation_id=None, category_id=None, withZerkine=False, limit=None):
cur = self.con.cursor() cur = self.con.cursor()
where = "" where = ""
params = [] params = []
@ -307,7 +320,8 @@ class COCOStorage:
if limit: if limit:
sqlLimit = f"LIMIT {int(limit)}" sqlLimit = f"LIMIT {int(limit)}"
cur.execute(f"SELECT * FROM annotations WHERE {where} ORDER BY RANDOM() {sqlLimit}", tuple(params)) cur.execute(
f"SELECT * FROM annotations WHERE {where} ORDER BY RANDOM() {sqlLimit}", tuple(params))
results = [] results = []
for ann in cur: for ann in cur:
results.append(Annotation(ann, self)) results.append(Annotation(ann, self))
@ -318,4 +332,3 @@ class COCOStorage:
# return Annotation(ann, self) # return Annotation(ann, self)
# else: # else:
# return None # return None

105
server.py Normal file
View file

@ -0,0 +1,105 @@
import argparse
from coco.storage import COCOStorage
import logging
import coloredlogs
import tornado.ioloop
import tornado.web
import tornado.websocket
import json
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("server")
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
method = getattr(obj, "forJson", None)
if callable(method):
return obj.forJson()
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
class AnnotationHandler(tornado.web.RequestHandler):
def initialize(self, storage: COCOStorage):
self.storage = storage
self.set_header("Content-Type", "application/json")
def get(self, *params):
self.write(json.dumps(self.getData(*params), cls=JsonEncoder))
def getData(self):
# get specific annotation
annotation_id = self.get_argument('id', None)
annotation_id = None if not annotation_id else int(annotation_id)
# get by category id
category_id = self.get_argument('category', None)
category_id = None if not category_id else int(category_id)
normalise = self.get_argument('normalise', False)
normalise = int(normalise) if normalise is not False else False
# category_id = None if not category_id else int(category_id)
logger.debug(
f'Get annotation id: {annotation_id}, category: {category_id}, normalised: {normalise}')
annotation = self.storage.getRandomAnnotation(
annotation_id=annotation_id, category_id=category_id)
if normalise:
return annotation.getNormalised(normalise, normalise)
return annotation
class StaticFileWithHeaderHandler(tornado.web.StaticFileHandler):
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
if path[-5:] == '.html':
self.set_header("Access-Control-Allow-Origin", "*")
def make_app(storage, debug):
return tornado.web.Application([
(r"/annotation.json", AnnotationHandler, {'storage': storage}),
(r"/(.*)", StaticFileWithHeaderHandler,
{"path": 'www', "default_filename": 'index.html'}),
], debug=debug)
if __name__ == "__main__":
argParser = argparse.ArgumentParser(
description='Server for COCO web interface')
argParser.add_argument(
'--port',
'-P',
type=int,
default=8888,
help='Port to listen on'
)
argParser.add_argument(
'--db',
type=COCOStorage,
metavar='DATABASE',
dest='storage',
help='SQLite db filename, will be created if not existing',
default='dataset/instances_val2017.db'
)
argParser.add_argument(
'--verbose',
'-v',
action='store_true',
help='Increase log level'
)
args = argParser.parse_args()
loglevel = logging.DEBUG if args.verbose else logging.INFO
coloredlogs.install(
level=loglevel,
fmt="%(asctime)s %(hostname)s %(name)s[%(process)d] %(levelname)s %(message)s"
)
app = make_app(args.storage, debug=args.verbose)
app.listen(args.port)
logger.info(f"Listening on {args.port}")
tornado.ioloop.IOLoop.current().start()

View file

@ -10,7 +10,8 @@ logger = logging.getLogger("tools")
def create(args): def create(args):
con = args.storage.con con = args.storage.con
cur = con.cursor() cur = con.cursor()
cur.executemany('INSERT OR IGNORE INTO categories(id, supercategory, name) VALUES (:id, :supercategory, :name)', args.coco.cats.values()) cur.executemany(
'INSERT OR IGNORE INTO categories(id, supercategory, name) VALUES (:id, :supercategory, :name)', args.coco.cats.values())
con.commit() con.commit()
logger.info("Images...") logger.info("Images...")
@ -22,7 +23,6 @@ def create(args):
logger.info("Annotations...") logger.info("Annotations...")
def annotation_generator(): def annotation_generator():
for c in args.coco.anns.values(): for c in args.coco.anns.values():
ann = c.copy() ann = c.copy()
@ -38,14 +38,14 @@ def create(args):
''', annotation_generator()) ''', annotation_generator())
con.commit() con.commit()
logger.info("Segments...") logger.info("Segments...")
def segment_generator(): def segment_generator():
for ann in args.coco.anns.values(): for ann in args.coco.anns.values():
for i, seg in enumerate(ann['segmentation']): for i, seg in enumerate(ann['segmentation']):
yield { yield {
'id': ann['id']*10 + i, # create a uniqe segment id, supports max 10 segments per annotation # create a uniqe segment id, supports max 10 segments per annotation
'id': ann['id']*10 + i,
'annotation_id': ann['id'], 'annotation_id': ann['id'],
'points': str(seg)[1:-1], 'points': str(seg)[1:-1],
} }
@ -56,7 +56,6 @@ def create(args):
''', segment_generator()) ''', segment_generator())
con.commit() con.commit()
logger.info("Done...") logger.info("Done...")
@ -64,7 +63,8 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(title = 'subcommands', help="Use command -h for specific help") subparsers = parser.add_subparsers(
title='subcommands', help="Use command -h for specific help")
parser_create = subparsers.add_parser('create') parser_create = subparsers.add_parser('create')
parser_create.add_argument( parser_create.add_argument(
@ -73,16 +73,16 @@ if __name__ == "__main__":
type=pycocotools.coco.COCO, type=pycocotools.coco.COCO,
dest='coco', dest='coco',
default='dataset/annotations/instances_val2017.json' default='dataset/annotations/instances_val2017.json'
) )
parser_create.add_argument( parser_create.add_argument(
'--db', '--db',
type=COCOStorage, type=COCOStorage,
metavar='DATABASE', metavar='DATABASE',
dest='storage', dest='storage',
help='SQLite db filename, will be created if not existing', help='SQLite db filename, will be created if not existing',
default='dataset/instances_val2017.db' default='dataset/instances_val2017.db'
) )
parser_create.set_defaults(target = create) parser_create.set_defaults(target=create)
# parser_build = subparsers.add_parser('build') # parser_build = subparsers.add_parser('build')