2021-11-22 20:54:04 +01:00
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import os
|
2022-06-08 17:26:30 +02:00
|
|
|
import shutil
|
2022-06-08 18:22:04 +02:00
|
|
|
from urllib.error import HTTPError
|
2021-11-22 20:54:04 +01:00
|
|
|
import tornado.ioloop
|
|
|
|
import tornado.web
|
|
|
|
import tornado.websocket
|
|
|
|
from urllib.parse import urlparse
|
|
|
|
import uuid
|
|
|
|
import datetime
|
|
|
|
import html
|
|
|
|
import argparse
|
|
|
|
import coloredlogs
|
|
|
|
import glob
|
2022-06-08 18:22:04 +02:00
|
|
|
import filelock
|
2022-01-19 10:28:51 +01:00
|
|
|
import svganim.strokes
|
2022-05-30 15:02:28 +02:00
|
|
|
import svganim.uimethods
|
2022-06-28 17:10:15 +02:00
|
|
|
import cairosvg
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
logger = logging.getLogger("svganim.webserver")
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
|
|
|
|
class DateTimeEncoder(json.JSONEncoder):
|
|
|
|
def default(self, o):
|
|
|
|
if isinstance(o, datetime.datetime):
|
2022-01-19 10:28:51 +01:00
|
|
|
return o.isoformat(timespec="milliseconds")
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
return super().default(self, o)
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
class StaticFileWithHeaderHandler(tornado.web.StaticFileHandler):
|
|
|
|
def set_extra_headers(self, path):
|
|
|
|
"""For subclass to add extra headers to the response"""
|
2022-01-19 10:28:51 +01:00
|
|
|
if path[-5:] == ".html":
|
2021-11-22 20:54:04 +01:00
|
|
|
self.set_header("Access-Control-Allow-Origin", "*")
|
2022-01-19 10:28:51 +01:00
|
|
|
if path[-4:] == ".svg":
|
2021-11-22 20:54:04 +01:00
|
|
|
self.set_header("Content-Type", "image/svg+xml")
|
|
|
|
|
|
|
|
|
|
|
|
class WebSocketHandler(tornado.websocket.WebSocketHandler):
|
|
|
|
"""
|
2022-01-19 10:28:51 +01:00
|
|
|
Websocket from the drawing
|
2021-11-22 20:54:04 +01:00
|
|
|
"""
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-11-23 12:30:49 +01:00
|
|
|
# CORS_ORIGINS = ['localhost']
|
2021-11-22 20:54:04 +01:00
|
|
|
connections = set()
|
|
|
|
|
|
|
|
def initialize(self, config):
|
|
|
|
self.config = config
|
|
|
|
self.strokes = []
|
|
|
|
self.hasWritten = False
|
2022-02-03 17:39:59 +01:00
|
|
|
self.prev_file = None
|
2022-02-09 08:18:28 +01:00
|
|
|
self.prev_file_duration = 0
|
2021-11-22 20:54:04 +01:00
|
|
|
self.dimensions = [None, None]
|
|
|
|
|
2021-11-23 12:30:49 +01:00
|
|
|
# def check_origin(self, origin):
|
|
|
|
# parsed_origin = urlparse(origin)
|
|
|
|
# # parsed_origin.netloc.lower() gives localhost:3333
|
|
|
|
# valid = any([parsed_origin.hostname.endswith(origin) for origin in self.CORS_ORIGINS])
|
|
|
|
# return valid
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
# the client connected
|
2022-01-19 10:28:51 +01:00
|
|
|
def open(self, p=None):
|
2021-11-22 20:54:04 +01:00
|
|
|
self.__class__.connections.add(self)
|
2022-01-19 10:28:51 +01:00
|
|
|
self.prefix = datetime.datetime.now().strftime("%Y-%m-%d-")
|
|
|
|
self.filename = (
|
|
|
|
self.prefix + str(self.check_filenr()) + "-" + uuid.uuid4().hex[:6]
|
|
|
|
)
|
2021-12-21 13:14:32 +01:00
|
|
|
logger.info(f"{self.filename=}")
|
2022-01-19 10:28:51 +01:00
|
|
|
self.write_message(json.dumps({"filename": self.filename}))
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
def check_filenr(self):
|
2022-01-19 10:28:51 +01:00
|
|
|
files = glob.glob(os.path.join(self.config.storage, self.prefix + "*"))
|
2021-11-22 20:54:04 +01:00
|
|
|
return len(files) + 1
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-12-21 13:14:32 +01:00
|
|
|
def appendEvent(self, row):
|
2022-02-03 17:39:59 +01:00
|
|
|
if not self.hasWritten and self.prev_file and 'event' in row and row['event'] == 'viewbox':
|
|
|
|
# ignore canvas movement after
|
|
|
|
return
|
|
|
|
|
2021-12-21 13:14:32 +01:00
|
|
|
# write to an appendable json format. So basically a file that should be wrapped in [] to be json-parsable
|
2022-05-30 15:02:28 +02:00
|
|
|
# TODO use jsonlines -- which is not so much different but (semi-)standardized
|
2022-01-19 10:28:51 +01:00
|
|
|
with open(
|
2022-02-03 17:39:59 +01:00
|
|
|
os.path.join(self.config.storage, self.filename +
|
|
|
|
".json_appendable"), "a"
|
2022-01-19 10:28:51 +01:00
|
|
|
) as fp:
|
2021-12-21 13:14:32 +01:00
|
|
|
if not self.hasWritten:
|
2022-02-03 17:39:59 +01:00
|
|
|
if self.prev_file:
|
|
|
|
# TODO WIP
|
|
|
|
with open(
|
|
|
|
self.prev_file, 'r'
|
|
|
|
) as fprev:
|
|
|
|
wrote = False
|
|
|
|
for line in fprev:
|
|
|
|
wrote = True
|
|
|
|
fp.write(line)
|
|
|
|
if wrote:
|
|
|
|
fp.write(",\n")
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
# metadata to first row, but only on demand
|
|
|
|
fp.write(
|
|
|
|
json.dumps(
|
|
|
|
[
|
|
|
|
datetime.datetime.now().strftime("%Y-%m-%d %T"),
|
|
|
|
self.dimensions[0],
|
|
|
|
self.dimensions[1],
|
|
|
|
]
|
|
|
|
)
|
|
|
|
)
|
2021-12-21 13:14:32 +01:00
|
|
|
# writer.writerow()
|
|
|
|
self.hasWritten = True
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
fp.write(",\n")
|
2021-12-21 13:14:32 +01:00
|
|
|
# first column is color, rest is points
|
|
|
|
fp.write(json.dumps(row))
|
2021-11-22 20:54:04 +01:00
|
|
|
|
2022-02-03 17:39:59 +01:00
|
|
|
def preloadFile(self, file):
|
|
|
|
if self.hasWritten:
|
|
|
|
logger.error("Cannot preload when already written content")
|
|
|
|
return False
|
|
|
|
|
|
|
|
logger.info(f"load {file}")
|
|
|
|
# TODO, make sure file doesn't load file outside of storage
|
|
|
|
prev_file = os.path.join(
|
|
|
|
self.config.storage, file + ".json_appendable")
|
|
|
|
if not os.path.exists(prev_file):
|
|
|
|
logger.error(f"Cannot preload non-existent file: {prev_file}")
|
|
|
|
self.write_message(json.dumps(
|
|
|
|
{"error": f"Non-existent file: {file}"}))
|
|
|
|
return False
|
|
|
|
|
|
|
|
self.prev_file = prev_file
|
|
|
|
|
2022-02-09 08:18:28 +01:00
|
|
|
metadata = self.getFileMetadata(self.prev_file)
|
|
|
|
self.prev_file_duration = self.getLastTimestampInFile(self.prev_file)
|
|
|
|
logger.info(
|
|
|
|
"Previous file set. {self.prev_file} {metadata=} time: {self.prev_file_duration}")
|
|
|
|
|
|
|
|
self.write_message(json.dumps(
|
|
|
|
{"preloaded_svg": f"/drawing/{file}", "dimensions": [metadata[1], metadata[2]], "time": self.prev_file_duration}))
|
|
|
|
|
|
|
|
def getFileMetadata(self, filename):
|
|
|
|
with open(filename, "r") as fp:
|
2022-02-03 17:39:59 +01:00
|
|
|
first_line = fp.readline().strip()
|
|
|
|
if first_line.endswith(","):
|
|
|
|
first_line = first_line[:-1]
|
|
|
|
|
|
|
|
metadata = json.loads(first_line)
|
|
|
|
|
2022-02-09 08:18:28 +01:00
|
|
|
return metadata
|
|
|
|
|
|
|
|
def getLastTimestampInFile(self, filename):
|
|
|
|
with open(filename, "r") as fp:
|
|
|
|
for line in fp:
|
|
|
|
pass # loop until the end
|
|
|
|
last_line = line.strip()
|
|
|
|
if last_line.endswith(","):
|
|
|
|
last_line = last_line[:-1]
|
|
|
|
|
|
|
|
data = json.loads(last_line)
|
|
|
|
if type(data) is list:
|
|
|
|
raise Exception("Oddly, the file ends with merely metadata")
|
|
|
|
|
|
|
|
if data['event'] == 'stroke':
|
|
|
|
return data['points'][-1][3]
|
|
|
|
elif data['event'] == 'viewbox':
|
|
|
|
return data['viewboxes'][-1]['t']
|
|
|
|
else:
|
|
|
|
raise Exception("Unknown last event")
|
2022-02-03 17:39:59 +01:00
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
# the client sent the message
|
|
|
|
def on_message(self, message):
|
|
|
|
logger.info(f"recieve: {message}")
|
|
|
|
|
|
|
|
try:
|
|
|
|
msg = json.loads(message)
|
2022-01-19 10:28:51 +01:00
|
|
|
if msg["event"] == "stroke":
|
|
|
|
logger.info("stroke")
|
2022-02-09 08:18:28 +01:00
|
|
|
for i in range(len(msg['points'])):
|
|
|
|
msg['points'][i][3] += self.prev_file_duration
|
2021-12-21 13:14:32 +01:00
|
|
|
self.appendEvent(msg)
|
2022-01-19 10:28:51 +01:00
|
|
|
elif msg["event"] == "dimensions":
|
|
|
|
self.dimensions = [int(msg["width"]), int(msg["height"])]
|
2021-11-22 20:54:04 +01:00
|
|
|
logger.info(f"{self.dimensions=}")
|
2022-01-19 10:28:51 +01:00
|
|
|
elif msg["event"] == "viewbox":
|
|
|
|
logger.info("move or resize")
|
2022-02-09 08:18:28 +01:00
|
|
|
if len(msg['viewboxes']) == 0:
|
|
|
|
logger.warn("Empty viewbox array")
|
|
|
|
else:
|
|
|
|
for i in range(len(msg['viewboxes'])):
|
|
|
|
msg['viewboxes'][i]['t'] += self.prev_file_duration
|
|
|
|
self.appendEvent(msg)
|
2022-02-03 17:39:59 +01:00
|
|
|
elif msg["event"] == "preload":
|
|
|
|
self.preloadFile(msg["file"])
|
2021-11-22 20:54:04 +01:00
|
|
|
else:
|
|
|
|
# self.send({'alert': 'Unknown request: {}'.format(message)})
|
2022-01-19 10:28:51 +01:00
|
|
|
logger.warn("Unknown request: {}".format(message))
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
# self.send({'alert': 'Invalid request: {}'.format(e)})
|
|
|
|
logger.exception(e)
|
|
|
|
|
|
|
|
# client disconnected
|
|
|
|
def on_close(self):
|
|
|
|
self.__class__.rmConnection(self)
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
logger.info(f"Client disconnected: {self.request.remote_ip}")
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def rmConnection(cls, client):
|
|
|
|
if client not in cls.connections:
|
|
|
|
return
|
|
|
|
cls.connections.remove(client)
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
@classmethod
|
|
|
|
def hasConnection(cls, client):
|
|
|
|
return client in cls.connections
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
|
2021-12-20 13:36:18 +01:00
|
|
|
class AudioListingHandler(tornado.web.RequestHandler):
|
|
|
|
def initialize(self, config):
|
|
|
|
self.config = config
|
2022-01-19 10:28:51 +01:00
|
|
|
self.audiodir = os.path.join(self.config.storage, "audio")
|
2021-12-20 13:36:18 +01:00
|
|
|
|
|
|
|
def get(self):
|
|
|
|
# filename = self.get_argument("file", None)
|
|
|
|
self.set_header("Content-Type", "application/json")
|
|
|
|
if not os.path.exists(self.audiodir):
|
|
|
|
names = []
|
|
|
|
else:
|
2022-01-19 10:28:51 +01:00
|
|
|
names = sorted(
|
|
|
|
[
|
|
|
|
f"/audio/{name}"
|
|
|
|
for name in os.listdir(self.audiodir)
|
|
|
|
if name not in [".gitignore"]
|
|
|
|
]
|
|
|
|
)
|
2021-12-20 13:36:18 +01:00
|
|
|
print(names)
|
|
|
|
self.write(json.dumps(names))
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-12-20 13:36:18 +01:00
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
class AnimationHandler(tornado.web.RequestHandler):
|
2022-05-30 15:02:28 +02:00
|
|
|
def initialize(self, config, index: svganim.strokes.AnnotationIndex):
|
2021-11-22 20:54:04 +01:00
|
|
|
self.config = config
|
2022-05-30 15:02:28 +02:00
|
|
|
self.index = index
|
|
|
|
|
|
|
|
async def get(self, filename):
|
2021-11-22 20:54:04 +01:00
|
|
|
# filename = self.get_argument("file", None)
|
2022-01-19 10:28:51 +01:00
|
|
|
if filename == "":
|
2022-05-30 15:02:28 +02:00
|
|
|
self.set_header("Content-Type", "application/json")
|
2021-12-21 14:31:02 +01:00
|
|
|
files = []
|
2022-01-19 10:28:51 +01:00
|
|
|
names = [
|
|
|
|
name
|
|
|
|
for name in os.listdir(self.config.storage)
|
|
|
|
if name.endswith("json_appendable")
|
|
|
|
]
|
2021-12-21 14:31:02 +01:00
|
|
|
for name in names:
|
2022-02-03 17:39:59 +01:00
|
|
|
fn = os.path.join(self.config.storage, name)
|
|
|
|
stat = os.stat(fn)
|
|
|
|
if stat.st_size == 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
with open(fn, "r") as fp:
|
2021-12-21 14:31:02 +01:00
|
|
|
first_line = fp.readline().strip()
|
2022-01-19 10:28:51 +01:00
|
|
|
if first_line.endswith(","):
|
2021-12-21 14:31:02 +01:00
|
|
|
first_line = first_line[:-1]
|
2022-02-03 17:39:59 +01:00
|
|
|
|
2022-06-10 13:10:19 +02:00
|
|
|
drawing_specs = json.loads(first_line)
|
|
|
|
drawing_id = name[:-16]
|
|
|
|
md = self.index.drawings[drawing_id].get_metadata() if drawing_id in self.index.drawings else {}
|
|
|
|
title = md['title'] if 'title' in md else None
|
2022-01-19 10:28:51 +01:00
|
|
|
files.append(
|
|
|
|
{
|
2022-06-10 13:10:19 +02:00
|
|
|
"name": f"/files/{drawing_id}",
|
|
|
|
"id": drawing_id,
|
|
|
|
"title": title,
|
|
|
|
"ctime": drawing_specs[0],
|
2022-02-03 17:39:59 +01:00
|
|
|
"mtime": datetime.datetime.fromtimestamp(stat.st_mtime).strftime("%Y-%m-%d %T"),
|
2022-06-10 13:10:19 +02:00
|
|
|
"dimensions": [drawing_specs[1], drawing_specs[2]],
|
|
|
|
"svg": f"/drawing/{drawing_id}.svg",
|
2022-01-19 10:28:51 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2022-02-03 17:39:59 +01:00
|
|
|
files.sort(key=lambda k: k["mtime"])
|
2021-12-21 14:31:02 +01:00
|
|
|
self.write(json.dumps(files))
|
2021-11-22 20:54:04 +01:00
|
|
|
else:
|
2022-05-30 15:02:28 +02:00
|
|
|
if filename[-4:] == ".svg":
|
|
|
|
extension = "svg"
|
|
|
|
filename = filename[:-4]
|
2022-06-28 17:10:15 +02:00
|
|
|
elif filename[-4:] == ".png":
|
|
|
|
extension = "png"
|
|
|
|
filename = filename[:-4]
|
2022-05-30 15:02:28 +02:00
|
|
|
elif filename[-4:] == ".mp3":
|
|
|
|
extension = "mp3"
|
|
|
|
filename = filename[:-4]
|
|
|
|
elif filename[-4:] == ".wav":
|
|
|
|
extension = "wav"
|
|
|
|
filename = filename[:-4]
|
|
|
|
else:
|
|
|
|
extension = None
|
2021-11-22 20:54:04 +01:00
|
|
|
|
2022-05-30 15:02:28 +02:00
|
|
|
logger.info(f"file {filename=}, {extension=}")
|
|
|
|
# if annotation_id not in self.index.annotations:
|
|
|
|
# raise tornado.web.HTTPError(404)
|
|
|
|
|
|
|
|
# annotation = self.index.annotations[annotation_id]
|
|
|
|
|
|
|
|
|
|
|
|
t_in = self.get_argument('t_in', None)
|
|
|
|
t_out = self.get_argument('t_out', None)
|
|
|
|
|
|
|
|
animation = self.index.drawings[filename].get_animation()
|
|
|
|
|
|
|
|
if t_in is not None and t_out is not None:
|
|
|
|
animation = animation.getSlice(float(t_in), float(t_out))
|
|
|
|
|
|
|
|
|
|
|
|
if extension == "svg":
|
|
|
|
self.set_header("Content-Type", "image/svg+xml")
|
|
|
|
self.write(animation.get_as_svg())
|
2022-06-28 17:10:15 +02:00
|
|
|
elif extension == "png":
|
|
|
|
self.set_header("Content-Type", "image/png")
|
|
|
|
svgstring = animation.get_as_svg()
|
|
|
|
self.write(cairosvg.svg2png(bytestring=svgstring))
|
2022-05-30 15:02:28 +02:00
|
|
|
elif extension == "mp3":
|
|
|
|
self.set_header("Content-Type", "audio/mp3")
|
|
|
|
audio = await animation.audio.export(format="mp3")
|
|
|
|
self.write(audio.read())
|
|
|
|
elif extension == "wav":
|
|
|
|
self.set_header("Content-Type", "audio/wav")
|
|
|
|
audio = await animation.audio.export(format="wav")
|
|
|
|
self.write(audio.read())
|
|
|
|
else:
|
|
|
|
self.set_header("Content-Type", "application/json")
|
|
|
|
self.write(json.dumps(animation.asDict()))
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
|
|
|
|
class TagHandler(tornado.web.RequestHandler):
|
|
|
|
"""List all tags"""
|
|
|
|
|
|
|
|
def initialize(self, config, index: svganim.strokes.AnnotationIndex):
|
|
|
|
self.config = config
|
|
|
|
self.index = index
|
|
|
|
self.metadir = os.path.join(self.config.storage, "metadata")
|
|
|
|
|
|
|
|
def get(self):
|
|
|
|
self.set_header("Content-Type", "application/json")
|
|
|
|
tags = self.index.tags.keys()
|
|
|
|
self.write(json.dumps(list(tags)))
|
|
|
|
|
|
|
|
|
|
|
|
class TagAnnotationsHandler(tornado.web.RequestHandler):
|
|
|
|
"""List all annotations for given tag"""
|
|
|
|
|
|
|
|
def initialize(self, config, index: svganim.strokes.AnnotationIndex):
|
|
|
|
self.config = config
|
|
|
|
self.index = index
|
|
|
|
self.metadir = os.path.join(self.config.storage, "metadata")
|
|
|
|
|
|
|
|
def get(self, tag):
|
2022-06-08 13:28:36 +02:00
|
|
|
if not self.index.has_tag(tag):
|
2022-01-19 10:28:51 +01:00
|
|
|
raise tornado.web.HTTPError(404)
|
|
|
|
|
|
|
|
self.set_header("Content-Type", "application/json")
|
2022-06-08 13:28:36 +02:00
|
|
|
# annotations = self.index.tags[tag]
|
|
|
|
# self.write(json.dumps(list([a.id for a in annotations])))
|
|
|
|
annotations = self.index.get_nested_annotations_for_tag(tag)
|
|
|
|
self.write(json.dumps([{
|
|
|
|
"id": annotation.id,
|
|
|
|
"tag": annotation.tag,
|
|
|
|
"id_hash": svganim.uimethods.annotation_hash(input=annotation.id),
|
|
|
|
"url": annotation.getJsonUrl(),
|
|
|
|
"comment": annotation.comment,
|
|
|
|
"drawing": annotation.drawing.get_url()
|
|
|
|
} for annotation in annotations]))
|
2022-01-19 10:28:51 +01:00
|
|
|
|
|
|
|
|
|
|
|
class AnnotationHandler(tornado.web.RequestHandler):
|
|
|
|
"""Get annotation as svg"""
|
|
|
|
|
|
|
|
def initialize(self, config, index: svganim.strokes.AnnotationIndex):
|
|
|
|
self.config = config
|
|
|
|
self.index = index
|
|
|
|
self.metadir = os.path.join(self.config.storage, "metadata")
|
|
|
|
|
|
|
|
def get(self, annotation_id):
|
|
|
|
if annotation_id[-4:] == ".svg":
|
|
|
|
extension = "svg"
|
|
|
|
annotation_id = annotation_id[:-4]
|
2022-06-28 17:10:15 +02:00
|
|
|
elif annotation_id[-4:] == ".png":
|
|
|
|
extension = "png"
|
|
|
|
annotation_id = annotation_id[:-4]
|
2022-01-19 10:28:51 +01:00
|
|
|
elif annotation_id[-4:] == ".mp3":
|
|
|
|
extension = "mp3"
|
|
|
|
annotation_id = annotation_id[:-4]
|
|
|
|
elif annotation_id[-4:] == ".wav":
|
|
|
|
extension = "wav"
|
|
|
|
annotation_id = annotation_id[:-4]
|
2021-11-22 20:54:04 +01:00
|
|
|
else:
|
2022-01-19 10:28:51 +01:00
|
|
|
extension = None
|
|
|
|
|
|
|
|
logger.info(f"annotation {annotation_id=}, {extension=}")
|
|
|
|
if annotation_id not in self.index.annotations:
|
|
|
|
raise tornado.web.HTTPError(404)
|
2021-11-22 20:54:04 +01:00
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
annotation = self.index.annotations[annotation_id]
|
|
|
|
|
|
|
|
if extension == "svg":
|
|
|
|
self.set_header("Content-Type", "image/svg+xml")
|
2022-05-31 11:19:07 +02:00
|
|
|
self.set_header("Cache-Control", "max-age=31536000, immutable")
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
self.write(annotation.get_as_svg())
|
2022-06-28 17:10:15 +02:00
|
|
|
elif extension == "png":
|
|
|
|
self.set_header("Content-Type", "image/png")
|
|
|
|
svgstring = annotation.get_as_svg()
|
|
|
|
self.write(cairosvg.svg2png(bytestring=svgstring))
|
2022-01-19 10:28:51 +01:00
|
|
|
elif extension == "mp3":
|
|
|
|
self.set_header("Content-Type", "audio/mp3")
|
2022-02-03 17:39:59 +01:00
|
|
|
self.write(annotation.getAnimationSlice(
|
|
|
|
).audio.export(format="mp3").read())
|
2022-01-19 10:28:51 +01:00
|
|
|
elif extension == "wav":
|
|
|
|
self.set_header("Content-Type", "audio/wav")
|
2022-02-03 17:39:59 +01:00
|
|
|
self.write(annotation.getAnimationSlice(
|
|
|
|
).audio.export(format="wav").read())
|
2022-01-19 10:28:51 +01:00
|
|
|
else:
|
|
|
|
self.set_header("Content-Type", "application/json")
|
|
|
|
self.write(json.dumps({
|
|
|
|
"id": annotation.id,
|
|
|
|
"tag": annotation.tag,
|
|
|
|
"audio": f"/annotation/{annotation.id}.mp3",
|
|
|
|
}))
|
2022-06-08 18:22:04 +02:00
|
|
|
|
|
|
|
def post(self, annotation_id):
|
|
|
|
"""change tag for given annotation"""
|
|
|
|
if annotation_id not in self.index.annotations:
|
|
|
|
raise tornado.web.HTTPError(404)
|
|
|
|
|
|
|
|
# might be set on file level, but let's try to avoid issues by keeping it simple
|
|
|
|
lock = filelock.FileLock("metadata_write.lock", timeout=10)
|
|
|
|
with lock:
|
|
|
|
newTagId = self.get_argument('tag_id')
|
|
|
|
if not self.index.has_tag(newTagId):
|
|
|
|
raise tornado.web.HTTPError(400)
|
|
|
|
|
|
|
|
annotation: svganim.strokes.Annotation = self.index.annotations[annotation_id]
|
|
|
|
|
|
|
|
logger.info(f"change tag from {annotation.tag} to {newTagId}")
|
|
|
|
|
|
|
|
# change metadata and reload index
|
|
|
|
metadata = annotation.drawing.get_metadata()
|
|
|
|
change = False
|
|
|
|
for idx, ann in enumerate(metadata['annotations']):
|
|
|
|
if ann['t_in'] == annotation.t_in and ann['t_out'] == annotation.t_out and annotation.tag == ann['tag']:
|
|
|
|
#found!?
|
|
|
|
metadata['annotations'][idx]['tag'] = newTagId
|
|
|
|
change = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if change == False:
|
|
|
|
raise HTTPError(409)
|
|
|
|
|
|
|
|
with open(annotation.drawing.metadata_fn, "w") as fp:
|
|
|
|
logger.info(f"save tag in {annotation.drawing.metadata_fn}")
|
|
|
|
json.dump(metadata, fp)
|
|
|
|
|
|
|
|
self.index.refresh()
|
2021-12-20 13:36:18 +01:00
|
|
|
|
|
|
|
|
2022-02-03 17:39:59 +01:00
|
|
|
class DrawingHandler(tornado.web.RequestHandler):
|
|
|
|
"""Get drawing as svg"""
|
|
|
|
|
|
|
|
def initialize(self, config, index: svganim.strokes.AnnotationIndex):
|
|
|
|
self.config = config
|
|
|
|
self.index = index
|
|
|
|
self.metadir = os.path.join(self.config.storage, "metadata")
|
|
|
|
|
|
|
|
def get(self, drawing_id):
|
|
|
|
if drawing_id[-4:] == ".svg":
|
|
|
|
extension = "svg"
|
|
|
|
drawing_id = drawing_id[:-4]
|
2022-06-28 17:10:15 +02:00
|
|
|
elif drawing_id[-4:] == ".png":
|
|
|
|
extension = "png"
|
|
|
|
drawing_id = drawing_id[:-4]
|
2022-02-03 17:39:59 +01:00
|
|
|
elif drawing_id[-4:] == ".mp3":
|
|
|
|
extension = "mp3"
|
|
|
|
drawing_id = drawing_id[:-4]
|
|
|
|
elif drawing_id[-4:] == ".wav":
|
|
|
|
extension = "wav"
|
|
|
|
drawing_id = drawing_id[:-4]
|
|
|
|
else:
|
|
|
|
extension = None
|
|
|
|
|
|
|
|
logger.info(f"drawing {drawing_id=}, {extension=}")
|
|
|
|
if drawing_id not in self.index.drawings:
|
|
|
|
self.index.refresh()
|
|
|
|
# double check
|
|
|
|
if drawing_id not in self.index.drawings:
|
|
|
|
raise tornado.web.HTTPError(404)
|
|
|
|
|
|
|
|
drawing = self.index.drawings[drawing_id]
|
|
|
|
|
|
|
|
if extension == "svg":
|
|
|
|
self.set_header("Content-Type", "image/svg+xml")
|
|
|
|
self.write(drawing.get_animation().get_as_svg())
|
2022-06-28 17:10:15 +02:00
|
|
|
if extension == "png":
|
|
|
|
self.set_header("Content-Type", "image/png")
|
|
|
|
svgstring =drawing.get_animation().get_as_svg()
|
|
|
|
self.write(cairosvg.svg2png(bytestring = svgstring))
|
2022-02-03 17:39:59 +01:00
|
|
|
elif extension == "mp3":
|
|
|
|
self.set_header("Content-Type", "audio/mp3")
|
|
|
|
self.write(drawing.get_animation(
|
|
|
|
).audio.export(format="mp3").read())
|
|
|
|
elif extension == "wav":
|
|
|
|
self.set_header("Content-Type", "audio/wav")
|
|
|
|
self.write(drawing.get_animation(
|
|
|
|
).audio.export(format="wav").read())
|
|
|
|
else:
|
|
|
|
self.set_header("Content-Type", "application/json")
|
|
|
|
self.write(json.dumps({
|
|
|
|
"id": drawing.id,
|
|
|
|
"annotations_url": drawing.get_annotations_url(),
|
|
|
|
"audio": f"/drawing/{drawing.id}.mp3",
|
|
|
|
"svg": f"/drawing/{drawing.id}.svg",
|
|
|
|
}))
|
|
|
|
|
|
|
|
|
2021-12-20 13:36:18 +01:00
|
|
|
class AnnotationsHandler(tornado.web.RequestHandler):
|
|
|
|
def initialize(self, config):
|
|
|
|
self.config = config
|
2022-01-19 10:28:51 +01:00
|
|
|
self.metadir = os.path.join(self.config.storage, "metadata")
|
2021-12-20 13:36:18 +01:00
|
|
|
|
|
|
|
def prepare(self):
|
|
|
|
if self.request.headers.get("Content-Type", "").startswith("application/json"):
|
|
|
|
self.json_args = json.loads(self.request.body)
|
|
|
|
else:
|
|
|
|
self.json_args = None
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-12-22 11:52:38 +01:00
|
|
|
def get_filenames(self):
|
2022-01-19 10:28:51 +01:00
|
|
|
return [
|
|
|
|
name[:-16]
|
|
|
|
for name in os.listdir(self.config.storage)
|
|
|
|
if name.endswith("json_appendable")
|
|
|
|
]
|
2021-12-20 13:36:18 +01:00
|
|
|
|
|
|
|
def get(self, filename):
|
|
|
|
self.set_header("Content-Type", "application/json")
|
2021-12-22 11:52:38 +01:00
|
|
|
filenames = self.get_filenames()
|
|
|
|
|
|
|
|
print(filenames, filename)
|
2021-12-20 13:36:18 +01:00
|
|
|
|
|
|
|
if filename not in filenames:
|
2021-12-22 11:52:38 +01:00
|
|
|
raise tornado.web.HTTPError(404)
|
2021-12-20 13:36:18 +01:00
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
meta_file = os.path.join(self.metadir, filename + ".json")
|
2021-12-20 13:36:18 +01:00
|
|
|
if not os.path.exists(meta_file):
|
|
|
|
self.set_status(404)
|
|
|
|
return
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
with open(meta_file, "r") as fp:
|
2021-12-20 13:36:18 +01:00
|
|
|
self.write(json.load(fp))
|
|
|
|
|
|
|
|
def post(self, filename):
|
2022-01-19 10:28:51 +01:00
|
|
|
# filename = self.argument("file", None)
|
2021-12-20 13:36:18 +01:00
|
|
|
|
2021-12-22 11:52:38 +01:00
|
|
|
filenames = self.get_filenames()
|
|
|
|
print(filenames, filename)
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-12-20 13:36:18 +01:00
|
|
|
if filename not in filenames:
|
2021-12-22 11:52:38 +01:00
|
|
|
raise tornado.web.HTTPError(404)
|
2021-12-20 13:36:18 +01:00
|
|
|
|
|
|
|
if not os.path.exists(self.metadir):
|
|
|
|
os.mkdir(self.metadir)
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
meta_file = os.path.join(self.metadir, filename + ".json")
|
|
|
|
with open(meta_file, "w") as fp:
|
2021-12-20 13:36:18 +01:00
|
|
|
json.dump(self.json_args, fp)
|
|
|
|
|
2022-06-08 13:28:36 +02:00
|
|
|
class TagsHandler(tornado.web.RequestHandler):
|
|
|
|
def initialize(self, config, index: svganim.strokes.AnnotationIndex) -> None:
|
|
|
|
self.config = config
|
|
|
|
self.index = index
|
|
|
|
|
|
|
|
def get(self):
|
2022-06-08 17:26:30 +02:00
|
|
|
self.set_header("Content-Type", "application/json")
|
2022-06-08 18:51:22 +02:00
|
|
|
self.write(self.index.root_tag.toJson(with_counts=True))
|
|
|
|
# with open('www/tags.json', 'r') as fp:
|
|
|
|
# # TODO: enrich with counts
|
|
|
|
# self.write(fp.read())
|
2022-06-08 17:26:30 +02:00
|
|
|
|
|
|
|
def put(self):
|
|
|
|
# data = json.loads(self.request.body)
|
|
|
|
tree = svganim.strokes.loadTagFromJson(self.request.body)
|
|
|
|
logger.info(f"New tag tree:\n{tree}")
|
|
|
|
newTagsContent = tree.toJson()
|
|
|
|
# save at minute resolution
|
|
|
|
now = datetime.datetime.utcnow().isoformat(timespec='minutes')
|
|
|
|
|
|
|
|
backup_dir = os.path.join(self.config.storage, 'tag_versions')
|
|
|
|
if not os.path.exists(backup_dir):
|
|
|
|
logger.warning(f"Creating tags backupdir {backup_dir}")
|
|
|
|
os.mkdir(backup_dir)
|
|
|
|
|
|
|
|
bakfile = os.path.join(backup_dir, f'tags.{now}.json')
|
|
|
|
logger.info(f"Creating tags backup {bakfile}" )
|
|
|
|
shutil.copyfile('www/tags.json', bakfile)
|
|
|
|
|
|
|
|
with open('www/tags.json', 'w') as fp:
|
|
|
|
fp.write(newTagsContent)
|
2022-06-08 18:22:04 +02:00
|
|
|
|
|
|
|
# update as to load new tag into cache
|
|
|
|
self.index.refresh()
|
2022-06-08 17:26:30 +02:00
|
|
|
|
|
|
|
self.set_status(204)
|
|
|
|
# print()
|
|
|
|
|
2022-02-03 17:39:59 +01:00
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
class IndexHandler(tornado.web.RequestHandler):
|
|
|
|
"""Get annotation as svg"""
|
|
|
|
|
|
|
|
def initialize(self, config, index: svganim.strokes.AnnotationIndex):
|
|
|
|
self.config = config
|
|
|
|
self.index = index
|
|
|
|
|
|
|
|
def get(self):
|
2022-01-19 11:15:55 +01:00
|
|
|
do_refresh = bool(self.get_query_argument('refresh', False))
|
|
|
|
if do_refresh:
|
2022-03-29 15:07:18 +02:00
|
|
|
logger.info("Reloading Annotation Index")
|
2022-01-19 11:15:55 +01:00
|
|
|
self.index.refresh()
|
2022-03-29 15:07:18 +02:00
|
|
|
logger.info("\treloaded annotation index")
|
2022-02-03 17:39:59 +01:00
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
self.render("templates/index.html", index=self.index)
|
2021-11-22 20:54:04 +01:00
|
|
|
|
2022-02-03 17:39:59 +01:00
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
class Server:
|
|
|
|
"""
|
|
|
|
Server for HIT -> plotter events
|
|
|
|
As well as for the Status interface
|
|
|
|
"""
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
loop = None
|
|
|
|
|
|
|
|
def __init__(self, config, logger):
|
|
|
|
self.config = config
|
|
|
|
self.logger = logger
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
# self.config['server']['port']
|
|
|
|
self.web_root = os.path.join("www")
|
2021-11-22 20:54:04 +01:00
|
|
|
|
2022-03-15 15:04:03 +01:00
|
|
|
if not os.path.exists(self.config.storage):
|
|
|
|
raise NotADirectoryError("Provided files directory doesn't exist.")
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
self.index = svganim.strokes.AnnotationIndex(
|
2022-03-15 15:04:03 +01:00
|
|
|
os.path.join(self.config.storage,"annotation_index.shelve"), self.config.storage, os.path.join(self.config.storage,"metadata")
|
2022-01-19 10:28:51 +01:00
|
|
|
)
|
|
|
|
self.logger.info("Loading Annotation Index")
|
|
|
|
self.index.refresh()
|
|
|
|
self.logger.info("\tloaded annotation index")
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
def start(self):
|
2022-01-19 10:28:51 +01:00
|
|
|
application = tornado.web.Application(
|
|
|
|
[
|
|
|
|
(
|
|
|
|
r"/ws(.*)",
|
|
|
|
WebSocketHandler,
|
|
|
|
{
|
|
|
|
"config": self.config,
|
|
|
|
},
|
|
|
|
),
|
2022-05-30 15:02:28 +02:00
|
|
|
(r"/files/(.*)", AnimationHandler, {"config": self.config, "index": self.index}),
|
2022-01-19 10:28:51 +01:00
|
|
|
(
|
|
|
|
r"/audio/(.+)",
|
|
|
|
tornado.web.StaticFileHandler,
|
|
|
|
{"path": os.path.join(self.config.storage, "audio")},
|
|
|
|
),
|
|
|
|
(r"/audio", AudioListingHandler, {"config": self.config}),
|
2022-02-03 17:39:59 +01:00
|
|
|
(r"/annotations/(.+)", AnnotationsHandler,
|
|
|
|
{"config": self.config}),
|
|
|
|
(r"/tags", TagHandler,
|
|
|
|
{"config": self.config, "index": self.index}),
|
2022-01-19 10:28:51 +01:00
|
|
|
(
|
|
|
|
r"/tags/(.+)",
|
|
|
|
TagAnnotationsHandler,
|
|
|
|
{"config": self.config, "index": self.index},
|
|
|
|
),
|
|
|
|
(
|
|
|
|
r"/annotation/(.+)",
|
|
|
|
AnnotationHandler,
|
|
|
|
{"config": self.config, "index": self.index},
|
|
|
|
),
|
2022-02-03 17:39:59 +01:00
|
|
|
(
|
|
|
|
r"/drawing/(.+)",
|
|
|
|
DrawingHandler,
|
|
|
|
{"config": self.config, "index": self.index},
|
|
|
|
),
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2022-02-08 12:07:28 +01:00
|
|
|
(r"/index", IndexHandler,
|
2022-02-03 17:39:59 +01:00
|
|
|
{"config": self.config, "index": self.index}),
|
2022-02-08 12:07:28 +01:00
|
|
|
|
2022-06-08 17:26:30 +02:00
|
|
|
(r"/tags.json", TagsHandler,
|
|
|
|
{"config": self.config, "index": self.index}),
|
|
|
|
|
2022-02-08 12:07:28 +01:00
|
|
|
(r"/(.*)", StaticFileWithHeaderHandler,
|
|
|
|
{"path": self.web_root, 'default_filename': 'index.html'}),
|
2022-01-19 10:28:51 +01:00
|
|
|
],
|
|
|
|
debug=True,
|
|
|
|
autoreload=True,
|
2022-05-30 15:02:28 +02:00
|
|
|
ui_methods= svganim.uimethods
|
2022-01-19 10:28:51 +01:00
|
|
|
)
|
|
|
|
application.listen(self.config.port)
|
|
|
|
tornado.ioloop.IOLoop.current().start()
|
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
argParser = argparse.ArgumentParser(
|
2022-01-19 10:28:51 +01:00
|
|
|
description="Start up the vector animation server"
|
|
|
|
)
|
2021-11-22 20:54:04 +01:00
|
|
|
# argParser.add_argument(
|
|
|
|
# '--config',
|
|
|
|
# '-c',
|
|
|
|
# required=True,
|
|
|
|
# type=str,
|
|
|
|
# help='The yaml config file to load'
|
|
|
|
# )
|
2022-01-19 10:28:51 +01:00
|
|
|
argParser.add_argument("--port", type=int, default=7890, help="Port")
|
2021-11-22 20:54:04 +01:00
|
|
|
argParser.add_argument(
|
2022-01-19 10:28:51 +01:00
|
|
|
"--storage", type=str, default="files", help="directory name for output files"
|
2021-11-22 20:54:04 +01:00
|
|
|
)
|
2022-03-15 15:04:03 +01:00
|
|
|
argParser.add_argument(
|
|
|
|
"--logfile", type=str, default=None, help="log file to output to"
|
|
|
|
)
|
2022-01-19 10:28:51 +01:00
|
|
|
argParser.add_argument("--verbose", "-v", action="count", default=0)
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
args = argParser.parse_args()
|
|
|
|
|
2022-01-19 10:28:51 +01:00
|
|
|
loglevel = (
|
|
|
|
logging.NOTSET
|
|
|
|
if args.verbose > 1
|
|
|
|
else logging.DEBUG
|
|
|
|
if args.verbose > 0
|
|
|
|
else logging.INFO
|
|
|
|
)
|
2021-11-22 20:54:04 +01:00
|
|
|
|
|
|
|
coloredlogs.install(
|
|
|
|
level=loglevel,
|
2022-01-19 10:28:51 +01:00
|
|
|
# default: "%(asctime)s %(hostname)s %(name)s[%(process)d] %(levelname)s %(message)s"
|
|
|
|
fmt="%(asctime)s %(hostname)s %(name)s[%(process)d,%(threadName)s] %(levelname)s %(message)s",
|
2021-11-22 20:54:04 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# File logging
|
2022-03-15 15:04:03 +01:00
|
|
|
if args.logfile is not None:
|
|
|
|
formatter = logging.Formatter(
|
|
|
|
fmt="%(asctime)s %(module)s:%(lineno)d %(levelname)8s | %(message)s",
|
|
|
|
datefmt="%Y/%m/%d %H:%M:%S",
|
|
|
|
) # %I:%M:%S %p AM|PM format
|
|
|
|
logFileHandler = logging.handlers.RotatingFileHandler(
|
|
|
|
args.logfile, maxBytes=1024 * 512, backupCount=5
|
|
|
|
)
|
|
|
|
logFileHandler.setFormatter(formatter)
|
|
|
|
|
|
|
|
logger.addHandler(logFileHandler)
|
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
|
2021-12-08 12:40:45 +01:00
|
|
|
logger.info(f"Start server: http://localhost:{args.port}")
|
2022-01-19 10:28:51 +01:00
|
|
|
|
2021-11-22 20:54:04 +01:00
|
|
|
server = Server(args, logger)
|
|
|
|
server.start()
|