Option to show whole images as background in player

This commit is contained in:
Ruben van de Ven 2023-02-27 16:02:12 +01:00
parent 4d08b0b4ad
commit 3ed2448545
3 changed files with 41 additions and 11 deletions

View file

@ -119,7 +119,7 @@ class Drawing:
for p in event["points"]],
)
)
return AnimationSlice([self.id, None, None], strokes, viewboxes, audioslice=self.get_audio())
return AnimationSlice(self, [self.id, None, None], strokes, viewboxes, audioslice=self.get_audio())
def get_metadata(self):
canvas = self.get_canvas_metadata()
@ -159,17 +159,17 @@ class AnimationSlice:
# either a whole drawing or the result of applying an annotation to a drawing (an excerpt)
# TODO rename to AnimationSlice to include audio as well
def __init__(
self, slice_id: SliceId, strokes: list[Stroke], viewboxes: list[TimedViewbox] = [], t_in: float = 0, t_out: float = None, audioslice: AudioSlice = None
self, drawing: Drawing, slice_id: SliceId, strokes: list[Stroke], viewboxes: list[TimedViewbox] = [], t_in: float = 0, t_out: float = None, audioslice: AudioSlice = None
) -> None:
self.drawing = drawing
self.id = slice_id
self.strokes = strokes
self.viewboxes = viewboxes
self.t_in = t_in
self.t_out = t_out
self.audio = audioslice
# TODO: Audio
def asDict(self) -> dict:
def asDict(self, include_full_drawing=False) -> dict:
"""Can be used to json-ify the animation-slice
"""
@ -187,8 +187,11 @@ class AnimationSlice:
"shape": [s.asDict() for s in self.strokes],
"viewboxes": boxes,
"bounding_box": self.get_bounding_box().__dict__,
"audio": self.getAudioDict() if self.audio else None
"audio": self.getAudioDict() if self.audio else None,
}
if include_full_drawing:
print(type(self.drawing))
drawing["background"] = [s.get_as_d() for s in self.drawing.get_animation().strokes]
return drawing
def getAudioDict(self):
@ -249,7 +252,7 @@ class AnimationSlice:
viewboxes = self.getViewboxesSlice(t_in, t_out)
audio = self.audio.getSlice(t_in, t_out) if self.audio else None
return AnimationSlice([self.id[0], t_in, t_out], strokes, viewboxes, t_in, t_out, audio)
return AnimationSlice(self.drawing, [self.id[0], t_in, t_out], strokes, viewboxes, t_in, t_out, audio)
def get_as_svg_dwg(self) -> svgwrite.Drawing:
box = self.get_bounding_box()
@ -406,7 +409,7 @@ class AudioSlice:
self.t_out = t_out # in ms
self.offset = offset # in ms TODO: use from self.drawing metadata
def getSlice(self, t_in: float, t_out: float) -> AnimationSlice:
def getSlice(self, t_in: float, t_out: float) -> AudioSlice:
return AudioSlice(self.filename, self.drawing, t_in, t_out, self.offset)
def asDict(self):

View file

@ -280,7 +280,7 @@ class ExportHandler(tornado.web.RequestHandler):
logger.info('write json')
data = animation.asDict()
data = animation.asDict(include_full_drawing=True)
data['audio']['file'] = f'annotation-{identifier}.mp3';
archive.writestr(f'annotation-{identifier}.json', json.dumps(data))
@ -406,7 +406,7 @@ class AnimationHandler(tornado.web.RequestHandler):
self.write(audio.read())
else:
self.set_header("Content-Type", "application/json")
self.write(json.dumps(animation.asDict()))
self.write(json.dumps(animation.asDict(include_full_drawing=True)))
class TagHandler(tornado.web.RequestHandler):

View file

@ -75,6 +75,21 @@ class StrokeGroup {
}
return d;
}
setPrecomputedStrokes(strokeDs) {
const pathEls = this.g.querySelectorAll('path');
for (let pathEl of pathEls) {
pathEl.parentNode.removeChild(pathEl);
}
strokeDs.forEach((strokeD, index) => {
let pathEl = document.createElementNS('http://www.w3.org/2000/svg', 'path');
// pathEl.style.stroke = stroke.color;
// pathEl.classList.add('path');
pathEl.setAttribute('d', strokeD);
this.g.appendChild(pathEl);
});
}
}
class Stroke {
@ -238,7 +253,7 @@ class Annotator extends EventTarget {
this._currentTimeMs = 0;
this.videoIsPlaying = false;
const groups = ['before', 'annotation', 'after']
const groups = ['background', 'before', 'annotation', 'after']
this.strokeGroups = {};
groups.forEach(group => {
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
@ -738,6 +753,7 @@ class Annotator extends EventTarget {
this.filename = drawing.file;
this.strokes = drawing.shape.map(s => new Stroke(s['color'], s['points']));
this.backgroundStrokes = drawing.hasOwnProperty('background') ? drawing.background : [];
this.viewboxes = drawing.viewboxes;
this.currentPathI = null;
this.currentPointI = null;
@ -761,6 +777,10 @@ class Annotator extends EventTarget {
this.nextViewboxTimeout = null;
this._setPausedFlag(true);
if(this.backgroundStrokes && this.backgroundStrokes.length){
this.strokeGroups['background'].setPrecomputedStrokes(this.backgroundStrokes)
}
return this.setupAudioConfig().then(() => {
// this.setUpAnnotator()
let keyEl;
@ -1035,6 +1055,7 @@ class Annotator extends EventTarget {
return slices;
}
// TODO: when drawing, have a group active & inactive.
// active is getPathRange(currentIn, currentOut)
// inactive is what comes before and after.
@ -1664,6 +1685,13 @@ class AnnotationPlayer extends HTMLElement {
.hide-drawing-preview g.after path, .hide-drawing-preview path.before_in{
opacity:0;
}
.background{
visibility: hidden
}
.play:not(.cropped-to-selection) .background{
visibility: visible;
}
.gray {
position: absolute;
@ -1705,7 +1733,6 @@ class AnnotationPlayer extends HTMLElement {
/*text-decoration: line-through;*/
font-weight:bold;
}
`;
this.shadowRoot.appendChild(styleEl);