Fix playback of empty slice.
This commit is contained in:
parent
4cc69a4d29
commit
06bdd0dad1
2 changed files with 35 additions and 12 deletions
|
@ -232,7 +232,7 @@ class AnimationSlice:
|
||||||
strokes = self.getStrokeSlices(frame_in, frame_out, t_in)
|
strokes = self.getStrokeSlices(frame_in, frame_out, t_in)
|
||||||
# TODO shift t of points with t_in
|
# TODO shift t of points with t_in
|
||||||
viewboxes = self.getViewboxesSlice(t_in, t_out)
|
viewboxes = self.getViewboxesSlice(t_in, t_out)
|
||||||
print(viewboxes[0])
|
|
||||||
audio = self.audio.getSlice(t_in, t_out) if self.audio else None
|
audio = self.audio.getSlice(t_in, t_out) if self.audio else None
|
||||||
return AnimationSlice([self.id[0], t_in, t_out], strokes, viewboxes, t_in, t_out, audio)
|
return AnimationSlice([self.id[0], t_in, t_out], strokes, viewboxes, t_in, t_out, audio)
|
||||||
|
|
||||||
|
@ -287,8 +287,17 @@ class AnimationSlice:
|
||||||
) -> list[Stroke]:
|
) -> list[Stroke]:
|
||||||
"""Get list of Stroke/StrokeSlice based in in and out indexes
|
"""Get list of Stroke/StrokeSlice based in in and out indexes
|
||||||
Based on annotation.js getStrokesSliceForPathRange(in_point, out_point)
|
Based on annotation.js getStrokesSliceForPathRange(in_point, out_point)
|
||||||
|
If either in point or out point is [None, None], return an empty set.
|
||||||
"""
|
"""
|
||||||
slices = []
|
slices = []
|
||||||
|
if index_in[0] is None and index_in[1] is None:
|
||||||
|
# If no inpoint is set, in_point is after the last stroke
|
||||||
|
return slices
|
||||||
|
|
||||||
|
if index_out[0] is None and index_out[1] is None:
|
||||||
|
# If no out point is set, out_point is before the last stroke
|
||||||
|
return slices
|
||||||
|
|
||||||
for i in range(index_in[0], index_out[0] + 1):
|
for i in range(index_in[0], index_out[0] + 1):
|
||||||
try:
|
try:
|
||||||
stroke = self.strokes[i]
|
stroke = self.strokes[i]
|
||||||
|
@ -308,8 +317,8 @@ class AnimationSlice:
|
||||||
The In point version (so the first index after ms)
|
The In point version (so the first index after ms)
|
||||||
Equal to annotations.js findPositionForTime(ms)
|
Equal to annotations.js findPositionForTime(ms)
|
||||||
"""
|
"""
|
||||||
path_i = 0
|
path_i = None
|
||||||
point_i = 0
|
point_i = None
|
||||||
for i, stroke in enumerate(self.strokes):
|
for i, stroke in enumerate(self.strokes):
|
||||||
start_at = stroke.points[0].t
|
start_at = stroke.points[0].t
|
||||||
end_at = stroke.points[-1].t
|
end_at = stroke.points[-1].t
|
||||||
|
@ -329,6 +338,9 @@ class AnimationSlice:
|
||||||
if point.t > ms:
|
if point.t > ms:
|
||||||
break # stop when finding the next point after in point
|
break # stop when finding the next point after in point
|
||||||
break # done :-)
|
break # done :-)
|
||||||
|
if path_i is None or point_i is None:
|
||||||
|
logger.warn("in point after last stroke. Not sure if this works")
|
||||||
|
pass
|
||||||
return (path_i, point_i)
|
return (path_i, point_i)
|
||||||
|
|
||||||
def getIndexForOutPoint(self, ms: Milliseconds) -> FrameIndex:
|
def getIndexForOutPoint(self, ms: Milliseconds) -> FrameIndex:
|
||||||
|
@ -342,8 +354,8 @@ class AnimationSlice:
|
||||||
"""Get the frame index (path, point) based on the given time
|
"""Get the frame index (path, point) based on the given time
|
||||||
Equal to annotations.js findPositionForTime(ms)
|
Equal to annotations.js findPositionForTime(ms)
|
||||||
"""
|
"""
|
||||||
path_i = 0
|
path_i = None
|
||||||
point_i = 0
|
point_i = None
|
||||||
for i, stroke in enumerate(self.strokes):
|
for i, stroke in enumerate(self.strokes):
|
||||||
start_at = stroke.points[0].t
|
start_at = stroke.points[0].t
|
||||||
end_at = stroke.points[-1].t
|
end_at = stroke.points[-1].t
|
||||||
|
@ -363,6 +375,10 @@ class AnimationSlice:
|
||||||
# best option thus far
|
# best option thus far
|
||||||
path_i = i
|
path_i = i
|
||||||
point_i = len(stroke.points) - 1
|
point_i = len(stroke.points) - 1
|
||||||
|
|
||||||
|
if path_i is None or point_i is None:
|
||||||
|
logger.warn("OUT point after last stroke. Not sure if this works")
|
||||||
|
pass
|
||||||
return (path_i, point_i)
|
return (path_i, point_i)
|
||||||
|
|
||||||
audiocache = {}
|
audiocache = {}
|
||||||
|
|
|
@ -717,7 +717,7 @@ class Annotator extends EventTarget {
|
||||||
// bgEl.classList.add('background');
|
// bgEl.classList.add('background');
|
||||||
// this.svgEl.prepend(bgEl);
|
// this.svgEl.prepend(bgEl);
|
||||||
|
|
||||||
this.firstFrameTime = this.strokes[0].points[0][3];
|
this.firstFrameTime = this.strokes.length == 0 ? 0 : this.strokes[0].points[0][3];
|
||||||
this.lastFrameTime = this.getFinalFrameTime();
|
this.lastFrameTime = this.getFinalFrameTime();
|
||||||
this.playheadEl.max = this.lastFrameTime;
|
this.playheadEl.max = this.lastFrameTime;
|
||||||
this.nextFrameTimeout = null;
|
this.nextFrameTimeout = null;
|
||||||
|
@ -817,7 +817,7 @@ class Annotator extends EventTarget {
|
||||||
this.audioEl.setAttribute('controls', true);
|
this.audioEl.setAttribute('controls', true);
|
||||||
|
|
||||||
this.audioEl.addEventListener('canplaythrough', (ev) => {
|
this.audioEl.addEventListener('canplaythrough', (ev) => {
|
||||||
console.log('loaded audio', ev);
|
console.debug('loaded audio');
|
||||||
// this.audioEl.play();
|
// this.audioEl.play();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -959,6 +959,7 @@ class Annotator extends EventTarget {
|
||||||
}
|
}
|
||||||
|
|
||||||
getFinalFrameTime() {
|
getFinalFrameTime() {
|
||||||
|
if(this.strokes.length == 0) return null; // when no strokes are loaded (eg. for annotation)
|
||||||
const points = this.strokes[this.strokes.length - 1].points;
|
const points = this.strokes[this.strokes.length - 1].points;
|
||||||
return points[points.length - 1][3];
|
return points[points.length - 1][3];
|
||||||
}
|
}
|
||||||
|
@ -1093,9 +1094,15 @@ class Annotator extends EventTarget {
|
||||||
}
|
}
|
||||||
|
|
||||||
playStrokePosition(path_i, point_i, allow_interrupt) {
|
playStrokePosition(path_i, point_i, allow_interrupt) {
|
||||||
|
if (this.strokes.length === 0) {
|
||||||
|
console.debug('No video to play back');
|
||||||
|
this.videoIsPlaying = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (allow_interrupt) {
|
if (allow_interrupt) {
|
||||||
if (!this.videoIsPlaying) {
|
if (!this.videoIsPlaying) {
|
||||||
console.log('not playing because of interrupt');
|
console.debug('not playing because of interrupt');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -1121,7 +1128,7 @@ class Annotator extends EventTarget {
|
||||||
playViewboxPosition(box_i, allow_interrupt) {
|
playViewboxPosition(box_i, allow_interrupt) {
|
||||||
if (allow_interrupt) {
|
if (allow_interrupt) {
|
||||||
if (!this.videoIsPlaying) {
|
if (!this.videoIsPlaying) {
|
||||||
console.log('not playing because of interrupt');
|
console.debug('not playing because of interrupt');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1196,7 +1203,7 @@ class Annotator extends EventTarget {
|
||||||
this._setPausedFlag(false);
|
this._setPausedFlag(false);
|
||||||
|
|
||||||
const startPlayback = () => {
|
const startPlayback = () => {
|
||||||
console.log('start playback');
|
console.debug('start playback');
|
||||||
this.wrapperEl.classList.remove('loading'); // no loading anymore
|
this.wrapperEl.classList.remove('loading'); // no loading anymore
|
||||||
|
|
||||||
this.startTimeMs = window.performance.now() - this._currentTimeMs;
|
this.startTimeMs = window.performance.now() - this._currentTimeMs;
|
||||||
|
@ -1221,7 +1228,7 @@ class Annotator extends EventTarget {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.audioEl.src.length && this.audioEl.readyState !== 4) { // not ready to play after seeking audio.
|
if (this.audioEl.src.length && this.audioEl.readyState !== 4) { // not ready to play after seeking audio.
|
||||||
console.log('wait for audio before playback');
|
console.debug('wait for audio before playback');
|
||||||
this.wrapperEl.classList.add('loading');
|
this.wrapperEl.classList.add('loading');
|
||||||
this.audioEl.addEventListener('canplaythrough', () => {
|
this.audioEl.addEventListener('canplaythrough', () => {
|
||||||
startPlayback()
|
startPlayback()
|
||||||
|
|
Loading…
Reference in a new issue