Annotator now supports player mode. Buffer audio befor playback.

This commit is contained in:
Ruben van de Ven 2022-05-24 21:23:03 +02:00
parent d55c5b5486
commit eb6d31742e
2 changed files with 289 additions and 182 deletions

View file

@ -98,6 +98,7 @@
position: absolute;
left: 100%;
width: 30px;
height:30px;
}
.controls button.paused::before {
@ -108,6 +109,22 @@
content: '⏸';
}
.buffering .controls button:is(.playing,.paused)::before {
content: '↺';
display:inline-block;
animation: rotate 1s infinite;
}
@keyframes rotate {
0% {
transform: rotate(359deg)
}
100% {
transform: rotate(0deg)
}
}
.controls {
position: absolute !important;
z-index: 100;
@ -376,6 +393,7 @@
navigator.mediaSession.setActionHandler('seekforward', function () { /* Code excerpted. */ });
navigator.mediaSession.setActionHandler('previoustrack', function () { /* Code excerpted. */ });
navigator.mediaSession.setActionHandler('nexttrack', function () { /* Code excerpted. */ });
navigator.mediaSession.setActionHandler('playpause', function () { /* Code excerpted. */ });
</script>
</body>

View file

@ -110,9 +110,14 @@ class StrokeSlice {
}
class Annotator extends EventTarget {
constructor(wrapperEl, tagFile, fileurl) {
constructor(wrapperEl, tagFile, fileurl, config) {
super();
this.config = {
is_player: config && config.hasOwnProperty('is_player') ? config.is_player : false, // in player mode annotations are not loaded, nor is the annotator shown
crop_to_fit: config && config.hasOwnProperty('crop_to_fit') ? config.crop_to_fit : false, // don't animate viewport, but show the whole drawing
}
this.formatter = wNumb({
decimals: 2,
edit: (time) => {
@ -142,6 +147,7 @@ class Annotator extends EventTarget {
this.wrapperEl = wrapperEl;
this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
this.wrapperEl.appendChild(this.svgEl);
this.wrapperEl.classList.add(this.config.is_player ? "svganim_player" : "svganim_annotator");
this.controlsEl = document.createElement('div');
@ -161,7 +167,7 @@ class Annotator extends EventTarget {
this.playheadEl.addEventListener("input", (ev) => {
this.scrubTo(ev.target.value);
});
this.playheadEl.addEventListener('keydown',(ev) => {
this.playheadEl.addEventListener('keydown', (ev) => {
ev.preventDefault(); // we don't want to use arrow keys, as these are captured in the overall keydown event
})
@ -178,7 +184,7 @@ class Annotator extends EventTarget {
this.playPauseEl.addEventListener("click", (ev) => {
this.playPause()
})
this.playPauseEl.addEventListener('keydown',(ev) => {
this.playPauseEl.addEventListener('keydown', (ev) => {
ev.preventDefault(); // we don't want to spacebar, as this is captured in the overall keydown event
})
@ -191,6 +197,29 @@ class Annotator extends EventTarget {
this.annotationsEl.classList.add('annotations')
this.controlsEl.appendChild(this.annotationsEl);
this.inPointPosition = [0, 0];
this.inPointTimeMs = null;
this.outPointPosition = null;
this.outPointTimeMs = null;
this._currentTimeMs = 0;
this.videoIsPlaying = false;
const groups = ['before', 'annotation', 'after']
this.strokeGroups = {};
groups.forEach(group => {
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
groupEl.classList.add(group)
this.svgEl.appendChild(groupEl);
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
});
this.annotations = [];
if (this.config.is_player) {
this.load(fileurl);
} else {
this.loadTags(tagFile).then(() => {
this.tagsEl = document.createElement('ul');
this.tagsEl.classList.add('tags');
@ -242,27 +271,10 @@ class Annotator extends EventTarget {
this.controlsEl.appendChild(this.tagsEl);
this.inPointPosition = [0, 0];
this.inPointTimeMs = null;
this.outPointPosition = null;
this.outPointTimeMs = null;
this._currentTimeMs = 0;
this.videoIsPlaying = false;
const groups = ['before', 'annotation', 'after']
this.strokeGroups = {};
groups.forEach(group => {
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
groupEl.classList.add(group)
this.svgEl.appendChild(groupEl);
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
});
this.annotations = [];
this.load(fileurl);
});
}
}
getColorForTag(tag) {
const tagData = this.tagMap[tag];
@ -278,6 +290,10 @@ class Annotator extends EventTarget {
updateAnnotations(save) {
if (this.config.is_player) {
return false;
}
this.annotationsEl.innerHTML = "";
for (let annotation_i in this.annotations) {
const annotation = this.annotations[annotation_i];
@ -409,6 +425,8 @@ class Annotator extends EventTarget {
fetch(request)
.then(response => response.json())
.then(data => {
if (!this.config.is_player) {
const metadata_req = new Request(`/annotations/${data.file}`, {
method: 'GET',
});
@ -421,8 +439,10 @@ class Annotator extends EventTarget {
this.loadStrokes(data, metadata)
})
.catch(e => console.log(e));
// do something with the data sent in the request
});
} else {
this.loadStrokes(data, null);
}
}).catch(e => console.log(e));
}
updateState() {
@ -508,15 +528,27 @@ class Annotator extends EventTarget {
setUpAnnotator() {
this.playheadEl.min = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
this.playheadEl.max = this.getEndTimeMs();
this._updatePlayhead();
this.inPointPosition = this.findPositionForTime(this.currentTime);
this.inPointTimeMs = this._currentTimeMs;
this.outPointPosition = this.findPositionForTime(this.lastFrameTime); // TODO: simplify to get the last frame indexes directly
this.outPointTimeMs = this.getEndTimeMs();
if (!this.config.is_player) {
this.buildAnnotator();
}
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
}
buildAnnotator() {
if (this.scrubberEl.noUiSlider) {
this.slider.destroy();
}
@ -597,21 +629,27 @@ class Annotator extends EventTarget {
ttInputEl.addEventListener('blur', submit);
});
})
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
}
loadStrokes(drawing, metadata) {
this.audioOffset = 0;
if (metadata) {
this.annotations = metadata.annotations;
this.audioFile = metadata.hasOwnProperty('audio') ? metadata.audio.file : null;
this.audioOffset = metadata.hasOwnProperty('audio') ? Number.parseFloat(metadata.audio.offset) : 0;
}
if ((metadata && metadata.hasOwnProperty('audio')) || drawing.hasOwnProperty('audio')) {
if (metadata && metadata.hasOwnProperty('audio')) {
this.audioFile = metadata.audio.file
this.audioOffset = Number.parseFloat(metadata.audio.offset);
} else {
this.audioFile = drawing.audio.file
this.audioOffset = Number.parseFloat(drawing.audio.offset);
}
this._currentTimeMs = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
this._updatePlayhead();
//
// load any saved metadata
}
this.filename = drawing.file;
this.strokes = drawing.shape.map(s => new Stroke(s['color'], s['points']));
this.viewboxes = drawing.viewboxes;
@ -619,7 +657,11 @@ class Annotator extends EventTarget {
this.currentPointI = null;
this.currentViewboxI = null;
this.dimensions = drawing.dimensions;
if (!this.config.crop_to_fit) {
this.svgEl.setAttribute('viewBox', `0 0 ${this.dimensions[0]} ${this.dimensions[1]}`)
} else {
this.svgEl.setAttribute('viewBox', `${drawing.bounding_box.x} ${drawing.bounding_box.y} ${drawing.bounding_box.width} ${drawing.bounding_box.height}`)
}
// let bgEl = document.createElementNS('http://www.w3.org/2000/svg', 'rect');
// bgEl.setAttribute("x", 0);
@ -638,13 +680,39 @@ class Annotator extends EventTarget {
this.setupAudioConfig().then(() => {
// this.setUpAnnotator()
let keyEl;
if (this.config.is_player) {
keyEl = this.wrapperEl;
} else {
keyEl = document.body; // always capture
this.updateAnnotations(false);
}
document.body.addEventListener('keyup', (ev) => {
keyEl.addEventListener('keyup', (ev) => {
if (ev.key == ' ') {
this.playPause();
}
console.log('key', ev);
// shift+arrow keys, jump playhead (search position)
// FIXME doesn't keep playback after initial load. Only after unfocussing the window, and refocussing it, do the keys capture.
// Probably a wrong order
if (ev.key == 'ArrowLeft' && ev.shiftKey) {
const p = this._paused;
console.log(p);
const diff = ev.ctrlKey ? 10000 : 1000;
this.scrubTo(this._currentTimeMs - diff);
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
}
if (ev.key == 'ArrowRight' && ev.shiftKey) {
const p = this._paused;
console.log(p);
const diff = ev.ctrlKey ? 10000 : 1000;
this.scrubTo(this._currentTimeMs + diff);
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
}
// additional keys only for annotation mode
if (!this.config.is_player) {
if (ev.key == 'i') {
this.setInPoint(this.currentTime * 1000);
}
@ -666,22 +734,6 @@ class Annotator extends EventTarget {
this.resetInOutPoint();
}
}
// shift+arrow keys, jump playhead (search position)
// FIXME doesn't keep playback after initial load. Only after unfocussing the window, and refocussing it, do the keys capture.
// Probably a wrong order
if (ev.key == 'ArrowLeft' && ev.shiftKey) {
const p = this._paused;
console.log(p);
const diff = ev.ctrlKey ? 10000 : 1000;
this.scrubTo(this._currentTimeMs - diff);
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
}
if (ev.key == 'ArrowRight' && ev.shiftKey) {
const p = this._paused;
console.log(p);
const diff = ev.ctrlKey ? 10000 : 1000;
this.scrubTo(this._currentTimeMs + diff);
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
}
});
});
@ -714,11 +766,26 @@ class Annotator extends EventTarget {
setupAudioConfig() {
// audio config
return new Promise((resolve, reject) => {
this.audioEl = document.createElement('audio');
if (!this.config.is_player)
this.audioEl.setAttribute('controls', true);
this.audioEl.addEventListener('canplaythrough', (ev) => {
console.log('loaded audio', ev);
// this.audioEl.play();
});
if (this.config.is_player) {
this.wrapperEl.prepend(this.audioEl);
}
else {
let audioConfigEl = document.createElement('div');
audioConfigEl.classList.add('audioconfig')
this.wrapperEl.appendChild(audioConfigEl);
audioConfigEl.prepend(this.audioEl);
let audioSelectEl = document.createElement('select');
audioSelectEl.classList.add('audioselect');
audioConfigEl.appendChild(audioSelectEl);
@ -753,17 +820,10 @@ class Annotator extends EventTarget {
});
audioOffsetTextEl.appendChild(audioOffsetEl);
}
this.audioEl = document.createElement('audio');
this.audioEl.setAttribute('controls', true);
this.audioEl.addEventListener('canplaythrough', (ev) => {
console.log('loaded audio', ev);
this.audioEl.play();
});
// this.audioEl.addEventListener('seeked', (ev)=>{
// console.log(ev);
// })
audioConfigEl.prepend(this.audioEl);
this.audioEl.addEventListener('loadedmetadata', (ev) => {
// resolve the 'set up audio' when metadata has loaded
@ -827,11 +887,15 @@ class Annotator extends EventTarget {
console.debug('delay audio playback', t_start, t_diff);
// a negative audiooffset delays playback from the start
// this.audioStartTimeout = setTimeout((e) => this.audioEl.play(), t*-1000);
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0 }, t_start * -1); // triggers play with "seeked" event
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0; this.audioEl.play(); }, t_start * -1); // triggers play with "seeked" event
// this.audioEl.currentTime = 0;
}
} else {
if (this.audioEl.currentTime !== t_start / 1000) {
console.log(this.audioEl.currentTime, t_start / 1000);
this.audioEl.currentTime = t_start / 1000;
}
this.audioEl.play();
// this.audioEl.play(); // play is done in "seeked" evenlistener
console.log(this.audioEl.currentTime, t_start, t_in, t_out);
}
@ -844,6 +908,10 @@ class Annotator extends EventTarget {
}, t_diff);
}
_scrubAudio(time_ms) {
this.audioEl.currentTime = Math.max(0, this.getAudioTime(time_ms)) / 1000;
}
getFinalFrameTime() {
const points = this.strokes[this.strokes.length - 1].points;
return points[points.length - 1][3];
@ -924,8 +992,10 @@ class Annotator extends EventTarget {
}
this.currentViewboxI = box_i
const b = this.viewboxes[box_i];
if (!this.config.crop_to_fit) {
this.svgEl.setAttribute('viewBox', `${b.x} ${b.y} ${this.dimensions[0]} ${this.dimensions[1]}`)
}
}
getNextPosition(path_i, point_i) {
const path = this.strokes[path_i];
@ -1053,6 +1123,10 @@ class Annotator extends EventTarget {
this._seekByTimeMs(this._currentTimeMs); // prevent playback issue for initial load
}
this._setPausedFlag(false);
const startPlayback = () => {
console.log('start playback');
this.startTimeMs = window.performance.now() - this._currentTimeMs;
// strokes
if (this._currentTimeMs < 0) {
@ -1065,13 +1139,26 @@ class Annotator extends EventTarget {
this.playViewboxPosition(this.currentViewboxI);
// audio
// TODO: use this.audioEl.readyState == 4 : play immediately, otherwise after event
this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs);
// this.playStrokePosition(this.currentPathI, this.currentPointI);
this._setPausedFlag(false);
this.dispatchEvent(new CustomEvent('play', {}));
this._animationFrame();
resolve();
}
if (this.audioEl.readyState !== 4) { // not ready to play after seeking audio.
console.log('wait for audio before playback');
this.wrapperEl.classList.add('buffering');
this.audioEl.addEventListener('canplaythrough', () => {
this.wrapperEl.classList.remove('buffering');
startPlayback()
}, { once: true }); // only once
} else {
startPlayback();
}
});
}
@ -1145,6 +1232,7 @@ class Annotator extends EventTarget {
_seekByPoint(point) {
this.dispatchEvent(new CustomEvent('seeking', {}));
this._currentTimeMs = this.strokes[point[0]].points[point[1]][2];
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
[this.currentPathI, this.currentPointI] = point;
this._updatePlayhead();
this._updateFrame();
@ -1158,6 +1246,7 @@ class Annotator extends EventTarget {
_seekByTime(time) {
this.dispatchEvent(new CustomEvent('seeking', { detail: time }));
this._currentTimeMs = Number.parseFloat(time) * 1000;
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
[this.currentPathI, this.currentPointI] = this.findPositionForTime(this._currentTimeMs);
this._updatePlayhead();