Annotator now supports player mode. Buffer audio befor playback.

This commit is contained in:
Ruben van de Ven 2022-05-24 21:23:03 +02:00
parent d55c5b5486
commit eb6d31742e
2 changed files with 289 additions and 182 deletions

View File

@ -98,6 +98,7 @@
position: absolute;
left: 100%;
width: 30px;
height:30px;
}
.controls button.paused::before {
@ -108,6 +109,22 @@
content: '⏸';
}
.buffering .controls button:is(.playing,.paused)::before {
content: '↺';
display:inline-block;
animation: rotate 1s infinite;
}
@keyframes rotate {
0% {
transform: rotate(359deg)
}
100% {
transform: rotate(0deg)
}
}
.controls {
position: absolute !important;
z-index: 100;
@ -376,6 +393,7 @@
navigator.mediaSession.setActionHandler('seekforward', function () { /* Code excerpted. */ });
navigator.mediaSession.setActionHandler('previoustrack', function () { /* Code excerpted. */ });
navigator.mediaSession.setActionHandler('nexttrack', function () { /* Code excerpted. */ });
navigator.mediaSession.setActionHandler('playpause', function () { /* Code excerpted. */ });
</script>
</body>

View File

@ -110,9 +110,14 @@ class StrokeSlice {
}
class Annotator extends EventTarget {
constructor(wrapperEl, tagFile, fileurl) {
constructor(wrapperEl, tagFile, fileurl, config) {
super();
this.config = {
is_player: config && config.hasOwnProperty('is_player') ? config.is_player : false, // in player mode annotations are not loaded, nor is the annotator shown
crop_to_fit: config && config.hasOwnProperty('crop_to_fit') ? config.crop_to_fit : false, // don't animate viewport, but show the whole drawing
}
this.formatter = wNumb({
decimals: 2,
edit: (time) => {
@ -142,6 +147,7 @@ class Annotator extends EventTarget {
this.wrapperEl = wrapperEl;
this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
this.wrapperEl.appendChild(this.svgEl);
this.wrapperEl.classList.add(this.config.is_player ? "svganim_player" : "svganim_annotator");
this.controlsEl = document.createElement('div');
@ -161,7 +167,7 @@ class Annotator extends EventTarget {
this.playheadEl.addEventListener("input", (ev) => {
this.scrubTo(ev.target.value);
});
this.playheadEl.addEventListener('keydown',(ev) => {
this.playheadEl.addEventListener('keydown', (ev) => {
ev.preventDefault(); // we don't want to use arrow keys, as these are captured in the overall keydown event
})
@ -178,7 +184,7 @@ class Annotator extends EventTarget {
this.playPauseEl.addEventListener("click", (ev) => {
this.playPause()
})
this.playPauseEl.addEventListener('keydown',(ev) => {
this.playPauseEl.addEventListener('keydown', (ev) => {
ev.preventDefault(); // we don't want to spacebar, as this is captured in the overall keydown event
})
@ -191,77 +197,83 @@ class Annotator extends EventTarget {
this.annotationsEl.classList.add('annotations')
this.controlsEl.appendChild(this.annotationsEl);
this.loadTags(tagFile).then(() => {
this.tagsEl = document.createElement('ul');
this.tagsEl.classList.add('tags');
const addTags = (tags, tagsEl) => {
Object.entries(tags).forEach(([tag, tagData]) => {
let tagLiEl = document.createElement('li');
let tagEl = document.createElement('div');
tagEl.classList.add('tag');
tagEl.dataset.tag = tag;
tagEl.innerText = tagData.hasOwnProperty('fullname') ? tagData.fullname : tag;
tagEl.addEventListener('click', (e) => {
this.addTag(tag, this.inPointPosition, this.outPointPosition);
});
this.inPointPosition = [0, 0];
this.inPointTimeMs = null;
this.outPointPosition = null;
this.outPointTimeMs = null;
this._currentTimeMs = 0;
this.videoIsPlaying = false;
tagEl.title = tagData.hasOwnProperty('description') ? tagData.description : "";
let signEl = document.createElement('span');
signEl.classList.add('annotation-' + tag);
signEl.style.backgroundColor = this.getColorForTag(tag);
tagEl.prepend(signEl);
tagLiEl.appendChild(tagEl);
if (tagData.hasOwnProperty('sub')) {
const subEl = document.createElement('ul');
subEl.classList.add('subtags');
addTags(tagData.sub, subEl);
tagLiEl.appendChild(subEl);
}
tagsEl.appendChild(tagLiEl);
});
};
addTags(this.tags, this.tagsEl);
let tagEl = document.createElement('li');
tagEl.classList.add('tag');
tagEl.classList.add('annotation-rm');
tagEl.dataset.tag = 'rm';
tagEl.title = "Remove annotation";
tagEl.innerHTML = "🚮"; // &times;
tagEl.addEventListener('click', (e) => {
if (this.selectedAnnotation) {
this.removeAnnotation(this.selectedAnnotationI);
}
});
this.tagsEl.appendChild(tagEl);
this.controlsEl.appendChild(this.tagsEl);
this.inPointPosition = [0, 0];
this.inPointTimeMs = null;
this.outPointPosition = null;
this.outPointTimeMs = null;
this._currentTimeMs = 0;
this.videoIsPlaying = false;
const groups = ['before', 'annotation', 'after']
this.strokeGroups = {};
groups.forEach(group => {
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
groupEl.classList.add(group)
this.svgEl.appendChild(groupEl);
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
});
this.annotations = [];
this.load(fileurl);
const groups = ['before', 'annotation', 'after']
this.strokeGroups = {};
groups.forEach(group => {
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
groupEl.classList.add(group)
this.svgEl.appendChild(groupEl);
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
});
this.annotations = [];
if (this.config.is_player) {
this.load(fileurl);
} else {
this.loadTags(tagFile).then(() => {
this.tagsEl = document.createElement('ul');
this.tagsEl.classList.add('tags');
const addTags = (tags, tagsEl) => {
Object.entries(tags).forEach(([tag, tagData]) => {
let tagLiEl = document.createElement('li');
let tagEl = document.createElement('div');
tagEl.classList.add('tag');
tagEl.dataset.tag = tag;
tagEl.innerText = tagData.hasOwnProperty('fullname') ? tagData.fullname : tag;
tagEl.addEventListener('click', (e) => {
this.addTag(tag, this.inPointPosition, this.outPointPosition);
});
tagEl.title = tagData.hasOwnProperty('description') ? tagData.description : "";
let signEl = document.createElement('span');
signEl.classList.add('annotation-' + tag);
signEl.style.backgroundColor = this.getColorForTag(tag);
tagEl.prepend(signEl);
tagLiEl.appendChild(tagEl);
if (tagData.hasOwnProperty('sub')) {
const subEl = document.createElement('ul');
subEl.classList.add('subtags');
addTags(tagData.sub, subEl);
tagLiEl.appendChild(subEl);
}
tagsEl.appendChild(tagLiEl);
});
};
addTags(this.tags, this.tagsEl);
let tagEl = document.createElement('li');
tagEl.classList.add('tag');
tagEl.classList.add('annotation-rm');
tagEl.dataset.tag = 'rm';
tagEl.title = "Remove annotation";
tagEl.innerHTML = "🚮"; // &times;
tagEl.addEventListener('click', (e) => {
if (this.selectedAnnotation) {
this.removeAnnotation(this.selectedAnnotationI);
}
});
this.tagsEl.appendChild(tagEl);
this.controlsEl.appendChild(this.tagsEl);
this.load(fileurl);
});
}
}
getColorForTag(tag) {
@ -278,6 +290,10 @@ class Annotator extends EventTarget {
updateAnnotations(save) {
if (this.config.is_player) {
return false;
}
this.annotationsEl.innerHTML = "";
for (let annotation_i in this.annotations) {
const annotation = this.annotations[annotation_i];
@ -409,20 +425,24 @@ class Annotator extends EventTarget {
fetch(request)
.then(response => response.json())
.then(data => {
const metadata_req = new Request(`/annotations/${data.file}`, {
method: 'GET',
});
fetch(metadata_req)
.then(response => response.ok ? response.json() : null)
.then(metadata => {
if (metadata !== null) {
metadata.annotations = metadata.annotations.map((a) => new Annotation(a.tag, a.t_in, a.t_out))
}
this.loadStrokes(data, metadata)
})
.catch(e => console.log(e));
// do something with the data sent in the request
});
if (!this.config.is_player) {
const metadata_req = new Request(`/annotations/${data.file}`, {
method: 'GET',
});
fetch(metadata_req)
.then(response => response.ok ? response.json() : null)
.then(metadata => {
if (metadata !== null) {
metadata.annotations = metadata.annotations.map((a) => new Annotation(a.tag, a.t_in, a.t_out))
}
this.loadStrokes(data, metadata)
})
.catch(e => console.log(e));
} else {
this.loadStrokes(data, null);
}
}).catch(e => console.log(e));
}
updateState() {
@ -508,15 +528,27 @@ class Annotator extends EventTarget {
setUpAnnotator() {
this.playheadEl.min = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
this.playheadEl.max = this.getEndTimeMs();
this._updatePlayhead();
this.inPointPosition = this.findPositionForTime(this.currentTime);
this.inPointTimeMs = this._currentTimeMs;
this.outPointPosition = this.findPositionForTime(this.lastFrameTime); // TODO: simplify to get the last frame indexes directly
this.outPointTimeMs = this.getEndTimeMs();
if (!this.config.is_player) {
this.buildAnnotator();
}
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
}
buildAnnotator() {
if (this.scrubberEl.noUiSlider) {
this.slider.destroy();
}
@ -597,21 +629,27 @@ class Annotator extends EventTarget {
ttInputEl.addEventListener('blur', submit);
});
})
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
}
loadStrokes(drawing, metadata) {
this.audioOffset = 0;
if (metadata) {
this.annotations = metadata.annotations;
this.audioFile = metadata.hasOwnProperty('audio') ? metadata.audio.file : null;
this.audioOffset = metadata.hasOwnProperty('audio') ? Number.parseFloat(metadata.audio.offset) : 0;
}
if ((metadata && metadata.hasOwnProperty('audio')) || drawing.hasOwnProperty('audio')) {
if (metadata && metadata.hasOwnProperty('audio')) {
this.audioFile = metadata.audio.file
this.audioOffset = Number.parseFloat(metadata.audio.offset);
} else {
this.audioFile = drawing.audio.file
this.audioOffset = Number.parseFloat(drawing.audio.offset);
}
this._currentTimeMs = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
this._updatePlayhead();
//
// load any saved metadata
}
this.filename = drawing.file;
this.strokes = drawing.shape.map(s => new Stroke(s['color'], s['points']));
this.viewboxes = drawing.viewboxes;
@ -619,7 +657,11 @@ class Annotator extends EventTarget {
this.currentPointI = null;
this.currentViewboxI = null;
this.dimensions = drawing.dimensions;
this.svgEl.setAttribute('viewBox', `0 0 ${this.dimensions[0]} ${this.dimensions[1]}`)
if (!this.config.crop_to_fit) {
this.svgEl.setAttribute('viewBox', `0 0 ${this.dimensions[0]} ${this.dimensions[1]}`)
} else {
this.svgEl.setAttribute('viewBox', `${drawing.bounding_box.x} ${drawing.bounding_box.y} ${drawing.bounding_box.width} ${drawing.bounding_box.height}`)
}
// let bgEl = document.createElementNS('http://www.w3.org/2000/svg', 'rect');
// bgEl.setAttribute("x", 0);
@ -638,34 +680,19 @@ class Annotator extends EventTarget {
this.setupAudioConfig().then(() => {
// this.setUpAnnotator()
this.updateAnnotations(false);
let keyEl;
if (this.config.is_player) {
keyEl = this.wrapperEl;
} else {
keyEl = document.body; // always capture
this.updateAnnotations(false);
}
document.body.addEventListener('keyup', (ev) => {
keyEl.addEventListener('keyup', (ev) => {
if (ev.key == ' ') {
this.playPause();
}
console.log('key', ev);
if (ev.key == 'i') {
this.setInPoint(this.currentTime * 1000);
}
if (ev.key == 'o') {
this.setOutPoint(this.currentTime * 1000);
}
if (ev.key == 'I') {
// shift+i == jump to in point
this.scrubTo(this.inPointTimeMs);
}
if (ev.key == 'O') {
// shift+o == jump to end point
this.scrubTo(this.outPointTimeMs);
}
if (ev.key == 'Escape') {
if (this.selectedAnnotation) {
this.deselectAnnotation();
} else {
this.resetInOutPoint();
}
}
// shift+arrow keys, jump playhead (search position)
// FIXME doesn't keep playback after initial load. Only after unfocussing the window, and refocussing it, do the keys capture.
// Probably a wrong order
@ -683,6 +710,31 @@ class Annotator extends EventTarget {
this.scrubTo(this._currentTimeMs + diff);
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
}
// additional keys only for annotation mode
if (!this.config.is_player) {
if (ev.key == 'i') {
this.setInPoint(this.currentTime * 1000);
}
if (ev.key == 'o') {
this.setOutPoint(this.currentTime * 1000);
}
if (ev.key == 'I') {
// shift+i == jump to in point
this.scrubTo(this.inPointTimeMs);
}
if (ev.key == 'O') {
// shift+o == jump to end point
this.scrubTo(this.outPointTimeMs);
}
if (ev.key == 'Escape') {
if (this.selectedAnnotation) {
this.deselectAnnotation();
} else {
this.resetInOutPoint();
}
}
}
});
});
@ -714,56 +766,64 @@ class Annotator extends EventTarget {
setupAudioConfig() {
// audio config
return new Promise((resolve, reject) => {
let audioConfigEl = document.createElement('div');
audioConfigEl.classList.add('audioconfig')
this.wrapperEl.appendChild(audioConfigEl);
let audioSelectEl = document.createElement('select');
audioSelectEl.classList.add('audioselect');
audioConfigEl.appendChild(audioSelectEl);
fetch('/audio')
.then(response => response.json())
.then(data => {
data.unshift(''); // add empty, to deselect any file
data.forEach(audioFile => {
let optionEl = document.createElement('option');
optionEl.selected = this.audioFile == audioFile;
optionEl.innerText = audioFile;
audioSelectEl.appendChild(optionEl);
});
})
audioSelectEl.addEventListener('change', (ev) => {
this.setAudioFile(ev.target.value);
});
let audioOffsetTextEl = document.createElement('label');
audioOffsetTextEl.innerText = "Offset (s)";
audioConfigEl.appendChild(audioOffsetTextEl);
let audioOffsetEl = document.createElement('input');
audioOffsetEl.setAttribute('type', 'number');
audioOffsetEl.setAttribute('step', '.01');
audioOffsetEl.value = this.audioOffset ?? 0;
audioOffsetEl.addEventListener('change', (ev) => {
this.setAudioOffset(ev.target.value);
});
audioOffsetTextEl.appendChild(audioOffsetEl);
this.audioEl = document.createElement('audio');
this.audioEl.setAttribute('controls', true);
if (!this.config.is_player)
this.audioEl.setAttribute('controls', true);
this.audioEl.addEventListener('canplaythrough', (ev) => {
console.log('loaded audio', ev);
this.audioEl.play();
// this.audioEl.play();
});
// this.audioEl.addEventListener('seeked', (ev)=>{
// console.log(ev);
// })
audioConfigEl.prepend(this.audioEl);
if (this.config.is_player) {
this.wrapperEl.prepend(this.audioEl);
}
else {
let audioConfigEl = document.createElement('div');
audioConfigEl.classList.add('audioconfig')
this.wrapperEl.appendChild(audioConfigEl);
audioConfigEl.prepend(this.audioEl);
let audioSelectEl = document.createElement('select');
audioSelectEl.classList.add('audioselect');
audioConfigEl.appendChild(audioSelectEl);
fetch('/audio')
.then(response => response.json())
.then(data => {
data.unshift(''); // add empty, to deselect any file
data.forEach(audioFile => {
let optionEl = document.createElement('option');
optionEl.selected = this.audioFile == audioFile;
optionEl.innerText = audioFile;
audioSelectEl.appendChild(optionEl);
});
})
audioSelectEl.addEventListener('change', (ev) => {
this.setAudioFile(ev.target.value);
});
let audioOffsetTextEl = document.createElement('label');
audioOffsetTextEl.innerText = "Offset (s)";
audioConfigEl.appendChild(audioOffsetTextEl);
let audioOffsetEl = document.createElement('input');
audioOffsetEl.setAttribute('type', 'number');
audioOffsetEl.setAttribute('step', '.01');
audioOffsetEl.value = this.audioOffset ?? 0;
audioOffsetEl.addEventListener('change', (ev) => {
this.setAudioOffset(ev.target.value);
});
audioOffsetTextEl.appendChild(audioOffsetEl);
}
this.audioEl.addEventListener('loadedmetadata', (ev) => {
// resolve the 'set up audio' when metadata has loaded
@ -827,11 +887,15 @@ class Annotator extends EventTarget {
console.debug('delay audio playback', t_start, t_diff);
// a negative audiooffset delays playback from the start
// this.audioStartTimeout = setTimeout((e) => this.audioEl.play(), t*-1000);
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0 }, t_start * -1); // triggers play with "seeked" event
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0; this.audioEl.play(); }, t_start * -1); // triggers play with "seeked" event
// this.audioEl.currentTime = 0;
}
} else {
this.audioEl.currentTime = t_start / 1000;
if (this.audioEl.currentTime !== t_start / 1000) {
console.log(this.audioEl.currentTime, t_start / 1000);
this.audioEl.currentTime = t_start / 1000;
}
this.audioEl.play();
// this.audioEl.play(); // play is done in "seeked" evenlistener
console.log(this.audioEl.currentTime, t_start, t_in, t_out);
}
@ -844,6 +908,10 @@ class Annotator extends EventTarget {
}, t_diff);
}
_scrubAudio(time_ms) {
this.audioEl.currentTime = Math.max(0, this.getAudioTime(time_ms)) / 1000;
}
getFinalFrameTime() {
const points = this.strokes[this.strokes.length - 1].points;
return points[points.length - 1][3];
@ -924,7 +992,9 @@ class Annotator extends EventTarget {
}
this.currentViewboxI = box_i
const b = this.viewboxes[box_i];
this.svgEl.setAttribute('viewBox', `${b.x} ${b.y} ${this.dimensions[0]} ${this.dimensions[1]}`)
if (!this.config.crop_to_fit) {
this.svgEl.setAttribute('viewBox', `${b.x} ${b.y} ${this.dimensions[0]} ${this.dimensions[1]}`)
}
}
getNextPosition(path_i, point_i) {
@ -1053,25 +1123,42 @@ class Annotator extends EventTarget {
this._seekByTimeMs(this._currentTimeMs); // prevent playback issue for initial load
}
this.startTimeMs = window.performance.now() - this._currentTimeMs;
// strokes
if (this._currentTimeMs < 0) {
this.startVideoTimeout = setTimeout((e) => this.playStrokePosition(this.currentPathI, this.currentPointI), this._currentTimeMs * -1);
} else {
this.playStrokePosition(this.currentPathI, this.currentPointI);
}
// viewboxes
// const nextViewboxI = Math.max(this.currentViewboxI++, this.viewboxes.length-1);
this.playViewboxPosition(this.currentViewboxI);
// audio
this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs);
// this.playStrokePosition(this.currentPathI, this.currentPointI);
this._setPausedFlag(false);
this.dispatchEvent(new CustomEvent('play', {}));
this._animationFrame();
resolve();
const startPlayback = () => {
console.log('start playback');
this.startTimeMs = window.performance.now() - this._currentTimeMs;
// strokes
if (this._currentTimeMs < 0) {
this.startVideoTimeout = setTimeout((e) => this.playStrokePosition(this.currentPathI, this.currentPointI), this._currentTimeMs * -1);
} else {
this.playStrokePosition(this.currentPathI, this.currentPointI);
}
// viewboxes
// const nextViewboxI = Math.max(this.currentViewboxI++, this.viewboxes.length-1);
this.playViewboxPosition(this.currentViewboxI);
// audio
// TODO: use this.audioEl.readyState == 4 : play immediately, otherwise after event
this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs);
// this.playStrokePosition(this.currentPathI, this.currentPointI);
this.dispatchEvent(new CustomEvent('play', {}));
this._animationFrame();
resolve();
}
if (this.audioEl.readyState !== 4) { // not ready to play after seeking audio.
console.log('wait for audio before playback');
this.wrapperEl.classList.add('buffering');
this.audioEl.addEventListener('canplaythrough', () => {
this.wrapperEl.classList.remove('buffering');
startPlayback()
}, { once: true }); // only once
} else {
startPlayback();
}
});
}
@ -1145,6 +1232,7 @@ class Annotator extends EventTarget {
_seekByPoint(point) {
this.dispatchEvent(new CustomEvent('seeking', {}));
this._currentTimeMs = this.strokes[point[0]].points[point[1]][2];
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
[this.currentPathI, this.currentPointI] = point;
this._updatePlayhead();
this._updateFrame();
@ -1158,6 +1246,7 @@ class Annotator extends EventTarget {
_seekByTime(time) {
this.dispatchEvent(new CustomEvent('seeking', { detail: time }));
this._currentTimeMs = Number.parseFloat(time) * 1000;
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
[this.currentPathI, this.currentPointI] = this.findPositionForTime(this._currentTimeMs);
this._updatePlayhead();