Compare commits

...

2 commits

Author SHA1 Message Date
Ruben van de Ven
7e946cb3f6 feature: comment on annotations 2022-05-25 10:04:55 +02:00
Ruben van de Ven
eb6d31742e Annotator now supports player mode. Buffer audio befor playback. 2022-05-24 21:23:03 +02:00
2 changed files with 321 additions and 185 deletions

View file

@ -98,6 +98,7 @@
position: absolute; position: absolute;
left: 100%; left: 100%;
width: 30px; width: 30px;
height:30px;
} }
.controls button.paused::before { .controls button.paused::before {
@ -108,6 +109,22 @@
content: '⏸'; content: '⏸';
} }
.buffering .controls button:is(.playing,.paused)::before {
content: '↺';
display:inline-block;
animation: rotate 1s infinite;
}
@keyframes rotate {
0% {
transform: rotate(359deg)
}
100% {
transform: rotate(0deg)
}
}
.controls { .controls {
position: absolute !important; position: absolute !important;
z-index: 100; z-index: 100;
@ -182,6 +199,14 @@
pointer-events: all; pointer-events: all;
} }
.controls .annotation-comment{
width: 100%;
visibility: hidden;
}
.selected-annotation .controls .annotation-comment{
visibility: visible;
}
.noUi-handle:focus { .noUi-handle:focus {
/* background: red;; */ /* background: red;; */
border: solid 2px #601be0; border: solid 2px #601be0;
@ -376,6 +401,7 @@
navigator.mediaSession.setActionHandler('seekforward', function () { /* Code excerpted. */ }); navigator.mediaSession.setActionHandler('seekforward', function () { /* Code excerpted. */ });
navigator.mediaSession.setActionHandler('previoustrack', function () { /* Code excerpted. */ }); navigator.mediaSession.setActionHandler('previoustrack', function () { /* Code excerpted. */ });
navigator.mediaSession.setActionHandler('nexttrack', function () { /* Code excerpted. */ }); navigator.mediaSession.setActionHandler('nexttrack', function () { /* Code excerpted. */ });
navigator.mediaSession.setActionHandler('playpause', function () { /* Code excerpted. */ });
</script> </script>
</body> </body>

View file

@ -1,8 +1,9 @@
class Annotation { class Annotation {
constructor(tag, t_in, t_out) { constructor(tag, t_in, t_out, comment) {
this.tag = tag; this.tag = tag;
this.t_in = Number.parseFloat(t_in); this.t_in = Number.parseFloat(t_in);
this.t_out = Number.parseFloat(t_out); this.t_out = Number.parseFloat(t_out);
this.comment = comment;
} }
} }
@ -110,9 +111,14 @@ class StrokeSlice {
} }
class Annotator extends EventTarget { class Annotator extends EventTarget {
constructor(wrapperEl, tagFile, fileurl) { constructor(wrapperEl, tagFile, fileurl, config) {
super(); super();
this.config = {
is_player: config && config.hasOwnProperty('is_player') ? config.is_player : false, // in player mode annotations are not loaded, nor is the annotator shown
crop_to_fit: config && config.hasOwnProperty('crop_to_fit') ? config.crop_to_fit : false, // don't animate viewport, but show the whole drawing
}
this.formatter = wNumb({ this.formatter = wNumb({
decimals: 2, decimals: 2,
edit: (time) => { edit: (time) => {
@ -142,6 +148,7 @@ class Annotator extends EventTarget {
this.wrapperEl = wrapperEl; this.wrapperEl = wrapperEl;
this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg'); this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
this.wrapperEl.appendChild(this.svgEl); this.wrapperEl.appendChild(this.svgEl);
this.wrapperEl.classList.add(this.config.is_player ? "svganim_player" : "svganim_annotator");
this.controlsEl = document.createElement('div'); this.controlsEl = document.createElement('div');
@ -191,6 +198,29 @@ class Annotator extends EventTarget {
this.annotationsEl.classList.add('annotations') this.annotationsEl.classList.add('annotations')
this.controlsEl.appendChild(this.annotationsEl); this.controlsEl.appendChild(this.annotationsEl);
this.inPointPosition = [0, 0];
this.inPointTimeMs = null;
this.outPointPosition = null;
this.outPointTimeMs = null;
this._currentTimeMs = 0;
this.videoIsPlaying = false;
const groups = ['before', 'annotation', 'after']
this.strokeGroups = {};
groups.forEach(group => {
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
groupEl.classList.add(group)
this.svgEl.appendChild(groupEl);
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
});
this.annotations = [];
if (this.config.is_player) {
this.load(fileurl);
} else {
this.loadTags(tagFile).then(() => { this.loadTags(tagFile).then(() => {
this.tagsEl = document.createElement('ul'); this.tagsEl = document.createElement('ul');
this.tagsEl.classList.add('tags'); this.tagsEl.classList.add('tags');
@ -242,27 +272,28 @@ class Annotator extends EventTarget {
this.controlsEl.appendChild(this.tagsEl); this.controlsEl.appendChild(this.tagsEl);
this.inPointPosition = [0, 0]; this.commentEl = document.createElement('input');
this.inPointTimeMs = null; this.commentEl.type = 'text';
this.outPointPosition = null; this.commentEl.classList.add('annotation-comment');
this.outPointTimeMs = null; this.commentEl.title = "Add comment to annotation";
this._currentTimeMs = 0; this.commentEl.placeholder = "comment";
this.videoIsPlaying = false; this.commentEl.value = "";
this.commentEl.addEventListener('keyup', (e) => {
const groups = ['before', 'annotation', 'after'] e.stopPropagation(); // prevent keyup event to propagate and set i/o points
this.strokeGroups = {};
groups.forEach(group => {
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
groupEl.classList.add(group)
this.svgEl.appendChild(groupEl);
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
}); });
this.commentEl.addEventListener('input', (e) => {
this.annotations = []; e.stopPropagation(); // prevent keyup event
if (this.selectedAnnotation) {
this.selectedAnnotation.comment = this.commentEl.value;
this.updateAnnotations(true)
}
});
this.controlsEl.appendChild(this.commentEl);
this.load(fileurl); this.load(fileurl);
}); });
} }
}
getColorForTag(tag) { getColorForTag(tag) {
const tagData = this.tagMap[tag]; const tagData = this.tagMap[tag];
@ -278,6 +309,10 @@ class Annotator extends EventTarget {
updateAnnotations(save) { updateAnnotations(save) {
if (this.config.is_player) {
return false;
}
this.annotationsEl.innerHTML = ""; this.annotationsEl.innerHTML = "";
for (let annotation_i in this.annotations) { for (let annotation_i in this.annotations) {
const annotation = this.annotations[annotation_i]; const annotation = this.annotations[annotation_i];
@ -295,7 +330,7 @@ class Annotator extends EventTarget {
if (this.selectedAnnotationI == annotation_i) { if (this.selectedAnnotationI == annotation_i) {
this.annotationEl.classList.add('selected'); this.annotationEl.classList.add('selected');
} }
this.annotationEl.title = annotation.tag; this.annotationEl.title = `[${annotation.tag}] ${annotation.comment}`;
this.annotationEl.addEventListener('mouseover', (e) => { this.annotationEl.addEventListener('mouseover', (e) => {
@ -344,6 +379,7 @@ class Annotator extends EventTarget {
this.updateAnnotations(false); //selects the right tag & highlights the annotation this.updateAnnotations(false); //selects the right tag & highlights the annotation
this.wrapperEl.classList.add('selected-annotation'); this.wrapperEl.classList.add('selected-annotation');
this.commentEl.value = this.selectedAnnotation.comment;
} }
deselectAnnotation(keep_position) { deselectAnnotation(keep_position) {
@ -352,6 +388,7 @@ class Annotator extends EventTarget {
} }
this.wrapperEl.classList.remove('selected-annotation'); this.wrapperEl.classList.remove('selected-annotation');
this.commentEl.value = "";
this.selectedAnnotationI = null; this.selectedAnnotationI = null;
this.selectedAnnotation = null; this.selectedAnnotation = null;
@ -409,6 +446,8 @@ class Annotator extends EventTarget {
fetch(request) fetch(request)
.then(response => response.json()) .then(response => response.json())
.then(data => { .then(data => {
if (!this.config.is_player) {
const metadata_req = new Request(`/annotations/${data.file}`, { const metadata_req = new Request(`/annotations/${data.file}`, {
method: 'GET', method: 'GET',
}); });
@ -416,13 +455,15 @@ class Annotator extends EventTarget {
.then(response => response.ok ? response.json() : null) .then(response => response.ok ? response.json() : null)
.then(metadata => { .then(metadata => {
if (metadata !== null) { if (metadata !== null) {
metadata.annotations = metadata.annotations.map((a) => new Annotation(a.tag, a.t_in, a.t_out)) metadata.annotations = metadata.annotations.map((a) => new Annotation(a.tag, a.t_in, a.t_out, a.hasOwnProperty('comment') ? a.comment : ""))
} }
this.loadStrokes(data, metadata) this.loadStrokes(data, metadata)
}) })
.catch(e => console.log(e)); .catch(e => console.log(e));
// do something with the data sent in the request } else {
}); this.loadStrokes(data, null);
}
}).catch(e => console.log(e));
} }
updateState() { updateState() {
@ -497,7 +538,7 @@ class Annotator extends EventTarget {
this.slider.destroy(); this.slider.destroy();
} }
this.annotations.push(new Annotation(tag, t_in, t_out)); this.annotations.push(new Annotation(tag, t_in, t_out, ""));
this.updateAnnotations(true); this.updateAnnotations(true);
this._currentTimeMs = t_out; this._currentTimeMs = t_out;
@ -508,15 +549,27 @@ class Annotator extends EventTarget {
setUpAnnotator() { setUpAnnotator() {
this.playheadEl.min = this.audioOffset < 0 ? this.audioOffset * 1000 : 0; this.playheadEl.min = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
this.playheadEl.max = this.getEndTimeMs(); this.playheadEl.max = this.getEndTimeMs();
this._updatePlayhead(); this._updatePlayhead();
this.inPointPosition = this.findPositionForTime(this.currentTime); this.inPointPosition = this.findPositionForTime(this.currentTime);
this.inPointTimeMs = this._currentTimeMs; this.inPointTimeMs = this._currentTimeMs;
this.outPointPosition = this.findPositionForTime(this.lastFrameTime); // TODO: simplify to get the last frame indexes directly this.outPointPosition = this.findPositionForTime(this.lastFrameTime); // TODO: simplify to get the last frame indexes directly
this.outPointTimeMs = this.getEndTimeMs(); this.outPointTimeMs = this.getEndTimeMs();
if (!this.config.is_player) {
this.buildAnnotator();
}
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
}
buildAnnotator() {
if (this.scrubberEl.noUiSlider) { if (this.scrubberEl.noUiSlider) {
this.slider.destroy(); this.slider.destroy();
} }
@ -597,21 +650,27 @@ class Annotator extends EventTarget {
ttInputEl.addEventListener('blur', submit); ttInputEl.addEventListener('blur', submit);
}); });
}) })
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
} }
loadStrokes(drawing, metadata) { loadStrokes(drawing, metadata) {
this.audioOffset = 0; this.audioOffset = 0;
if (metadata) { if (metadata) {
this.annotations = metadata.annotations; this.annotations = metadata.annotations;
this.audioFile = metadata.hasOwnProperty('audio') ? metadata.audio.file : null; }
this.audioOffset = metadata.hasOwnProperty('audio') ? Number.parseFloat(metadata.audio.offset) : 0;
if ((metadata && metadata.hasOwnProperty('audio')) || drawing.hasOwnProperty('audio')) {
if (metadata && metadata.hasOwnProperty('audio')) {
this.audioFile = metadata.audio.file
this.audioOffset = Number.parseFloat(metadata.audio.offset);
} else {
this.audioFile = drawing.audio.file
this.audioOffset = Number.parseFloat(drawing.audio.offset);
}
this._currentTimeMs = this.audioOffset < 0 ? this.audioOffset * 1000 : 0; this._currentTimeMs = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
this._updatePlayhead(); this._updatePlayhead();
//
// load any saved metadata
} }
this.filename = drawing.file; this.filename = drawing.file;
this.strokes = drawing.shape.map(s => new Stroke(s['color'], s['points'])); this.strokes = drawing.shape.map(s => new Stroke(s['color'], s['points']));
this.viewboxes = drawing.viewboxes; this.viewboxes = drawing.viewboxes;
@ -619,7 +678,11 @@ class Annotator extends EventTarget {
this.currentPointI = null; this.currentPointI = null;
this.currentViewboxI = null; this.currentViewboxI = null;
this.dimensions = drawing.dimensions; this.dimensions = drawing.dimensions;
if (!this.config.crop_to_fit) {
this.svgEl.setAttribute('viewBox', `0 0 ${this.dimensions[0]} ${this.dimensions[1]}`) this.svgEl.setAttribute('viewBox', `0 0 ${this.dimensions[0]} ${this.dimensions[1]}`)
} else {
this.svgEl.setAttribute('viewBox', `${drawing.bounding_box.x} ${drawing.bounding_box.y} ${drawing.bounding_box.width} ${drawing.bounding_box.height}`)
}
// let bgEl = document.createElementNS('http://www.w3.org/2000/svg', 'rect'); // let bgEl = document.createElementNS('http://www.w3.org/2000/svg', 'rect');
// bgEl.setAttribute("x", 0); // bgEl.setAttribute("x", 0);
@ -638,13 +701,39 @@ class Annotator extends EventTarget {
this.setupAudioConfig().then(() => { this.setupAudioConfig().then(() => {
// this.setUpAnnotator() // this.setUpAnnotator()
let keyEl;
if (this.config.is_player) {
keyEl = this.wrapperEl;
} else {
keyEl = document.body; // always capture
this.updateAnnotations(false); this.updateAnnotations(false);
}
document.body.addEventListener('keyup', (ev) => { keyEl.addEventListener('keyup', (ev) => {
if (ev.key == ' ') { if (ev.key == ' ') {
this.playPause(); this.playPause();
} }
console.log('key', ev);
// shift+arrow keys, jump playhead (search position)
// FIXME doesn't keep playback after initial load. Only after unfocussing the window, and refocussing it, do the keys capture.
// Probably a wrong order
if (ev.key == 'ArrowLeft' && ev.shiftKey) {
const p = this._paused;
console.log(p);
const diff = ev.ctrlKey ? 10000 : 1000;
this.scrubTo(this._currentTimeMs - diff);
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
}
if (ev.key == 'ArrowRight' && ev.shiftKey) {
const p = this._paused;
console.log(p);
const diff = ev.ctrlKey ? 10000 : 1000;
this.scrubTo(this._currentTimeMs + diff);
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
}
// additional keys only for annotation mode
if (!this.config.is_player) {
if (ev.key == 'i') { if (ev.key == 'i') {
this.setInPoint(this.currentTime * 1000); this.setInPoint(this.currentTime * 1000);
} }
@ -666,22 +755,6 @@ class Annotator extends EventTarget {
this.resetInOutPoint(); this.resetInOutPoint();
} }
} }
// shift+arrow keys, jump playhead (search position)
// FIXME doesn't keep playback after initial load. Only after unfocussing the window, and refocussing it, do the keys capture.
// Probably a wrong order
if (ev.key == 'ArrowLeft' && ev.shiftKey) {
const p = this._paused;
console.log(p);
const diff = ev.ctrlKey ? 10000 : 1000;
this.scrubTo(this._currentTimeMs - diff);
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
}
if (ev.key == 'ArrowRight' && ev.shiftKey) {
const p = this._paused;
console.log(p);
const diff = ev.ctrlKey ? 10000 : 1000;
this.scrubTo(this._currentTimeMs + diff);
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
} }
}); });
}); });
@ -714,11 +787,26 @@ class Annotator extends EventTarget {
setupAudioConfig() { setupAudioConfig() {
// audio config // audio config
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
this.audioEl = document.createElement('audio');
if (!this.config.is_player)
this.audioEl.setAttribute('controls', true);
this.audioEl.addEventListener('canplaythrough', (ev) => {
console.log('loaded audio', ev);
// this.audioEl.play();
});
if (this.config.is_player) {
this.wrapperEl.prepend(this.audioEl);
}
else {
let audioConfigEl = document.createElement('div'); let audioConfigEl = document.createElement('div');
audioConfigEl.classList.add('audioconfig') audioConfigEl.classList.add('audioconfig')
this.wrapperEl.appendChild(audioConfigEl); this.wrapperEl.appendChild(audioConfigEl);
audioConfigEl.prepend(this.audioEl);
let audioSelectEl = document.createElement('select'); let audioSelectEl = document.createElement('select');
audioSelectEl.classList.add('audioselect'); audioSelectEl.classList.add('audioselect');
audioConfigEl.appendChild(audioSelectEl); audioConfigEl.appendChild(audioSelectEl);
@ -753,17 +841,10 @@ class Annotator extends EventTarget {
}); });
audioOffsetTextEl.appendChild(audioOffsetEl); audioOffsetTextEl.appendChild(audioOffsetEl);
}
this.audioEl = document.createElement('audio');
this.audioEl.setAttribute('controls', true);
this.audioEl.addEventListener('canplaythrough', (ev) => {
console.log('loaded audio', ev);
this.audioEl.play();
});
// this.audioEl.addEventListener('seeked', (ev)=>{
// console.log(ev);
// })
audioConfigEl.prepend(this.audioEl);
this.audioEl.addEventListener('loadedmetadata', (ev) => { this.audioEl.addEventListener('loadedmetadata', (ev) => {
// resolve the 'set up audio' when metadata has loaded // resolve the 'set up audio' when metadata has loaded
@ -827,11 +908,15 @@ class Annotator extends EventTarget {
console.debug('delay audio playback', t_start, t_diff); console.debug('delay audio playback', t_start, t_diff);
// a negative audiooffset delays playback from the start // a negative audiooffset delays playback from the start
// this.audioStartTimeout = setTimeout((e) => this.audioEl.play(), t*-1000); // this.audioStartTimeout = setTimeout((e) => this.audioEl.play(), t*-1000);
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0 }, t_start * -1); // triggers play with "seeked" event this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0; this.audioEl.play(); }, t_start * -1); // triggers play with "seeked" event
// this.audioEl.currentTime = 0; // this.audioEl.currentTime = 0;
} }
} else { } else {
if (this.audioEl.currentTime !== t_start / 1000) {
console.log(this.audioEl.currentTime, t_start / 1000);
this.audioEl.currentTime = t_start / 1000; this.audioEl.currentTime = t_start / 1000;
}
this.audioEl.play();
// this.audioEl.play(); // play is done in "seeked" evenlistener // this.audioEl.play(); // play is done in "seeked" evenlistener
console.log(this.audioEl.currentTime, t_start, t_in, t_out); console.log(this.audioEl.currentTime, t_start, t_in, t_out);
} }
@ -844,6 +929,10 @@ class Annotator extends EventTarget {
}, t_diff); }, t_diff);
} }
_scrubAudio(time_ms) {
this.audioEl.currentTime = Math.max(0, this.getAudioTime(time_ms)) / 1000;
}
getFinalFrameTime() { getFinalFrameTime() {
const points = this.strokes[this.strokes.length - 1].points; const points = this.strokes[this.strokes.length - 1].points;
return points[points.length - 1][3]; return points[points.length - 1][3];
@ -924,8 +1013,10 @@ class Annotator extends EventTarget {
} }
this.currentViewboxI = box_i this.currentViewboxI = box_i
const b = this.viewboxes[box_i]; const b = this.viewboxes[box_i];
if (!this.config.crop_to_fit) {
this.svgEl.setAttribute('viewBox', `${b.x} ${b.y} ${this.dimensions[0]} ${this.dimensions[1]}`) this.svgEl.setAttribute('viewBox', `${b.x} ${b.y} ${this.dimensions[0]} ${this.dimensions[1]}`)
} }
}
getNextPosition(path_i, point_i) { getNextPosition(path_i, point_i) {
const path = this.strokes[path_i]; const path = this.strokes[path_i];
@ -1053,6 +1144,10 @@ class Annotator extends EventTarget {
this._seekByTimeMs(this._currentTimeMs); // prevent playback issue for initial load this._seekByTimeMs(this._currentTimeMs); // prevent playback issue for initial load
} }
this._setPausedFlag(false);
const startPlayback = () => {
console.log('start playback');
this.startTimeMs = window.performance.now() - this._currentTimeMs; this.startTimeMs = window.performance.now() - this._currentTimeMs;
// strokes // strokes
if (this._currentTimeMs < 0) { if (this._currentTimeMs < 0) {
@ -1065,13 +1160,26 @@ class Annotator extends EventTarget {
this.playViewboxPosition(this.currentViewboxI); this.playViewboxPosition(this.currentViewboxI);
// audio // audio
// TODO: use this.audioEl.readyState == 4 : play immediately, otherwise after event
this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs); this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs);
// this.playStrokePosition(this.currentPathI, this.currentPointI); // this.playStrokePosition(this.currentPathI, this.currentPointI);
this._setPausedFlag(false);
this.dispatchEvent(new CustomEvent('play', {})); this.dispatchEvent(new CustomEvent('play', {}));
this._animationFrame(); this._animationFrame();
resolve(); resolve();
}
if (this.audioEl.readyState !== 4) { // not ready to play after seeking audio.
console.log('wait for audio before playback');
this.wrapperEl.classList.add('buffering');
this.audioEl.addEventListener('canplaythrough', () => {
this.wrapperEl.classList.remove('buffering');
startPlayback()
}, { once: true }); // only once
} else {
startPlayback();
}
}); });
} }
@ -1145,6 +1253,7 @@ class Annotator extends EventTarget {
_seekByPoint(point) { _seekByPoint(point) {
this.dispatchEvent(new CustomEvent('seeking', {})); this.dispatchEvent(new CustomEvent('seeking', {}));
this._currentTimeMs = this.strokes[point[0]].points[point[1]][2]; this._currentTimeMs = this.strokes[point[0]].points[point[1]][2];
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
[this.currentPathI, this.currentPointI] = point; [this.currentPathI, this.currentPointI] = point;
this._updatePlayhead(); this._updatePlayhead();
this._updateFrame(); this._updateFrame();
@ -1158,6 +1267,7 @@ class Annotator extends EventTarget {
_seekByTime(time) { _seekByTime(time) {
this.dispatchEvent(new CustomEvent('seeking', { detail: time })); this.dispatchEvent(new CustomEvent('seeking', { detail: time }));
this._currentTimeMs = Number.parseFloat(time) * 1000; this._currentTimeMs = Number.parseFloat(time) * 1000;
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
[this.currentPathI, this.currentPointI] = this.findPositionForTime(this._currentTimeMs); [this.currentPathI, this.currentPointI] = this.findPositionForTime(this._currentTimeMs);
this._updatePlayhead(); this._updatePlayhead();