1422 lines
51 KiB
JavaScript
1422 lines
51 KiB
JavaScript
class Annotation {
|
|
constructor(tag, t_in, t_out, comment) {
|
|
this.tag = tag;
|
|
this.t_in = Number.parseFloat(t_in);
|
|
this.t_out = Number.parseFloat(t_out);
|
|
this.comment = comment;
|
|
}
|
|
}
|
|
|
|
class StrokeGroup {
|
|
constructor(group_element, player) {
|
|
this.g = group_element;
|
|
this.player = player;
|
|
}
|
|
|
|
setStrokes(strokes) {
|
|
const pathEls = this.g.querySelectorAll('path');
|
|
let indexes = Object.keys(strokes);
|
|
for (let pathEl of pathEls) {
|
|
const i = pathEl.dataset.path_i;
|
|
if (!indexes.includes(pathEl.dataset.path_i)) {
|
|
pathEl.parentNode.removeChild(pathEl);
|
|
} else {
|
|
// check in and outpoint using pathEl.dataset
|
|
if (strokes[i].getSliceId() != pathEl.dataset.slice) {
|
|
const d = this.points2D(strokes[i].points);
|
|
pathEl.dataset.slice = strokes[i].getSliceId();
|
|
pathEl.setAttribute('d', d);
|
|
}
|
|
}
|
|
|
|
// this has now been processed
|
|
indexes.splice(indexes.indexOf(i), 1);
|
|
}
|
|
|
|
// new strokes
|
|
indexes.forEach(index => {
|
|
const stroke = strokes[index];
|
|
|
|
let pathEl = document.createElementNS('http://www.w3.org/2000/svg', 'path');
|
|
pathEl.style.stroke = stroke.color;
|
|
pathEl.classList.add('path');
|
|
pathEl.dataset.path_i = index;
|
|
pathEl.dataset.slice = stroke.getSliceId();
|
|
this.g.appendChild(pathEl);
|
|
|
|
const d = this.points2D(stroke.points);
|
|
pathEl.setAttribute('d', d);
|
|
});
|
|
}
|
|
|
|
// convert array of points to a d-attribute
|
|
points2D(strokes) {
|
|
// strokes to a d attribute for a path
|
|
let d = "";
|
|
let last_stroke = undefined;
|
|
let cmd = "";
|
|
for (let stroke of strokes) {
|
|
if (!last_stroke) {
|
|
d += `M${stroke[0]},${stroke[1]} `;
|
|
cmd = 'M';
|
|
} else {
|
|
if (last_stroke[2] == 1) {
|
|
d += " m";
|
|
cmd = 'm';
|
|
} else if (cmd != 'l') {
|
|
d += ' l ';
|
|
cmd = 'l';
|
|
}
|
|
let rel_stroke = [stroke[0] - last_stroke[0], stroke[1] - last_stroke[1]];
|
|
d += `${rel_stroke[0]},${rel_stroke[1]} `;
|
|
}
|
|
last_stroke = stroke;
|
|
|
|
}
|
|
return d;
|
|
}
|
|
}
|
|
|
|
class Stroke {
|
|
constructor(color, points) {
|
|
this.color = color;
|
|
this.points = points; // [[x1,y1,t1], [x2,y2,t2], ...]
|
|
}
|
|
|
|
getSliceId() {
|
|
return 'all';
|
|
}
|
|
}
|
|
|
|
class StrokeSlice {
|
|
constructor(stroke, i_in, i_out) {
|
|
this.stroke = stroke; // Stroke
|
|
this.i_in = typeof i_in === 'undefined' ? 0 : i_in;
|
|
this.i_out = typeof i_out === 'undefined' ? this.stroke.points.length - 1 : i_out;
|
|
}
|
|
|
|
getSliceId() {
|
|
return `${this.i_in}-${this.i_out}`;
|
|
}
|
|
|
|
// compatible with Stroke()
|
|
get points() {
|
|
return this.stroke.points.slice(this.i_in, this.i_out + 1);
|
|
}
|
|
|
|
// compatible with Stroke()
|
|
get color() {
|
|
return this.stroke.color;
|
|
}
|
|
}
|
|
|
|
class Annotator extends EventTarget {
|
|
constructor(wrapperEl, tagFile, fileurl, config) {
|
|
fileurl = fileurl.replace("&", "&"); // little hack: tornadoweb does this automatically for some reason
|
|
super();
|
|
|
|
this.config = {
|
|
is_player: config && config.hasOwnProperty('is_player') ? config.is_player : false, // in player mode annotations are not loaded, nor is the annotator shown
|
|
crop_to_fit: config && config.hasOwnProperty('crop_to_fit') ? config.crop_to_fit : false, // don't animate viewport, but show the whole drawing
|
|
autoplay: config && config.hasOwnProperty('autoplay') ? config.autoplay : false, // immediately start playback
|
|
}
|
|
|
|
this.formatter = wNumb({
|
|
decimals: 2,
|
|
edit: (time) => {
|
|
let neg = "";
|
|
if (time < 0) {
|
|
neg = "-";
|
|
time *= -1;
|
|
}
|
|
const s = Math.floor(time / 1000);
|
|
const minutes = Math.floor(s / 60);
|
|
const seconds = s - minutes * 60;
|
|
const ms = Math.floor((time / 1000 - s) * 1000);
|
|
return `${neg}${minutes}:${seconds}.${ms}`;
|
|
},
|
|
undo: (tc) => {
|
|
let [rest, ms] = tc.split(/[\.\,]/);
|
|
ms = parseFloat(typeof ms === "undefined" ? 0 : ms);
|
|
let factor = 1000;
|
|
rest.split(':').reverse().forEach((v, i) => {
|
|
ms += v * factor;
|
|
factor *= 60;
|
|
});
|
|
return `${ms}`;
|
|
}
|
|
});
|
|
|
|
this.wrapperEl = wrapperEl;
|
|
|
|
this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
|
|
this.wrapperEl.appendChild(this.svgEl);
|
|
this.wrapperEl.classList.add(this.config.is_player ? "svganim_player" : "svganim_annotator");
|
|
|
|
|
|
this.controlsEl = document.createElement('div');
|
|
this.controlsEl.classList.add('controls')
|
|
this.wrapperEl.appendChild(this.controlsEl);
|
|
|
|
this.playbackControlsEl = document.createElement('div');
|
|
this.playbackControlsEl.classList.add('controls--playback')
|
|
this.controlsEl.appendChild(this.playbackControlsEl);
|
|
|
|
this.playheadEl = document.createElement('input');
|
|
this.playheadEl.type = "range";
|
|
this.playheadEl.min = 0;
|
|
this.playheadEl.step = 0.01;
|
|
this.playbackControlsEl.appendChild(this.playheadEl);
|
|
|
|
this.playheadEl.addEventListener("input", (ev) => {
|
|
this.scrubTo(ev.target.value);
|
|
});
|
|
this.playheadEl.addEventListener('keydown', (ev) => {
|
|
ev.preventDefault(); // we don't want to use arrow keys, as these are captured in the overall keydown event
|
|
})
|
|
|
|
this.timeCodeEl = document.createElement('input');
|
|
this.timeCodeEl.type = 'numeric';
|
|
this.timeCodeEl.classList.add('timecode');
|
|
this.timeCodeEl.disabled = true;
|
|
this.playbackControlsEl.appendChild(this.timeCodeEl);
|
|
|
|
this.playPauseEl = document.createElement('button');
|
|
this.playPauseEl.classList.add('paused');
|
|
this.playbackControlsEl.appendChild(this.playPauseEl);
|
|
|
|
this.playPauseEl.addEventListener("click", (ev) => {
|
|
this.playPause()
|
|
})
|
|
this.playPauseEl.addEventListener('keydown', (ev) => {
|
|
ev.preventDefault(); // we don't want to spacebar, as this is captured in the overall keydown event
|
|
})
|
|
|
|
this.scrubberEl = document.createElement('div');
|
|
this.scrubberEl.classList.add('scrubber')
|
|
this.controlsEl.appendChild(this.scrubberEl);
|
|
|
|
|
|
if(!this.config.is_player){
|
|
this.annotationsEl = document.createElement('div');
|
|
this.annotationsEl.classList.add('annotations')
|
|
this.controlsEl.appendChild(this.annotationsEl);
|
|
}
|
|
|
|
|
|
this.inPointPosition = [0, 0];
|
|
this.inPointTimeMs = null;
|
|
this.outPointPosition = null;
|
|
this.outPointTimeMs = null;
|
|
this._currentTimeMs = 0;
|
|
this.videoIsPlaying = false;
|
|
|
|
const groups = ['before', 'annotation', 'after']
|
|
this.strokeGroups = {};
|
|
groups.forEach(group => {
|
|
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
|
|
groupEl.classList.add(group)
|
|
this.svgEl.appendChild(groupEl);
|
|
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
|
|
});
|
|
|
|
this.annotations = [];
|
|
|
|
if (this.config.is_player) {
|
|
this.load(fileurl);
|
|
} else {
|
|
|
|
this.loadTags(tagFile).then(() => {
|
|
this.tagsEl = document.createElement('ul');
|
|
this.tagsEl.classList.add('tags');
|
|
const addTags = (tags, tagsEl) => {
|
|
tags.forEach((tag) => {
|
|
let tagLiEl = document.createElement('li');
|
|
let tagEl = document.createElement('div');
|
|
|
|
tagEl.classList.add('tag');
|
|
tagEl.dataset.tag = tag.id;
|
|
tagEl.innerText = tag.hasOwnProperty('name') ? tag.name : tag.id;
|
|
tagEl.addEventListener('click', (e) => {
|
|
this.addTag(tag.id, this.inPointPosition, this.outPointPosition);
|
|
});
|
|
|
|
tagEl.title = tag.hasOwnProperty('description') ? tag.description : "";
|
|
|
|
let signEl = document.createElement('span');
|
|
signEl.classList.add('annotation-' + tag.id);
|
|
signEl.style.backgroundColor = this.getColorForTag(tag.id);
|
|
tagEl.prepend(signEl);
|
|
|
|
tagLiEl.appendChild(tagEl);
|
|
|
|
if (tag.hasOwnProperty('children')) {
|
|
const subEl = document.createElement('ul');
|
|
subEl.classList.add('subtags');
|
|
addTags(tag.children, subEl);
|
|
tagLiEl.appendChild(subEl);
|
|
}
|
|
|
|
tagsEl.appendChild(tagLiEl);
|
|
});
|
|
};
|
|
addTags(this.tags, this.tagsEl);
|
|
|
|
let tagEl = document.createElement('li');
|
|
tagEl.classList.add('tag');
|
|
tagEl.classList.add('annotation-rm');
|
|
tagEl.dataset.tag = 'rm';
|
|
tagEl.title = "Remove annotation";
|
|
tagEl.innerHTML = "🚮"; // ×
|
|
tagEl.addEventListener('click', (e) => {
|
|
if (this.selectedAnnotation) {
|
|
this.removeAnnotation(this.selectedAnnotationI);
|
|
}
|
|
});
|
|
this.tagsEl.appendChild(tagEl);
|
|
|
|
this.wrapperEl.appendChild(this.tagsEl);
|
|
|
|
this.commentEl = document.createElement('input');
|
|
this.commentEl.type = 'text';
|
|
this.commentEl.classList.add('annotation-comment');
|
|
this.commentEl.title = "Add comment to annotation";
|
|
this.commentEl.placeholder = "comment";
|
|
this.commentEl.value = "";
|
|
this.commentEl.addEventListener('keyup', (e) => {
|
|
if (e.key == 'Escape') {
|
|
this.commentEl.blur() // deselect annotation, and deselect commentEl
|
|
} else {
|
|
e.stopPropagation(); // prevent keyup event to propagate and set i/o points
|
|
}
|
|
});
|
|
this.commentEl.addEventListener('input', (e) => {
|
|
e.stopPropagation(); // prevent keyup event
|
|
if (this.selectedAnnotation) {
|
|
this.selectedAnnotation.comment = this.commentEl.value;
|
|
this.updateAnnotations(true)
|
|
}
|
|
});
|
|
this.controlsEl.appendChild(this.commentEl);
|
|
|
|
this.load(fileurl);
|
|
});
|
|
}
|
|
}
|
|
|
|
getColorForTag(tag_id) {
|
|
const tag = this.tagMap[tag_id];
|
|
console.log(tag_id, tag);
|
|
if (tag && tag.hasOwnProperty('color')) {
|
|
return tag.color;
|
|
}
|
|
if (tag && tag.hasOwnProperty('parent')) {
|
|
return this.getColorForTag(tag['parent'].id);
|
|
}
|
|
return 'black';
|
|
}
|
|
|
|
updateAnnotations(save) {
|
|
|
|
if (this.config.is_player) {
|
|
return false;
|
|
}
|
|
|
|
this.annotationsEl.innerHTML = "";
|
|
for (let annotation_i in this.annotations) {
|
|
const annotation = this.annotations[annotation_i];
|
|
this.annotationEl = document.createElement('div');
|
|
const prerollDiff = Number.parseFloat(this.audioOffset < 0 ? this.audioOffset * -1000 : 0);
|
|
// console.log('diff', prerollDiff, annotation.t_in, typeof annotation.t_in, this.duration,annotation.t_in + prerollDiff, (annotation.t_in + prerollDiff) / this.duration);
|
|
const left = ((annotation.t_in + prerollDiff) / (this.duration * 1000)) * 100;
|
|
const right = 100 - ((annotation.t_out + prerollDiff) / (this.duration * 1000)) * 100;
|
|
this.annotationEl.style.left = left + '%';
|
|
this.annotationEl.style.right = right + '%';
|
|
|
|
this.annotationEl.style.backgroundColor = this.getColorForTag(annotation.tag);
|
|
|
|
this.annotationEl.classList.add('annotation-' + annotation.tag);
|
|
if (this.selectedAnnotationI == annotation_i) {
|
|
this.annotationEl.classList.add('selected');
|
|
}
|
|
this.annotationEl.title = `[${annotation.tag}] ${annotation.comment}`;
|
|
|
|
this.annotationEl.addEventListener('mouseover', (e) => {
|
|
|
|
});
|
|
this.annotationEl.addEventListener('mouseout', (e) => {
|
|
|
|
});
|
|
this.annotationEl.addEventListener('click', (e) => {
|
|
if (this.selectedAnnotationI == annotation_i) {
|
|
this.deselectAnnotation(false);
|
|
} else {
|
|
this.selectAnnotation(annotation_i);
|
|
}
|
|
});
|
|
|
|
this.annotationsEl.appendChild(this.annotationEl);
|
|
}
|
|
|
|
this.tagsEl.querySelectorAll('.tag').forEach(tagEl => {
|
|
if (this.selectedAnnotation && this.selectedAnnotation.tag == tagEl.dataset.tag) {
|
|
tagEl.classList.add('selected')
|
|
} else {
|
|
tagEl.classList.remove('selected')
|
|
}
|
|
});
|
|
|
|
if (save) {
|
|
this.updateState();
|
|
}
|
|
}
|
|
|
|
selectAnnotation(annotation_i) {
|
|
this.selectedAnnotationI = annotation_i;
|
|
this.selectedAnnotation = this.annotations[annotation_i];
|
|
|
|
this.slider.set([this.selectedAnnotation.t_in, this.selectedAnnotation.t_out]);
|
|
|
|
this.inPointPosition = this.findPositionForTime(this.selectedAnnotation.t_in);
|
|
this.outPointPosition = this.findPositionForTime(this.selectedAnnotation.t_out);
|
|
this.inPointTimeMs = this.selectedAnnotation.t_in;
|
|
this.outPointTimeMs = this.selectedAnnotation.t_out;
|
|
this._seekByTimeMs(this.selectedAnnotation.t_in);
|
|
// draw full stroke of annotation:
|
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
|
|
this.updateAnnotations(false); //selects the right tag & highlights the annotation
|
|
|
|
this.wrapperEl.classList.add('selected-annotation');
|
|
this.commentEl.value = this.selectedAnnotation.comment;
|
|
}
|
|
|
|
deselectAnnotation(keep_position) {
|
|
if (this.selectedAnnotation) {
|
|
this._seekByTimeMs(this.selectedAnnotation.t_out);
|
|
}
|
|
|
|
this.wrapperEl.classList.remove('selected-annotation');
|
|
this.commentEl.value = "";
|
|
this.commentEl.blur(); // make sure we're not typing anymore
|
|
|
|
this.selectedAnnotationI = null;
|
|
this.selectedAnnotation = null;
|
|
|
|
if (!keep_position) {
|
|
this.setUpAnnotator();
|
|
}
|
|
this.updateAnnotations(false); // selects the right tag & highlights the annotation
|
|
}
|
|
|
|
setInPoint(time_ms) {
|
|
this.setInOutPoint(time_ms, this.outPointTimeMs);
|
|
}
|
|
setOutPoint(time_ms) {
|
|
this.setInOutPoint(this.inPointTimeMs, time_ms);
|
|
}
|
|
|
|
setInOutPoint(in_ms, out_ms) {
|
|
this.inPointPosition = this.findPositionForTime(in_ms);
|
|
this.inPointTimeMs = in_ms;
|
|
this.outPointPosition = this.findPositionForTime(out_ms);
|
|
this.outPointTimeMs = out_ms;
|
|
// this._seekByTimeMs(this.audioOffset < 0 ? this.audioOffset * 1000 : 0);
|
|
// draw full stroke of annotation
|
|
console.log('setInOut');
|
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
console.log([`${this.inPointTimeMs}`, `${this.outPointTimeMs}`])
|
|
this.slider.set([this.inPointTimeMs, this.outPointTimeMs]);
|
|
|
|
// console.log(this.selectedAnnotation);
|
|
if (this.selectedAnnotation) {
|
|
this.selectedAnnotation.t_in = in_ms;
|
|
this.selectedAnnotation.t_out = out_ms;
|
|
this.updateAnnotations(false);
|
|
}
|
|
}
|
|
|
|
resetInOutPoint() {
|
|
this.inPointPosition = [0, 0];
|
|
this.inPointTimeMs = null;
|
|
this.outPointPosition = null;
|
|
this.outPointTimeMs = null;
|
|
this._seekByTimeMs(this.audioOffset < 0 ? this.audioOffset * 1000 : 0);
|
|
// draw full stroke of annotation
|
|
console.log('reset!');
|
|
this.drawStrokePosition(this.inPointPosition, [Infinity, Infinity]);
|
|
this.setUpAnnotator();
|
|
}
|
|
|
|
load(file) {
|
|
const request = new Request(file, {
|
|
method: 'GET',
|
|
});
|
|
|
|
this.wrapperEl.classList.add('loading');
|
|
|
|
fetch(request)
|
|
.then(response => response.json())
|
|
.then(data => {
|
|
if (!this.config.is_player) {
|
|
|
|
const metadata_req = new Request(`/annotations/${data.file}`, {
|
|
method: 'GET',
|
|
});
|
|
return fetch(metadata_req)
|
|
.then(response => response.ok ? response.json() : null)
|
|
.then(metadata => {
|
|
if (metadata !== null) {
|
|
metadata.annotations = metadata.annotations.map((a) => new Annotation(a.tag, a.t_in, a.t_out, a.hasOwnProperty('comment') ? a.comment : ""))
|
|
}
|
|
return this.loadStrokes(data, metadata)
|
|
})
|
|
.catch(e => console.log(e));
|
|
} else {
|
|
return this.loadStrokes(data, null);
|
|
}
|
|
})
|
|
.then(() => {
|
|
// play on click for player
|
|
if(this.config.is_player) {
|
|
this.svgEl.addEventListener('click', (ev) => {
|
|
console.log('clicked for play/pause');
|
|
this.playPause();
|
|
});
|
|
}
|
|
|
|
// autoplay if necessary
|
|
if(this.config.autoplay){
|
|
this.play(); // play should remove loading
|
|
} else{
|
|
this.wrapperEl.classList.remove('loading');
|
|
}
|
|
})
|
|
.catch(e => console.log(e));
|
|
}
|
|
|
|
updateState() {
|
|
const state = {
|
|
'file': this.filename,
|
|
'annotations': this.annotations,
|
|
'audio': {
|
|
'file': this.audioFile,
|
|
'offset': this.audioOffset,
|
|
}
|
|
}
|
|
const newState = JSON.stringify(state);
|
|
if (newState == this.state) {
|
|
return;
|
|
}
|
|
|
|
this.wrapperEl.classList.remove('saved');
|
|
this.wrapperEl.classList.add('unsaved');
|
|
this.state = newState;
|
|
// autosave on state change:
|
|
this.save(newState);
|
|
}
|
|
|
|
setSaved(state) {
|
|
if (this.state != state) {
|
|
console.log('already outdated');
|
|
}
|
|
else {
|
|
this.wrapperEl.classList.add('saved');
|
|
this.wrapperEl.classList.remove('unsaved');
|
|
}
|
|
}
|
|
|
|
save(state) {
|
|
const request = new Request("/annotations/" + this.filename, {
|
|
method: 'POST',
|
|
headers: {
|
|
'Content-Type': 'application/json'
|
|
},
|
|
body: state
|
|
});
|
|
fetch(request)
|
|
.then((response) => {
|
|
if (response.ok) {
|
|
this.setSaved(state);
|
|
}
|
|
else {
|
|
throw Error('Something went wrong');
|
|
}
|
|
})
|
|
.catch((error) => {
|
|
console.log(error);
|
|
});
|
|
|
|
}
|
|
|
|
removeAnnotation(annotation_i) {
|
|
this.deselectAnnotation(true);
|
|
this.annotations.splice(annotation_i, 1);
|
|
this.updateAnnotations(true);
|
|
}
|
|
|
|
addTag(tag) {
|
|
if (this.selectedAnnotation) {
|
|
this.selectedAnnotation.tag = tag;
|
|
this.updateAnnotations(true);
|
|
} else {
|
|
|
|
// TODO this.slider values for in and out
|
|
const [t_in, t_out] = this.slider.get();
|
|
if (this.slider) {
|
|
this.slider.destroy();
|
|
}
|
|
|
|
this.annotations.push(new Annotation(tag, t_in, t_out, ""));
|
|
this.updateAnnotations(true);
|
|
|
|
this._currentTimeMs = t_out;
|
|
this._updatePlayhead();
|
|
this.setUpAnnotator();
|
|
}
|
|
}
|
|
|
|
|
|
setUpAnnotator() {
|
|
|
|
this.playheadEl.min = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
|
this.playheadEl.max = this.getEndTimeMs();
|
|
this._updatePlayhead();
|
|
|
|
|
|
this.inPointPosition = this.findPositionForTime(this.currentTime);
|
|
this.inPointTimeMs = this._currentTimeMs;
|
|
this.outPointPosition = this.findPositionForTime(this.lastFrameTime); // TODO: simplify to get the last frame indexes directly
|
|
this.outPointTimeMs = this.getEndTimeMs();
|
|
|
|
|
|
if (!this.config.is_player) {
|
|
this.buildAnnotator();
|
|
}
|
|
|
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
}
|
|
|
|
buildAnnotator() {
|
|
|
|
if (this.scrubberEl.noUiSlider) {
|
|
this.slider.destroy();
|
|
}
|
|
|
|
// console.log(this._currentTimeMs, )
|
|
const sliderMin = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
|
const sliderMax = this.getEndTimeMs();
|
|
this.slider = noUiSlider.create(this.scrubberEl, {
|
|
start: [this._currentTimeMs, this.getEndTimeMs()],
|
|
connect: true,
|
|
range: {
|
|
'min': sliderMin,
|
|
'max': sliderMax,
|
|
},
|
|
keyboardDefaultStep: (sliderMax - sliderMin) / 1000,
|
|
keyboardPageMultiplier: 10, // page up/down 10s
|
|
tooltips: [
|
|
this.formatter,
|
|
this.formatter
|
|
],
|
|
// pips: {
|
|
// mode: 'range',
|
|
// density: 3,
|
|
// format: this.formatter
|
|
// }
|
|
});
|
|
|
|
this.slider.on("slide", (values, handle) => {
|
|
this.videoIsPlaying = false;
|
|
this.inPointPosition = this.findPositionForTime(values[0]);
|
|
this.inPointTimeMs = Number.parseFloat(values[0]);
|
|
this.outPointPosition = this.findPositionForTime(values[1]);
|
|
this.outPointTimeMs = Number.parseFloat(values[1]);
|
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
|
|
// console.log(this.selectedAnnotation);
|
|
if (this.selectedAnnotation) {
|
|
this.selectedAnnotation.t_in = Number.parseFloat(values[0]);
|
|
this.selectedAnnotation.t_out = Number.parseFloat(values[1]);
|
|
this.updateAnnotations(false);
|
|
}
|
|
});
|
|
this.slider.on("end", (values, handle) => {
|
|
if (this.selectedAnnotation) {
|
|
this.updateAnnotations(true);
|
|
}
|
|
this._seekByTimeMs(values[0]);
|
|
this.play();
|
|
// this.playAudioSegment(values[0], values[1]);
|
|
});
|
|
|
|
this.slider.getTooltips().forEach((ttEl, i) => {
|
|
// console.log(ttEl, i);
|
|
ttEl.addEventListener('click', (e) => {
|
|
let ttInputEl = document.createElement('input');
|
|
ttInputEl.value = ttEl.innerHTML
|
|
ttEl.innerHTML = "";
|
|
ttEl.appendChild(ttInputEl);
|
|
ttInputEl.focus();
|
|
|
|
const submit = () => {
|
|
console.log(ttInputEl.value);
|
|
const tcMs = this.formatter.from(ttInputEl.value);
|
|
let points = this.slider.get();
|
|
points[i] = tcMs;
|
|
console.log(points);
|
|
this.slider.set(points);
|
|
};
|
|
ttInputEl.addEventListener('keydown', (keyE) => {
|
|
keyE.stopPropagation(); //prevent movement of tooltip
|
|
if (keyE.key == "Enter") {
|
|
submit();
|
|
}
|
|
})
|
|
ttInputEl.addEventListener('click', (clickE) => {
|
|
clickE.stopPropagation(); //prevent retrigger on selectino
|
|
})
|
|
ttInputEl.addEventListener('blur', submit);
|
|
});
|
|
})
|
|
}
|
|
|
|
loadStrokes(drawing, metadata) {
|
|
this.audioOffset = 0;
|
|
|
|
if (metadata) {
|
|
this.annotations = metadata.annotations;
|
|
}
|
|
|
|
if ((metadata && metadata.hasOwnProperty('audio')) || (drawing.hasOwnProperty('audio') && drawing.audio)) {
|
|
if (metadata && metadata.hasOwnProperty('audio')) {
|
|
this.audioFile = metadata.audio.file
|
|
this.audioOffset = Number.parseFloat(metadata.audio.offset);
|
|
} else {
|
|
this.audioFile = drawing.audio.file
|
|
this.audioOffset = Number.parseFloat(drawing.audio.offset);
|
|
}
|
|
this._currentTimeMs = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
|
this._updatePlayhead();
|
|
}
|
|
|
|
this.filename = drawing.file;
|
|
this.strokes = drawing.shape.map(s => new Stroke(s['color'], s['points']));
|
|
this.viewboxes = drawing.viewboxes;
|
|
this.currentPathI = null;
|
|
this.currentPointI = null;
|
|
this.currentViewboxI = null;
|
|
this.dimensions = drawing.dimensions;
|
|
this.bounding_box = drawing.bounding_box;
|
|
this.updateViewbox();
|
|
|
|
// let bgEl = document.createElementNS('http://www.w3.org/2000/svg', 'rect');
|
|
// bgEl.setAttribute("x", 0);
|
|
// bgEl.setAttribute("y", 0);
|
|
// bgEl.setAttribute("width", this.dimensions[0]);
|
|
// bgEl.setAttribute("height", this.dimensions[1]);
|
|
// bgEl.classList.add('background');
|
|
// this.svgEl.prepend(bgEl);
|
|
|
|
this.firstFrameTime = this.strokes.length == 0 ? 0 : this.strokes[0].points[0][3];
|
|
this.lastFrameTime = this.getFinalFrameTime();
|
|
this.playheadEl.max = this.lastFrameTime;
|
|
this.nextFrameTimeout = null;
|
|
this.nextViewboxTimeout = null;
|
|
this._setPausedFlag(true);
|
|
|
|
return this.setupAudioConfig().then(() => {
|
|
// this.setUpAnnotator()
|
|
let keyEl;
|
|
if (this.config.is_player) {
|
|
keyEl = this.wrapperEl;
|
|
} else {
|
|
keyEl = document.body; // always capture
|
|
this.updateAnnotations(false);
|
|
}
|
|
|
|
keyEl.addEventListener('keyup', (ev) => {
|
|
if (ev.key == ' ') {
|
|
this.playPause();
|
|
}
|
|
|
|
// shift+arrow keys, jump playhead (search position)
|
|
// FIXME doesn't keep playback after initial load. Only after unfocussing the window, and refocussing it, do the keys capture.
|
|
// Probably a wrong order
|
|
if (ev.key == 'ArrowLeft' && ev.shiftKey) {
|
|
const p = this._paused;
|
|
console.log(p);
|
|
const diff = ev.ctrlKey ? 10000 : 1000;
|
|
this.scrubTo(this._currentTimeMs - diff);
|
|
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
|
|
}
|
|
if (ev.key == 'ArrowRight' && ev.shiftKey) {
|
|
const p = this._paused;
|
|
console.log(p);
|
|
const diff = ev.ctrlKey ? 10000 : 1000;
|
|
this.scrubTo(this._currentTimeMs + diff);
|
|
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
|
|
}
|
|
|
|
// additional keys only for annotation mode
|
|
if (!this.config.is_player) {
|
|
if (ev.key == 'i') {
|
|
this.setInPoint(this.currentTime * 1000);
|
|
}
|
|
if (ev.key == 'o') {
|
|
this.setOutPoint(this.currentTime * 1000);
|
|
}
|
|
if (ev.key == 'I') {
|
|
// shift+i == jump to in point
|
|
this.scrubTo(this.inPointTimeMs);
|
|
}
|
|
if (ev.key == 'O') {
|
|
// shift+o == jump to end point
|
|
this.scrubTo(this.outPointTimeMs);
|
|
}
|
|
if (ev.key == 'Escape') {
|
|
if (this.selectedAnnotation) {
|
|
this.deselectAnnotation();
|
|
} else {
|
|
this.resetInOutPoint();
|
|
}
|
|
}
|
|
}
|
|
});
|
|
});
|
|
|
|
// this.playStrokePosition(0, 1);
|
|
}
|
|
|
|
|
|
loadTags(tagFile) {
|
|
// tags config
|
|
const request = new Request(tagFile);
|
|
return fetch(request)
|
|
.then(response => response.json())
|
|
.then(rootTag => {
|
|
this.tags = rootTag.children;
|
|
this.tagMap = {};
|
|
const addTagsToMap = (tags, parent) => {
|
|
tags.forEach((tag) => {
|
|
tag['parent'] = typeof parent != "undefined" ? parent : null;
|
|
this.tagMap[tag.id] = tag;
|
|
if (tag.hasOwnProperty("children")) {
|
|
addTagsToMap(tag.children, tag);
|
|
}
|
|
});
|
|
};
|
|
addTagsToMap(this.tags);
|
|
});
|
|
}
|
|
|
|
setupAudioConfig() {
|
|
// audio config
|
|
return new Promise((resolve, reject) => {
|
|
this.audioEl = document.createElement('audio');
|
|
if (!this.config.is_player)
|
|
this.audioEl.setAttribute('controls', true);
|
|
|
|
this.audioEl.addEventListener('canplaythrough', (ev) => {
|
|
console.debug('loaded audio');
|
|
// this.audioEl.play();
|
|
});
|
|
|
|
if (this.config.is_player) {
|
|
this.wrapperEl.prepend(this.audioEl);
|
|
}
|
|
else {
|
|
|
|
let audioConfigEl = document.createElement('div');
|
|
audioConfigEl.classList.add('audioconfig')
|
|
this.wrapperEl.appendChild(audioConfigEl);
|
|
|
|
audioConfigEl.prepend(this.audioEl);
|
|
|
|
let audioSelectEl = document.createElement('select');
|
|
audioSelectEl.classList.add('audioselect');
|
|
audioConfigEl.appendChild(audioSelectEl);
|
|
|
|
fetch('/audio')
|
|
.then(response => response.json())
|
|
.then(data => {
|
|
data.unshift(''); // add empty, to deselect any file
|
|
data.forEach(audioFile => {
|
|
let optionEl = document.createElement('option');
|
|
optionEl.selected = this.audioFile == audioFile;
|
|
optionEl.innerText = audioFile;
|
|
audioSelectEl.appendChild(optionEl);
|
|
});
|
|
})
|
|
|
|
audioSelectEl.addEventListener('change', (ev) => {
|
|
this.setAudioFile(ev.target.value);
|
|
});
|
|
|
|
|
|
let audioOffsetTextEl = document.createElement('label');
|
|
audioOffsetTextEl.innerText = "Offset (s)";
|
|
audioConfigEl.appendChild(audioOffsetTextEl);
|
|
|
|
let audioOffsetEl = document.createElement('input');
|
|
audioOffsetEl.setAttribute('type', 'number');
|
|
audioOffsetEl.setAttribute('step', '.01');
|
|
audioOffsetEl.value = this.audioOffset ?? 0;
|
|
audioOffsetEl.addEventListener('change', (ev) => {
|
|
this.setAudioOffset(ev.target.value);
|
|
});
|
|
audioOffsetTextEl.appendChild(audioOffsetEl);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
this.audioEl.addEventListener('loadedmetadata', (ev) => {
|
|
// resolve the 'set up audio' when metadata has loaded
|
|
this.setUpAnnotator(); // if offset is negative, annotator starts at negative time
|
|
resolve();
|
|
})
|
|
if (this.audioFile) {
|
|
this.audioEl.setAttribute('src', this.audioFile);
|
|
} else {
|
|
this.setUpAnnotator();
|
|
resolve();
|
|
}
|
|
|
|
});
|
|
}
|
|
|
|
setAudioFile(audioFile) {
|
|
this.audioFile = audioFile;
|
|
this.audioEl.setAttribute('src', this.audioFile);
|
|
// this.audioEl.play();
|
|
// TODO update playhead
|
|
// TODO update this.duration after load
|
|
this.updateState();
|
|
}
|
|
|
|
setAudioOffset(audioOffset) {
|
|
this.audioOffset = Number.parseFloat(audioOffset);
|
|
// TODO update playhead
|
|
// TODO update this.duration
|
|
this.setUpAnnotator(); // if offset is negative, annotator starts at negative time
|
|
this.updateState();
|
|
}
|
|
|
|
/**
|
|
* @param float time time is ms
|
|
* @returns float
|
|
*/
|
|
getAudioTime(time) {
|
|
return Number.parseFloat(time) - (this.audioOffset * 1000 ?? 0);
|
|
}
|
|
|
|
/**
|
|
*
|
|
* @param float t_in in point time, in ms
|
|
* @param float t_out out point time, in ms
|
|
*/
|
|
playAudioSegment(t_in, t_out) {
|
|
if (this.audioStartTimeout) clearTimeout(this.audioStartTimeout);
|
|
if (this.audioEndTimeout) clearTimeout(this.audioEndTimeout);
|
|
|
|
// TODO, handle playback delay
|
|
const t_start = this.getAudioTime(t_in); // in ms
|
|
const t_diff = (t_out ?? this.audioEl.duration * 1000) - t_in; // in ms
|
|
|
|
this.audioEl.pause();
|
|
|
|
if (t_start < 0) {
|
|
if (t_diff <= t_start * -1) {
|
|
console.debug('no audio playback in segment', t_start, t_diff);
|
|
} else {
|
|
console.debug('delay audio playback', t_start, t_diff);
|
|
// a negative audiooffset delays playback from the start
|
|
// this.audioStartTimeout = setTimeout((e) => this.audioEl.play(), t*-1000);
|
|
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0; this.audioEl.play(); }, t_start * -1); // triggers play with "seeked" event
|
|
// this.audioEl.currentTime = 0;
|
|
}
|
|
} else {
|
|
if (this.audioEl.currentTime !== t_start / 1000) {
|
|
console.log(this.audioEl.currentTime, t_start / 1000);
|
|
this.audioEl.currentTime = t_start / 1000;
|
|
}
|
|
this.audioEl.play();
|
|
// this.audioEl.play(); // play is done in "seeked" evenlistener
|
|
console.log(this.audioEl.currentTime, t_start, t_in, t_out);
|
|
}
|
|
|
|
this.audioIsPlaying = true; // also state as playing in preroll
|
|
this.audioEndTimeout = setTimeout((e) => {
|
|
this.audioEl.pause();
|
|
this.audioIsPlaying = false;
|
|
console.debug('done playing audio');
|
|
}, t_diff);
|
|
}
|
|
|
|
_scrubAudio(time_ms) {
|
|
this.audioEl.currentTime = Math.max(0, this.getAudioTime(time_ms)) / 1000;
|
|
}
|
|
|
|
getFinalFrameTime() {
|
|
if(this.strokes.length == 0) return null; // when no strokes are loaded (eg. for annotation)
|
|
const points = this.strokes[this.strokes.length - 1].points;
|
|
return points[points.length - 1][3];
|
|
}
|
|
|
|
getStrokesSliceForPathRange(in_point, out_point) {
|
|
// get paths for given range. Also, split path at in & out if necessary.
|
|
let slices = {};
|
|
for (let i = in_point[0]; i <= out_point[0]; i++) {
|
|
const stroke = this.strokes[i];
|
|
if (typeof stroke === 'undefined') {
|
|
// out point can be Infinity. So interrupt whenever the end is reached
|
|
break;
|
|
}
|
|
const in_i = (in_point[0] === i) ? in_point[1] : 0;
|
|
const out_i = (out_point[0] === i) ? out_point[1] : Infinity;
|
|
|
|
slices[i] = new StrokeSlice(stroke, in_i, out_i);
|
|
}
|
|
return slices;
|
|
}
|
|
|
|
// TODO: when drawing, have a group active & inactive.
|
|
// active is getPathRange(currentIn, currentOut)
|
|
// inactive is what comes before and after.
|
|
// then, playing the video is just running pathRanghe(0, playhead)
|
|
drawStrokePosition(in_point, out_point, show_all) {
|
|
if (typeof show_all === 'undefined')
|
|
show_all = true;
|
|
|
|
this.strokeGroups['before'].setStrokes(this.getStrokesSliceForPathRange([0, 0], in_point));
|
|
this.strokeGroups['annotation'].setStrokes(this.getStrokesSliceForPathRange(in_point, out_point));
|
|
this.strokeGroups['after'].setStrokes(this.getStrokesSliceForPathRange(out_point, [Infinity, Infinity]));
|
|
|
|
|
|
// // an inpoint is set, so we're annotating
|
|
// // make everything coming before translucent
|
|
// if (this.inPointPosition !== null) {
|
|
// const [inPath_i, inPoint_i] = this.inPointPosition;
|
|
// // returns a static NodeList
|
|
// const currentBeforeEls = this.svgEl.querySelectorAll(`.before_in`);
|
|
// for (let currentBeforeEl of currentBeforeEls) {
|
|
// currentBeforeEl.classList.remove('before_in');
|
|
// }
|
|
|
|
// for (let index = 0; index < inPath_i; index++) {
|
|
// const pathEl = this.svgEl.querySelector(`.path${index}`);
|
|
// if (pathEl) {
|
|
// pathEl.classList.add('before_in');
|
|
// }
|
|
// }
|
|
// }
|
|
|
|
// this.currentPathI = path_i;
|
|
// this.currentPointI = point_i;
|
|
|
|
// const path = this.strokes[path_i];
|
|
// // console.log(path);
|
|
// let pathEl = this.svgEl.querySelector(`.path${path_i}`);
|
|
// if (!pathEl) {
|
|
// pathEl = document.createElementNS('http://www.w3.org/2000/svg', 'path');
|
|
// pathEl.style.stroke = path.color;
|
|
// pathEl.classList.add('path' + path_i)
|
|
// this.svgEl.appendChild(pathEl)
|
|
// }
|
|
|
|
// const stroke = path.points.slice(0, point_i);
|
|
// const d = this.strokes2D(stroke);
|
|
// pathEl.setAttribute('d', d);
|
|
|
|
// this.scrubberElOld.value = path.points[point_i][3];
|
|
// this.currentTime = path.points[point_i][3];
|
|
}
|
|
|
|
setViewboxPosition(box_i) {
|
|
if (this.currentViewboxI == box_i) {
|
|
return;
|
|
}
|
|
this.currentViewboxI = box_i
|
|
if (!this.config.crop_to_fit) {
|
|
this.updateViewbox();
|
|
}
|
|
}
|
|
|
|
updateViewbox() {
|
|
if (this.config.crop_to_fit) {
|
|
this.svgEl.setAttribute('viewBox', `${this.bounding_box.x} ${this.bounding_box.y} ${this.bounding_box.width} ${this.bounding_box.height}`);
|
|
} else {
|
|
let x,y,w,h;
|
|
if(this.currentViewboxI !== null) {
|
|
x = this.viewboxes[this.currentViewboxI].x,
|
|
y = this.viewboxes[this.currentViewboxI].y,
|
|
w = this.dimensions[0],
|
|
h = this.dimensions[1];
|
|
} else {
|
|
x = 0,
|
|
y = 0,
|
|
w = this.dimensions[0],
|
|
h = this.dimensions[1];
|
|
}
|
|
this.svgEl.setAttribute('viewBox', `${x} ${y} ${w} ${h}`);
|
|
}
|
|
}
|
|
|
|
toggleCrop(){
|
|
this.config.crop_to_fit = !this.config.crop_to_fit;
|
|
this.updateViewbox();
|
|
}
|
|
|
|
getNextPosition(path_i, point_i) {
|
|
const path = this.strokes[path_i];
|
|
let next_path, next_point;
|
|
if (path.points.length > point_i + 1) {
|
|
next_path = path_i;
|
|
next_point = point_i + 1;
|
|
// setTimeout(() => this.playStroke(next_path, next_point), dt);
|
|
} else if (this.strokes.length > path_i + 1) {
|
|
next_path = path_i + 1;
|
|
next_point = 0;
|
|
// use starttime instead of diff, to prevent floating
|
|
} else {
|
|
return [null, null];
|
|
}
|
|
|
|
// when an outpoint is set, stop playing there
|
|
if (this.outPointPosition && (next_path > this.outPointPosition[0] ||
|
|
(next_path == this.outPointPosition[0] && next_point > this.outPointPosition[1]))) {
|
|
console.log('> out point', this.outPointPosition)
|
|
return [null, null];
|
|
}
|
|
|
|
return [next_path, next_point];
|
|
}
|
|
|
|
playStrokePosition(path_i, point_i, allow_interrupt) {
|
|
if (this.strokes.length === 0) {
|
|
console.debug('No video to play back');
|
|
this.videoIsPlaying = false;
|
|
return;
|
|
}
|
|
|
|
if (allow_interrupt) {
|
|
if (!this.videoIsPlaying) {
|
|
console.debug('not playing because of interrupt');
|
|
return;
|
|
}
|
|
} else {
|
|
this.videoIsPlaying = true;
|
|
}
|
|
this.drawStrokePosition(this.inPointPosition, [path_i, point_i]);
|
|
|
|
const [next_path, next_point] = this.getNextPosition(path_i, point_i);
|
|
if (next_path === null) {
|
|
console.debug('done playing video');
|
|
this.videoIsPlaying = false;
|
|
return;
|
|
}
|
|
|
|
const t = this.strokes[next_path].points[next_point][3];// - path.points[point_i][3];
|
|
|
|
// calculate interval based on playback start to avoid drifting of time
|
|
const dt = t - (window.performance.now() - this.startTimeMs);
|
|
this.nextFrameTimeout = setTimeout(() => this.playStrokePosition(next_path, next_point, true), dt);
|
|
}
|
|
|
|
|
|
playViewboxPosition(box_i, allow_interrupt) {
|
|
if (allow_interrupt) {
|
|
if (!this.videoIsPlaying) {
|
|
console.debug('not playing because of interrupt');
|
|
return;
|
|
}
|
|
}
|
|
// else {
|
|
// this.videoIsPlaying = true;
|
|
// }
|
|
this.setViewboxPosition(box_i);
|
|
|
|
const next_box_i = box_i + 1;
|
|
if (this.viewboxes.length <= next_box_i) {
|
|
console.debug('done playing viewbox');
|
|
return;
|
|
}
|
|
|
|
const t = this.viewboxes[next_box_i].t;
|
|
|
|
// calculate interval based on playback start to avoid drifting of time
|
|
const dt = t - (window.performance.now() - this.startTimeMs);
|
|
this.nextViewboxTimeout = setTimeout(() => this.playViewboxPosition(next_box_i, true), dt);
|
|
}
|
|
|
|
scrubTo(ms) {
|
|
// const [path_i, point_i] = this.findPositionForTime(ms);
|
|
// console.log(path_i, point_i);
|
|
this.pause();
|
|
this._seekByTime(ms / 1000);
|
|
// this.playHead = ms;
|
|
}
|
|
|
|
playPause() {
|
|
if (this.paused) {
|
|
this.play();
|
|
} else {
|
|
this.pause()
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Compatibility with HTMLMediaElement API
|
|
* @returns None
|
|
*/
|
|
pause() {
|
|
this._interruptPlayback();
|
|
}
|
|
|
|
_interruptPlayback() {
|
|
clearTimeout(this.nextFrameTimeout);
|
|
clearTimeout(this.nextViewboxTimeout);
|
|
clearTimeout(this.audioEndTimeout);
|
|
clearTimeout(this.audioStartTimeout);
|
|
clearTimeout(this.startVideoTimeout);
|
|
this.audioEl.pause();
|
|
this.videoIsPlaying = false;
|
|
this.audioIsPlaying = false;
|
|
this._setPausedFlag(true);
|
|
}
|
|
|
|
/**
|
|
* Compatibility with HTMLMediaElement API
|
|
* @returns Promise
|
|
*/
|
|
play() {
|
|
return new Promise((resolve, reject) => {
|
|
this._interruptPlayback();
|
|
|
|
if (this._currentTimeMs > this.outPointTimeMs) {
|
|
this._seekByTimeMs(this.inPointTimeMs);
|
|
} else {
|
|
this._seekByTimeMs(this._currentTimeMs); // prevent playback issue for initial load
|
|
}
|
|
|
|
this._setPausedFlag(false);
|
|
|
|
const startPlayback = () => {
|
|
console.debug('start playback');
|
|
this.wrapperEl.classList.remove('loading'); // no loading anymore
|
|
|
|
this.startTimeMs = window.performance.now() - this._currentTimeMs;
|
|
// strokes
|
|
if (this._currentTimeMs < 0) {
|
|
this.startVideoTimeout = setTimeout((e) => this.playStrokePosition(this.currentPathI, this.currentPointI), this._currentTimeMs * -1);
|
|
} else {
|
|
this.playStrokePosition(this.currentPathI, this.currentPointI);
|
|
}
|
|
// viewboxes
|
|
// const nextViewboxI = Math.max(this.currentViewboxI++, this.viewboxes.length-1);
|
|
this.playViewboxPosition(this.currentViewboxI);
|
|
|
|
// audio
|
|
// TODO: use this.audioEl.readyState == 4 : play immediately, otherwise after event
|
|
this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs);
|
|
// this.playStrokePosition(this.currentPathI, this.currentPointI);
|
|
|
|
this.dispatchEvent(new CustomEvent('play', {}));
|
|
this._animationFrame();
|
|
resolve();
|
|
}
|
|
|
|
if (this.audioEl.src.length && this.audioEl.readyState !== 4) { // not ready to play after seeking audio.
|
|
console.debug('wait for audio before playback');
|
|
this.wrapperEl.classList.add('loading');
|
|
this.audioEl.addEventListener('canplaythrough', () => {
|
|
startPlayback()
|
|
}, { once: true }); // only once
|
|
} else {
|
|
startPlayback();
|
|
}
|
|
|
|
});
|
|
}
|
|
|
|
_setPausedFlag(paused) {
|
|
this._paused = !!paused; //convert to boolean
|
|
if (paused) {
|
|
this.playPauseEl.classList.remove('playing');
|
|
this.playPauseEl.classList.add('paused');
|
|
} else {
|
|
this.playPauseEl.classList.remove('paused');
|
|
this.playPauseEl.classList.add('playing');
|
|
}
|
|
}
|
|
|
|
get paused() {
|
|
return this._paused;
|
|
}
|
|
|
|
_updatePlayhead() {
|
|
this.playheadEl.value = this._currentTimeMs;
|
|
this.timeCodeEl.value = this.formatter.to(this._currentTimeMs);
|
|
}
|
|
|
|
|
|
// on playback, run every windowAnimationFrame
|
|
_animationFrame(timestamp) {
|
|
// TODO, move time at end of playStrokePosition to here
|
|
const nextTime = window.performance.now() - this.startTimeMs;
|
|
const endTime = this.outPointTimeMs ?? this.duration * 1000;
|
|
let interrupt = false;
|
|
if (nextTime > endTime) {
|
|
this._currentTimeMs = endTime;
|
|
interrupt = true;
|
|
} else {
|
|
this._currentTimeMs = nextTime;
|
|
}
|
|
this._updatePlayhead();
|
|
if (!interrupt && (this.videoIsPlaying || this.audioIsPlaying)) {
|
|
window.requestAnimationFrame((timestamp) => this._animationFrame(timestamp));
|
|
} else {
|
|
console.debug('finished playback');
|
|
this._interruptPlayback(true);
|
|
// this.resetPlayhead(); // Disable to not jump to start on pause. TODO: check if this causes issues e.g. on end
|
|
}
|
|
}
|
|
|
|
resetPlayhead() {
|
|
this._seekByTimeMs(this.inPointTimeMs);
|
|
if (this.selectedAnnotation) {
|
|
// show the hole selected annotation
|
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Note that both t_in and t_out can be negative
|
|
* @param float|Array t_in in point time, in ms or array with path/frame points
|
|
* @param float|Array t_out out point time, in ms or array with path/frame points
|
|
*/
|
|
playSegment(in_point, out_point) {
|
|
if (!Array.isArray(in_point)) in_point = this.findPositionForTime(in_point);
|
|
if (!Array.isArray(out_point)) out_point = this.findPositionForTime(out_point);
|
|
|
|
this.inPointPosition = in_point;
|
|
this.outPointPosition = out_point;
|
|
this._seekByPoint(in_point);
|
|
|
|
this.play();
|
|
}
|
|
|
|
_seekByPoint(point) {
|
|
this.dispatchEvent(new CustomEvent('seeking', {}));
|
|
this._currentTimeMs = this.strokes[point[0]].points[point[1]][2];
|
|
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
|
|
[this.currentPathI, this.currentPointI] = point;
|
|
this._updatePlayhead();
|
|
this._updateFrame();
|
|
// TODO set audio, wait for promise to finish
|
|
this.dispatchEvent(new CustomEvent('seeked', {}));
|
|
|
|
}
|
|
_seekByTimeMs(time) {
|
|
this._seekByTime(Number.parseFloat(time) / 1000);
|
|
}
|
|
_seekByTime(time) {
|
|
this.dispatchEvent(new CustomEvent('seeking', { detail: time }));
|
|
this._currentTimeMs = Number.parseFloat(time) * 1000;
|
|
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
|
|
[this.currentPathI, this.currentPointI] = this.findPositionForTime(this._currentTimeMs);
|
|
|
|
this._updatePlayhead();
|
|
this._updateFrame();
|
|
this.dispatchEvent(new CustomEvent('seeked', { detail: this.currentTime }));
|
|
}
|
|
|
|
_updateFrame() {
|
|
this.drawStrokePosition(this.inPointPosition, [this.currentPathI, this.currentPointI]);
|
|
this.setViewboxPosition(this.findViewboxForTime(this._currentTimeMs));
|
|
}
|
|
|
|
/**
|
|
* For compatibility with HTMLMediaElement API convert seconds to ms of internal timer
|
|
*/
|
|
set currentTime(time) {
|
|
this._seekByTime(time);
|
|
}
|
|
|
|
get currentTime() {
|
|
return this._currentTimeMs / 1000;
|
|
}
|
|
|
|
getEndTimeMs() {
|
|
const videoDuration = this.getFinalFrameTime();
|
|
const audioDuration = (this.audioEl && this.audioEl.src) ? this.audioEl.duration + this.audioOffset : 0;
|
|
|
|
return Math.max(videoDuration, audioDuration * 1000);
|
|
}
|
|
|
|
get duration() {
|
|
|
|
const prerollDuration = this.audioOffset < 0 ? this.audioOffset * -1 : 0;
|
|
|
|
return prerollDuration + this.getEndTimeMs() / 1000;
|
|
}
|
|
|
|
findPositionForTime(ms) {
|
|
ms = Math.min(Math.max(ms, 0), this.lastFrameTime);
|
|
// console.log('scrub to', ms)
|
|
let path_i = 0;
|
|
let point_i = 0;
|
|
this.strokes.every((stroke, index) => {
|
|
const startAt = stroke.points[0][3];
|
|
const endAt = stroke.points[stroke.points.length - 1][3];
|
|
|
|
if (startAt > ms) {
|
|
return false; // too far
|
|
}
|
|
if (endAt > ms) {
|
|
// we're getting close. Find the right point_i
|
|
path_i = index;
|
|
stroke.points.every((point, pi) => {
|
|
if (point[3] > ms) {
|
|
// too far
|
|
return false;
|
|
}
|
|
point_i = pi;
|
|
return true;
|
|
});
|
|
return false;
|
|
} else {
|
|
// in case nothings comes after, we store the last best option thus far
|
|
path_i = index;
|
|
point_i = stroke.points.length - 1;
|
|
return true;
|
|
}
|
|
|
|
});
|
|
return [path_i, point_i];
|
|
}
|
|
|
|
findViewboxForTime(ms) {
|
|
ms = Math.min(Math.max(ms, 0), this.lastFrameTime);
|
|
// console.log('scrub to', ms)
|
|
let box_i = 0;
|
|
this.viewboxes.every((viewbox, index) => {
|
|
const startAt = viewbox.t;
|
|
|
|
if (startAt > ms) {
|
|
return false; // too far
|
|
} else {
|
|
// in case nothings comes after, we store the last best option thus far
|
|
box_i = index;
|
|
return true;
|
|
}
|
|
|
|
});
|
|
return box_i;
|
|
}
|
|
|
|
|
|
}
|
|
|