995 lines
No EOL
34 KiB
JavaScript
995 lines
No EOL
34 KiB
JavaScript
class Annotation {
|
|
constructor(tag, t_in, t_out) {
|
|
this.tag = tag;
|
|
this.t_in = Number.parseFloat(t_in);
|
|
this.t_out = Number.parseFloat(t_out);
|
|
}
|
|
}
|
|
|
|
class StrokeGroup {
|
|
constructor(group_element, player) {
|
|
this.g = group_element;
|
|
this.player = player;
|
|
}
|
|
|
|
setStrokes(strokes) {
|
|
const pathEls = this.g.querySelectorAll('path');
|
|
let indexes = Object.keys(strokes);
|
|
for (let pathEl of pathEls) {
|
|
const i = pathEl.dataset.path_i;
|
|
if (!indexes.includes(pathEl.dataset.path_i)) {
|
|
pathEl.parentNode.removeChild(pathEl);
|
|
} else {
|
|
// check in and outpoint using pathEl.dataset
|
|
if (strokes[i].getSliceId() != pathEl.dataset.slice) {
|
|
const d = this.points2D(strokes[i].points);
|
|
pathEl.dataset.slice = strokes[i].getSliceId();
|
|
pathEl.setAttribute('d', d);
|
|
}
|
|
}
|
|
|
|
// this has now been processed
|
|
indexes.splice(indexes.indexOf(i), 1);
|
|
}
|
|
|
|
// new strokes
|
|
indexes.forEach(index => {
|
|
const stroke = strokes[index];
|
|
|
|
let pathEl = document.createElementNS('http://www.w3.org/2000/svg', 'path');
|
|
pathEl.style.stroke = stroke.color;
|
|
pathEl.classList.add('path');
|
|
pathEl.dataset.path_i = index;
|
|
pathEl.dataset.slice = stroke.getSliceId();
|
|
this.g.appendChild(pathEl);
|
|
|
|
const d = this.points2D(stroke.points);
|
|
pathEl.setAttribute('d', d);
|
|
});
|
|
}
|
|
|
|
// convert array of points to a d-attribute
|
|
points2D(strokes) {
|
|
// strokes to a d attribute for a path
|
|
let d = "";
|
|
let last_stroke = undefined;
|
|
let cmd = "";
|
|
for (let stroke of strokes) {
|
|
if (!last_stroke) {
|
|
d += `M${stroke[0] * this.player.dimensions[0]},${stroke[1] * this.player.dimensions[1]} `;
|
|
cmd = 'M';
|
|
} else {
|
|
if (last_stroke[2] == 1) {
|
|
d += " m";
|
|
cmd = 'm';
|
|
} else if (cmd != 'l') {
|
|
d += ' l ';
|
|
cmd = 'l';
|
|
}
|
|
let rel_stroke = [stroke[0] - last_stroke[0], stroke[1] - last_stroke[1]];
|
|
d += `${rel_stroke[0] * this.player.dimensions[0]},${rel_stroke[1] * this.player.dimensions[1]} `;
|
|
}
|
|
last_stroke = stroke;
|
|
|
|
}
|
|
return d;
|
|
}
|
|
}
|
|
|
|
class Stroke {
|
|
constructor(color, points) {
|
|
this.color = color;
|
|
this.points = points; // [[x1,y1,t1], [x2,y2,t2], ...]
|
|
}
|
|
|
|
getSliceId() {
|
|
return 'all';
|
|
}
|
|
}
|
|
|
|
class StrokeSlice {
|
|
constructor(stroke, i_in, i_out) {
|
|
this.stroke = stroke; // Stroke
|
|
this.i_in = typeof i_in === 'undefined' ? 0 : i_in;
|
|
this.i_out = typeof i_out === 'undefined' ? this.stroke.points.length - 1 : i_out;
|
|
}
|
|
|
|
getSliceId() {
|
|
return `${this.i_in}-${this.i_out}`;
|
|
}
|
|
|
|
// compatible with Stroke()
|
|
get points() {
|
|
return this.stroke.points.slice(this.i_in, this.i_out + 1);
|
|
}
|
|
|
|
// compatible with Stroke()
|
|
get color() {
|
|
return this.stroke.color;
|
|
}
|
|
}
|
|
|
|
class Annotator extends EventTarget {
|
|
constructor(wrapperEl, tags, fileurl) {
|
|
super();
|
|
|
|
this.wrapperEl = wrapperEl;
|
|
this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
|
|
this.wrapperEl.appendChild(this.svgEl);
|
|
|
|
|
|
this.controlsEl = document.createElement('div');
|
|
this.controlsEl.classList.add('controls')
|
|
this.wrapperEl.appendChild(this.controlsEl);
|
|
|
|
this.playheadEl = document.createElement('input');
|
|
this.playheadEl.type = "range";
|
|
this.playheadEl.min = 0;
|
|
this.playheadEl.step = 0.01;
|
|
this.controlsEl.appendChild(this.playheadEl);
|
|
|
|
this.playheadEl.addEventListener("input", (ev) => {
|
|
this.scrubTo(ev.target.value);
|
|
})
|
|
|
|
this.playPauseEl = document.createElement('button');
|
|
this.playPauseEl.classList.add('paused');
|
|
this.controlsEl.appendChild(this.playPauseEl);
|
|
|
|
this.playPauseEl.addEventListener("click", (ev) => {
|
|
this.playPause()
|
|
})
|
|
|
|
this.scrubberEl = document.createElement('div');
|
|
this.scrubberEl.classList.add('scrubber')
|
|
this.controlsEl.appendChild(this.scrubberEl);
|
|
|
|
this.tagsEl = document.createElement('ul');
|
|
this.tagsEl.classList.add('tags');
|
|
for (let tag of tags) {
|
|
let tagEl = document.createElement('li');
|
|
tagEl.classList.add('tag');
|
|
tagEl.dataset.tag = tag;
|
|
tagEl.innerText = tag;
|
|
tagEl.addEventListener('click', (e) => {
|
|
this.addTag(tag, this.inPointPosition, this.outPointPosition);
|
|
})
|
|
|
|
let signEl = document.createElement('span');
|
|
signEl.classList.add('annotation-' + tag);
|
|
tagEl.prepend(signEl);
|
|
this.tagsEl.appendChild(tagEl);
|
|
}
|
|
let tagEl = document.createElement('li');
|
|
tagEl.classList.add('tag');
|
|
tagEl.classList.add('annotation-rm');
|
|
tagEl.dataset.tag = 'rm';
|
|
tagEl.title = "Remove annotation";
|
|
tagEl.innerHTML = "🚮"; // ×
|
|
tagEl.addEventListener('click', (e) => {
|
|
if (this.selectedAnnotation) {
|
|
this.removeAnnotation(this.selectedAnnotationI);
|
|
}
|
|
});
|
|
this.tagsEl.appendChild(tagEl);
|
|
|
|
this.controlsEl.appendChild(this.tagsEl);
|
|
|
|
this.annotationsEl = document.createElement('div');
|
|
this.annotationsEl.classList.add('annotations')
|
|
this.controlsEl.appendChild(this.annotationsEl);
|
|
|
|
|
|
this.inPointPosition = [0, 0];
|
|
this.inPointTimeMs = null;
|
|
this.outPointPosition = null;
|
|
this.outPointTimeMs = null;
|
|
this._currentTimeMs = 0;
|
|
this.videoIsPlaying = false;
|
|
|
|
const groups = ['before', 'annotation', 'after']
|
|
this.strokeGroups = {};
|
|
groups.forEach(group => {
|
|
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
|
|
groupEl.classList.add(group)
|
|
this.svgEl.appendChild(groupEl);
|
|
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
|
|
});
|
|
|
|
this.annotations = [];
|
|
|
|
this.load(fileurl);
|
|
}
|
|
|
|
updateAnnotations(save) {
|
|
|
|
this.annotationsEl.innerHTML = "";
|
|
for (let annotation_i in this.annotations) {
|
|
const annotation = this.annotations[annotation_i];
|
|
this.annotationEl = document.createElement('div');
|
|
const prerollDiff = Number.parseFloat(this.audioOffset < 0 ? this.audioOffset * -1000 : 0);
|
|
// console.log('diff', prerollDiff, annotation.t_in, typeof annotation.t_in, this.duration,annotation.t_in + prerollDiff, (annotation.t_in + prerollDiff) / this.duration);
|
|
const left = ((annotation.t_in + prerollDiff) / (this.duration * 1000)) * 100;
|
|
const right = 100 - ((annotation.t_out + prerollDiff) / (this.duration * 1000)) * 100;
|
|
this.annotationEl.style.left = left + '%';
|
|
this.annotationEl.style.right = right + '%';
|
|
|
|
this.annotationEl.classList.add('annotation-' + annotation.tag);
|
|
if (this.selectedAnnotationI == annotation_i) {
|
|
this.annotationEl.classList.add('selected');
|
|
}
|
|
this.annotationEl.title = annotation.tag;
|
|
|
|
this.annotationEl.addEventListener('mouseover', (e) => {
|
|
|
|
});
|
|
this.annotationEl.addEventListener('mouseout', (e) => {
|
|
|
|
});
|
|
this.annotationEl.addEventListener('click', (e) => {
|
|
if (this.selectedAnnotationI == annotation_i) {
|
|
this.deselectAnnotation(false);
|
|
} else {
|
|
this.selectAnnotation(annotation_i);
|
|
}
|
|
});
|
|
|
|
this.annotationsEl.appendChild(this.annotationEl);
|
|
}
|
|
|
|
this.tagsEl.childNodes.forEach(tagEl => {
|
|
if (this.selectedAnnotation && this.selectedAnnotation.tag == tagEl.dataset.tag) {
|
|
tagEl.classList.add('selected')
|
|
} else {
|
|
tagEl.classList.remove('selected')
|
|
}
|
|
});
|
|
|
|
if (save) {
|
|
this.updateState();
|
|
}
|
|
}
|
|
|
|
selectAnnotation(annotation_i) {
|
|
this.selectedAnnotationI = annotation_i;
|
|
this.selectedAnnotation = this.annotations[annotation_i];
|
|
|
|
this.slider.set([this.selectedAnnotation.t_in, this.selectedAnnotation.t_out]);
|
|
|
|
this.inPointPosition = this.findPositionForTime(this.selectedAnnotation.t_in);
|
|
this.outPointPosition = this.findPositionForTime(this.selectedAnnotation.t_out);
|
|
this.inPointTimeMs = this.selectedAnnotation.t_in;
|
|
this.outPointTimeMs = this.selectedAnnotation.t_out;
|
|
this._seekByTimeMs(this.selectedAnnotation.t_in);
|
|
// draw full stroke of annotation:
|
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
|
|
this.updateAnnotations(false); //selects the right tag & highlights the annotation
|
|
|
|
this.wrapperEl.classList.add('selected-annotation');
|
|
}
|
|
|
|
deselectAnnotation(keep_position) {
|
|
if (this.selectedAnnotation) {
|
|
this._seekByTimeMs(this.selectedAnnotation.t_out);
|
|
}
|
|
|
|
this.wrapperEl.classList.remove('selected-annotation');
|
|
|
|
this.selectedAnnotationI = null;
|
|
this.selectedAnnotation = null;
|
|
|
|
if (!keep_position) {
|
|
this.setUpAnnotator();
|
|
}
|
|
this.updateAnnotations(false); // selects the right tag & highlights the annotation
|
|
}
|
|
|
|
resetInOutPoint() {
|
|
this.inPointPosition = [0, 0];
|
|
this.inPointTimeMs = null;
|
|
this.outPointPosition = null;
|
|
this.outPointTimeMs = null;
|
|
this._seekByTimeMs(this.audioOffset < 0 ? this.audioOffset * 1000 : 0);
|
|
// draw full stroke of annotation
|
|
console.log('reset!');
|
|
this.drawStrokePosition(this.inPointPosition, [Infinity, Infinity]);
|
|
this.setUpAnnotator();
|
|
}
|
|
|
|
load(file) {
|
|
const request = new Request(file, {
|
|
method: 'GET',
|
|
});
|
|
|
|
fetch(request)
|
|
.then(response => response.json())
|
|
.then(data => {
|
|
const metadata_req = new Request(`/annotations/${data.file}`, {
|
|
method: 'GET',
|
|
});
|
|
fetch(metadata_req)
|
|
.then(response => response.ok ? response.json() : null)
|
|
.then(metadata => {
|
|
metadata.annotations = metadata.annotations.map((a) => new Annotation(a.tag, a.t_in, a.t_out))
|
|
this.loadStrokes(data, metadata)
|
|
})
|
|
.catch(e => console.log(e));
|
|
// do something with the data sent in the request
|
|
});
|
|
}
|
|
|
|
updateState() {
|
|
const state = {
|
|
'file': this.filename,
|
|
'annotations': this.annotations,
|
|
'audio': {
|
|
'file': this.audioFile,
|
|
'offset': this.audioOffset,
|
|
}
|
|
}
|
|
const newState = JSON.stringify(state);
|
|
if (newState == this.state) {
|
|
return;
|
|
}
|
|
|
|
this.wrapperEl.classList.remove('saved');
|
|
this.wrapperEl.classList.add('unsaved');
|
|
this.state = newState;
|
|
// autosave on state change:
|
|
this.save(newState);
|
|
}
|
|
|
|
setSaved(state) {
|
|
if (this.state != state) {
|
|
console.log('already outdated');
|
|
}
|
|
else {
|
|
this.wrapperEl.classList.add('saved');
|
|
this.wrapperEl.classList.remove('unsaved');
|
|
}
|
|
}
|
|
|
|
save(state) {
|
|
const request = new Request("/annotations/" + this.filename, {
|
|
method: 'POST',
|
|
headers: {
|
|
'Content-Type': 'application/json'
|
|
},
|
|
body: state
|
|
});
|
|
fetch(request)
|
|
.then((response) => {
|
|
if (response.ok) {
|
|
this.setSaved(state);
|
|
}
|
|
else {
|
|
throw Error('Something went wrong');
|
|
}
|
|
})
|
|
.catch((error) => {
|
|
console.log(error);
|
|
});
|
|
|
|
}
|
|
|
|
removeAnnotation(annotation_i) {
|
|
this.deselectAnnotation(true);
|
|
this.annotations.splice(annotation_i, 1);
|
|
this.updateAnnotations(true);
|
|
}
|
|
|
|
addTag(tag) {
|
|
if (this.selectedAnnotation) {
|
|
this.selectedAnnotation.tag = tag;
|
|
this.updateAnnotations(true);
|
|
} else {
|
|
|
|
// TODO this.slider values for in and out
|
|
const [t_in, t_out] = this.slider.get();
|
|
if (this.slider) {
|
|
this.slider.destroy();
|
|
}
|
|
|
|
this.annotations.push(new Annotation(tag, t_in, t_out));
|
|
this.updateAnnotations(true);
|
|
|
|
this._currentTimeMs = t_out;
|
|
this.playheadEl.value = this._currentTimeMs;
|
|
this.setUpAnnotator();
|
|
}
|
|
}
|
|
|
|
|
|
setUpAnnotator() {
|
|
this.playheadEl.min = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
|
this.playheadEl.max = this.getEndTimeMs();
|
|
this.playheadEl.value = this._currentTimeMs;
|
|
|
|
this.inPointPosition = this.findPositionForTime(this.currentTime);
|
|
this.inPointTimeMs = this._currentTimeMs;
|
|
this.outPointPosition = this.findPositionForTime(this.lastFrameTime); // TODO: simplify to get the last frame indexes directly
|
|
this.outPointTimeMs = this.getEndTimeMs();
|
|
|
|
if (this.scrubberEl.noUiSlider) {
|
|
this.slider.destroy();
|
|
}
|
|
|
|
// console.log(this._currentTimeMs, )
|
|
this.slider = noUiSlider.create(this.scrubberEl, {
|
|
start: [this._currentTimeMs, this.getEndTimeMs()],
|
|
connect: true,
|
|
range: {
|
|
'min': this.audioOffset < 0 ? this.audioOffset * 1000 : 0,
|
|
'max': this.getEndTimeMs(),
|
|
},
|
|
tooltips: [
|
|
this.formatter,
|
|
this.formatter
|
|
],
|
|
// pips: {
|
|
// mode: 'range',
|
|
// density: 3,
|
|
// format: this.formatter
|
|
// }
|
|
});
|
|
|
|
this.slider.on("slide", (values, handle) => {
|
|
this.videoIsPlaying = false;
|
|
this.inPointPosition = this.findPositionForTime(values[0]);
|
|
this.inPointTimeMs = Number.parseFloat(values[0]);
|
|
this.outPointPosition = this.findPositionForTime(values[1]);
|
|
this.outPointTimeMs = Number.parseFloat(values[1]);
|
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
|
|
// console.log(this.selectedAnnotation);
|
|
if (this.selectedAnnotation) {
|
|
this.selectedAnnotation.t_in = Number.parseFloat(values[0]);
|
|
this.selectedAnnotation.t_out = Number.parseFloat(values[1]);
|
|
this.updateAnnotations(false);
|
|
}
|
|
});
|
|
this.slider.on("end", (values, handle) => {
|
|
if (this.selectedAnnotation) {
|
|
this.updateAnnotations(true);
|
|
}
|
|
this._seekByTimeMs(values[0]);
|
|
this.play();
|
|
// this.playAudioSegment(values[0], values[1]);
|
|
});
|
|
|
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
}
|
|
|
|
loadStrokes(drawing, metadata) {
|
|
this.audioOffset = 0;
|
|
if (metadata) {
|
|
this.annotations = metadata.annotations;
|
|
this.audioFile = metadata.hasOwnProperty('audio') ? metadata.audio.file : null;
|
|
this.audioOffset = metadata.hasOwnProperty('audio') ? Number.parseFloat(metadata.audio.offset) : 0;
|
|
this._currentTimeMs = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
|
this.playheadEl.value = this._currentTimeMs;
|
|
//
|
|
// load any saved metadata
|
|
}
|
|
this.filename = drawing.file;
|
|
this.strokes = drawing.shape.map(s => new Stroke(s['color'], s['points']));
|
|
this.currentPathI = null;
|
|
this.currentPointI = null;
|
|
this.dimensions = drawing.dimensions;
|
|
this.svgEl.setAttribute('viewBox', `0 0 ${this.dimensions[0]} ${this.dimensions[1]}`)
|
|
|
|
let bgEl = document.createElementNS('http://www.w3.org/2000/svg', 'rect');
|
|
bgEl.setAttribute("x", 0);
|
|
bgEl.setAttribute("y", 0);
|
|
bgEl.setAttribute("width", this.dimensions[0]);
|
|
bgEl.setAttribute("height", this.dimensions[1]);
|
|
bgEl.classList.add('background');
|
|
this.svgEl.prepend(bgEl);
|
|
|
|
this.firstFrameTime = this.strokes[0].points[0][3];
|
|
this.lastFrameTime = this.getFinalFrameTime();
|
|
this.playheadEl.max = this.lastFrameTime;
|
|
this.nextFrameTimeout = null;
|
|
this._setPausedFlag(true);
|
|
|
|
this.formatter = wNumb({
|
|
decimals: 2,
|
|
edit: (time) => {
|
|
let neg = "";
|
|
if (time < 0) {
|
|
neg = "-";
|
|
time *= -1;
|
|
}
|
|
const s = Math.floor(time / 1000);
|
|
const minutes = Math.floor(s / 60);
|
|
const seconds = s - minutes * 60;
|
|
const ms = Math.floor((time / 1000 - s) * 1000);
|
|
return `${neg}${minutes}:${seconds}:${ms}`;
|
|
}
|
|
});
|
|
|
|
|
|
this.setupAudioConfig().then(() => {
|
|
// this.setUpAnnotator()
|
|
this.updateAnnotations(false);
|
|
|
|
document.body.addEventListener('keyup', (ev) => {
|
|
if (ev.key == ' ') {
|
|
this.playPause();
|
|
}
|
|
if (ev.key == 'Escape') {
|
|
if (this.selectedAnnotation) {
|
|
this.deselectAnnotation();
|
|
} else {
|
|
this.resetInOutPoint();
|
|
}
|
|
}
|
|
});
|
|
});
|
|
|
|
// this.playStrokePosition(0, 1);
|
|
}
|
|
|
|
setupAudioConfig() {
|
|
// audio config
|
|
return new Promise((resolve, reject) => {
|
|
|
|
let audioConfigEl = document.createElement('div');
|
|
audioConfigEl.classList.add('audioconfig')
|
|
this.wrapperEl.appendChild(audioConfigEl);
|
|
|
|
let audioSelectEl = document.createElement('select');
|
|
audioSelectEl.classList.add('audioselect');
|
|
audioConfigEl.appendChild(audioSelectEl);
|
|
|
|
fetch('/audio')
|
|
.then(response => response.json())
|
|
.then(data => {
|
|
data.unshift(''); // add empty, to deselect any file
|
|
data.forEach(audioFile => {
|
|
let optionEl = document.createElement('option');
|
|
optionEl.selected = this.audioFile == audioFile;
|
|
optionEl.innerText = audioFile;
|
|
audioSelectEl.appendChild(optionEl);
|
|
});
|
|
})
|
|
|
|
audioSelectEl.addEventListener('change', (ev) => {
|
|
this.setAudioFile(ev.target.value);
|
|
});
|
|
|
|
|
|
let audioOffsetTextEl = document.createElement('label');
|
|
audioOffsetTextEl.innerText = "Offset (s)";
|
|
audioConfigEl.appendChild(audioOffsetTextEl);
|
|
|
|
let audioOffsetEl = document.createElement('input');
|
|
audioOffsetEl.setAttribute('type', 'number');
|
|
audioOffsetEl.setAttribute('step', '.01');
|
|
audioOffsetEl.value = this.audioOffset ?? 0;
|
|
audioOffsetEl.addEventListener('change', (ev) => {
|
|
this.setAudioOffset(ev.target.value);
|
|
});
|
|
audioOffsetTextEl.appendChild(audioOffsetEl);
|
|
|
|
|
|
this.audioEl = document.createElement('audio');
|
|
this.audioEl.setAttribute('controls', true);
|
|
this.audioEl.addEventListener('canplaythrough', (ev) => {
|
|
console.log('loaded audio', ev);
|
|
this.audioEl.play();
|
|
});
|
|
// this.audioEl.addEventListener('seeked', (ev)=>{
|
|
// console.log(ev);
|
|
// })
|
|
audioConfigEl.prepend(this.audioEl);
|
|
|
|
this.audioEl.addEventListener('loadedmetadata', (ev) => {
|
|
// resolve the 'set up audio' when metadata has loaded
|
|
this.setUpAnnotator(); // if offset is negative, annotator starts at negative time
|
|
resolve();
|
|
})
|
|
if (this.audioFile) {
|
|
this.audioEl.setAttribute('src', this.audioFile);
|
|
} else {
|
|
this.setUpAnnotator();
|
|
resolve();
|
|
}
|
|
|
|
});
|
|
}
|
|
|
|
setAudioFile(audioFile) {
|
|
this.audioFile = audioFile;
|
|
this.audioEl.setAttribute('src', this.audioFile);
|
|
// this.audioEl.play();
|
|
// TODO update playhead
|
|
// TODO update this.duration after load
|
|
this.updateState();
|
|
}
|
|
|
|
setAudioOffset(audioOffset) {
|
|
this.audioOffset = Number.parseFloat(audioOffset);
|
|
// TODO update playhead
|
|
// TODO update this.duration
|
|
this.setUpAnnotator(); // if offset is negative, annotator starts at negative time
|
|
this.updateState();
|
|
}
|
|
|
|
/**
|
|
* @param float time time is ms
|
|
* @returns float
|
|
*/
|
|
getAudioTime(time) {
|
|
return Number.parseFloat(time) - (this.audioOffset * 1000 ?? 0);
|
|
}
|
|
|
|
/**
|
|
*
|
|
* @param float t_in in point time, in ms
|
|
* @param float t_out out point time, in ms
|
|
*/
|
|
playAudioSegment(t_in, t_out) {
|
|
if (this.audioStartTimeout) clearTimeout(this.audioStartTimeout);
|
|
if (this.audioEndTimeout) clearTimeout(this.audioEndTimeout);
|
|
|
|
// TODO, handle playback delay
|
|
const t_start = this.getAudioTime(t_in); // in ms
|
|
const t_diff = (t_out ?? this.audioEl.duration * 1000) - t_in; // in ms
|
|
|
|
this.audioEl.pause();
|
|
|
|
if (t_start < 0) {
|
|
if (t_diff <= t_start * -1) {
|
|
console.debug('no audio playback in segment', t_start, t_diff);
|
|
} else {
|
|
console.debug('delay audio playback', t_start, t_diff);
|
|
// a negative audiooffset delays playback from the start
|
|
// this.audioStartTimeout = setTimeout((e) => this.audioEl.play(), t*-1000);
|
|
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0 }, t_start * -1); // triggers play with "seeked" event
|
|
// this.audioEl.currentTime = 0;
|
|
}
|
|
} else {
|
|
this.audioEl.currentTime = t_start / 1000;
|
|
// this.audioEl.play(); // play is done in "seeked" evenlistener
|
|
console.log(this.audioEl.currentTime, t_start, t_in, t_out);
|
|
}
|
|
|
|
this.audioIsPlaying = true; // also state as playing in preroll
|
|
this.audioEndTimeout = setTimeout((e) => {
|
|
this.audioEl.pause();
|
|
this.audioIsPlaying = false;
|
|
console.debug('done playing audio');
|
|
}, t_diff);
|
|
}
|
|
|
|
getFinalFrameTime() {
|
|
const points = this.strokes[this.strokes.length - 1].points;
|
|
return points[points.length - 1][3];
|
|
}
|
|
|
|
getStrokesSliceForPathRange(in_point, out_point) {
|
|
// get paths for given range. Also, split path at in & out if necessary.
|
|
let slices = {};
|
|
for (let i = in_point[0]; i <= out_point[0]; i++) {
|
|
const stroke = this.strokes[i];
|
|
if (typeof stroke === 'undefined') {
|
|
// out point can be Infinity. So interrupt whenever the end is reached
|
|
break;
|
|
}
|
|
const in_i = (in_point[0] === i) ? in_point[1] : 0;
|
|
const out_i = (out_point[0] === i) ? out_point[1] : Infinity;
|
|
|
|
slices[i] = new StrokeSlice(stroke, in_i, out_i);
|
|
}
|
|
return slices;
|
|
}
|
|
|
|
// TODO: when drawing, have a group active & inactive.
|
|
// active is getPathRange(currentIn, currentOut)
|
|
// inactive is what comes before and after.
|
|
// then, playing the video is just running pathRanghe(0, playhead)
|
|
drawStrokePosition(in_point, out_point, show_all) {
|
|
if (typeof show_all === 'undefined')
|
|
show_all = true;
|
|
|
|
this.strokeGroups['before'].setStrokes(this.getStrokesSliceForPathRange([0, 0], in_point));
|
|
this.strokeGroups['annotation'].setStrokes(this.getStrokesSliceForPathRange(in_point, out_point));
|
|
this.strokeGroups['after'].setStrokes(this.getStrokesSliceForPathRange(out_point, [Infinity, Infinity]));
|
|
|
|
|
|
// // an inpoint is set, so we're annotating
|
|
// // make everything coming before translucent
|
|
// if (this.inPointPosition !== null) {
|
|
// const [inPath_i, inPoint_i] = this.inPointPosition;
|
|
// // returns a static NodeList
|
|
// const currentBeforeEls = this.svgEl.querySelectorAll(`.before_in`);
|
|
// for (let currentBeforeEl of currentBeforeEls) {
|
|
// currentBeforeEl.classList.remove('before_in');
|
|
// }
|
|
|
|
// for (let index = 0; index < inPath_i; index++) {
|
|
// const pathEl = this.svgEl.querySelector(`.path${index}`);
|
|
// if (pathEl) {
|
|
// pathEl.classList.add('before_in');
|
|
// }
|
|
// }
|
|
// }
|
|
|
|
// this.currentPathI = path_i;
|
|
// this.currentPointI = point_i;
|
|
|
|
// const path = this.strokes[path_i];
|
|
// // console.log(path);
|
|
// let pathEl = this.svgEl.querySelector(`.path${path_i}`);
|
|
// if (!pathEl) {
|
|
// pathEl = document.createElementNS('http://www.w3.org/2000/svg', 'path');
|
|
// pathEl.style.stroke = path.color;
|
|
// pathEl.classList.add('path' + path_i)
|
|
// this.svgEl.appendChild(pathEl)
|
|
// }
|
|
|
|
// const stroke = path.points.slice(0, point_i);
|
|
// const d = this.strokes2D(stroke);
|
|
// pathEl.setAttribute('d', d);
|
|
|
|
// this.scrubberElOld.value = path.points[point_i][3];
|
|
// this.currentTime = path.points[point_i][3];
|
|
}
|
|
|
|
getNextPosition(path_i, point_i) {
|
|
const path = this.strokes[path_i];
|
|
let next_path, next_point;
|
|
if (path.points.length > point_i + 1) {
|
|
next_path = path_i;
|
|
next_point = point_i + 1;
|
|
// setTimeout(() => this.playStroke(next_path, next_point), dt);
|
|
} else if (this.strokes.length > path_i + 1) {
|
|
next_path = path_i + 1;
|
|
next_point = 1;
|
|
// use starttime instead of diff, to prevent floating
|
|
} else {
|
|
return [null, null];
|
|
}
|
|
|
|
// when an outpoint is set, stop playing there
|
|
if (this.outPointPosition && (next_path > this.outPointPosition[0] || next_point > this.outPointPosition[1])) {
|
|
return [null, null];
|
|
}
|
|
|
|
return [next_path, next_point];
|
|
}
|
|
|
|
playStrokePosition(path_i, point_i, allow_interrupt) {
|
|
if (allow_interrupt) {
|
|
if (!this.videoIsPlaying) {
|
|
console.log('not playing because of interrupt');
|
|
return;
|
|
}
|
|
} else {
|
|
this.videoIsPlaying = true;
|
|
}
|
|
this.drawStrokePosition(this.inPointPosition, [path_i, point_i]);
|
|
|
|
const [next_path, next_point] = this.getNextPosition(path_i, point_i);
|
|
if (next_path === null) {
|
|
console.debug('done playing video');
|
|
this.videoIsPlaying = false;
|
|
return;
|
|
}
|
|
|
|
const t = this.strokes[next_path].points[next_point][3];// - path.points[point_i][3];
|
|
|
|
// calculate interval based on playback start to avoid drifting of time
|
|
const dt = t - (window.performance.now() - this.startTimeMs);
|
|
this.nextFrameTimeout = setTimeout(() => this.playStrokePosition(next_path, next_point, true), dt);
|
|
}
|
|
|
|
scrubTo(ms) {
|
|
// const [path_i, point_i] = this.findPositionForTime(ms);
|
|
// console.log(path_i, point_i);
|
|
this.pause();
|
|
this._seekByTime(ms / 1000);
|
|
// this.playHead = ms;
|
|
}
|
|
|
|
playPause() {
|
|
if (this.paused) {
|
|
this.play();
|
|
} else {
|
|
this.pause()
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Compatibility with HTMLMediaElement API
|
|
* @returns None
|
|
*/
|
|
pause() {
|
|
this._interruptPlayback();
|
|
}
|
|
|
|
_interruptPlayback() {
|
|
clearTimeout(this.nextFrameTimeout);
|
|
clearTimeout(this.audioEndTimeout);
|
|
clearTimeout(this.audioStartTimeout);
|
|
clearTimeout(this.startVideoTimeout);
|
|
this.audioEl.pause();
|
|
this.videoIsPlaying = false;
|
|
this.audioIsPlaying = false;
|
|
this._setPausedFlag(true);
|
|
}
|
|
|
|
/**
|
|
* Compatibility with HTMLMediaElement API
|
|
* @returns Promise
|
|
*/
|
|
play() {
|
|
return new Promise((resolve, reject) => {
|
|
this._interruptPlayback();
|
|
this._seekByTimeMs(this._currentTimeMs); // prevent playback issue for initial load
|
|
|
|
this.startTimeMs = window.performance.now() - this._currentTimeMs;
|
|
|
|
if (this._currentTimeMs < 0) {
|
|
this.startVideoTimeout = setTimeout((e) => this.playStrokePosition(this.currentPathI, this.currentPointI), this._currentTimeMs * -1);
|
|
} else {
|
|
this.playStrokePosition(this.currentPathI, this.currentPointI);
|
|
} this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs);
|
|
// this.playStrokePosition(this.currentPathI, this.currentPointI);
|
|
this._setPausedFlag(false);
|
|
|
|
this.dispatchEvent(new CustomEvent('play', {}));
|
|
this._animationFrame();
|
|
resolve();
|
|
});
|
|
}
|
|
|
|
_setPausedFlag(paused) {
|
|
this._paused = !!paused; //convert to boolean
|
|
if (paused) {
|
|
this.playPauseEl.classList.remove('playing');
|
|
this.playPauseEl.classList.add('paused');
|
|
} else {
|
|
this.playPauseEl.classList.remove('paused');
|
|
this.playPauseEl.classList.add('playing');
|
|
}
|
|
}
|
|
|
|
get paused() {
|
|
return this._paused;
|
|
}
|
|
|
|
_animationFrame(timestamp) {
|
|
// TODO, move time at end of playStrokePosition to here
|
|
const nextTime = window.performance.now() - this.startTimeMs;
|
|
const endTime = this.outPointTimeMs ?? this.duration * 1000;
|
|
let interrupt = false;
|
|
if (nextTime > endTime) {
|
|
this._currentTimeMs = endTime;
|
|
interrupt = true;
|
|
} else {
|
|
this._currentTimeMs = nextTime;
|
|
}
|
|
this.playheadEl.value = this._currentTimeMs;
|
|
if (!interrupt && (this.videoIsPlaying || this.audioIsPlaying)) {
|
|
window.requestAnimationFrame((timestamp) => this._animationFrame(timestamp));
|
|
} else {
|
|
console.debug('finished playback');
|
|
this._interruptPlayback(true);
|
|
this.resetPlayhead();
|
|
}
|
|
}
|
|
|
|
resetPlayhead() {
|
|
this._seekByTimeMs(this.inPointTimeMs);
|
|
if (this.selectedAnnotation) {
|
|
// show the hole selected annotation
|
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Note that both t_in and t_out can be negative
|
|
* @param float|Array t_in in point time, in ms or array with path/frame points
|
|
* @param float|Array t_out out point time, in ms or array with path/frame points
|
|
*/
|
|
playSegment(in_point, out_point) {
|
|
if (!Array.isArray(in_point)) in_point = this.findPositionForTime(in_point);
|
|
if (!Array.isArray(out_point)) out_point = this.findPositionForTime(out_point);
|
|
|
|
this.inPointPosition = in_point;
|
|
this.outPointPosition = out_point;
|
|
this._seekByPoint(in_point);
|
|
|
|
this.play();
|
|
}
|
|
|
|
_seekByPoint(point) {
|
|
this.dispatchEvent(new CustomEvent('seeking', {}));
|
|
this._currentTimeMs = this.strokes[point[0]].points[point[1]][2];
|
|
[this.currentPathI, this.currentPointI] = point;
|
|
this.playheadEl.value = this._currentTimeMs;
|
|
this._updateFrame();
|
|
// TODO set audio, wait for promise to finish
|
|
this.dispatchEvent(new CustomEvent('seeked', {}));
|
|
|
|
}
|
|
_seekByTimeMs(time) {
|
|
this._seekByTime(Number.parseFloat(time) / 1000);
|
|
}
|
|
_seekByTime(time) {
|
|
this.dispatchEvent(new CustomEvent('seeking', { detail: time }));
|
|
this._currentTimeMs = Number.parseFloat(time) * 1000;
|
|
[this.currentPathI, this.currentPointI] = this.findPositionForTime(this._currentTimeMs);
|
|
this.playheadEl.value = this._currentTimeMs;
|
|
this._updateFrame();
|
|
this.dispatchEvent(new CustomEvent('seeked', { detail: this.currentTime }));
|
|
}
|
|
|
|
_updateFrame() {
|
|
this.drawStrokePosition(this.inPointPosition, [this.currentPathI, this.currentPointI]);
|
|
}
|
|
|
|
/**
|
|
* For compatibility with HTMLMediaElement API convert seconds to ms of internal timer
|
|
*/
|
|
set currentTime(time) {
|
|
this._seekByTime(time);
|
|
}
|
|
|
|
get currentTime() {
|
|
return this._currentTimeMs / 1000;
|
|
}
|
|
|
|
getEndTimeMs() {
|
|
const videoDuration = this.getFinalFrameTime();
|
|
const audioDuration = (this.audioEl) ? this.audioEl.duration + this.audioOffset : 0;
|
|
return Math.max(videoDuration, audioDuration * 1000);
|
|
}
|
|
|
|
get duration() {
|
|
|
|
const prerollDuration = this.audioOffset < 0 ? this.audioOffset * -1 : 0;
|
|
|
|
return prerollDuration + this.getEndTimeMs() / 1000;
|
|
}
|
|
|
|
findPositionForTime(ms) {
|
|
ms = Math.min(Math.max(ms, 0), this.lastFrameTime);
|
|
// console.log('scrub to', ms)
|
|
let path_i = 0;
|
|
let point_i = 0;
|
|
this.strokes.every((stroke, index) => {
|
|
const startAt = stroke.points[0][3];
|
|
const endAt = stroke.points[stroke.points.length - 1][3];
|
|
|
|
if (startAt > ms) {
|
|
return false; // too far
|
|
}
|
|
if (endAt > ms) {
|
|
// we're getting close. Find the right point_i
|
|
path_i = index;
|
|
stroke.points.every((point, pi) => {
|
|
if (point[3] > ms) {
|
|
// too far
|
|
return false;
|
|
}
|
|
point_i = pi;
|
|
return true;
|
|
});
|
|
return false;
|
|
} else {
|
|
// in case nothings comes after, we store the last best option thus far
|
|
path_i = index;
|
|
point_i = stroke.points.length - 1;
|
|
return true;
|
|
}
|
|
|
|
});
|
|
return [path_i, point_i];
|
|
}
|
|
|
|
|
|
} |