Annotator now supports player mode. Buffer audio befor playback.
This commit is contained in:
parent
d55c5b5486
commit
eb6d31742e
2 changed files with 289 additions and 182 deletions
|
@ -98,6 +98,7 @@
|
||||||
position: absolute;
|
position: absolute;
|
||||||
left: 100%;
|
left: 100%;
|
||||||
width: 30px;
|
width: 30px;
|
||||||
|
height:30px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.controls button.paused::before {
|
.controls button.paused::before {
|
||||||
|
@ -108,6 +109,22 @@
|
||||||
content: '⏸';
|
content: '⏸';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.buffering .controls button:is(.playing,.paused)::before {
|
||||||
|
content: '↺';
|
||||||
|
display:inline-block;
|
||||||
|
animation: rotate 1s infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes rotate {
|
||||||
|
0% {
|
||||||
|
transform: rotate(359deg)
|
||||||
|
}
|
||||||
|
100% {
|
||||||
|
transform: rotate(0deg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.controls {
|
.controls {
|
||||||
position: absolute !important;
|
position: absolute !important;
|
||||||
z-index: 100;
|
z-index: 100;
|
||||||
|
@ -376,6 +393,7 @@
|
||||||
navigator.mediaSession.setActionHandler('seekforward', function () { /* Code excerpted. */ });
|
navigator.mediaSession.setActionHandler('seekforward', function () { /* Code excerpted. */ });
|
||||||
navigator.mediaSession.setActionHandler('previoustrack', function () { /* Code excerpted. */ });
|
navigator.mediaSession.setActionHandler('previoustrack', function () { /* Code excerpted. */ });
|
||||||
navigator.mediaSession.setActionHandler('nexttrack', function () { /* Code excerpted. */ });
|
navigator.mediaSession.setActionHandler('nexttrack', function () { /* Code excerpted. */ });
|
||||||
|
navigator.mediaSession.setActionHandler('playpause', function () { /* Code excerpted. */ });
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
|
|
|
@ -110,9 +110,14 @@ class StrokeSlice {
|
||||||
}
|
}
|
||||||
|
|
||||||
class Annotator extends EventTarget {
|
class Annotator extends EventTarget {
|
||||||
constructor(wrapperEl, tagFile, fileurl) {
|
constructor(wrapperEl, tagFile, fileurl, config) {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
|
this.config = {
|
||||||
|
is_player: config && config.hasOwnProperty('is_player') ? config.is_player : false, // in player mode annotations are not loaded, nor is the annotator shown
|
||||||
|
crop_to_fit: config && config.hasOwnProperty('crop_to_fit') ? config.crop_to_fit : false, // don't animate viewport, but show the whole drawing
|
||||||
|
}
|
||||||
|
|
||||||
this.formatter = wNumb({
|
this.formatter = wNumb({
|
||||||
decimals: 2,
|
decimals: 2,
|
||||||
edit: (time) => {
|
edit: (time) => {
|
||||||
|
@ -142,6 +147,7 @@ class Annotator extends EventTarget {
|
||||||
this.wrapperEl = wrapperEl;
|
this.wrapperEl = wrapperEl;
|
||||||
this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
|
this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
|
||||||
this.wrapperEl.appendChild(this.svgEl);
|
this.wrapperEl.appendChild(this.svgEl);
|
||||||
|
this.wrapperEl.classList.add(this.config.is_player ? "svganim_player" : "svganim_annotator");
|
||||||
|
|
||||||
|
|
||||||
this.controlsEl = document.createElement('div');
|
this.controlsEl = document.createElement('div');
|
||||||
|
@ -161,7 +167,7 @@ class Annotator extends EventTarget {
|
||||||
this.playheadEl.addEventListener("input", (ev) => {
|
this.playheadEl.addEventListener("input", (ev) => {
|
||||||
this.scrubTo(ev.target.value);
|
this.scrubTo(ev.target.value);
|
||||||
});
|
});
|
||||||
this.playheadEl.addEventListener('keydown',(ev) => {
|
this.playheadEl.addEventListener('keydown', (ev) => {
|
||||||
ev.preventDefault(); // we don't want to use arrow keys, as these are captured in the overall keydown event
|
ev.preventDefault(); // we don't want to use arrow keys, as these are captured in the overall keydown event
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -178,7 +184,7 @@ class Annotator extends EventTarget {
|
||||||
this.playPauseEl.addEventListener("click", (ev) => {
|
this.playPauseEl.addEventListener("click", (ev) => {
|
||||||
this.playPause()
|
this.playPause()
|
||||||
})
|
})
|
||||||
this.playPauseEl.addEventListener('keydown',(ev) => {
|
this.playPauseEl.addEventListener('keydown', (ev) => {
|
||||||
ev.preventDefault(); // we don't want to spacebar, as this is captured in the overall keydown event
|
ev.preventDefault(); // we don't want to spacebar, as this is captured in the overall keydown event
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -191,6 +197,29 @@ class Annotator extends EventTarget {
|
||||||
this.annotationsEl.classList.add('annotations')
|
this.annotationsEl.classList.add('annotations')
|
||||||
this.controlsEl.appendChild(this.annotationsEl);
|
this.controlsEl.appendChild(this.annotationsEl);
|
||||||
|
|
||||||
|
|
||||||
|
this.inPointPosition = [0, 0];
|
||||||
|
this.inPointTimeMs = null;
|
||||||
|
this.outPointPosition = null;
|
||||||
|
this.outPointTimeMs = null;
|
||||||
|
this._currentTimeMs = 0;
|
||||||
|
this.videoIsPlaying = false;
|
||||||
|
|
||||||
|
const groups = ['before', 'annotation', 'after']
|
||||||
|
this.strokeGroups = {};
|
||||||
|
groups.forEach(group => {
|
||||||
|
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
|
||||||
|
groupEl.classList.add(group)
|
||||||
|
this.svgEl.appendChild(groupEl);
|
||||||
|
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.annotations = [];
|
||||||
|
|
||||||
|
if (this.config.is_player) {
|
||||||
|
this.load(fileurl);
|
||||||
|
} else {
|
||||||
|
|
||||||
this.loadTags(tagFile).then(() => {
|
this.loadTags(tagFile).then(() => {
|
||||||
this.tagsEl = document.createElement('ul');
|
this.tagsEl = document.createElement('ul');
|
||||||
this.tagsEl.classList.add('tags');
|
this.tagsEl.classList.add('tags');
|
||||||
|
@ -242,27 +271,10 @@ class Annotator extends EventTarget {
|
||||||
|
|
||||||
this.controlsEl.appendChild(this.tagsEl);
|
this.controlsEl.appendChild(this.tagsEl);
|
||||||
|
|
||||||
this.inPointPosition = [0, 0];
|
|
||||||
this.inPointTimeMs = null;
|
|
||||||
this.outPointPosition = null;
|
|
||||||
this.outPointTimeMs = null;
|
|
||||||
this._currentTimeMs = 0;
|
|
||||||
this.videoIsPlaying = false;
|
|
||||||
|
|
||||||
const groups = ['before', 'annotation', 'after']
|
|
||||||
this.strokeGroups = {};
|
|
||||||
groups.forEach(group => {
|
|
||||||
let groupEl = document.createElementNS('http://www.w3.org/2000/svg', 'g');
|
|
||||||
groupEl.classList.add(group)
|
|
||||||
this.svgEl.appendChild(groupEl);
|
|
||||||
this.strokeGroups[group] = new StrokeGroup(groupEl, this);
|
|
||||||
});
|
|
||||||
|
|
||||||
this.annotations = [];
|
|
||||||
|
|
||||||
this.load(fileurl);
|
this.load(fileurl);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
getColorForTag(tag) {
|
getColorForTag(tag) {
|
||||||
const tagData = this.tagMap[tag];
|
const tagData = this.tagMap[tag];
|
||||||
|
@ -278,6 +290,10 @@ class Annotator extends EventTarget {
|
||||||
|
|
||||||
updateAnnotations(save) {
|
updateAnnotations(save) {
|
||||||
|
|
||||||
|
if (this.config.is_player) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
this.annotationsEl.innerHTML = "";
|
this.annotationsEl.innerHTML = "";
|
||||||
for (let annotation_i in this.annotations) {
|
for (let annotation_i in this.annotations) {
|
||||||
const annotation = this.annotations[annotation_i];
|
const annotation = this.annotations[annotation_i];
|
||||||
|
@ -409,6 +425,8 @@ class Annotator extends EventTarget {
|
||||||
fetch(request)
|
fetch(request)
|
||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
|
if (!this.config.is_player) {
|
||||||
|
|
||||||
const metadata_req = new Request(`/annotations/${data.file}`, {
|
const metadata_req = new Request(`/annotations/${data.file}`, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
});
|
});
|
||||||
|
@ -421,8 +439,10 @@ class Annotator extends EventTarget {
|
||||||
this.loadStrokes(data, metadata)
|
this.loadStrokes(data, metadata)
|
||||||
})
|
})
|
||||||
.catch(e => console.log(e));
|
.catch(e => console.log(e));
|
||||||
// do something with the data sent in the request
|
} else {
|
||||||
});
|
this.loadStrokes(data, null);
|
||||||
|
}
|
||||||
|
}).catch(e => console.log(e));
|
||||||
}
|
}
|
||||||
|
|
||||||
updateState() {
|
updateState() {
|
||||||
|
@ -508,15 +528,27 @@ class Annotator extends EventTarget {
|
||||||
|
|
||||||
|
|
||||||
setUpAnnotator() {
|
setUpAnnotator() {
|
||||||
|
|
||||||
this.playheadEl.min = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
this.playheadEl.min = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
||||||
this.playheadEl.max = this.getEndTimeMs();
|
this.playheadEl.max = this.getEndTimeMs();
|
||||||
this._updatePlayhead();
|
this._updatePlayhead();
|
||||||
|
|
||||||
|
|
||||||
this.inPointPosition = this.findPositionForTime(this.currentTime);
|
this.inPointPosition = this.findPositionForTime(this.currentTime);
|
||||||
this.inPointTimeMs = this._currentTimeMs;
|
this.inPointTimeMs = this._currentTimeMs;
|
||||||
this.outPointPosition = this.findPositionForTime(this.lastFrameTime); // TODO: simplify to get the last frame indexes directly
|
this.outPointPosition = this.findPositionForTime(this.lastFrameTime); // TODO: simplify to get the last frame indexes directly
|
||||||
this.outPointTimeMs = this.getEndTimeMs();
|
this.outPointTimeMs = this.getEndTimeMs();
|
||||||
|
|
||||||
|
|
||||||
|
if (!this.config.is_player) {
|
||||||
|
this.buildAnnotator();
|
||||||
|
}
|
||||||
|
|
||||||
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
||||||
|
}
|
||||||
|
|
||||||
|
buildAnnotator() {
|
||||||
|
|
||||||
if (this.scrubberEl.noUiSlider) {
|
if (this.scrubberEl.noUiSlider) {
|
||||||
this.slider.destroy();
|
this.slider.destroy();
|
||||||
}
|
}
|
||||||
|
@ -597,21 +629,27 @@ class Annotator extends EventTarget {
|
||||||
ttInputEl.addEventListener('blur', submit);
|
ttInputEl.addEventListener('blur', submit);
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
|
|
||||||
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
loadStrokes(drawing, metadata) {
|
loadStrokes(drawing, metadata) {
|
||||||
this.audioOffset = 0;
|
this.audioOffset = 0;
|
||||||
|
|
||||||
if (metadata) {
|
if (metadata) {
|
||||||
this.annotations = metadata.annotations;
|
this.annotations = metadata.annotations;
|
||||||
this.audioFile = metadata.hasOwnProperty('audio') ? metadata.audio.file : null;
|
}
|
||||||
this.audioOffset = metadata.hasOwnProperty('audio') ? Number.parseFloat(metadata.audio.offset) : 0;
|
|
||||||
|
if ((metadata && metadata.hasOwnProperty('audio')) || drawing.hasOwnProperty('audio')) {
|
||||||
|
if (metadata && metadata.hasOwnProperty('audio')) {
|
||||||
|
this.audioFile = metadata.audio.file
|
||||||
|
this.audioOffset = Number.parseFloat(metadata.audio.offset);
|
||||||
|
} else {
|
||||||
|
this.audioFile = drawing.audio.file
|
||||||
|
this.audioOffset = Number.parseFloat(drawing.audio.offset);
|
||||||
|
}
|
||||||
this._currentTimeMs = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
this._currentTimeMs = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
||||||
this._updatePlayhead();
|
this._updatePlayhead();
|
||||||
//
|
|
||||||
// load any saved metadata
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.filename = drawing.file;
|
this.filename = drawing.file;
|
||||||
this.strokes = drawing.shape.map(s => new Stroke(s['color'], s['points']));
|
this.strokes = drawing.shape.map(s => new Stroke(s['color'], s['points']));
|
||||||
this.viewboxes = drawing.viewboxes;
|
this.viewboxes = drawing.viewboxes;
|
||||||
|
@ -619,7 +657,11 @@ class Annotator extends EventTarget {
|
||||||
this.currentPointI = null;
|
this.currentPointI = null;
|
||||||
this.currentViewboxI = null;
|
this.currentViewboxI = null;
|
||||||
this.dimensions = drawing.dimensions;
|
this.dimensions = drawing.dimensions;
|
||||||
|
if (!this.config.crop_to_fit) {
|
||||||
this.svgEl.setAttribute('viewBox', `0 0 ${this.dimensions[0]} ${this.dimensions[1]}`)
|
this.svgEl.setAttribute('viewBox', `0 0 ${this.dimensions[0]} ${this.dimensions[1]}`)
|
||||||
|
} else {
|
||||||
|
this.svgEl.setAttribute('viewBox', `${drawing.bounding_box.x} ${drawing.bounding_box.y} ${drawing.bounding_box.width} ${drawing.bounding_box.height}`)
|
||||||
|
}
|
||||||
|
|
||||||
// let bgEl = document.createElementNS('http://www.w3.org/2000/svg', 'rect');
|
// let bgEl = document.createElementNS('http://www.w3.org/2000/svg', 'rect');
|
||||||
// bgEl.setAttribute("x", 0);
|
// bgEl.setAttribute("x", 0);
|
||||||
|
@ -638,13 +680,39 @@ class Annotator extends EventTarget {
|
||||||
|
|
||||||
this.setupAudioConfig().then(() => {
|
this.setupAudioConfig().then(() => {
|
||||||
// this.setUpAnnotator()
|
// this.setUpAnnotator()
|
||||||
|
let keyEl;
|
||||||
|
if (this.config.is_player) {
|
||||||
|
keyEl = this.wrapperEl;
|
||||||
|
} else {
|
||||||
|
keyEl = document.body; // always capture
|
||||||
this.updateAnnotations(false);
|
this.updateAnnotations(false);
|
||||||
|
}
|
||||||
|
|
||||||
document.body.addEventListener('keyup', (ev) => {
|
keyEl.addEventListener('keyup', (ev) => {
|
||||||
if (ev.key == ' ') {
|
if (ev.key == ' ') {
|
||||||
this.playPause();
|
this.playPause();
|
||||||
}
|
}
|
||||||
console.log('key', ev);
|
|
||||||
|
// shift+arrow keys, jump playhead (search position)
|
||||||
|
// FIXME doesn't keep playback after initial load. Only after unfocussing the window, and refocussing it, do the keys capture.
|
||||||
|
// Probably a wrong order
|
||||||
|
if (ev.key == 'ArrowLeft' && ev.shiftKey) {
|
||||||
|
const p = this._paused;
|
||||||
|
console.log(p);
|
||||||
|
const diff = ev.ctrlKey ? 10000 : 1000;
|
||||||
|
this.scrubTo(this._currentTimeMs - diff);
|
||||||
|
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
|
||||||
|
}
|
||||||
|
if (ev.key == 'ArrowRight' && ev.shiftKey) {
|
||||||
|
const p = this._paused;
|
||||||
|
console.log(p);
|
||||||
|
const diff = ev.ctrlKey ? 10000 : 1000;
|
||||||
|
this.scrubTo(this._currentTimeMs + diff);
|
||||||
|
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
|
||||||
|
}
|
||||||
|
|
||||||
|
// additional keys only for annotation mode
|
||||||
|
if (!this.config.is_player) {
|
||||||
if (ev.key == 'i') {
|
if (ev.key == 'i') {
|
||||||
this.setInPoint(this.currentTime * 1000);
|
this.setInPoint(this.currentTime * 1000);
|
||||||
}
|
}
|
||||||
|
@ -666,22 +734,6 @@ class Annotator extends EventTarget {
|
||||||
this.resetInOutPoint();
|
this.resetInOutPoint();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// shift+arrow keys, jump playhead (search position)
|
|
||||||
// FIXME doesn't keep playback after initial load. Only after unfocussing the window, and refocussing it, do the keys capture.
|
|
||||||
// Probably a wrong order
|
|
||||||
if (ev.key == 'ArrowLeft' && ev.shiftKey) {
|
|
||||||
const p = this._paused;
|
|
||||||
console.log(p);
|
|
||||||
const diff = ev.ctrlKey ? 10000 : 1000;
|
|
||||||
this.scrubTo(this._currentTimeMs - diff);
|
|
||||||
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
|
|
||||||
}
|
|
||||||
if (ev.key == 'ArrowRight' && ev.shiftKey) {
|
|
||||||
const p = this._paused;
|
|
||||||
console.log(p);
|
|
||||||
const diff = ev.ctrlKey ? 10000 : 1000;
|
|
||||||
this.scrubTo(this._currentTimeMs + diff);
|
|
||||||
if (!p) { console.log('play!'); this.play(); } // scrubTo() causes a pause();
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -714,11 +766,26 @@ class Annotator extends EventTarget {
|
||||||
setupAudioConfig() {
|
setupAudioConfig() {
|
||||||
// audio config
|
// audio config
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
|
this.audioEl = document.createElement('audio');
|
||||||
|
if (!this.config.is_player)
|
||||||
|
this.audioEl.setAttribute('controls', true);
|
||||||
|
|
||||||
|
this.audioEl.addEventListener('canplaythrough', (ev) => {
|
||||||
|
console.log('loaded audio', ev);
|
||||||
|
// this.audioEl.play();
|
||||||
|
});
|
||||||
|
|
||||||
|
if (this.config.is_player) {
|
||||||
|
this.wrapperEl.prepend(this.audioEl);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
|
||||||
let audioConfigEl = document.createElement('div');
|
let audioConfigEl = document.createElement('div');
|
||||||
audioConfigEl.classList.add('audioconfig')
|
audioConfigEl.classList.add('audioconfig')
|
||||||
this.wrapperEl.appendChild(audioConfigEl);
|
this.wrapperEl.appendChild(audioConfigEl);
|
||||||
|
|
||||||
|
audioConfigEl.prepend(this.audioEl);
|
||||||
|
|
||||||
let audioSelectEl = document.createElement('select');
|
let audioSelectEl = document.createElement('select');
|
||||||
audioSelectEl.classList.add('audioselect');
|
audioSelectEl.classList.add('audioselect');
|
||||||
audioConfigEl.appendChild(audioSelectEl);
|
audioConfigEl.appendChild(audioSelectEl);
|
||||||
|
@ -753,17 +820,10 @@ class Annotator extends EventTarget {
|
||||||
});
|
});
|
||||||
audioOffsetTextEl.appendChild(audioOffsetEl);
|
audioOffsetTextEl.appendChild(audioOffsetEl);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
this.audioEl = document.createElement('audio');
|
|
||||||
this.audioEl.setAttribute('controls', true);
|
|
||||||
this.audioEl.addEventListener('canplaythrough', (ev) => {
|
|
||||||
console.log('loaded audio', ev);
|
|
||||||
this.audioEl.play();
|
|
||||||
});
|
|
||||||
// this.audioEl.addEventListener('seeked', (ev)=>{
|
|
||||||
// console.log(ev);
|
|
||||||
// })
|
|
||||||
audioConfigEl.prepend(this.audioEl);
|
|
||||||
|
|
||||||
this.audioEl.addEventListener('loadedmetadata', (ev) => {
|
this.audioEl.addEventListener('loadedmetadata', (ev) => {
|
||||||
// resolve the 'set up audio' when metadata has loaded
|
// resolve the 'set up audio' when metadata has loaded
|
||||||
|
@ -827,11 +887,15 @@ class Annotator extends EventTarget {
|
||||||
console.debug('delay audio playback', t_start, t_diff);
|
console.debug('delay audio playback', t_start, t_diff);
|
||||||
// a negative audiooffset delays playback from the start
|
// a negative audiooffset delays playback from the start
|
||||||
// this.audioStartTimeout = setTimeout((e) => this.audioEl.play(), t*-1000);
|
// this.audioStartTimeout = setTimeout((e) => this.audioEl.play(), t*-1000);
|
||||||
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0 }, t_start * -1); // triggers play with "seeked" event
|
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0; this.audioEl.play(); }, t_start * -1); // triggers play with "seeked" event
|
||||||
// this.audioEl.currentTime = 0;
|
// this.audioEl.currentTime = 0;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
if (this.audioEl.currentTime !== t_start / 1000) {
|
||||||
|
console.log(this.audioEl.currentTime, t_start / 1000);
|
||||||
this.audioEl.currentTime = t_start / 1000;
|
this.audioEl.currentTime = t_start / 1000;
|
||||||
|
}
|
||||||
|
this.audioEl.play();
|
||||||
// this.audioEl.play(); // play is done in "seeked" evenlistener
|
// this.audioEl.play(); // play is done in "seeked" evenlistener
|
||||||
console.log(this.audioEl.currentTime, t_start, t_in, t_out);
|
console.log(this.audioEl.currentTime, t_start, t_in, t_out);
|
||||||
}
|
}
|
||||||
|
@ -844,6 +908,10 @@ class Annotator extends EventTarget {
|
||||||
}, t_diff);
|
}, t_diff);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_scrubAudio(time_ms) {
|
||||||
|
this.audioEl.currentTime = Math.max(0, this.getAudioTime(time_ms)) / 1000;
|
||||||
|
}
|
||||||
|
|
||||||
getFinalFrameTime() {
|
getFinalFrameTime() {
|
||||||
const points = this.strokes[this.strokes.length - 1].points;
|
const points = this.strokes[this.strokes.length - 1].points;
|
||||||
return points[points.length - 1][3];
|
return points[points.length - 1][3];
|
||||||
|
@ -924,8 +992,10 @@ class Annotator extends EventTarget {
|
||||||
}
|
}
|
||||||
this.currentViewboxI = box_i
|
this.currentViewboxI = box_i
|
||||||
const b = this.viewboxes[box_i];
|
const b = this.viewboxes[box_i];
|
||||||
|
if (!this.config.crop_to_fit) {
|
||||||
this.svgEl.setAttribute('viewBox', `${b.x} ${b.y} ${this.dimensions[0]} ${this.dimensions[1]}`)
|
this.svgEl.setAttribute('viewBox', `${b.x} ${b.y} ${this.dimensions[0]} ${this.dimensions[1]}`)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
getNextPosition(path_i, point_i) {
|
getNextPosition(path_i, point_i) {
|
||||||
const path = this.strokes[path_i];
|
const path = this.strokes[path_i];
|
||||||
|
@ -1053,6 +1123,10 @@ class Annotator extends EventTarget {
|
||||||
this._seekByTimeMs(this._currentTimeMs); // prevent playback issue for initial load
|
this._seekByTimeMs(this._currentTimeMs); // prevent playback issue for initial load
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this._setPausedFlag(false);
|
||||||
|
|
||||||
|
const startPlayback = () => {
|
||||||
|
console.log('start playback');
|
||||||
this.startTimeMs = window.performance.now() - this._currentTimeMs;
|
this.startTimeMs = window.performance.now() - this._currentTimeMs;
|
||||||
// strokes
|
// strokes
|
||||||
if (this._currentTimeMs < 0) {
|
if (this._currentTimeMs < 0) {
|
||||||
|
@ -1065,13 +1139,26 @@ class Annotator extends EventTarget {
|
||||||
this.playViewboxPosition(this.currentViewboxI);
|
this.playViewboxPosition(this.currentViewboxI);
|
||||||
|
|
||||||
// audio
|
// audio
|
||||||
|
// TODO: use this.audioEl.readyState == 4 : play immediately, otherwise after event
|
||||||
this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs);
|
this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs);
|
||||||
// this.playStrokePosition(this.currentPathI, this.currentPointI);
|
// this.playStrokePosition(this.currentPathI, this.currentPointI);
|
||||||
this._setPausedFlag(false);
|
|
||||||
|
|
||||||
this.dispatchEvent(new CustomEvent('play', {}));
|
this.dispatchEvent(new CustomEvent('play', {}));
|
||||||
this._animationFrame();
|
this._animationFrame();
|
||||||
resolve();
|
resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.audioEl.readyState !== 4) { // not ready to play after seeking audio.
|
||||||
|
console.log('wait for audio before playback');
|
||||||
|
this.wrapperEl.classList.add('buffering');
|
||||||
|
this.audioEl.addEventListener('canplaythrough', () => {
|
||||||
|
this.wrapperEl.classList.remove('buffering');
|
||||||
|
startPlayback()
|
||||||
|
}, { once: true }); // only once
|
||||||
|
} else {
|
||||||
|
startPlayback();
|
||||||
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1145,6 +1232,7 @@ class Annotator extends EventTarget {
|
||||||
_seekByPoint(point) {
|
_seekByPoint(point) {
|
||||||
this.dispatchEvent(new CustomEvent('seeking', {}));
|
this.dispatchEvent(new CustomEvent('seeking', {}));
|
||||||
this._currentTimeMs = this.strokes[point[0]].points[point[1]][2];
|
this._currentTimeMs = this.strokes[point[0]].points[point[1]][2];
|
||||||
|
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
|
||||||
[this.currentPathI, this.currentPointI] = point;
|
[this.currentPathI, this.currentPointI] = point;
|
||||||
this._updatePlayhead();
|
this._updatePlayhead();
|
||||||
this._updateFrame();
|
this._updateFrame();
|
||||||
|
@ -1158,6 +1246,7 @@ class Annotator extends EventTarget {
|
||||||
_seekByTime(time) {
|
_seekByTime(time) {
|
||||||
this.dispatchEvent(new CustomEvent('seeking', { detail: time }));
|
this.dispatchEvent(new CustomEvent('seeking', { detail: time }));
|
||||||
this._currentTimeMs = Number.parseFloat(time) * 1000;
|
this._currentTimeMs = Number.parseFloat(time) * 1000;
|
||||||
|
this.audioEl.currentTime = this.getAudioTime(this._currentTimeMs) / 1000;
|
||||||
[this.currentPathI, this.currentPointI] = this.findPositionForTime(this._currentTimeMs);
|
[this.currentPathI, this.currentPointI] = this.findPositionForTime(this._currentTimeMs);
|
||||||
|
|
||||||
this._updatePlayhead();
|
this._updatePlayhead();
|
||||||
|
|
Loading…
Reference in a new issue