WIP playback which is handling audio pre-/postroll
This commit is contained in:
parent
1e9bc1f471
commit
4cad6ed741
1 changed files with 228 additions and 91 deletions
229
www/annotate.js
229
www/annotate.js
|
@ -109,8 +109,10 @@ class StrokeSlice {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class Annotator {
|
class Annotator extends EventTarget {
|
||||||
constructor(wrapperEl, tags, fileurl) {
|
constructor(wrapperEl, tags, fileurl) {
|
||||||
|
super();
|
||||||
|
|
||||||
this.wrapperEl = wrapperEl;
|
this.wrapperEl = wrapperEl;
|
||||||
this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
|
this.svgEl = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
|
||||||
this.wrapperEl.appendChild(this.svgEl);
|
this.wrapperEl.appendChild(this.svgEl);
|
||||||
|
@ -120,13 +122,13 @@ class Annotator {
|
||||||
this.controlsEl.classList.add('controls')
|
this.controlsEl.classList.add('controls')
|
||||||
this.wrapperEl.appendChild(this.controlsEl);
|
this.wrapperEl.appendChild(this.controlsEl);
|
||||||
|
|
||||||
this.scrubberElOld = document.createElement('input');
|
this.playheadEl = document.createElement('input');
|
||||||
this.scrubberElOld.type = "range";
|
this.playheadEl.type = "range";
|
||||||
this.scrubberElOld.min = 0;
|
this.playheadEl.min = 0;
|
||||||
this.scrubberElOld.step = 0.01;
|
this.playheadEl.step = 0.01;
|
||||||
this.controlsEl.appendChild(this.scrubberElOld);
|
this.controlsEl.appendChild(this.playheadEl);
|
||||||
|
|
||||||
this.scrubberElOld.addEventListener("input", (ev) => {
|
this.playheadEl.addEventListener("input", (ev) => {
|
||||||
this.scrubTo(ev.target.value);
|
this.scrubTo(ev.target.value);
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -170,9 +172,11 @@ class Annotator {
|
||||||
this.controlsEl.appendChild(this.annotationsEl);
|
this.controlsEl.appendChild(this.annotationsEl);
|
||||||
|
|
||||||
|
|
||||||
this.inPointPosition = null;
|
this.inPointPosition = [0, 0];
|
||||||
|
this.inPointTimeMs = null;
|
||||||
this.outPointPosition = null;
|
this.outPointPosition = null;
|
||||||
this.currentTime = 0;
|
this.outPointTimeMs = null;
|
||||||
|
this._currentTimeMs = 0;
|
||||||
this.isPlaying = false;
|
this.isPlaying = false;
|
||||||
|
|
||||||
const groups = ['before', 'annotation', 'after']
|
const groups = ['before', 'annotation', 'after']
|
||||||
|
@ -186,7 +190,7 @@ class Annotator {
|
||||||
|
|
||||||
this.annotations = [];
|
this.annotations = [];
|
||||||
|
|
||||||
this.play(fileurl);
|
this.load(fileurl);
|
||||||
}
|
}
|
||||||
|
|
||||||
updateAnnotations(save) {
|
updateAnnotations(save) {
|
||||||
|
@ -195,8 +199,8 @@ class Annotator {
|
||||||
for (let annotation_i in this.annotations) {
|
for (let annotation_i in this.annotations) {
|
||||||
const annotation = this.annotations[annotation_i];
|
const annotation = this.annotations[annotation_i];
|
||||||
this.annotationEl = document.createElement('div');
|
this.annotationEl = document.createElement('div');
|
||||||
const left = (annotation.t_in / this.duration) * 100;
|
const left = (annotation.t_in / this.lastFrameTime) * 100;
|
||||||
const right = 100 - (annotation.t_out / this.duration) * 100;
|
const right = 100 - (annotation.t_out / this.lastFrameTime) * 100;
|
||||||
this.annotationEl.style.left = left + '%';
|
this.annotationEl.style.left = left + '%';
|
||||||
this.annotationEl.style.right = right + '%';
|
this.annotationEl.style.right = right + '%';
|
||||||
|
|
||||||
|
@ -253,7 +257,7 @@ class Annotator {
|
||||||
|
|
||||||
deselectAnnotation(keep_position) {
|
deselectAnnotation(keep_position) {
|
||||||
if (this.selectedAnnotation)
|
if (this.selectedAnnotation)
|
||||||
this.currentTime = this.selectedAnnotation.t_out;
|
this._currentTimeMs = this.selectedAnnotation.t_out;
|
||||||
|
|
||||||
this.wrapperEl.classList.remove('selected-annotation');
|
this.wrapperEl.classList.remove('selected-annotation');
|
||||||
|
|
||||||
|
@ -266,7 +270,7 @@ class Annotator {
|
||||||
this.updateAnnotations(false); // selects the right tag & highlights the annotation
|
this.updateAnnotations(false); // selects the right tag & highlights the annotation
|
||||||
}
|
}
|
||||||
|
|
||||||
play(file) {
|
load(file) {
|
||||||
const request = new Request(file, {
|
const request = new Request(file, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
});
|
});
|
||||||
|
@ -280,7 +284,7 @@ class Annotator {
|
||||||
fetch(metadata_req)
|
fetch(metadata_req)
|
||||||
.then(response => response.ok ? response.json() : null)
|
.then(response => response.ok ? response.json() : null)
|
||||||
.then(metadata => {
|
.then(metadata => {
|
||||||
this.playStrokes(data, metadata)
|
this.loadStrokes(data, metadata)
|
||||||
})
|
})
|
||||||
.catch(e => console.log(e));
|
.catch(e => console.log(e));
|
||||||
// do something with the data sent in the request
|
// do something with the data sent in the request
|
||||||
|
@ -362,26 +366,33 @@ class Annotator {
|
||||||
this.annotations.push(new Annotation(tag, t_in, t_out));
|
this.annotations.push(new Annotation(tag, t_in, t_out));
|
||||||
this.updateAnnotations(true);
|
this.updateAnnotations(true);
|
||||||
|
|
||||||
this.currentTime = t_out;
|
this._currentTimeMs = t_out;
|
||||||
this.setUpAnnotator();
|
this.setUpAnnotator();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
setUpAnnotator() {
|
setUpAnnotator() {
|
||||||
|
this.playheadEl.min = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
||||||
|
this.playheadEl.max = this.getEndTimeMs();
|
||||||
|
this.playheadEl.value = this._currentTimeMs;
|
||||||
|
|
||||||
this.inPointPosition = this.findPositionForTime(this.currentTime);
|
this.inPointPosition = this.findPositionForTime(this.currentTime);
|
||||||
this.outPointPosition = this.findPositionForTime(this.duration);
|
this.inPointTimeMs = this._currentTimeMs;
|
||||||
|
this.outPointPosition = this.findPositionForTime(this.lastFrameTime); // TODO: simplify to get the last frame indexes directly
|
||||||
|
this.outPointTimeMs = null;
|
||||||
|
|
||||||
if (this.scrubberEl.noUiSlider) {
|
if (this.scrubberEl.noUiSlider) {
|
||||||
this.slider.destroy();
|
this.slider.destroy();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// console.log(this._currentTimeMs, )
|
||||||
this.slider = noUiSlider.create(this.scrubberEl, {
|
this.slider = noUiSlider.create(this.scrubberEl, {
|
||||||
start: [this.currentTime, this.duration],
|
start: [this._currentTimeMs, this.lastFrameTime],
|
||||||
connect: true,
|
connect: true,
|
||||||
range: {
|
range: {
|
||||||
'min': this.audioOffset < 0 ? this.audioOffset * 1000 : 0,
|
'min': this.audioOffset < 0 ? this.audioOffset * 1000 : 0,
|
||||||
'max': this.duration
|
'max': this.getEndTimeMs(),
|
||||||
},
|
},
|
||||||
tooltips: [
|
tooltips: [
|
||||||
this.formatter,
|
this.formatter,
|
||||||
|
@ -397,7 +408,9 @@ class Annotator {
|
||||||
this.slider.on("slide", (values, handle) => {
|
this.slider.on("slide", (values, handle) => {
|
||||||
this.isPlaying = false;
|
this.isPlaying = false;
|
||||||
this.inPointPosition = this.findPositionForTime(values[0]);
|
this.inPointPosition = this.findPositionForTime(values[0]);
|
||||||
|
this.inPointTimeMs = Number.parseFloat(values[0]);
|
||||||
this.outPointPosition = this.findPositionForTime(values[1]);
|
this.outPointPosition = this.findPositionForTime(values[1]);
|
||||||
|
this.outPointTimeMs = Number.parseFloat(values[1]);
|
||||||
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
||||||
|
|
||||||
// console.log(this.selectedAnnotation);
|
// console.log(this.selectedAnnotation);
|
||||||
|
@ -417,13 +430,13 @@ class Annotator {
|
||||||
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
|
||||||
}
|
}
|
||||||
|
|
||||||
playStrokes(drawing, metadata) {
|
loadStrokes(drawing, metadata) {
|
||||||
this.audioOffset = 0;
|
this.audioOffset = 0;
|
||||||
if (metadata) {
|
if (metadata) {
|
||||||
this.annotations = metadata.annotations;
|
this.annotations = metadata.annotations;
|
||||||
this.audioFile = metadata.hasOwnProperty('audio') ? metadata.audio.file : null;
|
this.audioFile = metadata.hasOwnProperty('audio') ? metadata.audio.file : null;
|
||||||
this.audioOffset = metadata.hasOwnProperty('audio') ? Number.parseFloat(metadata.audio.offset) : 0;
|
this.audioOffset = metadata.hasOwnProperty('audio') ? Number.parseFloat(metadata.audio.offset) : 0;
|
||||||
this.currentTime = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
this._currentTimeMs = this.audioOffset < 0 ? this.audioOffset * 1000 : 0;
|
||||||
//
|
//
|
||||||
// load any saved metadata
|
// load any saved metadata
|
||||||
}
|
}
|
||||||
|
@ -442,16 +455,16 @@ class Annotator {
|
||||||
bgEl.classList.add('background');
|
bgEl.classList.add('background');
|
||||||
this.svgEl.prepend(bgEl);
|
this.svgEl.prepend(bgEl);
|
||||||
|
|
||||||
this.startTime = window.performance.now() - this.strokes[0].points[0][3];
|
this.firstFrameTime = this.strokes[0].points[0][3];
|
||||||
this.duration = this.getDuration();
|
this.lastFrameTime = this.getFinalFrameTime();
|
||||||
this.scrubberElOld.max = this.duration;
|
this.playheadEl.max = this.lastFrameTime;
|
||||||
this.playTimout = null;
|
this.nextFrameTimeout = null;
|
||||||
|
|
||||||
this.formatter = wNumb({
|
this.formatter = wNumb({
|
||||||
decimals: 2,
|
decimals: 2,
|
||||||
edit: (time) => {
|
edit: (time) => {
|
||||||
let neg = "";
|
let neg = "";
|
||||||
if(time < 0) {
|
if (time < 0) {
|
||||||
neg = "-";
|
neg = "-";
|
||||||
time *= -1;
|
time *= -1;
|
||||||
}
|
}
|
||||||
|
@ -463,16 +476,19 @@ class Annotator {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
this.setUpAnnotator()
|
|
||||||
this.updateAnnotations(false);
|
|
||||||
|
|
||||||
this.setupAudioConfig();
|
this.setupAudioConfig().then(() => {
|
||||||
|
// this.setUpAnnotator()
|
||||||
|
this.updateAnnotations(false);
|
||||||
|
});
|
||||||
|
|
||||||
// this.playStrokePosition(0, 1);
|
// this.playStrokePosition(0, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
setupAudioConfig() {
|
setupAudioConfig() {
|
||||||
// audio config
|
// audio config
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
|
||||||
let audioConfigEl = document.createElement('div');
|
let audioConfigEl = document.createElement('div');
|
||||||
audioConfigEl.classList.add('audioconfig')
|
audioConfigEl.classList.add('audioconfig')
|
||||||
this.wrapperEl.appendChild(audioConfigEl);
|
this.wrapperEl.appendChild(audioConfigEl);
|
||||||
|
@ -513,9 +529,6 @@ class Annotator {
|
||||||
|
|
||||||
|
|
||||||
this.audioEl = document.createElement('audio');
|
this.audioEl = document.createElement('audio');
|
||||||
if (this.audioFile) {
|
|
||||||
this.audioEl.setAttribute('src', this.audioFile);
|
|
||||||
}
|
|
||||||
this.audioEl.setAttribute('controls', true);
|
this.audioEl.setAttribute('controls', true);
|
||||||
this.audioEl.addEventListener('canplaythrough', (ev) => {
|
this.audioEl.addEventListener('canplaythrough', (ev) => {
|
||||||
console.log('loaded audio', ev);
|
console.log('loaded audio', ev);
|
||||||
|
@ -525,6 +538,20 @@ class Annotator {
|
||||||
// console.log(ev);
|
// console.log(ev);
|
||||||
// })
|
// })
|
||||||
audioConfigEl.prepend(this.audioEl);
|
audioConfigEl.prepend(this.audioEl);
|
||||||
|
|
||||||
|
this.audioEl.addEventListener('loadedmetadata', (ev) => {
|
||||||
|
// resolve the 'set up audio' when metadata has loaded
|
||||||
|
this.setUpAnnotator(); // if offset is negative, annotator starts at negative time
|
||||||
|
resolve();
|
||||||
|
})
|
||||||
|
if (this.audioFile) {
|
||||||
|
this.audioEl.setAttribute('src', this.audioFile);
|
||||||
|
} else {
|
||||||
|
this.setUpAnnotator();
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
setAudioFile(audioFile) {
|
setAudioFile(audioFile) {
|
||||||
|
@ -563,7 +590,7 @@ class Annotator {
|
||||||
|
|
||||||
// TODO, handle playback delay
|
// TODO, handle playback delay
|
||||||
const t_start = this.getAudioTime(t_in); // in ms
|
const t_start = this.getAudioTime(t_in); // in ms
|
||||||
const t_diff = t_out - t_in; // in ms
|
const t_diff = (t_out ?? this.audioEl.duration * 1000) - t_in; // in ms
|
||||||
|
|
||||||
this.audioEl.pause();
|
this.audioEl.pause();
|
||||||
|
|
||||||
|
@ -580,13 +607,13 @@ class Annotator {
|
||||||
} else {
|
} else {
|
||||||
this.audioEl.currentTime = t_start / 1000;
|
this.audioEl.currentTime = t_start / 1000;
|
||||||
// this.audioEl.play(); // play is done in "seeked" evenlistener
|
// this.audioEl.play(); // play is done in "seeked" evenlistener
|
||||||
console.log(this.audioEl.currentTime, t_start, t_in, t_out)
|
console.log(this.audioEl.currentTime, t_start, t_in, t_out);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.audioEndTimeout = setTimeout((e) => this.audioEl.pause(), t_diff);
|
this.audioEndTimeout = setTimeout((e) => this.audioEl.pause(), t_diff);
|
||||||
}
|
}
|
||||||
|
|
||||||
getDuration() {
|
getFinalFrameTime() {
|
||||||
const points = this.strokes[this.strokes.length - 1].points;
|
const points = this.strokes[this.strokes.length - 1].points;
|
||||||
return points[points.length - 1][3];
|
return points[points.length - 1][3];
|
||||||
}
|
}
|
||||||
|
@ -692,7 +719,7 @@ class Annotator {
|
||||||
} else {
|
} else {
|
||||||
this.isPlaying = true;
|
this.isPlaying = true;
|
||||||
}
|
}
|
||||||
this.drawStrokePosition(path_i, point_i);
|
this.drawStrokePosition(this.inPointPosition, [path_i, point_i]);
|
||||||
|
|
||||||
const [next_path, next_point] = this.getNextPosition(path_i, point_i);
|
const [next_path, next_point] = this.getNextPosition(path_i, point_i);
|
||||||
if (next_path === null) {
|
if (next_path === null) {
|
||||||
|
@ -702,26 +729,136 @@ class Annotator {
|
||||||
|
|
||||||
const t = this.strokes[next_path].points[next_point][3];// - path.points[point_i][3];
|
const t = this.strokes[next_path].points[next_point][3];// - path.points[point_i][3];
|
||||||
|
|
||||||
const dt = t - (window.performance.now() - this.startTime);
|
// calculate interval based on playback start to avoid drifting of time
|
||||||
this.playTimout = setTimeout(() => this.playStrokePosition(next_path, next_point, true), dt);
|
const dt = t - (window.performance.now() - this.startTimeMs);
|
||||||
}
|
this.nextFrameTimeout = setTimeout(() => this.playStrokePosition(next_path, next_point, true), dt);
|
||||||
|
|
||||||
playUntil(path_i) {
|
|
||||||
// for scrubber
|
|
||||||
}
|
}
|
||||||
|
|
||||||
scrubTo(ms) {
|
scrubTo(ms) {
|
||||||
const [path_i, point_i] = this.findPositionForTime(ms);
|
// const [path_i, point_i] = this.findPositionForTime(ms);
|
||||||
// console.log(path_i, point_i);
|
// console.log(path_i, point_i);
|
||||||
clearTimeout(this.playTimout);
|
this.pause();
|
||||||
this.playStrokePosition(path_i, point_i);
|
this._seekByTime(ms / 1000);
|
||||||
// this.playHead = ms;
|
// this.playHead = ms;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compatibility with HTMLMediaElement API
|
||||||
|
* @returns None
|
||||||
|
*/
|
||||||
|
pause() {
|
||||||
|
this._interruptPlayback();
|
||||||
|
}
|
||||||
|
|
||||||
|
_interruptPlayback() {
|
||||||
|
clearTimeout(this.nextFrameTimeout);
|
||||||
|
clearTimeout(this.audioEndTimeout);
|
||||||
|
clearTimeout(this.audioStartTimeout);
|
||||||
|
clearTimeout(this.startVideoTimeout);
|
||||||
|
this.audioEl.pause();
|
||||||
|
this.isPlaying = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compatibility with HTMLMediaElement API
|
||||||
|
* @returns Promise
|
||||||
|
*/
|
||||||
|
play() {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
this._interruptPlayback();
|
||||||
|
|
||||||
|
this.startTimeMs = window.performance.now() - this._currentTimeMs;
|
||||||
|
|
||||||
|
if (this._currentTimeMs < 0) {
|
||||||
|
this.startVideoTimeout = setTimeout((e) => this.playStrokePosition(this.currentPathI, this.currentPointI), this._currentTimeMs * -1);
|
||||||
|
} else {
|
||||||
|
this.playStrokePosition(this.currentPathI, this.currentPointI);
|
||||||
|
}
|
||||||
|
console.log(this._currentTimeMs, this.outPointTimeMs);
|
||||||
|
this.playAudioSegment(this._currentTimeMs, this.outPointTimeMs);
|
||||||
|
// this.playStrokePosition(this.currentPathI, this.currentPointI);
|
||||||
|
this.dispatchEvent(new CustomEvent('play', {}));
|
||||||
|
this._animationFrame();
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
_animationFrame(timestamp) {
|
||||||
|
// TODO, move time at end of playStrokePosition to here
|
||||||
|
const nextTime = window.performance.now() - this.startTimeMs;
|
||||||
|
const endTime = this.outPointTimeMs ?? this.duration * 1000;
|
||||||
|
if (nextTime > this.duration * 1000) {
|
||||||
|
|
||||||
|
}
|
||||||
|
this.playheadEl.value = this._currentTimeMs;
|
||||||
|
if (this.isPlaying) {
|
||||||
|
window.requestAnimationFrame((timestamp) => this._animationFrame(timestamp));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Note that both t_in and t_out can be negative
|
||||||
|
* @param float|Array t_in in point time, in ms or array with path/frame points
|
||||||
|
* @param float|Array t_out out point time, in ms or array with path/frame points
|
||||||
|
*/
|
||||||
|
playSegment(in_point, out_point) {
|
||||||
|
if (!Array.isArray(in_point)) in_point = this.findPositionForTime(in_point);
|
||||||
|
if (!Array.isArray(out_point)) out_point = this.findPositionForTime(out_point);
|
||||||
|
|
||||||
|
this.inPointPosition = in_point;
|
||||||
|
this.outPointPosition = out_point;
|
||||||
|
this._seekByPoint(in_point);
|
||||||
|
|
||||||
|
this.play();
|
||||||
|
}
|
||||||
|
|
||||||
|
_seekByPoint(point) {
|
||||||
|
this.dispatchEvent(new CustomEvent('seeking', {}));
|
||||||
|
this._currentTimeMs = this.strokes[point[0]].points[point[1]][2];
|
||||||
|
[this.currentPathI, this.currentPointI] = point;
|
||||||
|
this._updateFrame();
|
||||||
|
// TODO set audio, wait for promise to finish
|
||||||
|
this.dispatchEvent(new CustomEvent('seeked', {}));
|
||||||
|
|
||||||
|
}
|
||||||
|
_seekByTime(time) {
|
||||||
|
this.dispatchEvent(new CustomEvent('seeking', { time: time }));
|
||||||
|
this._currentTimeMs = Number.parseFloat(time) * 1000;
|
||||||
|
[this.currentPathI, this.currentPointI] = this.findPositionForTime(this._currentTimeMs);
|
||||||
|
this._updateFrame();
|
||||||
|
this.dispatchEvent(new CustomEvent('seeked', { time: this.currentTime }));
|
||||||
|
}
|
||||||
|
|
||||||
|
_updateFrame() {
|
||||||
|
this.drawStrokePosition(this.inPointPosition, [this.currentPathI, this.currentPointI]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For compatibility with HTMLMediaElement API convert seconds to ms of internal timer
|
||||||
|
*/
|
||||||
|
set currentTime(time) {
|
||||||
|
this._seekByTime(time);
|
||||||
|
}
|
||||||
|
|
||||||
|
get currentTime() {
|
||||||
|
return this._currentTimeMs / 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
getEndTimeMs() {
|
||||||
|
const videoDuration = this.getFinalFrameTime();
|
||||||
|
const audioDuration = (this.audioEl) ? this.audioEl.duration + this.audioOffset : 0;
|
||||||
|
return Math.max(videoDuration, audioDuration * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
get duration() {
|
||||||
|
|
||||||
|
const prerollDuration = this.audioOffset < 0 ? this.audioOffset * -1 : 0;
|
||||||
|
|
||||||
|
return prerollDuration + this.getEndTimeMs();
|
||||||
|
}
|
||||||
|
|
||||||
findPositionForTime(ms) {
|
findPositionForTime(ms) {
|
||||||
ms = Math.min(Math.max(ms, 0), this.duration);
|
ms = Math.min(Math.max(ms, 0), this.lastFrameTime);
|
||||||
// console.log('scrub to', ms)
|
// console.log('scrub to', ms)
|
||||||
let path_i = 0;
|
let path_i = 0;
|
||||||
let point_i = 0;
|
let point_i = 0;
|
||||||
|
|
Loading…
Reference in a new issue