Fix playback and fix playback position

This commit is contained in:
Ruben van de Ven 2021-12-22 11:52:38 +01:00
parent 08f680c07c
commit 9a8509e56d
3 changed files with 94 additions and 12 deletions

View file

@ -11,7 +11,6 @@ import html
import argparse
import coloredlogs
import glob
import csv
@ -168,7 +167,6 @@ class AnimationHandler(tornado.web.RequestHandler):
}
with open(path, 'r') as fp:
events = json.loads('['+fp.read()+']')
# events = csv.reader(fp,delimiter=';')
for i, event in enumerate(events):
if i == 0:
# metadata on first line
@ -223,12 +221,17 @@ class AnnotationsHandler(tornado.web.RequestHandler):
else:
self.json_args = None
def get_filenames(self):
return [name[:-16] for name in os.listdir(self.config.storage) if name.endswith('json_appendable')]
def get(self, filename):
self.set_header("Content-Type", "application/json")
filenames = sorted([name[:-4] for name in os.listdir(self.config.storage) if name not in ['.gitignore']])
filenames = self.get_filenames()
print(filenames, filename)
if filename not in filenames:
raise Exception('Invalid filename')
raise tornado.web.HTTPError(404)
meta_file = os.path.join(self.metadir, filename +'.json')
@ -242,10 +245,11 @@ class AnnotationsHandler(tornado.web.RequestHandler):
def post(self, filename):
# filename = self.get_argument("file", None)
filenames = sorted([name[:-4] for name in os.listdir(self.config.storage) if name not in ['.gitignore']])
filenames = self.get_filenames()
print(filenames, filename)
if filename not in filenames:
raise Exception('Invalid filename')
raise tornado.web.HTTPError(404)
if not os.path.exists(self.metadir):
os.mkdir(self.metadir)

View file

@ -207,6 +207,26 @@
.noUi-horizontal .noUi-touch-area {
cursor: ew-resize;
}
.audioconfig{
z-index: 9;
background:black;
color: white;
position: relative;
width: 100px; /* as wide as audio controls only */
overflow: hidden;
white-space: nowrap;
}
.audioconfig:hover{
width: auto;
}
.audioconfig select, .audioconfig input{
margin:10px;
}
audio{
vertical-align: middle;
width: 100px; /* hides seek head */
}
</style>
<link rel="stylesheet" href="assets/nouislider-15.5.0.css">
<link rel="stylesheet" href="core.css">
@ -220,8 +240,9 @@
<script src="annotate.js"></script>
<script src="playlist.js"></script>
<script type='text/javascript'>
let ann;
if (location.search) {
const player = new Annotator(
ann = new Annotator(
document.getElementById("interface"),
["test", "another", "google"],
location.search.substring(1)

View file

@ -91,7 +91,7 @@ class StrokeSlice {
constructor(stroke, i_in, i_out) {
this.stroke = stroke; // Stroke
this.i_in = typeof i_in === 'undefined' ? 0 : i_in;
this.i_out = typeof i_out === 'undefined' ? this.stroke.points.length-1 : i_out;
this.i_out = typeof i_out === 'undefined' ? this.stroke.points.length - 1 : i_out;
}
getSliceId() {
@ -100,7 +100,7 @@ class StrokeSlice {
// compatible with Stroke()
get points() {
return this.stroke.points.slice(this.i_in, this.i_out+1);
return this.stroke.points.slice(this.i_in, this.i_out + 1);
}
// compatible with Stroke()
@ -190,6 +190,7 @@ class Annotator {
}
updateAnnotations(save) {
this.annotationsEl.innerHTML = "";
for (let annotation_i in this.annotations) {
const annotation = this.annotations[annotation_i];
@ -410,12 +411,14 @@ class Annotator {
if (this.selectedAnnotation) {
this.updateAnnotations(true);
}
this.playAudioSegment(values[0], values[1]);
})
this.drawStrokePosition(this.inPointPosition, this.outPointPosition);
}
playStrokes(drawing, metadata) {
this.audioOffset = 0;
if (metadata) {
this.annotations = metadata.annotations;
this.audioFile = metadata.hasOwnProperty('audio') ? metadata.audio.file : null;
@ -489,28 +492,39 @@ class Annotator {
});
let audioOffsetTextEl = document.createElement('label');
audioOffsetTextEl.innerText = "Offset (s)";
audioConfigEl.appendChild(audioOffsetTextEl);
let audioOffsetEl = document.createElement('input');
audioOffsetEl.setAttribute('type', 'number');
audioOffsetEl.value = this.audioOffset;
audioOffsetEl.setAttribute('step', '.01');
audioOffsetEl.value = this.audioOffset ?? 0;
audioOffsetEl.addEventListener('change', (ev) => {
this.setAudioOffset(ev.target.value);
});
audioConfigEl.appendChild(audioOffsetEl);
audioOffsetTextEl.appendChild(audioOffsetEl);
this.audioEl = document.createElement('audio');
if (this.audioFile) {
this.audioEl.setAttribute('src', this.audioFile);
}
this.audioEl.setAttribute('controls', true);
this.audioEl.addEventListener('canplaythrough', (ev) => {
console.log('loaded audio', ev);
this.audioEl.play();
});
audioConfigEl.appendChild(this.audioEl);
// this.audioEl.addEventListener('seeked', (ev)=>{
// console.log(ev);
// })
audioConfigEl.prepend(this.audioEl);
}
setAudioFile(audioFile) {
this.audioFile = audioFile;
this.audioEl.setAttribute('src', this.audioFile);
// this.audioEl.play();
// TODO update playhead
// TODO update this.duration after load
this.updateState();
@ -523,6 +537,49 @@ class Annotator {
this.updateState();
}
/**
* @param float time time is ms
* @returns float
*/
getAudioTime(time) {
return Number.parseFloat(time) + (this.audioOffset * 1000 ?? 0);
}
/**
*
* @param float t_in in point time, in ms
* @param float t_out out point time, in ms
*/
playAudioSegment(t_in, t_out) {
if (this.audioStartTimeout) clearTimeout(this.audioStartTimeout);
if (this.audioEndTimeout) clearTimeout(this.audioEndTimeout);
// TODO, handle playback delay
const t_start = this.getAudioTime(t_in); // in ms
const t_diff = t_out - t_in; // in ms
console.log('set time', t_in, t_start, typeof t_start, typeof t_in, t_start < 0);
this.audioEl.pause();
if (t_start < 0) {
if (t_diff <= t_start * -1) {
console.log('no audio playback in segment', t_start, t_diff);
} else {
console.log('huh?', t_start, t_diff);
// a negative audiooffset delays playback from the start
// this.audioStartTimeout = setTimeout((e) => this.audioEl.play(), t*-1000);
this.audioStartTimeout = setTimeout((e) => { this.audioEl.currentTime = 0 }, t_start * -1); // triggers play with "seeked" event
// this.audioEl.currentTime = 0;
}
} else {
this.audioEl.currentTime = t_start / 1000;
// this.audioEl.play(); // play is done in "seeked" evenlistener
console.log(this.audioEl.currentTime, t_start, t_in, t_out)
}
this.audioEndTimeout = setTimeout((e) => this.audioEl.pause(), t_diff);
}
getDuration() {
const points = this.strokes[this.strokes.length - 1].points;
return points[points.length - 1][3];