207 lines
7.0 KiB
Rust
207 lines
7.0 KiB
Rust
|
|
#[macro_use] extern crate log;
|
|
#[macro_use(s)] extern crate ndarray;
|
|
|
|
use simplelog::*;
|
|
use nannou::prelude::*;
|
|
use v4l::{Buffer, CaptureDevice, MappedBufferStream};
|
|
use image;
|
|
mod visualhaar;
|
|
mod heatmap;
|
|
|
|
// use std::fs::File;
|
|
|
|
static mut CAMERA: Option<CaptureDevice> = None;
|
|
|
|
fn main() {
|
|
CombinedLogger::init(
|
|
vec![
|
|
TermLogger::new(LevelFilter::Info, Config::default(), TerminalMode::Mixed),
|
|
// WriteLogger::new(LevelFilter::Info, Config::default(), File::create("my_rust_binary.log").unwrap()),
|
|
]
|
|
).unwrap();
|
|
|
|
warn!("test");
|
|
|
|
// unsafe{
|
|
|
|
let device_id = 0;
|
|
|
|
if let Ok(dev) = CaptureDevice::new(device_id) {
|
|
|
|
let formats = dev.enumerate_formats();
|
|
if let Ok(formats) = formats {
|
|
info!("Supported camera formats");
|
|
for fmt in formats {
|
|
info!("{}", fmt);
|
|
}
|
|
}
|
|
|
|
unsafe{
|
|
CAMERA = Some(dev.format(424, 240, b"RGB3")
|
|
.expect("Failed to set format"));
|
|
}
|
|
} else {
|
|
println!("Failed to open camera device {}", device_id);
|
|
return;
|
|
}
|
|
// CAMERA = Some(CaptureDevice::new(3)
|
|
// .expect("Failed to open device")
|
|
// // .format(640, 480, b"RGB3")
|
|
// .format(424, 240, b"RGB3")
|
|
// // .format(320, 240, b"RGB3")
|
|
// .expect("Failed to set format")()
|
|
// .fps(30)
|
|
// .expect("Failed to set frame interval"));
|
|
// }
|
|
|
|
nannou::app(model)
|
|
.event(event)
|
|
.update(update)
|
|
.view(view)
|
|
.run();
|
|
}
|
|
|
|
struct Model<'a> {
|
|
stream: MappedBufferStream<'a>,
|
|
_window: window::Id,
|
|
image: Option<nannou::image::DynamicImage>,
|
|
haar: visualhaar::HaarClassifier,
|
|
heatmap: Option<heatmap::Heatmap>,
|
|
haar_outcome: Option<visualhaar::Outcome>,
|
|
}
|
|
|
|
fn model<'a>(app: &App) -> Model<'a> {
|
|
// Create a new capture device with a few extra parameters
|
|
unsafe{
|
|
// because our code is slower than the camera, set buffer to 1, then it will capture the latest frame
|
|
let stream = MappedBufferStream::with_buffers(CAMERA.as_mut().unwrap(), 1)
|
|
.expect("Failed to create buffer stream");
|
|
|
|
let _window = app.new_window()
|
|
.size(720, 720)
|
|
// .event(window_event)
|
|
// .raw_event(raw_window_event)
|
|
// .key_pressed(key_pressed)
|
|
// .key_released(key_released)
|
|
// .mouse_moved(mouse_moved)
|
|
// .mouse_pressed(mouse_pressed)
|
|
// .mouse_released(mouse_released)
|
|
// .mouse_wheel(mouse_wheel)
|
|
// .mouse_entered(mouse_entered)
|
|
// .mouse_exited(mouse_exited)
|
|
// .touch(touch)
|
|
// .touchpad_pressure(touchpad_pressure)
|
|
// .moved(window_moved)
|
|
// .resized(window_resized)
|
|
// .hovered_file(hovered_file)
|
|
// .hovered_file_cancelled(hovered_file_cancelled)
|
|
// .dropped_file(dropped_file)
|
|
// .focused(window_focused)
|
|
// .unfocused(window_unfocused)
|
|
// .closed(window_closed)
|
|
.build()
|
|
.unwrap();
|
|
|
|
let haar = visualhaar::HaarClassifier::from_xml("haarcascade_frontalface_alt2.xml").unwrap();
|
|
|
|
// println!("Haar: {:?}", haar);
|
|
|
|
|
|
Model {
|
|
stream: stream,
|
|
_window: _window,
|
|
image: None,
|
|
haar: haar,
|
|
heatmap: Some(heatmap::Heatmap::new(heatmap::ColorMaps::Plasma)),
|
|
haar_outcome: None,
|
|
}
|
|
}
|
|
}
|
|
|
|
fn event(_app: &App, _model: &mut Model, event: Event) {
|
|
match event {
|
|
Event::WindowEvent {
|
|
id: _window_id,
|
|
//raw: _,
|
|
simple: _simple,
|
|
} => {
|
|
match _simple {
|
|
None => {println!("Unkown window event")}
|
|
// Some(nannou::event::WindowEvent:Moved(_)) => {println!("moved! {:?}", _simple.unwrap())}
|
|
Some(_ev) => { println!("Any other window event! {:?}", _ev) }
|
|
}
|
|
}
|
|
Event::DeviceEvent(_device_id, _event) => {}
|
|
Event::Update(_dt) => {}
|
|
Event::Suspended => {}
|
|
Event::Resumed => {}
|
|
}
|
|
}
|
|
|
|
/// renders each frame (called through nannou)
|
|
fn update(_app: &App, _model: &mut Model, _update: Update) {
|
|
|
|
// get frame from camera stream
|
|
let frame = _model.stream.next().unwrap();
|
|
// let vec: Vec<u8> = frame.data().to_vec();
|
|
// Convert to Nannou ImageBuffer
|
|
let img_buffer: Option<nannou::image::ImageBuffer<nannou::image::Rgb<u8>, Vec<u8>>> = nannou::image::ImageBuffer::from_raw(424,240, frame.data().to_vec());
|
|
|
|
match img_buffer {
|
|
None => {
|
|
// no imagebuffer for this update. set haar outcomes to empy
|
|
_model.haar_outcome = None;
|
|
}
|
|
Some(ib) => {
|
|
// let
|
|
// ib.map( nannou::image::DynamicImage::ImageRgb8);
|
|
// let ib_bw = nannou::image::imageops::grayscale(&ib);
|
|
// _model.image = Some(nannou::image::DynamicImage::ImageLuma8(ib_bw));
|
|
let outcome = _model.haar.scan_image(ib, &_model.heatmap).unwrap();
|
|
// let image_hm = _model.heatmap.convert_image(outcome.dynamic_img);
|
|
_model.haar_outcome = Some(outcome);
|
|
// _model.image = Some(nannou::image::DynamicImage::ImageRgb8(ib));
|
|
|
|
}
|
|
}
|
|
|
|
// println!("Fetch frame nr {:?} {:?} {:?}", frame.meta().seq, frame.meta().timestamp, frame.data())
|
|
// println!("Fetch frame nr {:?} {:?} {:?} {:?}", frame.meta().seq, frame.meta().timestamp, frame.meta().flags, frame.len())
|
|
}
|
|
|
|
|
|
fn view(_app: &App, _model: &Model, frame: Frame){
|
|
let draw = _app.draw();
|
|
draw.background().color(BLACK);
|
|
let sine = (_app.time / 1.0).sin();
|
|
let slowersine = (_app.time / 3.0).sin();
|
|
let rotation = _app.time % (2. * PI);
|
|
let boundary = _app.window_rect();
|
|
|
|
// let texture = wgpu::Texture::load_from_
|
|
// let assets = _app.assets_path().unwrap();
|
|
// let img_path = assets.join("test1.png");
|
|
// let texture = wgpu::Texture::from_path(_app, img_path).unwrap();
|
|
|
|
// let image = nannou::image::DynamicImage::new_rgb8(640, 480);
|
|
match &_model.haar_outcome {
|
|
Some(outcome) => {
|
|
// let i = outcome.dyn(/);
|
|
// let img // ::from(&outcome.dynamic_img);
|
|
// let hm = heatmap::Heatmap::new(heatmap::ColorMaps::Plasma);
|
|
// let image_hm = hm.convert_image(image);
|
|
let img = image::DynamicImage::ImageRgb8(outcome.dynamic_img.to_rgb()).resize(1000, 1000, image::imageops::FilterType::Triangle);
|
|
|
|
let texture = wgpu::Texture::from_image(_app, &img);
|
|
draw.texture(&texture);
|
|
}
|
|
_ => {}
|
|
}
|
|
|
|
// let x = map_range(sine, -1.0, 1.0, boundary.left(), boundary.right());
|
|
// let y = map_range(slowersine, -1.0, 1.0, boundary.bottom(), boundary.top());
|
|
// draw.rect().color(STEELBLUE).rotate(rotation).x_y(x,y);
|
|
draw.to_frame(_app, &frame).unwrap();
|
|
}
|