You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
176 lines
5.8 KiB
176 lines
5.8 KiB
|
|
#[macro_use] extern crate log; |
|
#[macro_use(s)] extern crate ndarray; |
|
|
|
use simplelog::*; |
|
use nannou::prelude::*; |
|
use v4l::{Buffer, CaptureDevice, MappedBufferStream}; |
|
|
|
mod visualhaar; |
|
|
|
// use std::fs::File; |
|
|
|
static mut CAMERA: Option<CaptureDevice> = None; |
|
|
|
fn main() { |
|
CombinedLogger::init( |
|
vec![ |
|
TermLogger::new(LevelFilter::Info, Config::default(), TerminalMode::Mixed), |
|
// WriteLogger::new(LevelFilter::Info, Config::default(), File::create("my_rust_binary.log").unwrap()), |
|
] |
|
).unwrap(); |
|
|
|
warn!("test"); |
|
|
|
unsafe{ |
|
CAMERA = Some(CaptureDevice::new(2) |
|
.expect("Failed to open device") |
|
// .format(640, 480, b"RGB3") |
|
.format(424, 240, b"RGB3") |
|
// .format(320, 240, b"RGB3") |
|
.expect("Failed to set format") |
|
.fps(30) |
|
.expect("Failed to set frame interval")); |
|
} |
|
|
|
nannou::app(model) |
|
.event(event) |
|
.update(update) |
|
.view(view) |
|
.run(); |
|
} |
|
|
|
struct Model<'a> { |
|
stream: MappedBufferStream<'a>, |
|
_window: window::Id, |
|
image: Option<nannou::image::DynamicImage>, |
|
haar: visualhaar::HaarClassifier, |
|
haar_outcome: Option<visualhaar::Outcome>, |
|
} |
|
|
|
fn model<'a>(app: &App) -> Model<'a> { |
|
// Create a new capture device with a few extra parameters |
|
unsafe{ |
|
// because our code is slower than the camera, set buffer to 1, then it will capture the latest frame |
|
let stream = MappedBufferStream::with_buffers(CAMERA.as_mut().unwrap(), 1) |
|
.expect("Failed to create buffer stream"); |
|
|
|
let _window = app.new_window() |
|
.size(720, 720) |
|
// .event(window_event) |
|
// .raw_event(raw_window_event) |
|
// .key_pressed(key_pressed) |
|
// .key_released(key_released) |
|
// .mouse_moved(mouse_moved) |
|
// .mouse_pressed(mouse_pressed) |
|
// .mouse_released(mouse_released) |
|
// .mouse_wheel(mouse_wheel) |
|
// .mouse_entered(mouse_entered) |
|
// .mouse_exited(mouse_exited) |
|
// .touch(touch) |
|
// .touchpad_pressure(touchpad_pressure) |
|
// .moved(window_moved) |
|
// .resized(window_resized) |
|
// .hovered_file(hovered_file) |
|
// .hovered_file_cancelled(hovered_file_cancelled) |
|
// .dropped_file(dropped_file) |
|
// .focused(window_focused) |
|
// .unfocused(window_unfocused) |
|
// .closed(window_closed) |
|
.build() |
|
.unwrap(); |
|
|
|
let haar = visualhaar::HaarClassifier::from_xml("haarcascade_frontalface_alt2.xml").unwrap(); |
|
|
|
println!("Haar: {:?}", haar); |
|
|
|
|
|
Model { |
|
stream: stream, |
|
_window: _window, |
|
image: None, |
|
haar: haar, |
|
haar_outcome: None, |
|
} |
|
} |
|
} |
|
|
|
fn event(_app: &App, _model: &mut Model, event: Event) { |
|
match event { |
|
Event::WindowEvent { |
|
id: _window_id, |
|
//raw: _, |
|
simple: _simple, |
|
} => { |
|
match _simple { |
|
None => {println!("Unkown window event")} |
|
// Some(nannou::event::WindowEvent:Moved(_)) => {println!("moved! {:?}", _simple.unwrap())} |
|
Some(_ev) => { println!("Any other window event! {:?}", _ev) } |
|
} |
|
} |
|
Event::DeviceEvent(_device_id, _event) => {} |
|
Event::Update(_dt) => {} |
|
Event::Suspended => {} |
|
Event::Resumed => {} |
|
} |
|
} |
|
|
|
/// renders each frame (called through nannou) |
|
fn update(_app: &App, _model: &mut Model, _update: Update) { |
|
|
|
// get frame from camera stream |
|
let frame = _model.stream.next().unwrap(); |
|
// let vec: Vec<u8> = frame.data().to_vec(); |
|
// Convert to Nannou ImageBuffer |
|
let img_buffer: Option<nannou::image::ImageBuffer<nannou::image::Rgb<u8>, Vec<u8>>> = nannou::image::ImageBuffer::from_raw(424,240, frame.data().to_vec()); |
|
|
|
match img_buffer { |
|
None => { |
|
// no imagebuffer for this update. set haar outcomes to empy |
|
_model.haar_outcome = None; |
|
} |
|
Some(ib) => { |
|
// let |
|
// ib.map( nannou::image::DynamicImage::ImageRgb8); |
|
// let ib_bw = nannou::image::imageops::grayscale(&ib); |
|
// _model.image = Some(nannou::image::DynamicImage::ImageLuma8(ib_bw)); |
|
let outcome = _model.haar.scan_image(ib).unwrap(); |
|
_model.haar_outcome = Some(outcome); |
|
// _model.image = Some(nannou::image::DynamicImage::ImageRgb8(ib)); |
|
|
|
} |
|
} |
|
|
|
// println!("Fetch frame nr {:?} {:?} {:?}", frame.meta().seq, frame.meta().timestamp, frame.data()) |
|
// println!("Fetch frame nr {:?} {:?} {:?} {:?}", frame.meta().seq, frame.meta().timestamp, frame.meta().flags, frame.len()) |
|
} |
|
|
|
|
|
fn view(_app: &App, _model: &Model, frame: Frame){ |
|
let draw = _app.draw(); |
|
draw.background().color(PLUM); |
|
let sine = (_app.time / 1.0).sin(); |
|
let slowersine = (_app.time / 3.0).sin(); |
|
let rotation = _app.time % (2. * PI); |
|
let boundary = _app.window_rect(); |
|
let x = map_range(sine, -1.0, 1.0, boundary.left(), boundary.right()); |
|
let y = map_range(slowersine, -1.0, 1.0, boundary.bottom(), boundary.top()); |
|
|
|
// let texture = wgpu::Texture::load_from_ |
|
// let assets = _app.assets_path().unwrap(); |
|
// let img_path = assets.join("test1.png"); |
|
// let texture = wgpu::Texture::from_path(_app, img_path).unwrap(); |
|
|
|
// let image = nannou::image::DynamicImage::new_rgb8(640, 480); |
|
match &_model.haar_outcome { |
|
Some(outcome) => { |
|
// let i = outcome.dyn(/); |
|
let texture = wgpu::Texture::from_image(_app, &outcome.dynamic_img); |
|
draw.texture(&texture); |
|
} |
|
_ => {} |
|
} |
|
|
|
draw.rect().color(STEELBLUE).rotate(rotation).x_y(x,y); |
|
draw.to_frame(_app, &frame).unwrap(); |
|
}
|
|
|