use nannou::prelude::*; use v4l::{Buffer, CaptureDevice, MappedBufferStream}; use nannou::image; static mut CAMERA: Option = None; fn main() { unsafe{ CAMERA = Some(CaptureDevice::new(2) .expect("Failed to open device") .format(640, 480, b"RGB3") .expect("Failed to set format") .fps(60) .expect("Failed to set frame interval")); } nannou::app(model) .event(event) .update(update) .view(view) .run(); } struct Model<'a> { stream: MappedBufferStream<'a>, _window: window::Id, image: Option } fn model<'a>(app: &App) -> Model<'a> { // Create a new capture device with a few extra parameters unsafe{ let stream = MappedBufferStream::with_buffers(CAMERA.as_mut().unwrap(), 4) .expect("Failed to create buffer stream"); let _window = app.new_window() .size(720, 720) // .event(window_event) // .raw_event(raw_window_event) // .key_pressed(key_pressed) // .key_released(key_released) // .mouse_moved(mouse_moved) // .mouse_pressed(mouse_pressed) // .mouse_released(mouse_released) // .mouse_wheel(mouse_wheel) // .mouse_entered(mouse_entered) // .mouse_exited(mouse_exited) // .touch(touch) // .touchpad_pressure(touchpad_pressure) // .moved(window_moved) // .resized(window_resized) // .hovered_file(hovered_file) // .hovered_file_cancelled(hovered_file_cancelled) // .dropped_file(dropped_file) // .focused(window_focused) // .unfocused(window_unfocused) // .closed(window_closed) .build() .unwrap(); Model { stream: stream, _window: _window, image: None, } } } fn event(_app: &App, _model: &mut Model, event: Event) { match event { Event::WindowEvent { id: _window_id, //raw: _, simple: _simple, } => { match _simple { None => {println!("Unkown window event")} // Some(nannou::event::WindowEvent:Moved(_)) => {println!("moved! {:?}", _simple.unwrap())} Some(_ev) => { println!("Any other window event! {:?}", _ev) } } } Event::DeviceEvent(_device_id, _event) => {} Event::Update(_dt) => {} Event::Suspended => {} Event::Resumed => {} } } fn update(_app: &App, _model: &mut Model, _update: Update) { let frame = _model.stream.next().unwrap(); // let vec: Vec = frame.data().to_vec(); let img_buffer = nannou::image::ImageBuffer::from_raw(640,480, frame.data().to_vec()); match img_buffer { None => { _model.image = None; } Some(ib) => { // let // ib.map( nannou::image::DynamicImage::ImageRgb8); _model.image = Some(nannou::image::DynamicImage::ImageRgb8(ib)); } } // println!("Fetch frame nr {:?} {:?} {:?}", frame.meta().seq, frame.meta().timestamp, frame.data()) // println!("Fetch frame nr {:?} {:?} {:?} {:?}", frame.meta().seq, frame.meta().timestamp, frame.meta().flags, frame.len()) } fn view(_app: &App, _model: &Model, frame: Frame){ let draw = _app.draw(); draw.background().color(PLUM); let sine = (_app.time / 1.0).sin(); let slowersine = (_app.time / 3.0).sin(); let rotation = _app.time % (2. * PI); let boundary = _app.window_rect(); let x = map_range(sine, -1.0, 1.0, boundary.left(), boundary.right()); let y = map_range(slowersine, -1.0, 1.0, boundary.bottom(), boundary.top()); // let texture = wgpu::Texture::load_from_ // let assets = _app.assets_path().unwrap(); // let img_path = assets.join("test1.png"); // let texture = wgpu::Texture::from_path(_app, img_path).unwrap(); // let image = nannou::image::DynamicImage::new_rgb8(640, 480); match &_model.image { Some(dynamic_image) => { let texture = wgpu::Texture::from_image(_app, dynamic_image); draw.texture(&texture); } _ => {} } draw.rect().color(STEELBLUE).rotate(rotation).x_y(x,y); draw.to_frame(_app, &frame).unwrap(); }