First heatmap version
This commit is contained in:
parent
4d37d38ac6
commit
92ec2a4256
4 changed files with 274 additions and 20 deletions
225
src/heatmap.rs
Normal file
225
src/heatmap.rs
Normal file
|
@ -0,0 +1,225 @@
|
||||||
|
use ndarray as nd;
|
||||||
|
use image;
|
||||||
|
|
||||||
|
pub enum ColorMaps{
|
||||||
|
Binary,
|
||||||
|
NipySpectral,
|
||||||
|
TraficLight,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ColorMap{
|
||||||
|
pub red: Vec<(f64, f64, f64)>,
|
||||||
|
pub green: Vec<(f64, f64, f64)>,
|
||||||
|
pub blue: Vec<(f64, f64, f64)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Heatmap{
|
||||||
|
pub cm: ColorMap
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl Heatmap{
|
||||||
|
pub fn new(cm: ColorMaps) -> Self{
|
||||||
|
Self{
|
||||||
|
cm: ColorMap::new(cm)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn convert_image(&self, img: image::DynamicImage) -> image::RgbImage {
|
||||||
|
let gray_img: image::GrayImage = match img {
|
||||||
|
image::DynamicImage::ImageLuma8(gray_image) => {
|
||||||
|
gray_image
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
img.to_luma()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut heatmap_img = image::RgbImage::new(gray_img.width(), gray_img.height());
|
||||||
|
let lut_size = 256;// * 256 * 256;
|
||||||
|
let lut = self.cm.generate_lut(lut_size);
|
||||||
|
|
||||||
|
// info!("LUT: {:?}", lut);
|
||||||
|
|
||||||
|
for pixel in gray_img.enumerate_pixels() {
|
||||||
|
let l = pixel.2;
|
||||||
|
let p = image::Rgb(lut[l.0[0] as usize]);
|
||||||
|
heatmap_img.put_pixel(pixel.0, pixel.1, p);
|
||||||
|
}
|
||||||
|
|
||||||
|
return heatmap_img;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// impl From<nd::Array2<i32>> for Heatmap {
|
||||||
|
// fn from(array: nd::Array2<i32>) -> Self {
|
||||||
|
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// impl From<image::DynamicImage> for Heatmap {
|
||||||
|
// fn from(image: image::DynamicImage) -> Self {
|
||||||
|
// Self{
|
||||||
|
// cm:
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
impl ColorMap{
|
||||||
|
pub fn new(m: ColorMaps) -> Self {
|
||||||
|
|
||||||
|
let cm = match m {
|
||||||
|
ColorMaps::Binary => {
|
||||||
|
Self{
|
||||||
|
red: vec![
|
||||||
|
(0., 0., 0.), (1., 1., 1.)
|
||||||
|
],
|
||||||
|
green: vec![
|
||||||
|
(0., 0., 0.), (1., 1., 1.)
|
||||||
|
],
|
||||||
|
blue: vec![
|
||||||
|
(0., 0., 0.), (1., 1., 1.)
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ColorMaps::TraficLight => {
|
||||||
|
Self{
|
||||||
|
red: vec![
|
||||||
|
(0., 0., 0.), (0.5, 1., 1.), (1., 1., 1.)
|
||||||
|
],
|
||||||
|
green: vec![
|
||||||
|
(0., 0., 0.), (0.5, 1., 1.), (1., 0., 0.)
|
||||||
|
],
|
||||||
|
blue: vec![
|
||||||
|
(0., 0., 1.), (0.5, 0., 0.), (1., 0., 0.)
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ColorMaps::NipySpectral => {
|
||||||
|
Self{
|
||||||
|
red: vec![(0.0, 0.0, 0.0), (0.05, 0.4667, 0.4667),
|
||||||
|
(0.10, 0.5333, 0.5333), (0.15, 0.0, 0.0),
|
||||||
|
(0.20, 0.0, 0.0), (0.25, 0.0, 0.0),
|
||||||
|
(0.30, 0.0, 0.0), (0.35, 0.0, 0.0),
|
||||||
|
(0.40, 0.0, 0.0), (0.45, 0.0, 0.0),
|
||||||
|
(0.50, 0.0, 0.0), (0.55, 0.0, 0.0),
|
||||||
|
(0.60, 0.0, 0.0), (0.65, 0.7333, 0.7333),
|
||||||
|
(0.70, 0.9333, 0.9333), (0.75, 1.0, 1.0),
|
||||||
|
(0.80, 1.0, 1.0), (0.85, 1.0, 1.0),
|
||||||
|
(0.90, 0.8667, 0.8667), (0.95, 0.80, 0.80),
|
||||||
|
(1.0, 0.80, 0.80)],
|
||||||
|
green: vec![(0.0, 0.0, 0.0), (0.05, 0.0, 0.0),
|
||||||
|
(0.10, 0.0, 0.0), (0.15, 0.0, 0.0),
|
||||||
|
(0.20, 0.0, 0.0), (0.25, 0.4667, 0.4667),
|
||||||
|
(0.30, 0.6000, 0.6000), (0.35, 0.6667, 0.6667),
|
||||||
|
(0.40, 0.6667, 0.6667), (0.45, 0.6000, 0.6000),
|
||||||
|
(0.50, 0.7333, 0.7333), (0.55, 0.8667, 0.8667),
|
||||||
|
(0.60, 1.0, 1.0), (0.65, 1.0, 1.0),
|
||||||
|
(0.70, 0.9333, 0.9333), (0.75, 0.8000, 0.8000),
|
||||||
|
(0.80, 0.6000, 0.6000), (0.85, 0.0, 0.0),
|
||||||
|
(0.90, 0.0, 0.0), (0.95, 0.0, 0.0),
|
||||||
|
(1.0, 0.80, 0.80)],
|
||||||
|
blue: vec![(0.0, 0.0, 0.0), (0.05, 0.5333, 0.5333),
|
||||||
|
(0.10, 0.6000, 0.6000), (0.15, 0.6667, 0.6667),
|
||||||
|
(0.20, 0.8667, 0.8667), (0.25, 0.8667, 0.8667),
|
||||||
|
(0.30, 0.8667, 0.8667), (0.35, 0.6667, 0.6667),
|
||||||
|
(0.40, 0.5333, 0.5333), (0.45, 0.0, 0.0),
|
||||||
|
(0.5, 0.0, 0.0), (0.55, 0.0, 0.0),
|
||||||
|
(0.60, 0.0, 0.0), (0.65, 0.0, 0.0),
|
||||||
|
(0.70, 0.0, 0.0), (0.75, 0.0, 0.0),
|
||||||
|
(0.80, 0.0, 0.0), (0.85, 0.0, 0.0),
|
||||||
|
(0.90, 0.0, 0.0), (0.95, 0.0, 0.0),
|
||||||
|
(1.0, 0.80, 0.80)],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return cm;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Similar to MatplotLib LinearSegmentedColormap
|
||||||
|
/// @see https://github.com/matplotlib/matplotlib/blob/13e3573b721210d84865d148aab7f63cc2fc95a6/lib/matplotlib/colors.py
|
||||||
|
/// """
|
||||||
|
/// Create color map from linear mapping segments
|
||||||
|
/// segmentdata argument is a dictionary with a red, green and blue
|
||||||
|
/// entries. Each entry should be a list of *x*, *y0*, *y1* tuples,
|
||||||
|
/// forming rows in a table. Entries for alpha are optional.
|
||||||
|
/// Example: suppose you want red to increase from 0 to 1 over
|
||||||
|
/// the bottom half, green to do the same over the middle half,
|
||||||
|
/// and blue over the top half. Then you would use::
|
||||||
|
/// cdict = {'red': [(0.0, 0.0, 0.0),
|
||||||
|
/// (0.5, 1.0, 1.0),
|
||||||
|
/// (1.0, 1.0, 1.0)],
|
||||||
|
/// 'green': [(0.0, 0.0, 0.0),
|
||||||
|
/// (0.25, 0.0, 0.0),
|
||||||
|
/// (0.75, 1.0, 1.0),
|
||||||
|
/// (1.0, 1.0, 1.0)],
|
||||||
|
/// 'blue': [(0.0, 0.0, 0.0),
|
||||||
|
/// (0.5, 0.0, 0.0),
|
||||||
|
/// (1.0, 1.0, 1.0)]}
|
||||||
|
/// Each row in the table for a given color is a sequence of
|
||||||
|
/// *x*, *y0*, *y1* tuples. In each sequence, *x* must increase
|
||||||
|
/// monotonically from 0 to 1. For any input value *z* falling
|
||||||
|
/// between *x[i]* and *x[i+1]*, the output value of a given color
|
||||||
|
/// will be linearly interpolated between *y1[i]* and *y0[i+1]*::
|
||||||
|
/// row i: x y0 y1
|
||||||
|
/// /
|
||||||
|
/// /
|
||||||
|
/// row i+1: x y0 y1
|
||||||
|
/// Hence y0 in the first row and y1 in the last row are never used.
|
||||||
|
/// See Also
|
||||||
|
/// --------
|
||||||
|
/// LinearSegmentedColormap.from_list
|
||||||
|
/// Static method; factory function for generating a smoothly-varying
|
||||||
|
/// LinearSegmentedColormap.
|
||||||
|
/// """
|
||||||
|
pub fn generate_lut(&self, N: usize) -> Vec<[u8; 3]> {
|
||||||
|
let r = Self::interpolate_color(&self.red, N);
|
||||||
|
let g = Self::interpolate_color(&self.green, N);
|
||||||
|
let b = Self::interpolate_color(&self.blue, N);
|
||||||
|
let mut lut = Vec::<[u8;3]>::new();
|
||||||
|
|
||||||
|
for i in 0..N {
|
||||||
|
lut.push([r[i], g[i], b[i]]);
|
||||||
|
}
|
||||||
|
|
||||||
|
lut
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn interpolate_color(colors: &Vec<(f64, f64, f64)>, N: usize) -> Vec<u8>{
|
||||||
|
let step_size = 1./N as f64;
|
||||||
|
|
||||||
|
let mut prev_color: Option<(f64, f64, f64)> = None;
|
||||||
|
|
||||||
|
let mut lut = Vec::<u8>::new();
|
||||||
|
|
||||||
|
for color in colors {
|
||||||
|
match prev_color {
|
||||||
|
Some(prev) => {
|
||||||
|
let steps = (color.0/step_size) as usize - lut.len();
|
||||||
|
for i in 0..steps {
|
||||||
|
let factor = (i + 1) as f64 / steps as f64;
|
||||||
|
let c = ((prev.2 * (1. - factor) + color.1 * factor) * (N as f64 - 1.)) as u8;
|
||||||
|
lut.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let steps = (color.0/step_size) as usize;
|
||||||
|
for i in 0..steps{
|
||||||
|
lut.push((color.2 * (N as f64 - 1.)) as u8);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
prev_color = Some(color.clone());
|
||||||
|
}
|
||||||
|
// now fill the last bit of the lut with the last color
|
||||||
|
for pos in lut.len()..N {
|
||||||
|
lut.push(lut.last().unwrap().clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
lut
|
||||||
|
}
|
||||||
|
}
|
15
src/main.rs
15
src/main.rs
|
@ -5,7 +5,7 @@
|
||||||
use simplelog::*;
|
use simplelog::*;
|
||||||
use nannou::prelude::*;
|
use nannou::prelude::*;
|
||||||
use v4l::{Buffer, CaptureDevice, MappedBufferStream};
|
use v4l::{Buffer, CaptureDevice, MappedBufferStream};
|
||||||
|
use image;
|
||||||
mod visualhaar;
|
mod visualhaar;
|
||||||
|
|
||||||
// use std::fs::File;
|
// use std::fs::File;
|
||||||
|
@ -148,13 +148,11 @@ fn update(_app: &App, _model: &mut Model, _update: Update) {
|
||||||
|
|
||||||
fn view(_app: &App, _model: &Model, frame: Frame){
|
fn view(_app: &App, _model: &Model, frame: Frame){
|
||||||
let draw = _app.draw();
|
let draw = _app.draw();
|
||||||
draw.background().color(PLUM);
|
draw.background().color(BLACK);
|
||||||
let sine = (_app.time / 1.0).sin();
|
let sine = (_app.time / 1.0).sin();
|
||||||
let slowersine = (_app.time / 3.0).sin();
|
let slowersine = (_app.time / 3.0).sin();
|
||||||
let rotation = _app.time % (2. * PI);
|
let rotation = _app.time % (2. * PI);
|
||||||
let boundary = _app.window_rect();
|
let boundary = _app.window_rect();
|
||||||
let x = map_range(sine, -1.0, 1.0, boundary.left(), boundary.right());
|
|
||||||
let y = map_range(slowersine, -1.0, 1.0, boundary.bottom(), boundary.top());
|
|
||||||
|
|
||||||
// let texture = wgpu::Texture::load_from_
|
// let texture = wgpu::Texture::load_from_
|
||||||
// let assets = _app.assets_path().unwrap();
|
// let assets = _app.assets_path().unwrap();
|
||||||
|
@ -165,12 +163,17 @@ fn view(_app: &App, _model: &Model, frame: Frame){
|
||||||
match &_model.haar_outcome {
|
match &_model.haar_outcome {
|
||||||
Some(outcome) => {
|
Some(outcome) => {
|
||||||
// let i = outcome.dyn(/);
|
// let i = outcome.dyn(/);
|
||||||
let texture = wgpu::Texture::from_image(_app, &outcome.dynamic_img);
|
// let img // ::from(&outcome.dynamic_img);
|
||||||
|
let img = image::DynamicImage::ImageRgb8(outcome.dynamic_img.to_rgb()).resize(1000, 1000, image::imageops::FilterType::Triangle);
|
||||||
|
|
||||||
|
let texture = wgpu::Texture::from_image(_app, &img);
|
||||||
draw.texture(&texture);
|
draw.texture(&texture);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
draw.rect().color(STEELBLUE).rotate(rotation).x_y(x,y);
|
// let x = map_range(sine, -1.0, 1.0, boundary.left(), boundary.right());
|
||||||
|
// let y = map_range(slowersine, -1.0, 1.0, boundary.bottom(), boundary.top());
|
||||||
|
// draw.rect().color(STEELBLUE).rotate(rotation).x_y(x,y);
|
||||||
draw.to_frame(_app, &frame).unwrap();
|
draw.to_frame(_app, &frame).unwrap();
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ use v4l::{Buffer, CaptureDevice, MappedBufferStream};
|
||||||
use image;
|
use image;
|
||||||
|
|
||||||
mod visualhaar;
|
mod visualhaar;
|
||||||
|
mod heatmap;
|
||||||
|
|
||||||
// use std::fs::File;
|
// use std::fs::File;
|
||||||
|
|
||||||
|
@ -30,7 +31,7 @@ fn main() {
|
||||||
|
|
||||||
let sw = Stopwatch::start_new();
|
let sw = Stopwatch::start_new();
|
||||||
|
|
||||||
let frame = image::open("/home/ruben/Documents/Projecten/2020/rust/lena_orig.png");
|
let frame = image::open("/home/ruben/Documents/Projecten/2020/rust/lena_orig-s.png");
|
||||||
|
|
||||||
|
|
||||||
// let vec: Vec<u8> = frame.data().to_vec();
|
// let vec: Vec<u8> = frame.data().to_vec();
|
||||||
|
@ -49,6 +50,12 @@ fn main() {
|
||||||
// _model.image = Some(nannou::image::DynamicImage::ImageLuma8(ib_bw));
|
// _model.image = Some(nannou::image::DynamicImage::ImageLuma8(ib_bw));
|
||||||
let i = ib.as_rgb8().unwrap().clone();
|
let i = ib.as_rgb8().unwrap().clone();
|
||||||
let image = haar.scan_image(i).unwrap().dynamic_img;
|
let image = haar.scan_image(i).unwrap().dynamic_img;
|
||||||
|
|
||||||
|
// let hm = heatmap::Heatmap::new(heatmap::ColorMaps::NipySpectral);
|
||||||
|
let hm = heatmap::Heatmap::new(heatmap::ColorMaps::TraficLight);
|
||||||
|
// let hm = heatmap::Heatmap::new(heatmap::ColorMaps::Binary);
|
||||||
|
let image = hm.convert_image(image);
|
||||||
|
|
||||||
image.save("/home/ruben/Documents/Projecten/2020/rust/lena_orig-output.png");
|
image.save("/home/ruben/Documents/Projecten/2020/rust/lena_orig-output.png");
|
||||||
info!("Scanning for faces took {}ms", sw.elapsed_ms());
|
info!("Scanning for faces took {}ms", sw.elapsed_ms());
|
||||||
// _model.image = Some(nannou::image::DynamicImage::ImageRgb8(ib));
|
// _model.image = Some(nannou::image::DynamicImage::ImageRgb8(ib));
|
||||||
|
|
|
@ -4,6 +4,7 @@ use image;
|
||||||
use log::{info, trace, warn};
|
use log::{info, trace, warn};
|
||||||
use std::{convert::TryInto, error::Error};
|
use std::{convert::TryInto, error::Error};
|
||||||
|
|
||||||
|
use stopwatch::{Stopwatch};
|
||||||
use ndarray as nd;
|
use ndarray as nd;
|
||||||
|
|
||||||
/// A haarclasifier based on opencv cascade XML files
|
/// A haarclasifier based on opencv cascade XML files
|
||||||
|
@ -60,7 +61,7 @@ impl HaarClassifierFeature{
|
||||||
score
|
score
|
||||||
}
|
}
|
||||||
|
|
||||||
fn draw(&self, draw_window: &mut nd::ArrayViewMut2<f64>, scale: &f64) {
|
fn draw(&self, draw_window: &mut nd::ArrayViewMut2<i16>, scale: &f64) {
|
||||||
for rect in &self.rects{
|
for rect in &self.rects{
|
||||||
rect.draw(draw_window, scale);
|
rect.draw(draw_window, scale);
|
||||||
}
|
}
|
||||||
|
@ -76,7 +77,7 @@ pub struct HaarClassifierFeatureRect{
|
||||||
width: u8,
|
width: u8,
|
||||||
height: u8,
|
height: u8,
|
||||||
/// weight factor
|
/// weight factor
|
||||||
weight: f64,
|
weight: i16,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HaarClassifierFeatureRect{
|
impl HaarClassifierFeatureRect{
|
||||||
|
@ -94,12 +95,12 @@ impl HaarClassifierFeatureRect{
|
||||||
let (x1, y1, x2, y2) = self.get_coordinates_for_scale(scale);
|
let (x1, y1, x2, y2) = self.get_coordinates_for_scale(scale);
|
||||||
|
|
||||||
let sum = (image_window[[y2,x2]] + image_window[[y1,x1]] - image_window[[y1, x2]] - image_window[[y2, x1]]) as f64;
|
let sum = (image_window[[y2,x2]] + image_window[[y1,x1]] - image_window[[y1, x2]] - image_window[[y2, x1]]) as f64;
|
||||||
let sum = (sum/(scale*scale)) * self.weight; // normalise: when the window grows, all values of the integral image become bigger by a factor scale-squared
|
let sum = (sum/(scale*scale)) * self.weight as f64; // normalise: when the window grows, all values of the integral image become bigger by a factor scale-squared
|
||||||
return sum;
|
return sum;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn draw(&self, draw_window: &mut nd::ArrayViewMut2<f64>, scale: &f64) {
|
fn draw(&self, draw_window: &mut nd::ArrayViewMut2<i16>, scale: &f64) {
|
||||||
let (x1, y1, x2, y2) = self.get_coordinates_for_scale(scale);
|
let (x1, y1, x2, y2) = self.get_coordinates_for_scale(scale);
|
||||||
|
|
||||||
// TODO how to speed this up?
|
// TODO how to speed this up?
|
||||||
|
@ -170,12 +171,13 @@ impl HaarClassifier {
|
||||||
// println!("{:?}",rect.text());
|
// println!("{:?}",rect.text());
|
||||||
let v: Vec<&str> = rect.text().unwrap().split_whitespace().collect();
|
let v: Vec<&str> = rect.text().unwrap().split_whitespace().collect();
|
||||||
assert_eq!(v.len(), 5, "Expected values for features: x, y, width, height, weight");
|
assert_eq!(v.len(), 5, "Expected values for features: x, y, width, height, weight");
|
||||||
|
let w: f64 = v[4].parse()?;
|
||||||
rects.push(HaarClassifierFeatureRect{
|
rects.push(HaarClassifierFeatureRect{
|
||||||
tl_x: v[0].parse()?,
|
tl_x: v[0].parse()?,
|
||||||
tl_y: v[1].parse()?,
|
tl_y: v[1].parse()?,
|
||||||
width: v[2].parse()?,
|
width: v[2].parse()?,
|
||||||
height: v[3].parse()?,
|
height: v[3].parse()?,
|
||||||
weight: v[4].parse()?,
|
weight: w as i16,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -334,7 +336,7 @@ impl HaarClassifier {
|
||||||
|
|
||||||
let integral = Self::integral_image(&img_bw);
|
let integral = Self::integral_image(&img_bw);
|
||||||
|
|
||||||
let mut output_frame: nd::Array2<f64> = nd::Array::zeros((
|
let mut output_frame: nd::Array2<i16> = nd::Array::zeros((
|
||||||
img_bw.dimensions().1 as usize,
|
img_bw.dimensions().1 as usize,
|
||||||
img_bw.dimensions().0 as usize,
|
img_bw.dimensions().0 as usize,
|
||||||
));
|
));
|
||||||
|
@ -351,14 +353,16 @@ impl HaarClassifier {
|
||||||
let mut count_faces = 0;
|
let mut count_faces = 0;
|
||||||
let mut count_not_faces = 0;
|
let mut count_not_faces = 0;
|
||||||
while window_size < max_window_size {
|
while window_size < max_window_size {
|
||||||
|
|
||||||
|
let sw = Stopwatch::start_new();
|
||||||
let scale = (window_size-1) as f64 / self.width as f64;
|
let scale = (window_size-1) as f64 / self.width as f64;
|
||||||
// to calculate a rect, we would need a -1 row, if we ignore that precision and add one at the end: (eg required when an item has width 20 (== feature width))
|
// to calculate a rect, we would need a -1 row, if we ignore that precision and add one at the end: (eg required when an item has width 20 (== feature width))
|
||||||
let scan_window_size = window_size + 1;
|
let scan_window_size = window_size + 1;
|
||||||
info!("Window size: {:?} {:?}", window_size, scale);
|
info!("Window size: {:?} {:?}", window_size, scale);
|
||||||
|
|
||||||
// step by scale.ceil() as this is 1px in the model's size. (small is probably unnecesarily fine-grained)
|
// step by scale.ceil() as this is 1px in the model's size. (small is probably unnecesarily fine-grained)
|
||||||
for x in (0..(img_bw.dimensions().0 as usize - scan_window_size)).step_by(scale.ceil() as usize) {
|
for x in (0..(img_bw.dimensions().0 as usize - scan_window_size)).step_by((scale * 1.0).ceil() as usize) {
|
||||||
for y in (0..(img_bw.dimensions().1 as usize - scan_window_size)).step_by(scale.ceil() as usize) {
|
for y in (0..(img_bw.dimensions().1 as usize - scan_window_size)).step_by((scale * 1.0).ceil() as usize) {
|
||||||
let window = integral.slice(s![y..y+scan_window_size, x..x+scan_window_size]);
|
let window = integral.slice(s![y..y+scan_window_size, x..x+scan_window_size]);
|
||||||
let mut output_window = output_frame.slice_mut(s![y..y+scan_window_size, x..x+scan_window_size]);
|
let mut output_window = output_frame.slice_mut(s![y..y+scan_window_size, x..x+scan_window_size]);
|
||||||
if self.scan_window(window, scale, &mut output_window) {
|
if self.scan_window(window, scale, &mut output_window) {
|
||||||
|
@ -370,6 +374,8 @@ impl HaarClassifier {
|
||||||
}
|
}
|
||||||
// break;
|
// break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
info!("\ttook: {:?}ms", sw.elapsed_ms());
|
||||||
// break;
|
// break;
|
||||||
|
|
||||||
window_size = (window_size as f32 * 1.2) as usize; // TODO make grow-factor variable (now 1.2)
|
window_size = (window_size as f32 * 1.2) as usize; // TODO make grow-factor variable (now 1.2)
|
||||||
|
@ -379,14 +385,22 @@ impl HaarClassifier {
|
||||||
// test_window += 10.;
|
// test_window += 10.;
|
||||||
|
|
||||||
// Find the largest non-NaN in vector, or NaN otherwise:
|
// Find the largest non-NaN in vector, or NaN otherwise:
|
||||||
let max_output_pixel = output_frame.iter().cloned().fold(0./0., f64::max);
|
let max_output_pixel = output_frame.iter().max().unwrap().clone();//when f64: output_frame.iter().cloned().fold(0./0., f64::max);
|
||||||
let min_output_pixel = output_frame.iter().cloned().fold(f64::NAN, f64::min);
|
let min_output_pixel = output_frame.iter().min().unwrap().clone();//when f64: output_frame.iter().cloned().fold(f64::NAN, f64::min);
|
||||||
info!("Maximum pixel value in drawing: {:?} / min: {:?}", max_output_pixel, min_output_pixel);
|
info!("Maximum pixel value in drawing: {:?} / min: {:?}", max_output_pixel, min_output_pixel);
|
||||||
info!("Count accepted/rejected windows: {:?}/{:?}", count_faces, count_not_faces);
|
info!("Count accepted/rejected windows: {:?}/{:?}", count_faces, count_not_faces);
|
||||||
|
|
||||||
// let max_output_pixel = output_frame.iter().par().unwrap().clone();
|
// let max_output_pixel = output_frame.iter().par().unwrap().clone();
|
||||||
output_frame -= min_output_pixel;
|
output_frame -= min_output_pixel;
|
||||||
output_frame /= (max_output_pixel-min_output_pixel) / 255.;
|
let pix_diff = (max_output_pixel-min_output_pixel) as f64 / 256.;
|
||||||
|
if pix_diff.abs() > 1. {
|
||||||
|
let frac: i16 = if pix_diff.is_sign_positive(){
|
||||||
|
pix_diff.ceil() as i16
|
||||||
|
} else {
|
||||||
|
pix_diff.floor() as i16
|
||||||
|
};
|
||||||
|
output_frame /= frac;
|
||||||
|
}
|
||||||
// let image_frame = output_frame / (max_output_pixel as)
|
// let image_frame = output_frame / (max_output_pixel as)
|
||||||
|
|
||||||
// convert to image, thanks to https://stackoverflow.com/a/56762490
|
// convert to image, thanks to https://stackoverflow.com/a/56762490
|
||||||
|
@ -404,10 +418,11 @@ impl HaarClassifier {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scan_window(&self, integral_window: nd::ArrayView2<u32>, scale: f64, output_window: &mut nd::ArrayViewMut2<f64>) -> bool{
|
fn scan_window(&self, integral_window: nd::ArrayView2<u32>, scale: f64, output_window: &mut nd::ArrayViewMut2<i16>) -> bool{
|
||||||
let mut failed = false; // let's assume the cascade will work
|
let mut failed = false; // let's assume the cascade will work
|
||||||
for stage in &self.stages{
|
for stage in &self.stages{
|
||||||
let mut stage_sum = 0.;
|
let mut stage_sum = 0.;
|
||||||
|
let mut i = 0;
|
||||||
for classifier in &stage.weak_classifiers{
|
for classifier in &stage.weak_classifiers{
|
||||||
// or 'stumps'
|
// or 'stumps'
|
||||||
let feature = classifier.feature.compute_feature(&integral_window, &scale);
|
let feature = classifier.feature.compute_feature(&integral_window, &scale);
|
||||||
|
@ -420,10 +435,14 @@ impl HaarClassifier {
|
||||||
stage_sum += classifier.leaf_values[1];
|
stage_sum += classifier.leaf_values[1];
|
||||||
// weak classifier bigger then threshold... draw it!
|
// weak classifier bigger then threshold... draw it!
|
||||||
classifier.feature.draw(output_window, &scale);
|
classifier.feature.draw(output_window, &scale);
|
||||||
|
i+=1;
|
||||||
classifier.right
|
classifier.right
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// if i > 2{
|
||||||
|
// break;
|
||||||
|
// }
|
||||||
|
|
||||||
// classifier.feature.draw(output_window, &scale);
|
// classifier.feature.draw(output_window, &scale);
|
||||||
}
|
}
|
||||||
if stage_sum < stage.treshold{
|
if stage_sum < stage.treshold{
|
||||||
|
|
Loading…
Reference in a new issue