Load haar features and draw matched ones to canvas
This commit is contained in:
parent
190f71bf06
commit
4d37d38ac6
6 changed files with 828 additions and 27 deletions
63
.vscode/launch.json
vendored
Normal file
63
.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
{
|
||||||
|
// Use IntelliSense to learn about possible attributes.
|
||||||
|
// Hover to view descriptions of existing attributes.
|
||||||
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Debug executable 'visual_haarcascades'",
|
||||||
|
"cargo": {
|
||||||
|
"args": [
|
||||||
|
"build",
|
||||||
|
"--bin=visual_haarcascades",
|
||||||
|
"--package=visual_haarcascades"
|
||||||
|
],
|
||||||
|
"filter": {
|
||||||
|
"name": "visual_haarcascades",
|
||||||
|
"kind": "bin"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"args": [],
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Debug executable 'visual_haarcascades_test'",
|
||||||
|
"cargo": {
|
||||||
|
"args": [
|
||||||
|
"build",
|
||||||
|
"--bin=visual_haarcascades_test",
|
||||||
|
"--package=visual_haarcascades"
|
||||||
|
],
|
||||||
|
"filter": {
|
||||||
|
"name": "visual_haarcascades_test",
|
||||||
|
"kind": "bin"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"args": [],
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Debug unit tests in executable 'visual_haarcascades'",
|
||||||
|
"cargo": {
|
||||||
|
"args": [
|
||||||
|
"test",
|
||||||
|
"--no-run",
|
||||||
|
"--bin=visual_haarcascades",
|
||||||
|
"--package=visual_haarcascades"
|
||||||
|
],
|
||||||
|
"filter": {
|
||||||
|
"name": "visual_haarcascades",
|
||||||
|
"kind": "bin"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"args": [],
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
172
Cargo.lock
generated
172
Cargo.lock
generated
|
@ -195,6 +195,17 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "chrono"
|
||||||
|
version = "0.4.11"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "80094f509cf8b5ae86a4966a39b3ff66cd7e2a3e594accec3743ff3fabeab5b2"
|
||||||
|
dependencies = [
|
||||||
|
"num-integer",
|
||||||
|
"num-traits 0.2.11",
|
||||||
|
"time",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clang-sys"
|
name = "clang-sys"
|
||||||
version = "0.29.3"
|
version = "0.29.3"
|
||||||
|
@ -292,7 +303,7 @@ dependencies = [
|
||||||
"daggy 0.5.0",
|
"daggy 0.5.0",
|
||||||
"fnv",
|
"fnv",
|
||||||
"instant",
|
"instant",
|
||||||
"num",
|
"num 0.2.1",
|
||||||
"pistoncore-input",
|
"pistoncore-input",
|
||||||
"rusttype 0.8.3",
|
"rusttype 0.8.3",
|
||||||
]
|
]
|
||||||
|
@ -1052,6 +1063,15 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "matrixmultiply"
|
||||||
|
version = "0.2.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d4f7ec66360130972f34830bfad9ef05c6610a43938a467bcc9ab9369ab3478f"
|
||||||
|
dependencies = [
|
||||||
|
"rawpointer",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "maybe-uninit"
|
name = "maybe-uninit"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
|
@ -1180,6 +1200,19 @@ dependencies = [
|
||||||
"winit",
|
"winit",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ndarray"
|
||||||
|
version = "0.13.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ac06db03ec2f46ee0ecdca1a1c34a99c0d188a0d83439b84bf0cb4b386e4ab09"
|
||||||
|
dependencies = [
|
||||||
|
"matrixmultiply",
|
||||||
|
"num-complex 0.2.4",
|
||||||
|
"num-integer",
|
||||||
|
"num-traits 0.2.11",
|
||||||
|
"rawpointer",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "net2"
|
name = "net2"
|
||||||
version = "0.2.33"
|
version = "0.2.33"
|
||||||
|
@ -1230,20 +1263,46 @@ version = "0.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "004d578bbfc8a6bdd4690576a8381af234ef051dd4cc358604e1784821e8205c"
|
checksum = "004d578bbfc8a6bdd4690576a8381af234ef051dd4cc358604e1784821e8205c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num"
|
||||||
|
version = "0.1.42"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4703ad64153382334aa8db57c637364c322d3372e097840c72000dabdcf6156e"
|
||||||
|
dependencies = [
|
||||||
|
"num-bigint 0.1.44",
|
||||||
|
"num-complex 0.1.43",
|
||||||
|
"num-integer",
|
||||||
|
"num-iter",
|
||||||
|
"num-rational 0.1.42",
|
||||||
|
"num-traits 0.2.11",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num"
|
name = "num"
|
||||||
version = "0.2.1"
|
version = "0.2.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b8536030f9fea7127f841b45bb6243b27255787fb4eb83958aa1ef9d2fdc0c36"
|
checksum = "b8536030f9fea7127f841b45bb6243b27255787fb4eb83958aa1ef9d2fdc0c36"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"num-bigint",
|
"num-bigint 0.2.6",
|
||||||
"num-complex",
|
"num-complex 0.2.4",
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-iter",
|
"num-iter",
|
||||||
"num-rational 0.2.4",
|
"num-rational 0.2.4",
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num-bigint"
|
||||||
|
version = "0.1.44"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e63899ad0da84ce718c14936262a41cee2c79c981fc0a0e7c7beb47d5a07e8c1"
|
||||||
|
dependencies = [
|
||||||
|
"num-integer",
|
||||||
|
"num-traits 0.2.11",
|
||||||
|
"rand 0.4.6",
|
||||||
|
"rustc-serialize",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num-bigint"
|
name = "num-bigint"
|
||||||
version = "0.2.6"
|
version = "0.2.6"
|
||||||
|
@ -1255,6 +1314,16 @@ dependencies = [
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "num-complex"
|
||||||
|
version = "0.1.43"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b288631d7878aaf59442cffd36910ea604ecd7745c36054328595114001c9656"
|
||||||
|
dependencies = [
|
||||||
|
"num-traits 0.2.11",
|
||||||
|
"rustc-serialize",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num-complex"
|
name = "num-complex"
|
||||||
version = "0.2.4"
|
version = "0.2.4"
|
||||||
|
@ -1292,8 +1361,10 @@ version = "0.1.42"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ee314c74bd753fc86b4780aa9475da469155f3848473a261d2d18e35245a784e"
|
checksum = "ee314c74bd753fc86b4780aa9475da469155f3848473a261d2d18e35245a784e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"num-bigint 0.1.44",
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
|
"rustc-serialize",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1303,7 +1374,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5c000134b5dbf44adc5cb772486d335293351644b801551abe8f75c84cfa4aef"
|
checksum = "5c000134b5dbf44adc5cb772486d335293351644b801551abe8f75c84cfa4aef"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg 1.0.0",
|
"autocfg 1.0.0",
|
||||||
"num-bigint",
|
"num-bigint 0.2.6",
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
]
|
]
|
||||||
|
@ -1631,6 +1702,19 @@ dependencies = [
|
||||||
"proc-macro2 1.0.10",
|
"proc-macro2 1.0.10",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rand"
|
||||||
|
version = "0.4.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293"
|
||||||
|
dependencies = [
|
||||||
|
"fuchsia-cprng",
|
||||||
|
"libc",
|
||||||
|
"rand_core 0.3.1",
|
||||||
|
"rdrand",
|
||||||
|
"winapi 0.3.8",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rand"
|
name = "rand"
|
||||||
version = "0.5.6"
|
version = "0.5.6"
|
||||||
|
@ -1816,6 +1900,12 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rawpointer"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rayon"
|
name = "rayon"
|
||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
|
@ -1910,12 +2000,27 @@ dependencies = [
|
||||||
"smallvec 0.6.13",
|
"smallvec 0.6.13",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "roxmltree"
|
||||||
|
version = "0.13.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "17dfc6c39f846bfc7d2ec442ad12055d79608d501380789b965d22f9354451f2"
|
||||||
|
dependencies = [
|
||||||
|
"xmlparser",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-hash"
|
name = "rustc-hash"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-serialize"
|
||||||
|
version = "0.3.24"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc_version"
|
name = "rustc_version"
|
||||||
version = "0.2.3"
|
version = "0.2.3"
|
||||||
|
@ -2048,6 +2153,17 @@ dependencies = [
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "simplelog"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2b2736f58087298a448859961d3f4a0850b832e72619d75adc69da7993c2cd3c"
|
||||||
|
dependencies = [
|
||||||
|
"chrono",
|
||||||
|
"log",
|
||||||
|
"termcolor",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "siphasher"
|
name = "siphasher"
|
||||||
version = "0.3.3"
|
version = "0.3.3"
|
||||||
|
@ -2121,6 +2237,15 @@ dependencies = [
|
||||||
"byteorder",
|
"byteorder",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "stopwatch"
|
||||||
|
version = "0.0.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3d04b5ebc78da44d3a456319d8bc2783e7d8cc7ccbb5cb4dc3f54afbd93bf728"
|
||||||
|
dependencies = [
|
||||||
|
"num 0.1.42",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "storage-map"
|
name = "storage-map"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
|
@ -2179,14 +2304,6 @@ dependencies = [
|
||||||
"winapi-util",
|
"winapi-util",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "testproject"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"nannou",
|
|
||||||
"v4l",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "textwrap"
|
name = "textwrap"
|
||||||
version = "0.11.0"
|
version = "0.11.0"
|
||||||
|
@ -2225,6 +2342,16 @@ dependencies = [
|
||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "time"
|
||||||
|
version = "0.1.43"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"winapi 0.3.8",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.5.6"
|
version = "0.5.6"
|
||||||
|
@ -2284,6 +2411,21 @@ version = "0.9.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
|
checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "visual_haarcascades"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"image 0.23.4",
|
||||||
|
"log",
|
||||||
|
"nannou",
|
||||||
|
"ndarray",
|
||||||
|
"roxmltree",
|
||||||
|
"simplelog",
|
||||||
|
"stopwatch",
|
||||||
|
"v4l",
|
||||||
|
"xml-rs",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "void"
|
name = "void"
|
||||||
version = "1.0.2"
|
version = "1.0.2"
|
||||||
|
@ -2612,6 +2754,12 @@ version = "0.8.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b07db065a5cf61a7e4ba64f29e67db906fb1787316516c4e6e5ff0fea1efcd8a"
|
checksum = "b07db065a5cf61a7e4ba64f29e67db906fb1787316516c4e6e5ff0fea1efcd8a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "xmlparser"
|
||||||
|
version = "0.13.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "52613e655f6f11f63c0fe7d1c3b5ef69e44d96df9b65dab296b441ed0e1125f5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zerocopy"
|
name = "zerocopy"
|
||||||
version = "0.2.8"
|
version = "0.2.8"
|
||||||
|
|
18
Cargo.toml
18
Cargo.toml
|
@ -1,15 +1,29 @@
|
||||||
[package]
|
[package]
|
||||||
name = "testproject"
|
name = "visual_haarcascades"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Ruben van de Ven <git@rubenvandeven.com>"]
|
authors = ["Ruben van de Ven <git@rubenvandeven.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "visual_haarcascades"
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "visual_haarcascades_test"
|
||||||
|
path = "src/test.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nannou = "0.14"
|
nannou = "0.14"
|
||||||
# clang-sys = "0.29.3"
|
# clang-sys = "0.29.3"
|
||||||
v4l = {version = "0.5.1", features = ["libv4l"]}
|
v4l = {version = "0.5.1", features = ["libv4l"]}
|
||||||
|
image = "0.23"
|
||||||
|
log = "0.4"
|
||||||
|
xml-rs = "0.8.3"
|
||||||
|
simplelog = "0.8.0"
|
||||||
# image2 = "0.11.3" # seems deprecated and advices imaged
|
# image2 = "0.11.3" # seems deprecated and advices imaged
|
||||||
# rscam = "0.5.5"
|
# rscam = "0.5.5"
|
||||||
|
roxmltree = "0.13.0"
|
||||||
|
ndarray = "0.13"
|
||||||
|
stopwatch = "0.0.7"
|
60
src/main.rs
60
src/main.rs
|
@ -1,17 +1,35 @@
|
||||||
|
|
||||||
|
#[macro_use] extern crate log;
|
||||||
|
#[macro_use(s)] extern crate ndarray;
|
||||||
|
|
||||||
|
use simplelog::*;
|
||||||
use nannou::prelude::*;
|
use nannou::prelude::*;
|
||||||
use v4l::{Buffer, CaptureDevice, MappedBufferStream};
|
use v4l::{Buffer, CaptureDevice, MappedBufferStream};
|
||||||
use nannou::image;
|
|
||||||
|
mod visualhaar;
|
||||||
|
|
||||||
|
// use std::fs::File;
|
||||||
|
|
||||||
static mut CAMERA: Option<CaptureDevice> = None;
|
static mut CAMERA: Option<CaptureDevice> = None;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
CombinedLogger::init(
|
||||||
|
vec![
|
||||||
|
TermLogger::new(LevelFilter::Info, Config::default(), TerminalMode::Mixed),
|
||||||
|
// WriteLogger::new(LevelFilter::Info, Config::default(), File::create("my_rust_binary.log").unwrap()),
|
||||||
|
]
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
warn!("test");
|
||||||
|
|
||||||
unsafe{
|
unsafe{
|
||||||
CAMERA = Some(CaptureDevice::new(2)
|
CAMERA = Some(CaptureDevice::new(2)
|
||||||
.expect("Failed to open device")
|
.expect("Failed to open device")
|
||||||
.format(640, 480, b"RGB3")
|
// .format(640, 480, b"RGB3")
|
||||||
|
.format(424, 240, b"RGB3")
|
||||||
|
// .format(320, 240, b"RGB3")
|
||||||
.expect("Failed to set format")
|
.expect("Failed to set format")
|
||||||
.fps(60)
|
.fps(30)
|
||||||
.expect("Failed to set frame interval"));
|
.expect("Failed to set frame interval"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,13 +43,16 @@ fn main() {
|
||||||
struct Model<'a> {
|
struct Model<'a> {
|
||||||
stream: MappedBufferStream<'a>,
|
stream: MappedBufferStream<'a>,
|
||||||
_window: window::Id,
|
_window: window::Id,
|
||||||
image: Option<nannou::image::DynamicImage>
|
image: Option<nannou::image::DynamicImage>,
|
||||||
|
haar: visualhaar::HaarClassifier,
|
||||||
|
haar_outcome: Option<visualhaar::Outcome>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn model<'a>(app: &App) -> Model<'a> {
|
fn model<'a>(app: &App) -> Model<'a> {
|
||||||
// Create a new capture device with a few extra parameters
|
// Create a new capture device with a few extra parameters
|
||||||
unsafe{
|
unsafe{
|
||||||
let stream = MappedBufferStream::with_buffers(CAMERA.as_mut().unwrap(), 4)
|
// because our code is slower than the camera, set buffer to 1, then it will capture the latest frame
|
||||||
|
let stream = MappedBufferStream::with_buffers(CAMERA.as_mut().unwrap(), 1)
|
||||||
.expect("Failed to create buffer stream");
|
.expect("Failed to create buffer stream");
|
||||||
|
|
||||||
let _window = app.new_window()
|
let _window = app.new_window()
|
||||||
|
@ -59,10 +80,17 @@ fn model<'a>(app: &App) -> Model<'a> {
|
||||||
.build()
|
.build()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
let haar = visualhaar::HaarClassifier::from_xml("haarcascade_frontalface_alt2.xml").unwrap();
|
||||||
|
|
||||||
|
println!("Haar: {:?}", haar);
|
||||||
|
|
||||||
|
|
||||||
Model {
|
Model {
|
||||||
stream: stream,
|
stream: stream,
|
||||||
_window: _window,
|
_window: _window,
|
||||||
image: None,
|
image: None,
|
||||||
|
haar: haar,
|
||||||
|
haar_outcome: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -87,19 +115,28 @@ fn event(_app: &App, _model: &mut Model, event: Event) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// renders each frame (called through nannou)
|
||||||
fn update(_app: &App, _model: &mut Model, _update: Update) {
|
fn update(_app: &App, _model: &mut Model, _update: Update) {
|
||||||
|
|
||||||
|
// get frame from camera stream
|
||||||
let frame = _model.stream.next().unwrap();
|
let frame = _model.stream.next().unwrap();
|
||||||
// let vec: Vec<u8> = frame.data().to_vec();
|
// let vec: Vec<u8> = frame.data().to_vec();
|
||||||
let img_buffer = nannou::image::ImageBuffer::from_raw(640,480, frame.data().to_vec());
|
// Convert to Nannou ImageBuffer
|
||||||
|
let img_buffer: Option<nannou::image::ImageBuffer<nannou::image::Rgb<u8>, Vec<u8>>> = nannou::image::ImageBuffer::from_raw(424,240, frame.data().to_vec());
|
||||||
|
|
||||||
match img_buffer {
|
match img_buffer {
|
||||||
None => {
|
None => {
|
||||||
_model.image = None;
|
// no imagebuffer for this update. set haar outcomes to empy
|
||||||
|
_model.haar_outcome = None;
|
||||||
}
|
}
|
||||||
Some(ib) => {
|
Some(ib) => {
|
||||||
// let
|
// let
|
||||||
// ib.map( nannou::image::DynamicImage::ImageRgb8);
|
// ib.map( nannou::image::DynamicImage::ImageRgb8);
|
||||||
_model.image = Some(nannou::image::DynamicImage::ImageRgb8(ib));
|
// let ib_bw = nannou::image::imageops::grayscale(&ib);
|
||||||
|
// _model.image = Some(nannou::image::DynamicImage::ImageLuma8(ib_bw));
|
||||||
|
let outcome = _model.haar.scan_image(ib).unwrap();
|
||||||
|
_model.haar_outcome = Some(outcome);
|
||||||
|
// _model.image = Some(nannou::image::DynamicImage::ImageRgb8(ib));
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -125,9 +162,10 @@ fn view(_app: &App, _model: &Model, frame: Frame){
|
||||||
// let texture = wgpu::Texture::from_path(_app, img_path).unwrap();
|
// let texture = wgpu::Texture::from_path(_app, img_path).unwrap();
|
||||||
|
|
||||||
// let image = nannou::image::DynamicImage::new_rgb8(640, 480);
|
// let image = nannou::image::DynamicImage::new_rgb8(640, 480);
|
||||||
match &_model.image {
|
match &_model.haar_outcome {
|
||||||
Some(dynamic_image) => {
|
Some(outcome) => {
|
||||||
let texture = wgpu::Texture::from_image(_app, dynamic_image);
|
// let i = outcome.dyn(/);
|
||||||
|
let texture = wgpu::Texture::from_image(_app, &outcome.dynamic_img);
|
||||||
draw.texture(&texture);
|
draw.texture(&texture);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|
60
src/test.rs
Normal file
60
src/test.rs
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
|
||||||
|
#[macro_use] extern crate log;
|
||||||
|
#[macro_use(s)] extern crate ndarray;
|
||||||
|
extern crate stopwatch;
|
||||||
|
use stopwatch::{Stopwatch};
|
||||||
|
|
||||||
|
use simplelog::*;
|
||||||
|
use v4l::{Buffer, CaptureDevice, MappedBufferStream};
|
||||||
|
use image;
|
||||||
|
|
||||||
|
mod visualhaar;
|
||||||
|
|
||||||
|
// use std::fs::File;
|
||||||
|
|
||||||
|
static mut CAMERA: Option<CaptureDevice> = None;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
CombinedLogger::init(
|
||||||
|
vec![
|
||||||
|
TermLogger::new(LevelFilter::Info, Config::default(), TerminalMode::Mixed),
|
||||||
|
// WriteLogger::new(LevelFilter::Info, Config::default(), File::create("my_rust_binary.log").unwrap()),
|
||||||
|
]
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
|
||||||
|
warn!("test");
|
||||||
|
let haar = visualhaar::HaarClassifier::from_xml("haarcascade_frontalface_alt2.xml").unwrap();
|
||||||
|
// println!("Haar: {:?}", haar);
|
||||||
|
|
||||||
|
|
||||||
|
let sw = Stopwatch::start_new();
|
||||||
|
|
||||||
|
let frame = image::open("/home/ruben/Documents/Projecten/2020/rust/lena_orig.png");
|
||||||
|
|
||||||
|
|
||||||
|
// let vec: Vec<u8> = frame.data().to_vec();
|
||||||
|
// Convert to Nannou ImageBuffer
|
||||||
|
// let img_buffer: Option<image::ImageBuffer<image::Rgb<u8>, Vec<u8>>> = image::ImageBuffer::from_raw(640,480, frame.data().to_vec());
|
||||||
|
|
||||||
|
match frame {
|
||||||
|
Err(e) => {
|
||||||
|
// no imagebuffer for this update. set haar outcomes to empy
|
||||||
|
error!("{:?}",e);
|
||||||
|
}
|
||||||
|
Ok(ib) => {
|
||||||
|
// let
|
||||||
|
// ib.map( nannou::image::DynamicImage::ImageRgb8);
|
||||||
|
// let ib_bw = nannou::image::imageops::grayscale(&ib);
|
||||||
|
// _model.image = Some(nannou::image::DynamicImage::ImageLuma8(ib_bw));
|
||||||
|
let i = ib.as_rgb8().unwrap().clone();
|
||||||
|
let image = haar.scan_image(i).unwrap().dynamic_img;
|
||||||
|
image.save("/home/ruben/Documents/Projecten/2020/rust/lena_orig-output.png");
|
||||||
|
info!("Scanning for faces took {}ms", sw.elapsed_ms());
|
||||||
|
// _model.image = Some(nannou::image::DynamicImage::ImageRgb8(ib));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
478
src/visualhaar.rs
Normal file
478
src/visualhaar.rs
Normal file
|
@ -0,0 +1,478 @@
|
||||||
|
extern crate xml;
|
||||||
|
|
||||||
|
use image;
|
||||||
|
use log::{info, trace, warn};
|
||||||
|
use std::{convert::TryInto, error::Error};
|
||||||
|
|
||||||
|
use ndarray as nd;
|
||||||
|
|
||||||
|
/// A haarclasifier based on opencv cascade XML files
|
||||||
|
/// Structure info from https://answers.opencv.org/question/8418/explanation-of-cascadexml-in-a-haar-classifier/
|
||||||
|
/// and the contribution of StevenPuttemans to OpenCV 3 Blueprints
|
||||||
|
/// Code examples of the book are https://github.com/OpenCVBlueprints/OpenCVBlueprints/tree/master/chapter_5/source_code
|
||||||
|
/// A little at SO https://stackoverflow.com/a/34897460
|
||||||
|
/// Nice and detailed: https://stackoverflow.com/a/41893728
|
||||||
|
/// Also great: https://medium.com/datadriveninvestor/understanding-and-implementing-the-viola-jones-image-classification-algorithm-85621f7fe20b
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct HaarClassifier {
|
||||||
|
/// width of the object model
|
||||||
|
pub height: u8,
|
||||||
|
/// height of the object model
|
||||||
|
pub width: u8,
|
||||||
|
pub stage_max_weak_count: u8,
|
||||||
|
pub feature_max_cat_count: u8,
|
||||||
|
pub stage_num: usize,
|
||||||
|
pub stages: Vec<HaarClassifierStage>,
|
||||||
|
pub features: Vec<HaarClassifierFeature>
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct HaarClassifierStage {
|
||||||
|
max_weak_count: usize,
|
||||||
|
treshold: f64,
|
||||||
|
weak_classifiers: Vec<HaarClassifierStageClassifier>
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct HaarClassifierStageClassifier {
|
||||||
|
left: f32,
|
||||||
|
right: f32,
|
||||||
|
feature: HaarClassifierFeature,
|
||||||
|
threshold: f64, // stageTreshold - THRESHOLD_EPS = 1e-5f: https://github.com/opencv/opencv/blob/0cf479dd5ce8602040716811240bb7595de4ba9e/modules/objdetect/src/cascadedetect.cpp#L1503
|
||||||
|
leaf_values: Vec<f64>
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone,Debug)]
|
||||||
|
pub struct HaarClassifierFeature {
|
||||||
|
rects: Vec<HaarClassifierFeatureRect>,
|
||||||
|
tilted: bool, // Dont' implement for now
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl HaarClassifierFeature{
|
||||||
|
fn compute_feature(&self, image_window: &nd::ArrayView2<u32>, scale: &f64) -> f64 {
|
||||||
|
let mut score = 0.;
|
||||||
|
|
||||||
|
for rect in &self.rects{
|
||||||
|
score += rect.compute_rect(image_window, scale);
|
||||||
|
}
|
||||||
|
|
||||||
|
score
|
||||||
|
}
|
||||||
|
|
||||||
|
fn draw(&self, draw_window: &mut nd::ArrayViewMut2<f64>, scale: &f64) {
|
||||||
|
for rect in &self.rects{
|
||||||
|
rect.draw(draw_window, scale);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone,Debug)]
|
||||||
|
pub struct HaarClassifierFeatureRect{
|
||||||
|
/// top left coordinate, x
|
||||||
|
tl_x: u8,
|
||||||
|
/// top left coordinate, y
|
||||||
|
tl_y: u8,
|
||||||
|
width: u8,
|
||||||
|
height: u8,
|
||||||
|
/// weight factor
|
||||||
|
weight: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HaarClassifierFeatureRect{
|
||||||
|
fn get_coordinates_for_scale(&self, scale: &f64) -> (usize, usize, usize, usize) {
|
||||||
|
let x1 = (self.tl_x as f64 * scale).floor() as usize;
|
||||||
|
let y1 = (self.tl_y as f64 * scale).floor() as usize;
|
||||||
|
let x2 = x1 + (self.width as f64 * scale).floor() as usize;
|
||||||
|
let y2 = y1 + (self.height as f64 * scale).floor() as usize;
|
||||||
|
|
||||||
|
(x1, y1, x2, y2)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The feature sum is finally calculated by first summing all values of the pixels inside the rectangle and then multiplying it with the weight factor. Finally, those weighted sums are combined together to yield as a final feature value. Keep in mind that all the coordinates retrieved for a single feature are in relation to the window/model size and not the complete image which is processed.
|
||||||
|
fn compute_rect(&self, image_window: &nd::ArrayView2<u32>, scale: &f64) -> f64 {
|
||||||
|
let (x1, y1, x2, y2) = self.get_coordinates_for_scale(scale);
|
||||||
|
|
||||||
|
let sum = (image_window[[y2,x2]] + image_window[[y1,x1]] - image_window[[y1, x2]] - image_window[[y2, x1]]) as f64;
|
||||||
|
let sum = (sum/(scale*scale)) * self.weight; // normalise: when the window grows, all values of the integral image become bigger by a factor scale-squared
|
||||||
|
return sum;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn draw(&self, draw_window: &mut nd::ArrayViewMut2<f64>, scale: &f64) {
|
||||||
|
let (x1, y1, x2, y2) = self.get_coordinates_for_scale(scale);
|
||||||
|
|
||||||
|
// TODO how to speed this up?
|
||||||
|
|
||||||
|
// info!("Draw {} {} {} {} ({:?}),", x1, y1, x2, y2,self);
|
||||||
|
let mut rect = draw_window.slice_mut(s![y1..y2, x1..x2]); // semi slow (initially 500ms)
|
||||||
|
rect += self.weight; // super slow (initially 10.000 ms)
|
||||||
|
|
||||||
|
// for x in x1..x2{
|
||||||
|
// for y in y1..y2{
|
||||||
|
// draw_window[[y, x]] = draw_window[[y, x]] as f64 + self.weight;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub struct Outcome {
|
||||||
|
// frame: image::ImageBuffer<image::Luma<u8>, Vec<u8>>,
|
||||||
|
pub dynamic_img: image::DynamicImage,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HaarClassifier {
|
||||||
|
|
||||||
|
/// parse from xml , using roxmltree
|
||||||
|
pub fn from_xml(filename: &str) -> Result<Self, Box<dyn Error>> {
|
||||||
|
let text = std::fs::read_to_string(filename).unwrap();
|
||||||
|
let doc = match roxmltree::Document::parse(&text) {
|
||||||
|
Ok(doc) => doc,
|
||||||
|
Err(e) => {
|
||||||
|
println!("Error: {}.", e);
|
||||||
|
return Err(From::from(e));
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// root: <opencv_storage>
|
||||||
|
let root_el = doc.root().first_element_child().unwrap();
|
||||||
|
println!("{:?}", root_el);
|
||||||
|
let cascade = root_el.first_element_child().unwrap();
|
||||||
|
println!("{:?}", cascade);
|
||||||
|
|
||||||
|
let features_el = cascade.children().find(|n| n.is_element() && n.has_tag_name("features")).unwrap();
|
||||||
|
let stages_el = cascade.children().find(|n| n.is_element() && n.has_tag_name("stages")).unwrap();
|
||||||
|
|
||||||
|
let height: u8 = cascade.children().find(|n| n.is_element() && n.has_tag_name("height")).unwrap().text().unwrap().parse()?;
|
||||||
|
let width: u8 = cascade.children().find(|n| n.is_element() && n.has_tag_name("width")).unwrap().text().unwrap().parse()?;
|
||||||
|
let stage_max_weak_count: u8 = cascade.children()
|
||||||
|
.find(|n| n.is_element() && n.has_tag_name("stageParams")).unwrap()
|
||||||
|
.children().find(|n| n.is_element() && n.has_tag_name("maxWeakCount")).unwrap()
|
||||||
|
.text().unwrap()
|
||||||
|
.parse()?;
|
||||||
|
let feature_max_cat_count: u8 = cascade.children()
|
||||||
|
.find(|n| n.is_element() && n.has_tag_name("featureParams")).unwrap()
|
||||||
|
.children().find(|n| n.is_element() && n.has_tag_name("maxCatCount")).unwrap()
|
||||||
|
.text().unwrap()
|
||||||
|
.parse()?;
|
||||||
|
let stage_num: usize = stages_el.children().count();
|
||||||
|
let feature_num: usize = features_el.children().count();
|
||||||
|
|
||||||
|
let mut stages = Vec::<HaarClassifierStage>::with_capacity(stage_num);
|
||||||
|
let mut features = Vec::<HaarClassifierFeature>::with_capacity(feature_num);
|
||||||
|
|
||||||
|
for node in features_el.children().filter(|n| n.is_element() && n.has_tag_name("_")) {
|
||||||
|
let rect_els = node.children().find(|n| n.has_tag_name("rects")).unwrap().children().filter(|n| n.is_element());
|
||||||
|
let mut rects = Vec::<HaarClassifierFeatureRect>::new();
|
||||||
|
for rect in rect_els {
|
||||||
|
// println!("{:?}",rect.text());
|
||||||
|
let v: Vec<&str> = rect.text().unwrap().split_whitespace().collect();
|
||||||
|
assert_eq!(v.len(), 5, "Expected values for features: x, y, width, height, weight");
|
||||||
|
rects.push(HaarClassifierFeatureRect{
|
||||||
|
tl_x: v[0].parse()?,
|
||||||
|
tl_y: v[1].parse()?,
|
||||||
|
width: v[2].parse()?,
|
||||||
|
height: v[3].parse()?,
|
||||||
|
weight: v[4].parse()?,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
features.push(
|
||||||
|
HaarClassifierFeature{
|
||||||
|
rects: rects,
|
||||||
|
tilted: false,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// loop the stages after the features, so we can immediately map features to the internalNodes
|
||||||
|
for node in stages_el.children().filter(|n| n.is_element() && n.has_tag_name("_")) {
|
||||||
|
let max_weak_count: usize = node.children().find(|n| n.has_tag_name("maxWeakCount")).unwrap().text().unwrap().parse()?;
|
||||||
|
let stage_treshold: f64 = node.children().find(|n| n.has_tag_name("stageThreshold")).unwrap().text().unwrap().parse()?;
|
||||||
|
let weak_classifier_els = node.children().find(|n| n.has_tag_name("weakClassifiers")).unwrap().children().filter(|n| n.is_element() && n.has_tag_name("_"));
|
||||||
|
let mut weak_classifiers = Vec::<HaarClassifierStageClassifier>::new();
|
||||||
|
|
||||||
|
|
||||||
|
for classifier_el in weak_classifier_els {
|
||||||
|
// <internalNodes>: left right featureIndex threshold ... rest is ignored
|
||||||
|
let mut internal_nodes = classifier_el.children().find(|n| n.has_tag_name("internalNodes")).unwrap().text().unwrap().trim().split_whitespace();
|
||||||
|
let left: f32 = internal_nodes.next().unwrap().parse().unwrap();
|
||||||
|
let right: f32 = internal_nodes.next().unwrap().parse().unwrap();
|
||||||
|
let feature_idx: usize = internal_nodes.next().unwrap().parse().unwrap();
|
||||||
|
let feature = features[feature_idx].clone(); // we just assume it exists. We copy instead of borrow to avoid lifetime parameters... I really need to figure these out.. ouch
|
||||||
|
let threshold: f64 = internal_nodes.next().unwrap().parse().unwrap();
|
||||||
|
// <leafValues>: the values for left and right ... the rest is ignored
|
||||||
|
let leaf_values_split = classifier_el.children().find(|n| n.has_tag_name("leafValues")).unwrap().text().unwrap().trim().split_whitespace();
|
||||||
|
let mut leaf_values: Vec<f64> = Vec::new();
|
||||||
|
for leaf_value in leaf_values_split{
|
||||||
|
leaf_values.push(leaf_value.parse().unwrap());
|
||||||
|
}
|
||||||
|
weak_classifiers.push(
|
||||||
|
HaarClassifierStageClassifier{
|
||||||
|
left: left,
|
||||||
|
right: right,
|
||||||
|
feature: feature,
|
||||||
|
threshold: threshold,
|
||||||
|
leaf_values: leaf_values
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// let rectEls = node.children().find(|n| n.has_tag_name("rects")).unwrap().children().filter(|n| n.is_element());
|
||||||
|
// let mut rects = Vec::<HaarClassifierFeatureRect>::new();
|
||||||
|
// for rect in rectEls {
|
||||||
|
// println!("{:?}",rect.text());
|
||||||
|
// let v: Vec<&str> = rect.text().unwrap().split_whitespace().collect();
|
||||||
|
// assert_eq!(v.len(), 5, "Expected values for features: x, y, width, height, weight");
|
||||||
|
// rects.push(HaarClassifierFeatureRect{
|
||||||
|
// tl_x: v[0].parse()?,
|
||||||
|
// tl_y: v[1].parse()?,
|
||||||
|
// width: v[2].parse()?,
|
||||||
|
// height: v[3].parse()?,
|
||||||
|
// weight: v[4].parse()?,
|
||||||
|
// });
|
||||||
|
// }
|
||||||
|
|
||||||
|
stages.push(
|
||||||
|
HaarClassifierStage{
|
||||||
|
max_weak_count: max_weak_count,
|
||||||
|
treshold: stage_treshold,
|
||||||
|
weak_classifiers: weak_classifiers,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// println!("{:?}", features);
|
||||||
|
|
||||||
|
let haar = HaarClassifier{
|
||||||
|
height: height,
|
||||||
|
width: width,
|
||||||
|
stage_max_weak_count: stage_max_weak_count,
|
||||||
|
feature_max_cat_count: feature_max_cat_count,
|
||||||
|
stage_num: stage_num,
|
||||||
|
stages: stages,
|
||||||
|
features: features,
|
||||||
|
};
|
||||||
|
return Ok(haar);
|
||||||
|
}
|
||||||
|
|
||||||
|
// pub fn from_xml(filename: &str) -> Result<Self, Box<dyn Error>> {
|
||||||
|
// let file = File::open(filename)?;
|
||||||
|
// let file = BufReader::new(file);
|
||||||
|
|
||||||
|
// let parser = EventReader::new(file);
|
||||||
|
// let mut depth = 0;
|
||||||
|
|
||||||
|
// let mut current_tags: Vec<String> = Vec::new();
|
||||||
|
|
||||||
|
// let mut height: Option<u8> = None;
|
||||||
|
// let mut width: Option<u8> = None;
|
||||||
|
// let mut stage_max_weak_count: Option<u8> = None;
|
||||||
|
// let mut feature_max_cat_count: Option<u8> = None;
|
||||||
|
// let mut stage_num: Option<usize> = None;
|
||||||
|
// // let mut stages;
|
||||||
|
|
||||||
|
// for e in parser {
|
||||||
|
// match e {
|
||||||
|
// Ok(XmlEvent::StartElement { name, .. }) => {
|
||||||
|
// let l = name.local_name.clone();
|
||||||
|
// current_tags.push(l);
|
||||||
|
// depth += 1;
|
||||||
|
// if l == "_" {
|
||||||
|
// let last_tag = ¤t_tags[current_tags.len()-1];
|
||||||
|
// match second_to_last_tag.as_ref() {
|
||||||
|
// "stages" => ,
|
||||||
|
// "weakClassifiers" =>
|
||||||
|
// "features" =>
|
||||||
|
// }
|
||||||
|
// } else {}
|
||||||
|
// }
|
||||||
|
// Ok(XmlEvent::EndElement { name }) => {
|
||||||
|
// let last_tag = current_tags.pop().expect("Unexpected end of tag list");
|
||||||
|
// if last_tag != name.local_name {
|
||||||
|
// return Err(From::from("Error in XML? Ending not the latest tag"));
|
||||||
|
// }
|
||||||
|
|
||||||
|
// depth -= 1;
|
||||||
|
// }
|
||||||
|
// Ok(XmlEvent::Characters(value)) => {
|
||||||
|
// let last_tag = ¤t_tags[current_tags.len()-1];
|
||||||
|
// match last_tag.as_ref() {
|
||||||
|
// "height" => height = Some(value.parse()?),
|
||||||
|
// "width" => width = Some(value.parse()?),
|
||||||
|
// "maxWeakCount" => stage_max_weak_count = Some(value.parse()?),
|
||||||
|
// "maxCatCount" => feature_max_cat_count = Some(value.parse()?),
|
||||||
|
// "stageNum" => stage_num = Some(value.parse()?),
|
||||||
|
// "_" => {
|
||||||
|
// let second_to_last_tag = ¤t_tags[current_tags.len()-2];
|
||||||
|
|
||||||
|
|
||||||
|
// }
|
||||||
|
// _ => info!("Contents for unknown tag {:?} <{:?}>", value, last_tag)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// Err(e) => return Err(From::from(e)),
|
||||||
|
// _ => {}
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return Ok(HaarClassifier{
|
||||||
|
// height: height.unwrap(),
|
||||||
|
// width: width.unwrap(),
|
||||||
|
// stage_max_weak_count: stage_max_weak_count.unwrap(),
|
||||||
|
// feature_max_cat_count: feature_max_cat_count.unwrap(),
|
||||||
|
// stage_num: stage_num.unwrap(),
|
||||||
|
// stages: Vec::<HaarClassifierStage>::with_capacity(stage_num.unwrap()),
|
||||||
|
// });
|
||||||
|
// }
|
||||||
|
|
||||||
|
/// take an ImageBuffer and scan it for faces.
|
||||||
|
pub fn scan_image(&self, frame: image::ImageBuffer<image::Rgb<u8>, Vec<u8>>) -> Result<Outcome, String> {
|
||||||
|
let img_bw = image::imageops::grayscale(&frame);
|
||||||
|
// let mut output_image = image::GrayImage::new(frame.width(), frame.height());
|
||||||
|
|
||||||
|
let integral = Self::integral_image(&img_bw);
|
||||||
|
|
||||||
|
let mut output_frame: nd::Array2<f64> = nd::Array::zeros((
|
||||||
|
img_bw.dimensions().1 as usize,
|
||||||
|
img_bw.dimensions().0 as usize,
|
||||||
|
));
|
||||||
|
|
||||||
|
info!("Frame: {:?} {:?}", integral[[0,0]], integral[[integral.dim().0-1,integral.dim().1-1]]);
|
||||||
|
|
||||||
|
// let rect = integral.slice(s![3..5, 2..4]);
|
||||||
|
|
||||||
|
// let min_size = self.width;
|
||||||
|
let min_size = frame.height() / 3; // TODO: Make min face size (or factor) variable
|
||||||
|
let max_window_size = std::cmp::min(img_bw.dimensions().0, img_bw.dimensions().1) as usize;
|
||||||
|
|
||||||
|
let mut window_size: usize = min_size.clone() as usize;
|
||||||
|
let mut count_faces = 0;
|
||||||
|
let mut count_not_faces = 0;
|
||||||
|
while window_size < max_window_size {
|
||||||
|
let scale = (window_size-1) as f64 / self.width as f64;
|
||||||
|
// to calculate a rect, we would need a -1 row, if we ignore that precision and add one at the end: (eg required when an item has width 20 (== feature width))
|
||||||
|
let scan_window_size = window_size + 1;
|
||||||
|
info!("Window size: {:?} {:?}", window_size, scale);
|
||||||
|
|
||||||
|
// step by scale.ceil() as this is 1px in the model's size. (small is probably unnecesarily fine-grained)
|
||||||
|
for x in (0..(img_bw.dimensions().0 as usize - scan_window_size)).step_by(scale.ceil() as usize) {
|
||||||
|
for y in (0..(img_bw.dimensions().1 as usize - scan_window_size)).step_by(scale.ceil() as usize) {
|
||||||
|
let window = integral.slice(s![y..y+scan_window_size, x..x+scan_window_size]);
|
||||||
|
let mut output_window = output_frame.slice_mut(s![y..y+scan_window_size, x..x+scan_window_size]);
|
||||||
|
if self.scan_window(window, scale, &mut output_window) {
|
||||||
|
count_faces += 1;
|
||||||
|
} else {
|
||||||
|
count_not_faces += 1;
|
||||||
|
}
|
||||||
|
// break;
|
||||||
|
}
|
||||||
|
// break;
|
||||||
|
}
|
||||||
|
// break;
|
||||||
|
|
||||||
|
window_size = (window_size as f32 * 1.2) as usize; // TODO make grow-factor variable (now 1.2)
|
||||||
|
}
|
||||||
|
|
||||||
|
// let mut test_window = output_frame.slice_mut(s![10..20,40..50]);
|
||||||
|
// test_window += 10.;
|
||||||
|
|
||||||
|
// Find the largest non-NaN in vector, or NaN otherwise:
|
||||||
|
let max_output_pixel = output_frame.iter().cloned().fold(0./0., f64::max);
|
||||||
|
let min_output_pixel = output_frame.iter().cloned().fold(f64::NAN, f64::min);
|
||||||
|
info!("Maximum pixel value in drawing: {:?} / min: {:?}", max_output_pixel, min_output_pixel);
|
||||||
|
info!("Count accepted/rejected windows: {:?}/{:?}", count_faces, count_not_faces);
|
||||||
|
|
||||||
|
// let max_output_pixel = output_frame.iter().par().unwrap().clone();
|
||||||
|
output_frame -= min_output_pixel;
|
||||||
|
output_frame /= (max_output_pixel-min_output_pixel) / 255.;
|
||||||
|
// let image_frame = output_frame / (max_output_pixel as)
|
||||||
|
|
||||||
|
// convert to image, thanks to https://stackoverflow.com/a/56762490
|
||||||
|
let raw = output_frame.as_standard_layout().to_owned().into_raw_vec();
|
||||||
|
let raw: Vec<u8> = raw.into_iter().map(|x| x as u8).collect();
|
||||||
|
// info!("Img: {:?}", raw);
|
||||||
|
let final_img = image::GrayImage::from_raw(frame.width(), frame.height(), raw).unwrap();
|
||||||
|
|
||||||
|
|
||||||
|
// let dynamic = image::DynamicImage::ImageLuma8(img_bw);
|
||||||
|
let dynamic = image::DynamicImage::ImageLuma8(final_img);
|
||||||
|
Ok(Outcome{
|
||||||
|
// frame: img_bw,
|
||||||
|
dynamic_img: dynamic,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn scan_window(&self, integral_window: nd::ArrayView2<u32>, scale: f64, output_window: &mut nd::ArrayViewMut2<f64>) -> bool{
|
||||||
|
let mut failed = false; // let's assume the cascade will work
|
||||||
|
for stage in &self.stages{
|
||||||
|
let mut stage_sum = 0.;
|
||||||
|
for classifier in &stage.weak_classifiers{
|
||||||
|
// or 'stumps'
|
||||||
|
let feature = classifier.feature.compute_feature(&integral_window, &scale);
|
||||||
|
let stddev = 1.; // TODO what should we use here?
|
||||||
|
let threshold = classifier.threshold * stddev;
|
||||||
|
let idx = if feature < threshold {
|
||||||
|
stage_sum += classifier.leaf_values[0];
|
||||||
|
classifier.left
|
||||||
|
} else {
|
||||||
|
stage_sum += classifier.leaf_values[1];
|
||||||
|
// weak classifier bigger then threshold... draw it!
|
||||||
|
classifier.feature.draw(output_window, &scale);
|
||||||
|
|
||||||
|
classifier.right
|
||||||
|
};
|
||||||
|
|
||||||
|
// classifier.feature.draw(output_window, &scale);
|
||||||
|
}
|
||||||
|
if stage_sum < stage.treshold{
|
||||||
|
failed = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// break; // TODO we're super slow and mainly want to get a gist of what is happening .so we only render stage 1
|
||||||
|
}
|
||||||
|
if failed {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
// info!("Face found?");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// turn the ImageBuffer into an integral image vector for faster calculations of areas
|
||||||
|
/// the Array2 has y,x (analogous to numpy arrays when create from images)
|
||||||
|
pub fn integral_image(i: &image::ImageBuffer<image::Luma<u8>, Vec<u8>>) -> nd::Array2<u32>{
|
||||||
|
// Vec<u32>
|
||||||
|
// let size = i.dimensions().0 * i.dimensions().1;
|
||||||
|
let mut integral: nd::Array2<u32> = nd::Array::zeros((
|
||||||
|
i.dimensions().1 as usize,
|
||||||
|
i.dimensions().0 as usize,
|
||||||
|
));
|
||||||
|
let mut cumsum: nd::Array2<u32> = nd::Array::zeros((
|
||||||
|
i.dimensions().1 as usize,
|
||||||
|
i.dimensions().0 as usize,
|
||||||
|
));
|
||||||
|
|
||||||
|
for y in 0..i.dimensions().1{
|
||||||
|
for x in 0..i.dimensions().0{
|
||||||
|
cumsum[[y as usize, x as usize]] = i.get_pixel(x, y).0.first().unwrap().clone() as u32;
|
||||||
|
if y > 0 {
|
||||||
|
cumsum[[y as usize, x as usize]] += cumsum[[(y - 1) as usize, x as usize]];
|
||||||
|
}
|
||||||
|
|
||||||
|
integral[[y as usize, x as usize]] = cumsum[[y as usize, x as usize]];
|
||||||
|
if x > 0 {
|
||||||
|
integral[[y as usize, x as usize]] += integral[[y as usize, (x - 1) as usize]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
integral
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Outcome {
|
||||||
|
|
||||||
|
}
|
Loading…
Reference in a new issue