forked from security_vision/semantic_graph
final version for greens report
This commit is contained in:
parent
c81ec7bf18
commit
8cb37534fc
7 changed files with 453 additions and 1132 deletions
|
@ -1,9 +1,4 @@
|
|||
@font-face {
|
||||
font-family: 'Lexend Mega Regular';
|
||||
font-style: normal;
|
||||
font-weight: normal;
|
||||
src: local('Lexend Mega Regular'), url('LexendMega-Regular.woff') format('woff');
|
||||
}
|
||||
|
||||
|
||||
:root {
|
||||
--color1: #9741f9;
|
||||
|
@ -38,13 +33,13 @@ body {
|
|||
/* background: linear-gradient(to top, #040308, #AD4A28, #DD723C, #fc7001, #dcb697, #9ba5ae, #3e5879, #020b1a); */
|
||||
background: var(--body-back);
|
||||
/* background: #9cb3c9; */
|
||||
font-family: sans-serif;
|
||||
font-family: 'CelloSansRegular', sans-serif;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
svg {
|
||||
cursor: grab;
|
||||
font-family: sans-serif;
|
||||
font-family: 'CelloSansRegular', sans-serif;
|
||||
}
|
||||
|
||||
svg.dragging {
|
||||
|
@ -136,7 +131,7 @@ svg #header #titlePath, svg #header #title2Path, svg #header #subtitlePath {
|
|||
|
||||
svg #header text {
|
||||
font-size: 180px;
|
||||
font-family: "Lexend Mega Regular";
|
||||
font-family: "CelloSansRegular", sans-serif;
|
||||
/*Comfortaa*/
|
||||
opacity: .8;
|
||||
fill: var(--title-color);
|
||||
|
@ -627,5 +622,8 @@ p.subtitle {
|
|||
}
|
||||
|
||||
#sources a{
|
||||
color: lightblue;
|
||||
color: var(--link-hover-related-color);
|
||||
}
|
||||
#sources a:hover{
|
||||
color: var(--link-hover-color);
|
||||
}
|
182
www/graph.js
182
www/graph.js
|
@ -114,6 +114,7 @@ const CONFIG = {
|
|||
"Software Developer",
|
||||
"Dataset Developer",
|
||||
"Related Institutions",
|
||||
"Is Department Of",
|
||||
"Involved Entities",
|
||||
],
|
||||
|
||||
|
@ -154,6 +155,10 @@ const CONFIG = {
|
|||
"label": "is related to",
|
||||
"swap": true,
|
||||
},
|
||||
"Is Department Of": {
|
||||
"label": "is part of",
|
||||
"swap": false,
|
||||
},
|
||||
"Involved Entities": {
|
||||
"label": "is involved in",
|
||||
"swap": true,
|
||||
|
@ -363,6 +368,8 @@ function getLinkId(link) {
|
|||
|
||||
class NodeMap {
|
||||
constructor(parent) {
|
||||
this.ready = false;
|
||||
this.readyCallback = false;
|
||||
this.root = d3.select(parent);
|
||||
this.resizeEvent = window.addEventListener('resize', this.resize.bind(this));
|
||||
this.tooltipEl = document.getElementById('tooltip');
|
||||
|
@ -542,57 +549,46 @@ class NodeMap {
|
|||
|
||||
}
|
||||
});
|
||||
this.title = this.container.append('g').attr('id', 'header');
|
||||
// this.title = this.container.append('g').attr('id', 'header');
|
||||
|
||||
const titleFeature = {
|
||||
"type": "LineString",
|
||||
"coordinates": []
|
||||
};
|
||||
const title2Feature = {
|
||||
"type": "LineString",
|
||||
"coordinates": []
|
||||
};
|
||||
const subtitleFeature = {
|
||||
"type": "LineString",
|
||||
"coordinates": []
|
||||
};
|
||||
for (let index = 26; index < 70; index++) {
|
||||
// projection apparently tries to find the shortest path between two points
|
||||
// which is NOT following a lat/lon line on the globe
|
||||
titleFeature.coordinates.push([index, 52]);
|
||||
title2Feature.coordinates.push([index, 50.5]);
|
||||
subtitleFeature.coordinates.push([index, 49]);
|
||||
}
|
||||
this.title.append("path")
|
||||
.attr("id", "titlePath")
|
||||
.attr("d", this.proj(titleFeature))
|
||||
;
|
||||
this.title.append("path")
|
||||
.attr("id", "title2Path")
|
||||
.attr("d", this.proj(title2Feature))
|
||||
;
|
||||
this.title.append("path")
|
||||
.attr("id", "subtitlePath")
|
||||
.attr("d", this.proj(subtitleFeature))
|
||||
;
|
||||
this.title.append("text")
|
||||
.html('<textPath xlink:href="#titlePath">Remote Biometric</textPath>')
|
||||
this.title.append("text")
|
||||
.html('<textPath xlink:href="#title2Path">Identification</textPath>')
|
||||
this.title.append("text")
|
||||
.attr("id", "subtitle")
|
||||
.html('<textPath xlink:href="#subtitlePath">' + CONFIG.subtitle + '</textPath>')
|
||||
|
||||
// this.title.append('text')
|
||||
// .attr('class', 'title')
|
||||
// .attr('x', 1000)
|
||||
// .attr('y', 1000)
|
||||
// .text(CONFIG.title);
|
||||
// this.title.append('text')
|
||||
// .attr('class', 'subtitle')
|
||||
// .attr('x', 1000)
|
||||
// .attr('y', 1200)
|
||||
// .text(CONFIG.subtitle);
|
||||
// const titleFeature = {
|
||||
// "type": "LineString",
|
||||
// "coordinates": []
|
||||
// };
|
||||
// const title2Feature = {
|
||||
// "type": "LineString",
|
||||
// "coordinates": []
|
||||
// };
|
||||
// const subtitleFeature = {
|
||||
// "type": "LineString",
|
||||
// "coordinates": []
|
||||
// };
|
||||
// for (let index = 26; index < 70; index++) {
|
||||
// // projection apparently tries to find the shortest path between two points
|
||||
// // which is NOT following a lat/lon line on the globe
|
||||
// titleFeature.coordinates.push([index, 52]);
|
||||
// title2Feature.coordinates.push([index, 50.5]);
|
||||
// subtitleFeature.coordinates.push([index, 49]);
|
||||
// }
|
||||
// this.title.append("path")
|
||||
// .attr("id", "titlePath")
|
||||
// .attr("d", this.proj(titleFeature))
|
||||
// ;
|
||||
// this.title.append("path")
|
||||
// .attr("id", "title2Path")
|
||||
// .attr("d", this.proj(title2Feature))
|
||||
// ;
|
||||
// this.title.append("path")
|
||||
// .attr("id", "subtitlePath")
|
||||
// .attr("d", this.proj(subtitleFeature))
|
||||
// ;
|
||||
// this.title.append("text")
|
||||
// .html('<textPath xlink:href="#titlePath">Remote Biometric</textPath>')
|
||||
// this.title.append("text")
|
||||
// .html('<textPath xlink:href="#title2Path">Identification</textPath>')
|
||||
// this.title.append("text")
|
||||
// .attr("id", "subtitle")
|
||||
// .html('<textPath xlink:href="#subtitlePath">' + CONFIG.subtitle + '</textPath>')
|
||||
|
||||
this.link = this.container.append("g")
|
||||
.attr('class', 'links')
|
||||
|
@ -668,6 +664,34 @@ class NodeMap {
|
|||
this.update();
|
||||
|
||||
setTimeout(() => this.calculateLabels(), 1000);
|
||||
|
||||
this.ready = true;
|
||||
if(this.readyCallback)
|
||||
this.readyCallback();
|
||||
}
|
||||
|
||||
triggerReset(){
|
||||
const cb = () => {
|
||||
this.deselectNode();
|
||||
this.resetZoom();
|
||||
}
|
||||
if(this.ready){
|
||||
cb();
|
||||
} else {
|
||||
this.readyCallback = cb;
|
||||
}
|
||||
}
|
||||
|
||||
triggerSelect(toSelect){
|
||||
const cb = () => {
|
||||
const node = this.graph.nodes.filter(n => n.id == toSelect)[0]
|
||||
this.selectNode(node);
|
||||
}
|
||||
if(this.ready){
|
||||
cb();
|
||||
} else {
|
||||
this.readyCallback = cb;
|
||||
}
|
||||
}
|
||||
|
||||
resetZoom() {
|
||||
|
@ -908,6 +932,39 @@ class NodeMap {
|
|||
this.sourcesEl.classList.remove('visible');
|
||||
}
|
||||
|
||||
hoverNode(evt, n){
|
||||
console.log('hover!', n)
|
||||
// d3.select(this).classed('hover', true);
|
||||
const links = document.getElementsByClassName('link');
|
||||
const linkedLinks = [];
|
||||
for (let link of links) {
|
||||
const l = d3.select(link).datum();
|
||||
if (n == l.target || n == l.source) {
|
||||
link.classList.add('linkedHover');
|
||||
// make sure it's the last element, so it's drawn on top
|
||||
// link.parentNode.appendChild(link); .. causes gliches
|
||||
// find related related node:
|
||||
const otherNode = n == l.target ? l.source : l.target;
|
||||
const otherNodeEl = document.getElementById(otherNode.id);
|
||||
otherNodeEl.classList.add('linkedHover');
|
||||
linkedLinks.push(l);
|
||||
}
|
||||
}
|
||||
|
||||
if(evt){
|
||||
this.showTooltip(evt.target, n, linkedLinks);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
endHoverNode(n){
|
||||
this.hideTooltip();
|
||||
const links = document.getElementsByClassName('linkedHover');
|
||||
while (links.length) {
|
||||
links[0].classList.remove('linkedHover');
|
||||
}
|
||||
}
|
||||
|
||||
update() {
|
||||
// console.log(this.graph)
|
||||
|
||||
|
@ -925,31 +982,10 @@ class NodeMap {
|
|||
evt.stopPropagation(); this.selectNode(n);
|
||||
});
|
||||
group.on("mouseover", (evt, n) => {
|
||||
// d3.select(this).classed('hover', true);
|
||||
const links = document.getElementsByClassName('link');
|
||||
const linkedLinks = [];
|
||||
for (let link of links) {
|
||||
const l = d3.select(link).datum();
|
||||
if (n == l.target || n == l.source) {
|
||||
link.classList.add('linkedHover');
|
||||
// make sure it's the last element, so it's drawn on top
|
||||
// link.parentNode.appendChild(link); .. causes gliches
|
||||
// find related related node:
|
||||
const otherNode = n == l.target ? l.source : l.target;
|
||||
const otherNodeEl = document.getElementById(otherNode.id);
|
||||
otherNodeEl.classList.add('linkedHover');
|
||||
linkedLinks.push(l);
|
||||
}
|
||||
}
|
||||
this.showTooltip(evt.target, n, linkedLinks);
|
||||
|
||||
this.hoverNode(evt, n);
|
||||
});
|
||||
group.on("mouseout", (evt, n) => {
|
||||
this.hideTooltip();
|
||||
const links = document.getElementsByClassName('linkedHover');
|
||||
while (links.length) {
|
||||
links[0].classList.remove('linkedHover');
|
||||
}
|
||||
this.endHoverNode(n);
|
||||
});
|
||||
// group.append('circle').attr("r", 5 /*this.nodeSize*/);
|
||||
group.append('path')
|
||||
|
|
|
@ -2,8 +2,9 @@
|
|||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<link rel="stylesheet" media="screen" href="https://fontlibrary.org//face/cello-sans" type="text/css"/>
|
||||
<link rel="stylesheet" href="graph.css">
|
||||
<title>Remote Biometric Identification | A survey of the European Union</title>
|
||||
<title>Biometric and Behavioural Mass Surveillance in EU Member States</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
@ -14,8 +15,8 @@
|
|||
<div id='map'></div>
|
||||
|
||||
<header>
|
||||
<h1>Remote Biometric Identification</h1>
|
||||
<p class='subtitle'>A survey of the European Union</p>
|
||||
<h1>Biometric and Behavioural Mass Surveillance</h1>
|
||||
<p class='subtitle'>in EU Member States</p>
|
||||
|
||||
<aside id="filters">
|
||||
<h3 onclick="this.parentNode.classList.toggle('hide');">Filter</h3>
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
:root {
|
||||
--border-radius: 5px;
|
||||
--box-shadow: 2px 2px 10px;
|
||||
--color: blue;
|
||||
--hover-color: lightblue;
|
||||
--color: #d1bce9;
|
||||
--hover-color: #9741f9;
|
||||
--color-accent: #118bee15;
|
||||
--color-bg: #fff;
|
||||
--color-bg-secondary: #e9e9e9;
|
||||
|
@ -13,7 +13,7 @@
|
|||
--color-shadow: #f4f4f4;
|
||||
--color-text: #000;
|
||||
--color-text-secondary: #999;
|
||||
--font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif;
|
||||
--font-family: 'CelloSansRegular', sans-serif;
|
||||
--hover-brightness: 1.2;
|
||||
--justify-important: center;
|
||||
--justify-normal: left;
|
||||
|
@ -28,7 +28,7 @@ iframe {
|
|||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: calc(100% - var(--width-content) - 2rem);
|
||||
width: calc(100% - var(--width-content) - 4rem);
|
||||
height: 100vh;
|
||||
border-style: none none none none;
|
||||
/* border-width: 3px;
|
||||
|
@ -87,8 +87,8 @@ header,
|
|||
main {
|
||||
margin: 0 0 0 auto;
|
||||
max-width: var(--width-content);
|
||||
width: calc(100% - 2rem);
|
||||
padding: .5rem 1rem;
|
||||
width: calc(100% - 4rem);
|
||||
padding: .5rem 2rem;
|
||||
}
|
||||
|
||||
header{
|
||||
|
@ -100,7 +100,7 @@ header{
|
|||
background-color: white;
|
||||
border-bottom: solid 1px var(--color-bg-secondary);
|
||||
z-index: 999;
|
||||
max-width: calc(var(--width-content) + 2rem);
|
||||
max-width: calc(var(--width-content) + 4rem);
|
||||
}
|
||||
|
||||
@media screen and (max-width: 740px) {
|
||||
|
@ -171,7 +171,7 @@ header a strong {
|
|||
|
||||
header nav > a{
|
||||
color: var(--color-text);
|
||||
padding-left: .5rem
|
||||
/* padding-left: .5rem */
|
||||
}
|
||||
header nav img {
|
||||
margin: 1rem 0;
|
||||
|
@ -186,11 +186,14 @@ section header {
|
|||
nav {
|
||||
align-items: center;
|
||||
display: flex;
|
||||
font-weight: bold;
|
||||
/* font-weight: bold; */
|
||||
justify-content: space-between;
|
||||
padding:0 2rem;
|
||||
/* margin-bottom: 7rem; */
|
||||
}
|
||||
|
||||
nav > a{ font-weight: bold;}
|
||||
|
||||
nav ul {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
|
@ -364,21 +367,28 @@ sup::after{
|
|||
} */
|
||||
|
||||
/* Links */
|
||||
a {
|
||||
color: var(--color);
|
||||
display: inline-block;
|
||||
font-weight: bold;
|
||||
a, a sup {
|
||||
color: inherit;
|
||||
border-bottom: solid 2px var(--color);
|
||||
display: inline;
|
||||
/* font-weight: bold; */
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
a:hover, a:hover sup {
|
||||
color: var(--hover-color);
|
||||
border-color: var(--hover-color);
|
||||
/* text-decoration: underline; */
|
||||
}
|
||||
a:hover sup{
|
||||
a.footnote-ref{
|
||||
border:none;
|
||||
/* background-color: var(--hover-color); */
|
||||
}
|
||||
|
||||
nav a{
|
||||
border: none;
|
||||
}
|
||||
|
||||
a b,
|
||||
a em,
|
||||
a i,
|
||||
|
@ -586,8 +596,8 @@ h1.Title{
|
|||
.keypoints{
|
||||
background-color: black;
|
||||
color: var(--color-bg-secondary);
|
||||
padding: 1rem;
|
||||
margin: 0 -1rem;
|
||||
padding: 2rem;
|
||||
margin: 0 -2rem;
|
||||
font-weight: bold;
|
||||
}
|
||||
.keypoints > p > strong{
|
||||
|
@ -597,4 +607,4 @@ h1.Title{
|
|||
|
||||
a.maplink{
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,88 @@
|
|||
<meta name="generator" content="pandoc" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" />
|
||||
<title>Biometric and Behavioural Mass Surveillance in EU Member States</title>
|
||||
<script>
|
||||
(function(document, history, location) {
|
||||
var HISTORY_SUPPORT = !!(history && history.pushState);
|
||||
|
||||
var anchorScrolls = {
|
||||
ANCHOR_REGEX: /^#[^ ]+$/,
|
||||
OFFSET_HEIGHT_PX: 100,
|
||||
|
||||
/**
|
||||
* Establish events, and fix initial scroll position if a hash is provided.
|
||||
*/
|
||||
init: function() {
|
||||
this.scrollToCurrent();
|
||||
window.addEventListener('hashchange', this.scrollToCurrent.bind(this));
|
||||
document.body.addEventListener('click', this.delegateAnchors.bind(this));
|
||||
},
|
||||
|
||||
/**
|
||||
* Return the offset amount to deduct from the normal scroll position.
|
||||
* Modify as appropriate to allow for dynamic calculations
|
||||
*/
|
||||
getFixedOffset: function() {
|
||||
return this.OFFSET_HEIGHT_PX;
|
||||
},
|
||||
|
||||
/**
|
||||
* If the provided href is an anchor which resolves to an element on the
|
||||
* page, scroll to it.
|
||||
* @param {String} href
|
||||
* @return {Boolean} - Was the href an anchor.
|
||||
*/
|
||||
scrollIfAnchor: function(href, pushToHistory) {
|
||||
var match, rect, anchorOffset;
|
||||
|
||||
if(!this.ANCHOR_REGEX.test(href)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
match = document.getElementById(href.slice(1));
|
||||
|
||||
if(match) {
|
||||
rect = match.getBoundingClientRect();
|
||||
anchorOffset = window.pageYOffset + rect.top - this.getFixedOffset();
|
||||
window.scrollTo(window.pageXOffset, anchorOffset);
|
||||
|
||||
// Add the state to history as-per normal anchor links
|
||||
if(HISTORY_SUPPORT && pushToHistory) {
|
||||
history.pushState({}, document.title, location.pathname + href);
|
||||
}
|
||||
}
|
||||
|
||||
return !!match;
|
||||
},
|
||||
|
||||
/**
|
||||
* Attempt to scroll to the current location's hash.
|
||||
*/
|
||||
scrollToCurrent: function() {
|
||||
this.scrollIfAnchor(window.location.hash);
|
||||
},
|
||||
|
||||
/**
|
||||
* If the click event's target was an anchor, fix the scroll position.
|
||||
*/
|
||||
delegateAnchors: function(e) {
|
||||
var elem = e.target;
|
||||
|
||||
if(
|
||||
elem.nodeName === 'A' &&
|
||||
this.scrollIfAnchor(elem.getAttribute('href'), true)
|
||||
) {
|
||||
e.preventDefault();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener(
|
||||
'DOMContentLoaded', anchorScrolls.init.bind(anchorScrolls)
|
||||
);
|
||||
})(window.document, window.history, window.location);
|
||||
</script>
|
||||
<link rel="stylesheet" media="screen" href="https://fontlibrary.org//face/cello-sans" type="text/css"/>
|
||||
<link rel="stylesheet" href="report.css" />
|
||||
<style type="text/css">
|
||||
|
||||
|
@ -38,11 +120,12 @@
|
|||
// navItemEl.classList.remove('active');
|
||||
} else {
|
||||
if(toSelect === null) {
|
||||
frameEl.contentWindow.mapGraph.deselectNode();
|
||||
frameEl.contentWindow.mapGraph.resetZoom();
|
||||
frameEl.contentWindow.mapGraph.triggerReset();
|
||||
// frameEl.contentWindow.mapGraph.deselectNode();
|
||||
// frameEl.contentWindow.mapGraph.resetZoom();
|
||||
} else {
|
||||
const node = frameEl.contentWindow.mapGraph.graph.nodes.filter(n => n.id == toSelect)[0]
|
||||
frameEl.contentWindow.mapGraph.selectNode(node);
|
||||
frameEl.contentWindow.mapGraph.triggerSelect(toSelect);
|
||||
// frameEl.contentWindow.mapGraph.selectNode(node);
|
||||
}
|
||||
// navItemEl.classList.add('active');
|
||||
}
|
||||
|
@ -231,7 +314,7 @@
|
|||
</tr>
|
||||
<tr class="odd">
|
||||
<th>BPOL</th>
|
||||
<td>German Federal Police</td>
|
||||
<td><a class="maplink" data-title="German Federal Police (Bundespolizei)">German Federal Police</a></td>
|
||||
</tr>
|
||||
<tr class="even">
|
||||
<th><a class="maplink" data-title="CATCH">CATCH</a></th>
|
||||
|
@ -535,7 +618,7 @@
|
|||
<li><p>Private and public actors are increasingly deploying “smart surveillance” solutions including RBI technologies which, if left unchecked, could become biometric mass surveillance.</p></li>
|
||||
<li><p>Facial recognition technology has been the most discussed of the RBI technologies. However, there seems to be little understanding of the ways in which this technology might be applied and the potential impact of such a broad range of applications on the fundamental rights of European citizens.</p></li>
|
||||
<li><p>The development of RBI systems by authoritarian regimes which may subsequently be exported to and used within Europe is of concern. Not only as it pertains to the deployments of such technologies but also the lack of adequate insight into the privacy practices of the companies supplying the systems.</p></li>
|
||||
<li><p>Four main positions have emerged among political actors with regard to the deployments of RBI technologies and their potential impact on fundamental rights: 1) active promotion 2) support with safeguards; 2) moratorium and 3) outright ban.</p></li>
|
||||
<li><p>Four main positions have emerged among political actors with regard to the deployments of RBI technologies and their potential impact on fundamental rights: 1) active promotion 2) support with safeguards; 3) moratorium and 4) outright ban.</p></li>
|
||||
</ul>
|
||||
<p><strong>CHAPTER 2: Technical Overview</strong></p>
|
||||
<ul>
|
||||
|
@ -613,11 +696,11 @@
|
|||
<li><p>The <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> has elicited numerous warnings regarding data protection and the rights to privacy from both public and private organisations. However the lack of contestation and social debate around the issues of privacy and human rights in relation to such projects as the Hungarian Government’s Dragonfly is striking.</p></li>
|
||||
</ul>
|
||||
<p><strong>CHAPTER 11: Recommendations</strong></p>
|
||||
<p><strong>1. The EU should prohibit the deployment of both indiscriminate and “targeted” Remote Biometric and Behavioural Identification technologies in public spaces, as it amounts to mass surveillance.</strong></p>
|
||||
<p><strong>1. The EU should prohibit the deployment of both indiscriminate and “targeted” Remote Biometric and Behavioural Identification (RBI) technologies in public spaces (real-time RBI), as well as ex-post identification (or forensic RBI). Our analysis shows that both practices, even when used for “targeted surveillance” amount to mass surveillance.</strong></p>
|
||||
<ul>
|
||||
<li><p>The EU should <strong>prohibit the deployment of Remote Biometric and Behavioural Identification technologies in public spaces</strong>, in line with similar recommendations made by the EDPB and the EDPS.<a href="#fn1" class="footnote-ref" id="fnref1" role="doc-noteref"><sup>1</sup></a></p></li>
|
||||
<li><p>This concerns indiscriminate use of such technologies in public spaces and concerns not only the acquisition and processing of <strong>faces, but also gait, voice and other biometric or behavioural signals.</strong></p></li>
|
||||
<li><p>It also concerns the use of such technologies for <strong>“targeted surveillance”</strong> as the practice might be considered as expansive and intrusive to an extent that it would constitute disproportionate interference with the rights to privacy and personal data protection.</p></li>
|
||||
<li><p>In line with similar recommendations made by the EDPB and the EDPS,<a href="#fn1" class="footnote-ref" id="fnref1" role="doc-noteref"><sup>1</sup></a> the EU should <strong>prohibit the deployment of Remote Biometric and Behavioural Identification technologies in public spaces</strong></p></li>
|
||||
<li><p>In line with the position of the EDRi regarding’s EU Artificial Intelligence Act<a href="#fn2" class="footnote-ref" id="fnref2" role="doc-noteref"><sup>2</sup></a>, our research supports the notion that the <strong>distinction between "real-time” and “ex-post” is irrelevant</strong> when it comes to the impact of these technologies on fundamental rights. Ex-post identification carries in fact a higher potential of harm, as more data can be pooled from different sources to proceed to the identification. The use of such technologies for <strong>“targeted surveillance” is thus equally harmful</strong> as the practice might be considered as expansive and intrusive to an extent that it would constitute disproportionate interference with the rights to privacy and personal data protection.</p></li>
|
||||
<li><p>This concerns not only the acquisition and processing of <strong>faces, but also gait, voice and other biometric or behavioural signals.</strong></p></li>
|
||||
</ul>
|
||||
<p><strong>2. The EU should strengthen transparency and accountability of biometric and behavioural recognition technologies</strong></p>
|
||||
<ul>
|
||||
|
@ -696,10 +779,10 @@
|
|||
|
||||
<p>The intrusiveness of the system, and its impact on fundamental rights is best exemplified by its deployment in the Xinjiang province. The province capital, Urumqi, is chequered with <strong>checkpoints and identification stations</strong>. Citizens need to submit to facial recognition ID checks in supermarkets, hotels, train stations, highway stations and several other public spaces (Chin and Bürge 2017). The information collected through the cameras is centralised and matched against other <strong>biometric data</strong> such as <strong>DNA samples</strong> and <strong>voice samples</strong>. This allows the government to attribute <strong>trust-worthiness scores</strong> (trustworthy, average, untrustworthy) and thus generate a list of individuals that can become candidates for detention (Wang 2018).</p>
|
||||
|
||||
<p>European countries’ deployments are far from the Chinese experience. But the companies involved in China’s pervasive digital surveillance network (such as <strong>Tencent</strong>, <strong><a class="maplink" data-title="Dahua Technologies">Dahua Technology</a></strong>, <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong>SenseTime</strong>, <strong>ByteDance</strong> and <strong><a class="maplink" data-title="Huawei">Huawei</a></strong>) are exporting their know-how to Europe, under the form of “<strong>safe city” packages</strong>. <strong><a class="maplink" data-title="Huawei">Huawei</a></strong> is one of the most active in this regard. On the European continent, the city of Belgrade has for example deployed an extensive communication network of more than 1.000 cameras which collect up to 10 body and facial attributes (Stojkovski 2019). The cameras, deployed on poles, major traffic crossings and a large number of public spaces allow the Belgrade police to monitor large parts of the city centre, collect <strong>biometric information</strong> and communicate it directly to police officers deployed in the field. Belgrade has the most advanced deployment of <a class="maplink" data-title="Huawei">Huawei</a>’s surveillance technologies on the European continent, but similar projects are being implemented by other corporations – including the <strong>European companies <a class="maplink" data-title="Thales">Thales</a>, <a class="maplink" data-title="Engie Ineo">Engie Ineo</a> or <a class="maplink" data-title="IDEMIA">Idemia</strong> – in other European cities and many “Safe City” deployments are planned soon in EU countries such as France, Italy, Spain, Malta, and Germany (Hillman and McCalpin 2019). Furthermore, contrary to the idea China would be the sole exporter of Remote Biometric Identification technologies, EU companies have substantially developed their exports in this domain over the last years (Wagner 2021)</p>
|
||||
<p>The turning point of public debates on facial recognition in Europe was probably <strong>the <a class="maplink" data-title="Clearview AI">Clearview AI</a> controversy</strong> in 2019-2020. <strong><a class="maplink" data-title="Clearview AI">Clearview AI</a></strong>, a company founded by Hoan Ton-That and Richard Schwartz in the United States, maintained a relatively secret profile until a New York Times article revealed in late 2019 that it was selling <strong>facial recognition technology</strong> to law enforcement. In February 2020, it was reported that the client list of <a class="maplink" data-title="Clearview AI">Clearview AI</a> had been stolen, and a few days later the details of the list were leaked (Mac, Haskins, and McDonald 2020). To the surprise of many in Europe, in addition to US government agencies and corporations, it appeared that the <strong>Metropolitan Police Service</strong> <strong>(London, UK)</strong>, as well as <strong>law enforcement from Belgian, Denmark, Finland, France, Ireland, <a class="maplink" data-title="Carabinieri">Italy</a>, Latvia, Lithuania, Malta, the Netherlands, Norway, Portugal, Serbia, Slovenia, Spain, Sweden, and Switzerland were on the client list.</strong> The controversy grew larger as it emerged that <a class="maplink" data-title="Clearview AI">Clearview AI</a> had (semi-illegally) harvested a large number of images from social media platforms such as <strong><a class="maplink" data-title="Facebook">Facebook</a>, YouTube</strong> and <strong>Twitter</strong> in order to constitute the datasets against which clients were invited to carry out searches (Mac, Haskins, and McDonald 2020).</p>
|
||||
<p>European countries’ deployments are far from the Chinese experience. But the companies involved in China’s pervasive digital surveillance network (such as <strong>Tencent</strong>, <strong><a class="maplink" data-title="Dahua Technologies">Dahua Technology</a></strong>, <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong>SenseTime</strong>, <strong>ByteDance</strong> and <strong><a class="maplink" data-title="Huawei">Huawei</a></strong>) are exporting their know-how to Europe, under the form of “<strong>safe city” packages</strong>. <strong><a class="maplink" data-title="Huawei">Huawei</a></strong> is one of the most active in this regard. On the European continent, the city of Belgrade has for example deployed an extensive communication network of more than 1.000 cameras which collect up to 10 body and facial attributes (Stojkovski 2019). The cameras, deployed on poles, major traffic crossings and a large number of public spaces allow the Belgrade police to monitor large parts of the city centre, collect <strong>biometric information</strong> and communicate it directly to police officers deployed in the field. Belgrade has the most advanced deployment of <a class="maplink" data-title="Huawei">Huawei</a>’s surveillance technologies on the European continent, but similar projects are being implemented by other corporations – including the <strong>European companies <a class="maplink" data-title="Thales">Thales</a>, <a class="maplink" data-title="Engie Ineo">Engie Ineo</a> or <a class="maplink" data-title="IDEMIA">Idemia</strong> – in other European cities and many “Safe City” deployments are planned soon in EU countries such as France, Italy, Spain, <a class="maplink" data-title="Safe City Malta">Malta</a>, and Germany (Hillman and McCalpin 2019). Furthermore, contrary to the idea China would be the sole exporter of Remote Biometric Identification technologies, EU companies have substantially developed their exports in this domain over the last years (Wagner 2021)</p>
|
||||
<p>The turning point of public debates on facial recognition in Europe was probably <strong>the <a class="maplink" data-title="Clearview AI">Clearview AI</a> controversy</strong> in 2019-2020. <strong><a class="maplink" data-title="Clearview AI">Clearview AI</a></strong>, a company founded by Hoan Ton-That and Richard Schwartz in the United States, maintained a relatively secret profile until a New York Times article revealed in late 2019 that it was selling <strong>facial recognition technology</strong> to law enforcement. In February 2020, it was reported that the client list of <a class="maplink" data-title="Clearview AI">Clearview AI</a> had been stolen, and a few days later the details of the list were leaked (Mac, Haskins, and McDonald 2020). To the surprise of many in Europe, in addition to US government agencies and corporations, it appeared that the Metropolitan Police Service (London, UK), as well as <strong>law enforcement from Belgian, Denmark, Finland, France, Ireland, <a class="maplink" data-title="Carabinieri">Italy</a>, Latvia, Lithuania, <a class="maplink" data-title="Maltese State">Malta</a>, the <a class="maplink" data-title="Dutch Police">Netherlands</a>, Norway, Portugal, Serbia, <a class="maplink" data-title="Slovenian Police">Slovenia</a>, Spain, <a class="maplink" data-title="Use of Clearview AI in Sweden">Sweden</a>, and Switzerland were on the client list.</strong> The controversy grew larger as it emerged that <a class="maplink" data-title="Clearview AI">Clearview AI</a> had (semi-illegally) harvested a large number of images from social media platforms such as <strong><a class="maplink" data-title="Facebook">Facebook</a>, YouTube</strong> and <strong>Twitter</strong> in order to constitute the datasets against which clients were invited to carry out searches (Mac, Haskins, and McDonald 2020).</p>
|
||||
|
||||
<p>The news of the hacking strengthened a strong push-back movement against the development of facial recognition technology by companies such as <a class="maplink" data-title="Clearview AI">Clearview AI</a>, as well as their use by government agencies. In 2018, <strong>Massachusetts Institute of Technology</strong> (MIT) scholar and <strong><a class="maplink" data-title="Algorithmic Justice League">Algorithmic Justice League</a></strong> founder <strong>Joy Buolamwini</strong> together with <strong>Temnit Gebru</strong> had published the report <em>Gender Shades</em> (Buolamwini and Gebru 2018), in which they assessed the racial bias in the face recognition datasets and algorithms used by companies such as <a class="maplink" data-title="IBM">IBM</a> and Microsoft. Buolamwini and Gebru found that <strong>algorithms performed generally worse on darker-skinned faces, and in particular darker-skinned females, with error rates up to 34% higher than lighter-skinned males</strong> (Najibi 2020). <a class="maplink" data-title="IBM">IBM</a> and Microsoft responded by amending their systems, and a re-audit showed less bias. Not all companies responded equally. <strong>Amazon’s Rekognition</strong> system, which was included in the second study continued to show a 31% lower rate for darker-skinned females. The same year <strong>ACLU</strong> conducted another key study on Amazon’s Rekognition, using the pictures of <strong>members of congress against a dataset of mugshots from law enforcemen</strong>t. 28 members of Congress, <strong>largely people of colour were incorrectly matched</strong> (Snow 2018). Activists engaged lawmakers. In 2019, the Algorithmic Accountability Act allowed the Federal Trade Commission to regulate private companies’ uses of facial recognition. In 2020, several companies, including <a class="maplink" data-title="IBM">IBM</a>, Microsoft, and Amazon, announced a moratorium on the development of their facial recognition technologies. Several US cities, including <strong>Boston</strong>, <strong>Cambridge</strong> (Massachusetts) <strong>San Francisco</strong>, <strong>Berkeley</strong>, <strong>Portland</strong> (Oregon), have also banned their police forces from using the technology.</p>
|
||||
<p>The news of the hacking strengthened a strong push-back movement against the development of facial recognition technology by companies such as <a class="maplink" data-title="Clearview AI">Clearview AI</a>, as well as their use by government agencies. In 2018, <strong>Massachusetts Institute of Technology</strong> (MIT) scholar and <strong><a class="maplink" data-title="Algorithmic Justice League">Algorithmic Justice League</a></strong> founder <strong>Joy Buolamwini</strong> together with <strong>Temnit Gebru</strong> had published the report <em>Gender Shades</em> (Buolamwini and Gebru 2018), in which they assessed the racial bias in the face recognition datasets and algorithms used by companies such as <a class="maplink" data-title="IBM">IBM</a> and Microsoft. Buolamwini and Gebru found that <strong>algorithms performed generally worse on darker-skinned faces, and in particular darker-skinned females, with error rates up to 34% higher than lighter-skinned males</strong> (Najibi 2020). <a class="maplink" data-title="IBM">IBM</a> and Microsoft responded by amending their systems, and a re-audit showed less bias. Not all companies responded equally. <strong>Amazon’s Rekognition</strong> system, which was included in the second study continued to show a 31% lower rate for darker-skinned females. The same year <strong>ACLU</strong> conducted another key study on Amazon’s Rekognition, using the pictures of <strong>members of congress against a dataset of mugshots from law enforcement</strong>. 28 members of Congress, <strong>largely people of colour were incorrectly matched</strong> (Snow 2018). A number of organizations seized the problem as a policy issue (<strong><a class="maplink" data-title="Black in AI">Black in AI</a></strong>, <strong><a class="maplink" data-title="Algorithmic Justice League">Algorithmic Justice League</a>, <a class="maplink" data-title="Data for Black Lives">Data for Black Lives</a></strong>) and some engaged lawmakers. In 2019, the Algorithmic Accountability Act allowed the Federal Trade Commission to regulate private companies’ uses of facial recognition. In 2020, several companies, including <a class="maplink" data-title="IBM">IBM</a>, Microsoft, and Amazon, announced a moratorium on the development of their facial recognition technologies. Several US cities, including <strong>Boston</strong>, <strong>Cambridge</strong> (Massachusetts) <strong>San Francisco</strong>, <strong>Berkeley</strong>, <strong>Portland</strong> (Oregon), have also banned their police forces from using the technology.</p>
|
||||
|
||||
</section>
|
||||
<section id="the-european-context" class="level2">
|
||||
|
@ -708,7 +791,7 @@
|
|||
|
||||
<p>Legislative activity accelerated in 2018. The <strong>European Commission</strong> (2018a) published a communication <em>Artificial Intelligence for Europe</em>, in which it called for a joint legal framework for the regulation of AI-related services. Later in the year, the Commission (2018b) adopted a <em>Coordinated Plan on Artificial Intelligence</em> with similar objectives. It compelled EU member states to adopt a national strategy on artificial intelligence which should meet the EU requirements. It also allocated 20 billion euros each year for investment in AI development. (Andraško et al. 2021, 4).</p>
|
||||
|
||||
<p>In 2019, the <strong>Council of Europe Commissioner for Human Rights</strong> published a Recommendation entitled <em>Unboxing Artificial Intelligence: 10 steps to Protect Human Rights</em> which describes several steps for national authorities to maximise the potential of AI while preventing or mitigating the risk of its misuse. (Gonzalez Fuster 2020, 46). The same year the <strong><a class="maplink" data-title="European Union">European Union</a>’s High Level Expert Group on Artificial Intelligence (AI HLEG)</strong> adopted the <em>Ethics Guidelines for Trustworthy Artificial Intelligence</em>, a key document for the EU strategy in bringing AI within ethical standards (Nesterova 2020, 3).</p>
|
||||
<p>In 2019, the <strong>Council of Europe Commissioner for Human Rights</strong> published a Recommendation entitled <em>Unboxing Artificial Intelligence: 10 steps to Protect Human Rights</em> which describes several steps for national authorities to maximise the potential of AI while preventing or mitigating the risk of its misuse. (Gonzalez Fuster 2020, 46). The same year the <strong><a class="maplink" data-title="European Union">European Union</a>’s <a class="maplink" data-title="European Union High-Level Expert Group on Artificial Intelligence (AI HLEG)">High Level Expert Group on Artificial Intelligence</a> (AI HLEG)</strong> adopted the <em>Ethics Guidelines for Trustworthy Artificial Intelligence</em>, a key document for the EU strategy in bringing AI within ethical standards (Nesterova 2020, 3).</p>
|
||||
|
||||
<p>In February 2020, the new <strong>European Commission</strong> went one step further in regulating matters related to AI, adopting the digital agenda package – a set of documents outlining the strategy of the EU in the digital age. Among the documents the <em>White Paper on Artificial Intelligence: a European approach to excellence and trust</em> captured most of the commission’s intentions and plans. </p>
|
||||
</section>
|
||||
|
@ -717,7 +800,7 @@
|
|||
<p>Over the past 3-4 years, positions around the use of facial recognition and more specifically the use of remote biometric identification in public space have progressively crystalised into four camps (for a more detailed analysis of the positions, see Chapter 5).</p>
|
||||
<section id="active-promotion" class="level3">
|
||||
<h3>Active promotion</h3>
|
||||
<p>A certain number of actors, both at the national and at the local level are pushing for the development and the extension of biometric remote identification. At the local level, figures such as Nice’s (France) mayor Christian Estrosi have repeatedly challenged Data Protection Authorities, arguing for the usefulness of such technologies in the face of insecurity (for a detailed analysis, see chapter 8 in this report, see also Barelli 2018). <strong>At the national level, Biometric systems for the purposes of authentication are increasingly deployed for forensic applications</strong> among law-enforcement agencies in the <a class="maplink" data-title="European Union">European Union</a>. As we elaborate in Chapter 3, 11 out of 27 member states of the <a class="maplink" data-title="European Union">European Union</a> are already using facial recognition against biometric databases for forensic purposes and 7 additional countries are expected to acquire such capabilities in the near future. Several states that have not yet adopted such technologies seem inclined to follow the trend, and push further. Belgian Minister of Interior Pieter De Crem for example, recently declared he was in favour of the use of facial recognition both for judicial inquiries but also for live facial recognition, a much rarer instance. Such outspoken advocates of the use of RBI constitute an important voice, but do not find an echo in the EU mainstream discussions.</p>
|
||||
<p>A certain number of actors, both at the national and at the local level are pushing for the development and the extension of biometric remote identification. At the local level, figures such as Nice’s (France) mayor Christian Estrosi have repeatedly challenged Data Protection Authorities, arguing for the usefulness of such technologies in the face of insecurity (for a detailed analysis, see chapter 8 in this report, see also Barelli 2018). <strong>At the national level, Biometric systems for the purposes of authentication are increasingly deployed for forensic applications</strong> among law-enforcement agencies in the <a class="maplink" data-title="European Union">European Union</a>. As we elaborate in Chapter 3, 11 out of 27 member states of the <a class="maplink" data-title="European Union">European Union</a> are already using facial recognition against biometric databases for forensic purposes and 7 additional countries are expected to acquire such capabilities in the near future. Several states that have not yet adopted such technologies seem inclined to follow the trend, and push further. Former Belgian Minister of Interior Pieter De Crem for example, recently declared he was in favour of the use of facial recognition both for judicial inquiries but also for live facial recognition, a much rarer instance. Such outspoken advocates of the use of RBI constitute an important voice, but do not find an echo in the EU mainstream discussions.</p>
|
||||
</section>
|
||||
<section id="support-with-safeguards" class="level3">
|
||||
<h3>Support with safeguards </h3>
|
||||
|
@ -729,7 +812,7 @@
|
|||
</section>
|
||||
<section id="ban" class="level3">
|
||||
<h3>Ban</h3>
|
||||
<p>Finally, a growing number of actors considers that there is enough information about remote biometric identification in public space to determine that they will never be able to comply to the strict requirement of the <a class="maplink" data-title="European Union">European Union</a> in terms of respect of Fundamental Rights, and as such should be banned entirely. It is the current position of the <strong>European Data Protection Supervisor (EDPS, 2021)</strong> the <strong>Council of Europe</strong> and a large coalition of NGOs, gathered under the umbrella of the <strong>European Digital Rights organisation</strong> (EDRi 2020). In the <strong>European Parliament</strong>, the position has most vocally been defended by the European Greens, but has been shared by several other voices, such as members of the Party of the European Left, the Party of European Socialists or Renew Europe (Breyer et al 2021).</p>
|
||||
<p>Finally, a growing number of actors considers that there is enough information about remote biometric identification in public space to determine that they will never be able to comply to the strict requirement of the <a class="maplink" data-title="European Union">European Union</a> in terms of respect of Fundamental Rights, and as such should be banned entirely. It is the current position of the <strong>European Data Protection Supervisor (EDPS, 2021)</strong> the <strong>Council of Europe</strong> and a large coalition of NGOs (among which <strong><a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a></strong> and the collaborative project <strong>Technopolice</strong>,) gathered under the umbrella of the <strong><a class="maplink" data-title="European Digital Rights (EDRi)">European Digital Rights organisation</a></strong> (EDRi 2020). In the <strong>European Parliament</strong>, the position has most vocally been defended by the European Greens, but has been shared by several other voices, such as members of the Party of the European Left, the Party of European Socialists or Renew Europe (Breyer et al 2021).</p>
|
||||
</section>
|
||||
</section>
|
||||
<section id="lack-of-transparency-and-the-stifling-of-public-debate" class="level2">
|
||||
|
@ -795,7 +878,7 @@
|
|||
</section>
|
||||
<section id="people-tracking-and-counting" class="level3">
|
||||
<h3>People tracking and counting </h3>
|
||||
<p>This is perhaps the form of person tracking with which the least information about an individual is stored. An <strong>object detection algorithm</strong> estimates the presence and position of individuals on a camera image. These positions are stored or counted and used for further metrics. It is used to count <strong>passers-by in city centres</strong>, and for a <strong>one-and-a-half-meter social distancing monitor in Amsterdam</strong><a href="#fn2" class="footnote-ref" id="fnref2" role="doc-noteref"><sup>2</sup></a>. See also the case study in this document on the <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (CHAPTER 7), which goes into more detail about the use of the recorded trajectories of individuals to label anomalous behaviour.</p>
|
||||
<p>This is perhaps the form of person tracking with which the least information about an individual is stored. An <strong>object detection algorithm</strong> estimates the presence and position of individuals on a camera image. These positions are stored or counted and used for further metrics. It is used to count <strong>passers-by in city centres</strong>, and for a <strong>one-and-a-half-meter social distancing monitor in Amsterdam</strong><a href="#fn3" class="footnote-ref" id="fnref3" role="doc-noteref"><sup>3</sup></a>. See also the case study in this document on the <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (CHAPTER 7), which goes into more detail about the use of the recorded trajectories of individuals to label anomalous behaviour.</p>
|
||||
</section>
|
||||
<section id="emotion-recognition." class="level3">
|
||||
<h3>Emotion recognition. </h3>
|
||||
|
@ -807,7 +890,7 @@
|
|||
</section>
|
||||
<section id="audio-recognition" class="level3">
|
||||
<h3>Audio recognition </h3>
|
||||
<p>From a technological perspective, neural networks process audio relatively similarly to how video is processed: rather than feeding an image, a spectrogram is used as input for the network. However, under the GDPR, recording conversations, is illegal in the <a class="maplink" data-title="European Union">European Union</a> without informed consent of the participants. In order to adhere to these regulations, on some occasions, only particular frequencies are recorded and processed. For example, in the <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (CHAPTER 7), only two frequencies are used to classify audio; making conversations indiscernible while being able to discern shouting or the breaking of glass<a href="#fn3" class="footnote-ref" id="fnref3" role="doc-noteref"><sup>3</sup></a>.</p>
|
||||
<p>From a technological perspective, neural networks process audio relatively similarly to how video is processed: rather than feeding an image, a spectrogram is used as input for the network. However, under the GDPR, recording conversations, is illegal in the <a class="maplink" data-title="European Union">European Union</a> without informed consent of the participants. In order to adhere to these regulations, on some occasions, only particular frequencies are recorded and processed. For example, in the <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (Netherlands)</strong> (CHAPTER 7), only two frequencies are used to classify audio; making conversations indiscernible while being able to discern shouting or the breaking of glass<a href="#fn4" class="footnote-ref" id="fnref4" role="doc-noteref"><sup>4</sup></a>. Another initiative using audio in to enhance the surveillance camera is the <strong>Living Lab International Zone</strong> project in the Hague (Netherlands), a collaboration between a broad range of partners<a href="#fn5" class="footnote-ref" id="fnref5" role="doc-noteref"><sup>5</sup></a>.</p>
|
||||
</section>
|
||||
</section>
|
||||
<section id="how-does-image-based-remote-biometric-identification-work" class="level2">
|
||||
|
@ -816,26 +899,26 @@
|
|||
<section id="image-acquisition-controlled-and-uncontrolled-images" class="level3">
|
||||
<h3>Image acquisition: Controlled and uncontrolled images</h3>
|
||||
<p>Facial recognition begins with an image. An image which will be subject to the algorithm’s scrutiny. <strong>Controlled images</strong> are images that are captured for the purpose of processing, aimed at optimal positions and lighting conditions. They are for example taken at a police station, or at a photographer’s studio with strict requirements, and are either contained in databases that precede the introduction of a facial recognition system (e.g., driver’s license databases) or are specifically designed to match high criteria of biometric systems (i.e., photographs for biometric passports). <strong>Uncontrolled images</strong> are images that are captured outside of specific requirement, collected for example through social media scraping or video surveillance.</p>
|
||||
<p>When it comes to the acquisition technologies (cameras) for uncontrolled images, over the past decades, the main evolution in terms of video has been the passage from analogue video to digital video, the latter allowing images to be processed through computers. As in the realm of consumer cameras, the initial race was for better definition (calculated in terms of megapixels). “Smart” camera systems require a slightly higher resolution than standard video surveillance systems in order to guarantee a minimum of 300 PPM to adequately feed the software (IPVM Team 2020, 5). But overall, the average camera does not exceed a definition of 4 megapixels and are more often in the area of 2 megapixels (which yields a 1080p or HD resolution)<a href="#fn4" class="footnote-ref" id="fnref4" role="doc-noteref"><sup>4</sup></a>. <strong>The quality of capture</strong>, especially in non-cooperative scenarios, is determined by two main external variables: the angle of the face relative to the camera (front, side, back, top) and the lighting conditions (bright daylight, dark night). In recent years, manufacturers have added an additional infra-red channel to the red-green-blue (RGB) video channels in order to increase detail accuracy in low-light conditions.</p>
|
||||
<p>When it comes to the acquisition technologies (cameras) for uncontrolled images, over the past decades, the main evolution in terms of video has been the passage from analogue video to digital video, the latter allowing images to be processed through computers. As in the realm of consumer cameras, the initial race was for better definition (calculated in terms of megapixels). “Smart” camera systems require a slightly higher resolution than standard video surveillance systems in order to guarantee a minimum of 300 PPM to adequately feed the software (IPVM Team 2020, 5). But overall, the average camera does not exceed a definition of 4 megapixels and are more often in the area of 2 megapixels (which yields a 1080p or HD resolution)<a href="#fn6" class="footnote-ref" id="fnref6" role="doc-noteref"><sup>6</sup></a>. <strong>The quality of capture</strong>, especially in non-cooperative scenarios, is determined by two main external variables: the angle of the face relative to the camera (front, side, back, top) and the lighting conditions (bright daylight, dark night). In recent years, manufacturers have added an additional infra-red channel to the red-green-blue (RGB) video channels in order to increase detail accuracy in low-light conditions.</p>
|
||||
</section>
|
||||
<section id="what-makes-systems-smart-image-processing-algorithms" class="level3">
|
||||
<h3>What makes systems “smart”: image processing algorithms</h3>
|
||||
<p>The processing of the photographic or video image by a specific software application is where the “smart” processing happens. Broadly speaking video surveillance technology can be split in two key historical moments: before machine learning, and after machine learning.</p>
|
||||
<p><strong>Video motion detection (VMD) and heuristic filters.</strong> The early smart technologies relied on simple motion detection algorithms which compared pixel changes from one image to the next (Quevillon 2012). The problem is that any movement (the leaves of a tree) or change of light (a car passing in the night) can trigger the systems. <strong>Heuristic filters</strong> were thus added to VMD systems in order to give additional parameters to the system (amount and size of pixel changing etc.). Both systems were highly inefficient and prone to trigger false alarms, making such technologies unattractive. The main problem was that only pre-established changes hard coded by humans would be detected by the systems.</p>
|
||||
<p><strong>Machine learning.</strong> Machine learning revolutionised image-based biometric identification. Machine learning is an automated process through which the software application will be programmed to recognise particular patterns, based on a dataset it is “trained” on. There are three ways in which this configuration of the machine learning model can be controlled: supervised, semi-supervised or unsupervised. <strong>Supervised machine learning</strong> consists of teaching the system to recognise people, cars, guns, or any other object by feeding it an annotated dataset of such objects. It is supervised because humans “supervise” how the computer learns, by annotating the dataset (“this is a car”, “this is a gun” etc.). The categories of the annotations (cars, guns, etc.) will thus be the only ones that the system will be able to recognise (if only cars and guns are annotated, the system won’t in such a case recognise cats). Most video surveillance systems use supervised machine learning (IPVM Team 2021a, 11)<strong>. Unsupervised machine learning</strong> lets the system cluster objects by itself. The advantage is the open-endedness of the systems (meaning they can generate categories of objects not anticipated in the training dataset), but the disadvantage is that algorithms can potentially cluster objects along irrelevant criteria for the task (for example clustering red motorcycles, cars, and trucks in one group and green ones in another, as opposed to creating one cluster for all motorcycles, one for cars and one for trucks). For this reason, <strong>semi-supervised machine learning</strong>, where only a small part of the data is labelled, can be used. Currently not widely in use, unsupervised machine learning is a growing trend in the video surveillance sector (IPVM Team 2021a, 12–13).</p>
|
||||
<p>Both supervised and unsupervised learning exist in many shapes and sizes. For example, the Viola-Jones object detection algorithm<a href="#fn5" class="footnote-ref" id="fnref5" role="doc-noteref"><sup>5</sup></a> from 2001, which made real-time face detection viable, is a supervised algorithm. Contemporary developments in video processing focus on using various kinds of artificial <strong>neural networks</strong> (i.e., <strong>convolutional neural networks</strong>, <strong>recurrent neural networks</strong>) to classify images and videos. These networks can be trained either <strong>supervised</strong>, <strong>semi-supervised</strong> or <strong>unsupervised</strong> depending on their configuration.</p>
|
||||
<p>Both supervised and unsupervised learning exist in many shapes and sizes. For example, the Viola-Jones object detection algorithm<a href="#fn7" class="footnote-ref" id="fnref7" role="doc-noteref"><sup>7</sup></a> from 2001, which made real-time face detection viable, is a supervised algorithm. Contemporary developments in video processing focus on using various kinds of artificial <strong>neural networks</strong> (i.e., <strong>convolutional neural networks</strong>, <strong>recurrent neural networks</strong>) to classify images and videos. These networks can be trained either <strong>supervised</strong>, <strong>semi-supervised</strong> or <strong>unsupervised</strong> depending on their configuration.</p>
|
||||
</section>
|
||||
<section id="machine-learning-and-operational-datasets" class="level3">
|
||||
<h3><strong>Machine learning</strong> and operational datasets</h3>
|
||||
<p>Remote biometric identification and classification relies in large part on datasets, for two key but distinct moments of their operation.</p>
|
||||
<p><strong>Machine learning datasets.</strong> These are the datasets used to train models through <strong>machine learning.</strong> We find three categories of such datasets. <strong>Publicly available datasets</strong> for object detection such as COCO, ImageNet, Pascal VOC include a varying number of images labelled in a range of categories, these can be used to train algorithms to detect for example people on an image (IPVM Team 2021a, 27). The most used open-source datasets for surveillance technologies are Celeb 500k, MS-Celeb-1Million-Cleaned, Labeled Faces in the Wild, VGG Face 2, DeepGlint Asian Celeb, IMDB-Face, IMDB-Wiki, CelebA, Diveface, Flickr faces and the IARPA Janus Benchmark (IPVM Team 2021b, 7). Many of these datasets also function as a public benchmark, against which the performance and accuracy of various algorithms is measured. For example, Labeled Faces in the Wild, the COCO dataset and <a class="maplink" data-title="NIST">NIST</a> present such leaderboards on their website<a href="#fn6" class="footnote-ref" id="fnref6" role="doc-noteref"><sup>6</sup></a>. <strong>Government datasets</strong> are generally collections of images available to a government for other purposes (driver’s license, passport, or criminal record photo datasets). While in Europe most of these datasets are not accessible to the public, in China and in the US, they are made available for testing and training purposes to private companies, such as the Multiple Encounter Dataset (NIST, 2010). Finally <strong>proprietary datasets</strong> may be developed by providers for their specific applications.</p>
|
||||
<p><strong>Machine learning datasets.</strong> These are the datasets used to train models through <strong>machine learning.</strong> We find three categories of such datasets. <strong>Publicly available datasets</strong> for object detection such as COCO, ImageNet, Pascal VOC include a varying number of images labelled in a range of categories, these can be used to train algorithms to detect for example people on an image (IPVM Team 2021a, 27). The most used open-source datasets for surveillance technologies are Celeb 500k, MS-Celeb-1Million-Cleaned, Labeled Faces in the Wild, VGG Face 2, DeepGlint Asian Celeb, IMDB-Face, IMDB-Wiki, CelebA, Diveface, Flickr faces and the IARPA Janus Benchmark (IPVM Team 2021b, 7). Many of these datasets also function as a public benchmark, against which the performance and accuracy of various algorithms is measured. For example, Labeled Faces in the Wild, the COCO dataset and <a class="maplink" data-title="NIST">NIST</a> present such leaderboards on their website<a href="#fn8" class="footnote-ref" id="fnref8" role="doc-noteref"><sup>8</sup></a>. <strong>Government datasets</strong> are generally collections of images available to a government for other purposes (driver’s license, passport, or criminal record photo datasets). While in Europe most of these datasets are not accessible to the public, in China and in the US, they are made available for testing and training purposes to private companies, such as the Multiple Encounter Dataset (NIST, 2010). Finally <strong>proprietary datasets</strong> may be developed by providers for their specific applications.</p>
|
||||
<p><strong>Machine learning models.</strong> In the machine learning process, an algorithm gets iteratively configured for the optimal output, based on the particular dataset that it is fed with. This can be a neural network, but also e.g., the aforementioned Viola-Jones’ object detector algorithm. The <strong>model</strong> is the final configuration of this learning process. As such, it does not contain the images of the dataset in and of themselves. Rather, it represents the abstractions the algorithm “learned” over time. In other words, the model operationalises the machine learning dataset. For example, the YOLO object detection algorithm yields different results when it is trained on either the COCO or the model (in conjunction with the algorithm) which determines the translation of an image into a category, or of the image of a face into its embedding.</p>
|
||||
<p><strong>Operational datasets, or image databases.</strong> Datasets used in training machine learning models should be distinguished from matching or operational datasets which are the “watchlists” of for example criminals, persons of interest or other lists of individuals against which facial recognition searches will be performed – whether these are in real time or post hoc. These datasets contain pre-processed images of individuals on the watchlist, and store the numerical representations of these faces, their feature vectors or <em>embedding</em>, in an index for fast retrieval and comparison with the queried features (using for example k-Nearest Neighbour or Support Vector Machines). Face or object detection models do not use such a dataset.</p>
|
||||
</section>
|
||||
<section id="availability" class="level3">
|
||||
<h3>Availability</h3>
|
||||
<p>Facial recognition algorithms can be developed in-house, taken from an open-source repository, or purchased (IPVM Team 2021b, 14). Popular <strong>open-source facial recognition</strong> implementations include OpenCV, Face_pytorch, OpenFace and Insightface. Many of these software libraries are developed at universities or implement algorithms and neural network architectures presented in academic papers. They are free, and allow for a great detail of customisation, but require substantial programming skills to be implemented in a surveillance system. Moreover, when using such software, the algorithms run on one’s own hardware which provides the developer with more control, but also requires more maintenance.</p>
|
||||
<p><strong>Proprietary facial recognition.</strong> There are three possible routes for the use of proprietary systems: There are <strong>“turnkey”</strong> systems sold by manufacturers such as <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong><a class="maplink" data-title="Dahua Technologies">Dahua</a></strong>, <strong><a class="maplink" data-title="AnyVision">AnyVision</a></strong> or <strong><a class="maplink" data-title="Briefcam Ltd">Briefcam</a></strong>. Those integrate the software and hardware, and as such can be directly deployed by the client. <strong>Algorithm developers</strong> such as <strong>Amazon AWS Rekognition</strong> (USA), <strong><a class="maplink" data-title="NEC">NEC</a></strong> (Japan), <strong>NTechlab</strong> (Russia), <strong><a class="maplink" data-title="Paravision">Paravision</a></strong> (USA) allow to implement their algorithms and customise them to one’s needs, and finally there are <strong>“cloud” API systems</strong>, a sub-set of the former category, where the algorithm is hosted in a datacentre and is accessed remotely (IPVM Team 2021b, 16). The latter type of technology bears important legal ramifications, as the data may travel outside of national or European jurisdictions. It should be noted that many of the proprietary products are based on similar algorithms and network architectures as their open-source counterparts (OpenCV, 2021). Contrary to the open-source software, it is generally unclear which datasets of images have been used to train the proprietary algorithms.</p>
|
||||
<p><strong>Proprietary facial recognition.</strong> There are three possible routes for the use of proprietary systems: There are <strong>“turnkey”</strong> systems sold by manufacturers such as <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong><a class="maplink" data-title="Dahua Technologies">Dahua</a></strong>, <strong><a class="maplink" data-title="AnyVision">AnyVision</a></strong> or <strong><a class="maplink" data-title="Briefcam Ltd">Briefcam</a></strong>. Those integrate the software and hardware, and as such can be directly deployed by the client. <strong>Algorithm developers</strong> such as <strong>Amazon AWS Rekognition</strong> (USA), <strong><a class="maplink" data-title="NEC">NEC</a></strong> (Japan), <strong><a class="maplink" data-title="Ntech Lab">NTechlab</a></strong> (Russia), <strong><a class="maplink" data-title="Paravision">Paravision</a></strong> (USA) allow to implement their algorithms and customise them to one’s needs, and finally there are <strong>“cloud” API systems</strong>, a sub-set of the former category, where the algorithm is hosted in a datacentre and is accessed remotely (IPVM Team 2021b, 16). The latter type of technology bears important legal ramifications, as the data may travel outside of national or European jurisdictions. It should be noted that many of the proprietary products are based on similar algorithms and network architectures as their open-source counterparts (OpenCV, 2021). Contrary to the open-source software, it is generally unclear which datasets of images have been used to train the proprietary algorithms.</p>
|
||||
</section>
|
||||
</section>
|
||||
<section id="technical-limits-problems-and-challenges-of-facial-recognition" class="level2">
|
||||
|
@ -879,19 +962,19 @@
|
|||
<p>A broad range of deployments, which we consider in this first section, is not aimed at surveillance, but at authentication (see section 2.3 in this report), namely making sure that the person in front of the security camera is who they say they are.</p>
|
||||
<section id="live-authentication" class="level3">
|
||||
<h3>Live authentication</h3>
|
||||
<p>As in the cases of the use of <a class="maplink" data-title="Cisco Systems">Cisco systems</a> powered FRT in two pilot projects in <strong><a class="maplink" data-title="Facial Recognition Pilot in High School (Nice)">high schools of Nice</a></strong> (see section 8.1) <strong>and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">Marseille</a> (France)</strong><a href="#fn7" class="footnote-ref" id="fnref7" role="doc-noteref"><sup>7</sup></a>, or as in the case of the <strong><a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">Anderstorp Upper Secondary School</a> in Skelleftea (Sweden)</strong><a href="#fn8" class="footnote-ref" id="fnref8" role="doc-noteref"><sup>8</sup></a>, the aim of these projects was to identify students who could have access to the premises. School-wide biometric databases were generated and populated with students’ portraits. Gates were fitted with cameras connected to facial recognition technology and allowed access only to recognised students. Another documented use has been for the <strong><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App (Hungary)</a></strong>, in which telephone cameras are used by authorities to verify the identity of the persons logged into the app (see also section 10.1).</p>
|
||||
<p>As in the cases of the use of <a class="maplink" data-title="Cisco Systems">Cisco systems</a> powered FRT in two pilot projects in <strong><a class="maplink" data-title="Facial Recognition Pilot in High School (Nice)">high schools of Nice</a></strong> (see section 8.1) <strong>and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">Marseille</a> (France)</strong><a href="#fn9" class="footnote-ref" id="fnref9" role="doc-noteref"><sup>9</sup></a>, or as in the case of the <strong><a class="maplink" data-title="Facial Recognition in Anderstorp Upper Secondary School (Skelleftea, Sweden)">Anderstorp Upper Secondary School</a> in Skelleftea (Sweden)</strong><a href="#fn10" class="footnote-ref" id="fnref10" role="doc-noteref"><sup>10</sup></a>, the aim of these projects was to identify students who could have access to the premises. School-wide biometric databases were generated and populated with students’ portraits. Gates were fitted with cameras connected to facial recognition technology and allowed access only to recognised students. Another documented use has been for the <strong><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App (Hungary)</a></strong>, in which telephone cameras are used by authorities to verify the identity of the persons logged into the app (see also section 10.1).</p>
|
||||
<p>In these deployments, people must submit themselves to the camera in order to be identified and gain access. While these techniques of identification pose <strong>important threats to the privacy of the concerned small groups of users</strong> (in both high school cases, DPAs banned the use of FRTs), and run the risk of false positives (unauthorised people recognised as authorised) or false negatives (authorised people not recognised as such) <strong>the risk of biometric mass surveillance strictly speaking is low to non-existent because of the nature of the acquisition of images and other sensor-based data.</strong></p>
|
||||
<p>However, other forms of live authentication tie in with surveillance practices, in particular various forms of <strong>blacklisting</strong>. With blacklisting the face of every passer-by is compared to a list of faces of individuals who have been rejected access to the premises. In such an instance, people do not have to be identified, as long as an image of their face is provided. This has been used in public places, for example in the case of the <a class="maplink" data-title="Korte Putstraat (Stopped)">Korte Putstraat</a> in the Dutch city of 's-Hertogenbosch: during the carnival festivities of 2019 two people were rejected access to the street after they were singled out by the system (Gotink, 2019). It is unclear how many false positives were generated during this period. Other cases of blacklisting can be found at, for example, access control at various football stadiums in Europe, see also section 3.3. In many cases of blacklisting, individuals do not enrol voluntarily.</p>
|
||||
</section>
|
||||
<section id="forensic-authentication" class="level3">
|
||||
<h3>Forensic authentication</h3>
|
||||
<p>Biometric systems for the purposes of authentication are also increasingly deployed for <strong>forensic applications</strong> among law-enforcement agencies in the European Union. The typical scenario for the use of such technologies is to match the photograph of a suspect (extracted, for example, from previous records or from CCTV footage) against an existing dataset of known individuals (e.g., a national biometric database, a driver’s license database, etc.). (TELEFI, 2021). The development of these forensic authentication capabilities is particularly relevant to this study, because it entails making large databases ready for searches on the basis of biometric information.</p>
|
||||
<p>To date, <strong>11 out of 27 member states of the <a class="maplink" data-title="European Union">European Union</a></strong> are using facial recognition against biometric databases for forensic purposes: <strong>Austria</strong> (<a class="maplink" data-title="EDE (AFR used by Austrian Criminal Intelligence Service)">EDE</a>)<a href="#fn9" class="footnote-ref" id="fnref9" role="doc-noteref"><sup>9</sup></a>, <strong>Finland</strong> (<a class="maplink" data-title="KASTU (Finland)">KASTU</a>)<a href="#fn10" class="footnote-ref" id="fnref10" role="doc-noteref"><sup>10</sup></a>, <strong>France</strong> (<a class="maplink" data-title="Deployment of TAJ">TAJ</a>)<a href="#fn11" class="footnote-ref" id="fnref11" role="doc-noteref"><sup>11</sup></a>, <strong>Germany</strong> (<a class="maplink" data-title="German central criminal information system INPOL">INPOL</a>)<a href="#fn12" class="footnote-ref" id="fnref12" role="doc-noteref"><sup>12</sup></a>, <strong>Greece</strong> (<a class="maplink" data-title="Facial Recognition in Greece (Law Enforcement)">Mugshot Database</a>)<a href="#fn13" class="footnote-ref" id="fnref13" role="doc-noteref"><sup>13</sup></a>, <strong>Hungary</strong> (<a class="maplink" data-title="NEC Face Recognition Search Engine in Hungary">Facial Image Registry</a>)<a href="#fn14" class="footnote-ref" id="fnref14" role="doc-noteref"><sup>14</sup></a>, <strong>Italy</strong> (<a class="maplink" data-title="AFIS (Deployment, Italy)">AFIS</a>)<a href="#fn15" class="footnote-ref" id="fnref15" role="doc-noteref"><sup>15</sup></a>, <strong>Latvia</strong> (<a class="maplink" data-title="BDAS Deployment (Latvia)">BDAS</a>)<a href="#fn16" class="footnote-ref" id="fnref16" role="doc-noteref"><sup>16</sup></a>, <strong>Lithuania</strong> (<a class="maplink" data-title="HDR (Deployment, Lithuania)">HDR</a>)<a href="#fn17" class="footnote-ref" id="fnref17" role="doc-noteref"><sup>17</sup></a>, <strong>Netherlands</strong> (<a class="maplink" data-title="CATCH">CATCH</a>)<a href="#fn18" class="footnote-ref" id="fnref18" role="doc-noteref"><sup>18</sup></a> and <strong>Slovenia</strong> (<a class="maplink" data-title="VeriLook (and Face Trace) in Slovenia">Record of Photographed Persons</a>)<a href="#fn19" class="footnote-ref" id="fnref19" role="doc-noteref"><sup>19</sup></a> (TELEFI 2021).</p>
|
||||
<p><strong>Seven additional countries</strong> are expected to acquire such capabilities in the near future: <strong>Croatia</strong> (<a class="maplink" data-title="ABIS (Deployment, Croatia)">ABIS</a>)<a href="#fn20" class="footnote-ref" id="fnref20" role="doc-noteref"><sup>20</sup></a>, <strong>Czech Republic</strong> (<a class="maplink" data-title="CBIS (deployment, Czech Republic)">CBIS</a>)<a href="#fn21" class="footnote-ref" id="fnref21" role="doc-noteref"><sup>21</sup></a>, <strong>Portugal</strong> (<a class="maplink" data-title="AFIS (Deployment, Portugal)">AFIS</a>) <strong>Romania</strong> (<a class="maplink" data-title="Romanian Police use of Facial Recognition (NBIS)">NBIS</a>)<a href="#fn22" class="footnote-ref" id="fnref22" role="doc-noteref"><sup>22</sup></a>, <strong>Spain</strong> (<a class="maplink" data-title="ABIS (Deployment, Spain)">ABIS</a>), <strong>Sweden</strong> (<a class="maplink" data-title="Facial Recognition National Mugshot Database (Sweden)">National Mugshot Database</a>), <strong>Cyprus</strong> (<a class="maplink" data-title="Facial Recognition in Cyprus (Law Enforcement)">ISIS Faces</a>)<a href="#fn23" class="footnote-ref" id="fnref23" role="doc-noteref"><sup>23</sup></a>, <strong>Estonia</strong> (<a class="maplink" data-title="ABIS (Deployment, Estonia)">ABIS</a>) (TELEFI 2021).</p>
|
||||
<p>When it comes to international institutions, <strong><a class="maplink" data-title="Interpol">Interpol</a></strong> (2020) has a facial recognition system (<a class="maplink" data-title="IFRS (Interpol)">IFRS</a>)<a href="#fn24" class="footnote-ref" id="fnref24" role="doc-noteref"><sup>24</sup></a>, based on facial images received from more than 160 countries. <strong><a class="maplink" data-title="Europol">Europol</a></strong> has two sub-units which use the facial recognition search tool and database known as <a class="maplink" data-title="FACE Deployment by EUROPOL">FACE</a>: the European Counter Terrorism Center (ECTC) and the European Cybercrime Center (ECC). (TELEFI, 2021 149-153) (Europol 2020)</p>
|
||||
<p>To date, <strong>11 out of 27 member states of the <a class="maplink" data-title="European Union">European Union</a></strong> are using facial recognition against biometric databases for forensic purposes: <strong>Austria</strong> (<a class="maplink" data-title="EDE (AFR used by Austrian Criminal Intelligence Service)">EDE</a>)<a href="#fn11" class="footnote-ref" id="fnref11" role="doc-noteref"><sup>11</sup></a>, <strong>Finland</strong> (<a class="maplink" data-title="KASTU (Finland)">KASTU</a>)<a href="#fn12" class="footnote-ref" id="fnref12" role="doc-noteref"><sup>12</sup></a>, <strong>France</strong> (<a class="maplink" data-title="Deployment of TAJ">TAJ</a>)<a href="#fn13" class="footnote-ref" id="fnref13" role="doc-noteref"><sup>13</sup></a>, <strong>Germany</strong> (<a class="maplink" data-title="German central criminal information system INPOL">INPOL</a>)<a href="#fn14" class="footnote-ref" id="fnref14" role="doc-noteref"><sup>14</sup></a>, <strong>Greece</strong> (<a class="maplink" data-title="Facial Recognition in Greece (Law Enforcement)">Mugshot Database</a>)<a href="#fn15" class="footnote-ref" id="fnref15" role="doc-noteref"><sup>15</sup></a>, <strong>Hungary</strong> (<a class="maplink" data-title="NEC Face Recognition Search Engine in Hungary">Facial Image Registry</a>)<a href="#fn16" class="footnote-ref" id="fnref16" role="doc-noteref"><sup>16</sup></a>, <strong>Italy</strong> (<a class="maplink" data-title="AFIS (Deployment, Italy)">AFIS</a>)<a href="#fn17" class="footnote-ref" id="fnref17" role="doc-noteref"><sup>17</sup></a>, <strong>Latvia</strong> (<a class="maplink" data-title="BDAS Deployment (Latvia)">BDAS</a>)<a href="#fn18" class="footnote-ref" id="fnref18" role="doc-noteref"><sup>18</sup></a>, <strong>Lithuania</strong> (<a class="maplink" data-title="HDR (Deployment, Lithuania)">HDR</a>)<a href="#fn19" class="footnote-ref" id="fnref19" role="doc-noteref"><sup>19</sup></a>, <strong>Netherlands</strong> (<a class="maplink" data-title="CATCH">CATCH</a>)<a href="#fn20" class="footnote-ref" id="fnref20" role="doc-noteref"><sup>20</sup></a> and <strong>Slovenia</strong> (<a class="maplink" data-title="VeriLook (and Face Trace) in Slovenia">Record of Photographed Persons</a>)<a href="#fn21" class="footnote-ref" id="fnref21" role="doc-noteref"><sup>21</sup></a> (TELEFI 2021).</p>
|
||||
<p><strong>Seven additional countries</strong> are expected to acquire such capabilities in the near future: <strong>Croatia</strong> (<a class="maplink" data-title="ABIS (Deployment, Croatia)">ABIS</a>)<a href="#fn22" class="footnote-ref" id="fnref22" role="doc-noteref"><sup>22</sup></a>, <strong>Czech Republic</strong> (<a class="maplink" data-title="CBIS (deployment, Czech Republic)">CBIS</a>)<a href="#fn23" class="footnote-ref" id="fnref23" role="doc-noteref"><sup>23</sup></a>, <strong>Portugal</strong> (<a class="maplink" data-title="AFIS (Deployment, Portugal)">AFIS</a>) <strong>Romania</strong> (<a class="maplink" data-title="Romanian Police use of Facial Recognition (NBIS)">NBIS</a>)<a href="#fn24" class="footnote-ref" id="fnref24" role="doc-noteref"><sup>24</sup></a>, <strong>Spain</strong> (<a class="maplink" data-title="ABIS (Deployment, Spain)">ABIS</a>), <strong><a class="maplink" data-title="National Forensic Center">Sweden</a></strong> (<a class="maplink" data-title="Facial Recognition National Mugshot Database (Sweden)">National Mugshot Database</a>), <strong>Cyprus</strong> (<a class="maplink" data-title="Facial Recognition in Cyprus (Law Enforcement)">ISIS Faces</a>)<a href="#fn25" class="footnote-ref" id="fnref25" role="doc-noteref"><sup>25</sup></a>, <strong><a class="maplink" data-title="Estonian Forensic Science Institute">Estonia</a></strong> (<a class="maplink" data-title="ABIS (Deployment, Estonia)">ABIS</a>)<a href="#fn26" class="footnote-ref" id="fnref26" role="doc-noteref"><sup>26</sup></a> (TELEFI 2021).</p>
|
||||
<p>When it comes to international institutions, <strong><a class="maplink" data-title="Interpol">Interpol</a></strong> (2020) has a facial recognition system (<a class="maplink" data-title="IFRS (Interpol)">IFRS</a>)<a href="#fn27" class="footnote-ref" id="fnref27" role="doc-noteref"><sup>27</sup></a>, based on facial images received from more than 160 countries. <strong><a class="maplink" data-title="Europol">Europol</a></strong> has two sub-units which use the facial recognition search tool and database known as <a class="maplink" data-title="FACE Deployment by EUROPOL">FACE</a>: the European Counter Terrorism Center (ECTC) and the European Cybercrime Center (ECC). (TELEFI, 2021 149-153) (Europol 2020)</p>
|
||||
<p><strong>Only 9 countries in the EU so far have rejected or do not plan to implement</strong> FRT for forensic purposes: <strong>Belgium</strong> (see CHAPTER 6), <strong>Bulgaria</strong>, <strong>Denmark</strong>, <strong>Ireland</strong>, <strong>Luxembourg</strong>, <strong>Malta</strong>, <strong>Poland</strong>, <strong>Portugal</strong>, <strong>Slovakia</strong>.</p>
|
||||
<p><img src="images/media/image1.png" style="width:4.62502in;height:3.28283in" alt="Map Description automatically generated" /></p>
|
||||
<p>Figure 1. EU Countries use of FRT for forensic applications<a href="#fn25" class="footnote-ref" id="fnref25" role="doc-noteref"><sup>25</sup></a></p>
|
||||
<p>Figure 1. EU Countries use of FRT for forensic applications<a href="#fn28" class="footnote-ref" id="fnref28" role="doc-noteref"><sup>28</sup></a></p>
|
||||
<p><strong>When it comes to databases</strong>, some countries limit the searches to <strong>criminal databases</strong> (Austria, Germany, France, Italy, Greece, Slovenia, Lithuania, UK), while other countries open the searches to <strong>civil databases</strong> (Finland, Netherlands, Latvia, Hungary).</p>
|
||||
<p>This means that the <strong>person categories can vary substantially.</strong> In the case of criminal databases it can range from suspects and convicts, to asylum seekers, aliens, unidentified persons, immigrants, visa applicants. When <strong>civil databases</strong> are used as well, such as in Hungary, the database contains a broad range of “individuals of known identity from various document/civil proceedings” (TELEFI 2021, appendix 3).</p>
|
||||
<p><strong>Finally, the database sizes</strong>, in comparison to the authentication databases mentioned in the previous section, are of a different magnitude. The databases of school students in France and Sweden, mentioned in the previous section contains a few hundred entries. National databases can contain instead several millions. Criminal databases such as Germany’s INPOL contains <strong>6,2 million individuals</strong>, France’s <a class="maplink" data-title="Deployment of TAJ">TAJ</a> <strong>21 million individuals</strong> and <a class="maplink" data-title="AFIS (Deployment, Italy)">Italy’s AFIS</a> <strong>9 million individuals.</strong> Civil databases, such as Hungary’s Facial Image Registry contain <strong>30 million templates</strong> (TELEFI, 2021 appendix 3).</p>
|
||||
|
@ -899,7 +982,7 @@
|
|||
</section>
|
||||
<section id="case-study-inpol-germany" class="level3">
|
||||
<h3>Case study: INPOL (Germany)</h3>
|
||||
<p>In order to give a concrete example of the forensic use of biometric technology, we can take the German case. Germany has been using <strong>automated facial recognition</strong> technologies to identify criminal activity since 2008 using a central criminal information system called <strong><a class="maplink" data-title="German central criminal information system INPOL">INPOL</a> (Informationssystem Polizei)</strong>, maintained by the <strong>Bundeskriminalamt (BKA)</strong>, which is the federal criminal police office. INPOL uses <strong>Oracle Software</strong> and includes the following information: name, aliases, date and place of birth, nationality, fingerprints, mugshots, appearance, information about criminal histories such as prison sentences or violence of an individual, and DNA information. However, DNA information is not automatically recorded (TELEFI 2021).</p>
|
||||
<p>In order to give a concrete example of the forensic use of biometric technology, we can take the German case. Germany has been using <strong>automated facial recognition</strong> technologies to identify criminal activity since 2008 using a central criminal information system called <strong><a class="maplink" data-title="German central criminal information system INPOL">INPOL</a> (Informationssystem Polizei)</strong>, maintained by the <strong><a class="maplink" data-title="German Federal Criminal Police Office (Bundeskriminalamt)">Bundeskriminalamt</a> (BKA)</strong>, which is the federal criminal police office. INPOL uses <strong><a class="maplink" data-title="Oracle Corporation">Oracle Software</a></strong> and includes the following information: name, aliases, date and place of birth, nationality, fingerprints, mugshots, appearance, information about criminal histories such as prison sentences or violence of an individual, and DNA information. However, DNA information is not automatically recorded (TELEFI 2021).</p>
|
||||
<p>The <a class="maplink" data-title="German central criminal information system INPOL">INPOL</a> database includes <strong>facial images of suspects, arrestees, missing persons, and convicted individuals</strong>. For the purpose of facial recognition, anatomical features of a person's face or head as seen on video surveillance or images are used as a material to match with data in <a class="maplink" data-title="German central criminal information system INPOL">INPOL</a>. The facial recognition system compares templates and lists all the matches ordered by degree of accordance. The BKA has specific personnel visually analysing the system's choices and providing an assessment, defining the probability of identifying a person. This assessment can be used in a court of law if necessary (Bundeskriminalamt, n.d.). Searches in the database are conducted by using <a class="maplink" data-title="Cognitec Systems">Cognitec</a> Face VACS software (TELEFI 2021).</p>
|
||||
<p>As of March 2020, <strong><a class="maplink" data-title="German central criminal information system INPOL">INPOL</a></strong> consists of <strong>5,8 million images of about 3,6 million individuals</strong>. All police stations in Germany have access to this database. The BKA saves biometric data and can be used by other ministries as well, for instance, to identify asylum seekers. Furthermore, the data is shared in the context of the <strong>Prüm cooperation</strong> on an international level (mostly fingerprints and DNA patterns). Furthermore, the <strong>BKA</strong> saves <strong>DNA analysis data as part of <a class="maplink" data-title="German central criminal information system INPOL">INPOL</a></strong>, accessible for all police stations in Germany. That database contains <strong>1,2 million data sets</strong> (Bundeskriminalamt, n.d.). Other recorded facial images, for instance, driver’s licenses or passports, are not included in the search, and the database is mainly used for police work (TELEFI 2021).</p>
|
||||
</section>
|
||||
|
@ -914,13 +997,13 @@
|
|||
<p>A second broad use of image and audio-based security technologies is for surveillance purposes. Here again, it is important, we suggest, to distinguish between two broad categories.</p>
|
||||
<section id="smart-surveillance-features" class="level3">
|
||||
<h3>Smart surveillance features</h3>
|
||||
<p>A first range of deployments of <strong>“smart” systems</strong> correspond to what can broadly be defined as “smart surveillance” yet <strong>do not collect or process biometric information per se</strong><a href="#fn26" class="footnote-ref" id="fnref26" role="doc-noteref"><sup>26</sup></a>. Smart systems can be used <strong>ex-post</strong>, <strong>to assist CCTV camera operators</strong> in processing large amounts of <strong>recorded information</strong>, or can guide their attention when they have to monitor a large number of <strong>live video feeds</strong> simultaneously. Smart surveillance uses the following features:</p>
|
||||
<p><strong>- Anomaly detection. In Toulouse (France), the City Council commissioned <a class="maplink" data-title="IBM">IBM</a> to connect 30 video surveillance cameras to software able to "assist human decisions" by raising alerts when "abnormal events are detected." (Technopolice 2021) The request was justified by the “difficulties of processing the images generated daily by the 350 cameras and kept for 30 days (more than 10,000 images per second)”. The objective, according to the digital direction is "to optimise and structure the supervision of video surveillance operators by generating alerts through a system of intelligent analysis that facilitates the identification of anomalies detected, whether: movements of crowds, isolated luggage, crossing virtual barriers north of the Garonne, precipitous movement, research of shapes and colour. All these detections are done in real time or delayed (Technopolice 2021). In other words, the anomaly detection is a way to <em>operationalise</em> the numerical output of various computer vision based recognition systems. Similar systems are used</strong> in the <strong>Smart video surveillance deployment in Valenciennes (France)</strong> or in the <strong>Urban Surveillance Centre (Marseille).</strong></p>
|
||||
<p><strong>- Object Detection.</strong> In Amsterdam, around the <strong><a class="maplink" data-title="Johan Cruijff ArenA">Johan Cruijff ArenA</a></strong> (Stadium), the city has been experimenting with a <strong><a class="maplink" data-title="Digitale Perimeter">Digitale Perimeter</a></strong> (digital perimeter) surveillance system. In addition to the usual features of facial recognition, and crowd monitorining, the system includes the possibility of automatically detecting specific objects such as <strong>weapons, fireworks</strong> or <strong>drones</strong>. Similar features are found in <strong><a class="maplink" data-title="Inwebit">Inwebit</a>’s Smart Security Platform (SSP) in Poland.</strong></p>
|
||||
<p><strong>- Feature search. In <a class="maplink" data-title="City of Marbella">Marbella</a> (Spain), <a class="maplink" data-title="Avigilon">Avigilon</a> deployed <a class="maplink" data-title="Avigilon deployment in Marbella">a smart camera system</a> aimed at providing “smart” functionalities without biometric data. Since regional law bans facial and biometric identification without consent, the software uses “appearance search”. “Appearance search” provides estimates for “unique facial traits, the colour of a person’s clothes, age, shape, gender and hair colour”. This information is not considered biometric. The individual’s features can be used to search for suspects fitting a particular profile. Similar technology has been deployed in Kortrijk (Belgium), which provides search parameters for people, vehicles and animals (</strong>Verbeke 2019)<strong>.</strong></p>
|
||||
<p>- <strong>Video summary.</strong> Some companies, such as <strong><a class="maplink" data-title="Briefcam Ltd">Briefcam</a></strong> and their product <strong>Briefcam Review</strong>, offer a related product, which promises to shorten the analysis of long hours of CCTV footage, by identifying specific topics of interest (children, women, lighting changes) and making the footage searchable. The product combines face recognition, license plate recognition, and more mundane video analysis features such as the possibility to overlay selected scenes, thus highlighting recurrent points of activity in the image. Briefcam is deployed in several cities across Europe, including <a class="maplink" data-title="Briefcam deployment in Vannes">Vannes</a>, <a class="maplink" data-title="Center of Urban Supervision (Roubaix)">Roubaix</a> (in partnership with <strong><a class="maplink" data-title="Eiffage">Eiffage</a></strong>) and <a class="maplink" data-title="City of Moirans">Moirand</a> in France.</p>
|
||||
<p><strong>- Object detection and object tracking. As outlined in chapter 2, object detection is often the first step in the various digital detection applications for images. An ‘object’ here can mean anything the computer is conditioned to search for: a suitcase, a vehicle, but also a person; while some products further process the detected object to estimate particular features, such as the colour of a vehicle, the age of a person. However, on some occasions — often to address concerns over privacy — only the position of the object on the image is stored. This is for example the case with the</strong> test of the <strong><a class="maplink" data-title="Test of One and a half meter monitor">One-and-a-half-meter monitor</a> in Amsterdam (Netherlands), <a class="maplink" data-title="Intemo">Intemo</a>’s people counting system in Nijmegen (Netherlands),</strong> the <strong><a class="maplink" data-title="Project KICK">KICK project</a></strong> in <strong><a class="maplink" data-title="Brugge Municipality">Brugge</a></strong>, <strong><a class="maplink" data-title="Kortrijk Municipality">Kortrijk</a></strong>, <strong><a class="maplink" data-title="Kortrijk Municipality">Ieper</a></strong>, <strong><a class="maplink" data-title="Roeselare Municipality">Roeselare</a></strong> and <strong><a class="maplink" data-title="Economisch Huis Oostende">Oostende</a></strong> in Belgium or the <strong><a class="maplink" data-title="Eco-Counter">Eco-counter</a></strong> <strong><a class="maplink" data-title="Tracking cameras pilot in Lannion">tracking cameras pilot project</a></strong> in <strong><a class="maplink" data-title="City of Lannion">Lannion</a></strong> (France).</p>
|
||||
<p><strong>- Movement recognition. <a class="maplink" data-title="Avigilon">Avigilon</a>’s software that is deployed in Marbella (Spain) also detects unusual movement. “To avoid graffiti, we can calculate the time someone takes to pass a shop window, “explained Javier Martín, local chief of police in Marbella to the Spanish newspaper El País. “If it takes them more than 10 seconds, the camera is activated to see if they are graffitiing. So far, it hasn’t been activated.” (Colomé 2019) Similar movement recognition technology is used in, the ViSense deployment at the Olympic Park London (UK) and the security camera system in Mechelen-Willebroek (Belgium). It should be noted that movement</strong> recognition can be done in two ways: where projects such as the <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Data-lab Burglary-free Neighbourhood</a> in Rotterdam (Netherlands)</strong><a href="#fn27" class="footnote-ref" id="fnref27" role="doc-noteref"><sup>27</sup></a> are only based on the tracking of trajectories of people through an image (see also ‘Object detection’), cases such as <strong>the <a class="maplink" data-title="Living Lab Stratumseind">Living Lab Stratumseind</a></strong><a href="#fn28" class="footnote-ref" id="fnref28" role="doc-noteref"><sup>28</sup></a> <strong>in Eindhoven (Netherlands)</strong> also process the movements and gestures of individuals in order to estimate their behaviour.</p>
|
||||
<p>A first range of deployments of <strong>“smart” systems</strong> correspond to what can broadly be defined as “smart surveillance” yet <strong>do not collect or process biometric information per se</strong><a href="#fn29" class="footnote-ref" id="fnref29" role="doc-noteref"><sup>29</sup></a>. Smart systems can be used <strong>ex-post</strong>, <strong>to assist CCTV camera operators</strong> in processing large amounts of <strong>recorded information</strong>, or can guide their attention when they have to monitor a large number of <strong>live video feeds</strong> simultaneously. Smart surveillance uses the following features:</p>
|
||||
<p><strong>- Anomaly detection. <a class="maplink" data-title="IBM Smart CCTV deployment in Toulouse">In Toulouse</a> (France), the City Council commissioned <a class="maplink" data-title="IBM">IBM</a> to connect 30 video surveillance cameras to software able to "assist human decisions" by raising alerts when "abnormal events are detected." (Technopolice 2021) The request was justified by the “difficulties of processing the images generated daily by the 350 cameras and kept for 30 days (more than 10,000 images per second)”. The objective, according to the digital direction is "to optimise and structure the supervision of video surveillance operators by generating alerts through a system of intelligent analysis that facilitates the identification of anomalies detected, whether: movements of crowds, isolated luggage, crossing virtual barriers north of the Garonne, precipitous movement, research of shapes and colour. All these detections are done in real time or delayed (Technopolice 2021). In other words, the anomaly detection is a way to <em>operationalise</em> the numerical output of various computer vision based recognition systems. Similar systems are used</strong> in the <strong>Smart video surveillance deployment in Valenciennes (France)</strong> or in the <strong>Urban Surveillance Centre (Marseille).</strong></p>
|
||||
<p><strong>- Object Detection.</strong> In Amsterdam, around the <strong><a class="maplink" data-title="Johan Cruijff ArenA">Johan Cruijff ArenA</a></strong> (Stadium), the city has been experimenting with a <strong><a class="maplink" data-title="Digitale Perimeter">Digitale Perimeter</a></strong> (digital perimeter) surveillance system. In addition to the usual features of facial recognition, and crowd monitorining, the system includes the possibility of automatically detecting specific objects such as <strong>weapons, fireworks</strong> or <strong>drones</strong>. Similar features are found in <strong><a class="maplink" data-title="Inwebit">Inwebit</a>’s <a class="maplink" data-title="Smart Security Platform (SSP), Poland">Smart Security Platform</a> (SSP) in Poland.</strong></p>
|
||||
<p><strong>- Feature search. In <a class="maplink" data-title="City of Marbella">Marbella</a> (Spain), <a class="maplink" data-title="Avigilon">Avigilon</a> deployed <a class="maplink" data-title="Avigilon deployment in Marbella">a smart camera system</a> aimed at providing “smart” functionalities without biometric data. Since regional law bans facial and biometric identification without consent, the software uses “appearance search”. “Appearance search” provides estimates for “unique facial traits, the colour of a person’s clothes, age, shape, gender and hair colour”. This information is not considered biometric. The individual’s features can be used to search for suspects fitting a particular profile. Similar technology has been <a class="maplink" data-title="Monitoring Kortrijk">deployed in Kortrijk</a> (Belgium), which provides search parameters for people, vehicles and animals</strong> (Verbeke 2019). <strong>During the Covid-19 pandemic, several initiatives emerged to automatically detect whether the mask mandates were observed by the public, such as in the <a class="maplink" data-title="Face mask recognition in Châtelet-Les Halles (Stopped)">aborted face mask recognition project in Châtelet-Les Halles</a> developed by the company <a class="maplink" data-title="Datakalab">Datakalab</a>.</strong></p>
|
||||
<p>- <strong>Video summary.</strong> Some companies, such as <strong><a class="maplink" data-title="Briefcam Ltd">Briefcam</a></strong> and their product <strong>Briefcam Review</strong>, offer a related product, which promises to shorten the analysis of long hours of CCTV footage, by identifying specific topics of interest (children, women, lighting changes) and making the footage searchable. The product combines face recognition, license plate recognition, and more mundane video analysis features such as the possibility to overlay selected scenes, thus highlighting recurrent points of activity in the image. Briefcam is deployed in several cities across Europe, including <a class="maplink" data-title="Briefcam deployment in Vannes">Vannes</a>, <a class="maplink" data-title="Eiffage / Briefcam deployment in Roubaix">Roubaix</a> (in partnership with <strong><a class="maplink" data-title="Eiffage">Eiffage</a></strong> managed by <strong>the <a class="maplink" data-title="City of Roubaix">City of Roubaix</a></strong> and the <strong><a class="maplink" data-title="Métropole Européenne de Lille">Métropole Européenne de Lille</a></strong>) and <a class="maplink" data-title="Smart Surveillance in Moirans">Moirans</a> in France (with equipment provided by <strong><a class="maplink" data-title="Nomadys">Nomadys</a></strong>).</p>
|
||||
<p><strong>- Object detection and object tracking. As outlined in chapter 2, object detection is often the first step in the various digital detection applications for images. An ‘object’ here can mean anything the computer is conditioned to search for: a suitcase, a vehicle, but also a person; while some products further process the detected object to estimate particular features, such as the colour of a vehicle, the age of a person. However, on some occasions — often to address concerns over privacy — only the position of the object on the image is stored. This is for example the case with the</strong> test of the <strong><a class="maplink" data-title="Test of One and a half meter monitor">One-and-a-half-meter monitor</a> in Amsterdam (Netherlands), <a class="maplink" data-title="Intemo">Intemo</a>’s <a class="maplink" data-title="People counting in Nijmegen">people counting system in Nijmegen</a> (Netherlands),</strong> the <a class="maplink" data-title="ViSense at MINDBase">ViSense social distancing monitor</a> at MINDBase,</strong> a testing location of the <strong><a class="maplink" data-title="Dutch Defence Equipment Organisation">Dutch Defence Equipment Organization</a></strong>; the <strong><a class="maplink" data-title="Project KICK">KICK project</a></strong> in <strong><a class="maplink" data-title="Brugge Municipality">Brugge</a></strong>, <strong><a class="maplink" data-title="Kortrijk Municipality">Kortrijk</a></strong>, <strong><a class="maplink" data-title="Ieper Municipality">Ieper</a></strong>, <strong><a class="maplink" data-title="Roeselare Municipality">Roeselare</a></strong> and <strong><a class="maplink" data-title="Economisch Huis Oostende">Oostende</a></strong> <strong>(Belgium), the <a class="maplink" data-title="ViSense - Mechelen">ViSense project in Mechelen</a> (Belgium)</strong> or the <strong><a class="maplink" data-title="Eco-Counter">Eco-counter</a></strong> <strong><a class="maplink" data-title="Tracking cameras pilot in Lannion">tracking cameras pilot project</a></strong> in <strong><a class="maplink" data-title="City of Lannion">Lannion</a> (France).</strong></p>
|
||||
<p><strong>- Movement recognition. <a class="maplink" data-title="Avigilon">Avigilon</a>’s software that is deployed in Marbella (Spain) also detects unusual movement. “To avoid graffiti, we can calculate the time someone takes to pass a shop window, “explained Javier Martín, local chief of police in Marbella to the Spanish newspaper El País. “If it takes them more than 10 seconds, the camera is activated to see if they are graffitiing. So far, it hasn’t been activated.” (Colomé 2019) Similar movement recognition technology is used in, the ViSense deployment at the Olympic Park London (UK) and the <a class="maplink" data-title="Security cameras Mechelen-Willebroek">security camera system in Mechelen-Willebroek</a> (Belgium). It should be noted that movement</strong> recognition can be done in two ways: where projects such as the <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Data-lab Burglary-free Neighbourhood</a> in Rotterdam (Netherlands)</strong><a href="#fn30" class="footnote-ref" id="fnref30" role="doc-noteref"><sup>30</sup></a> are only based on the tracking of trajectories of people through an image (see also ‘Object detection’), cases such as <strong>the <a class="maplink" data-title="Living Lab Stratumseind">Living Lab Stratumseind</a></strong><a href="#fn31" class="footnote-ref" id="fnref31" role="doc-noteref"><sup>31</sup></a> <strong>in Eindhoven (Netherlands)</strong> also process the movements and gestures of individuals in order to estimate their behaviour.</p>
|
||||
<section id="audio-recognition-1" class="level4">
|
||||
<h4>Audio recognition</h4>
|
||||
<p>- In addition to image (video) based products, some deployments use audio recognition to complement the decision-making process, for example used in the <strong><a class="maplink" data-title="Serenecity">Serenecity</a> (a branch of <a class="maplink" data-title="Verney-Carron SA">Verney-Carron</a>) Project in Saint-Etienne (France)</strong>, the <strong><a class="maplink" data-title="Smart CCTV with audio detection in Rouen public transportation">Smart CCTV deployment in public transportation</a> in <a class="maplink" data-title="City of Rouen">Rouen</a> (France)</strong> or the <strong><a class="maplink" data-title="Smart surveillance coupled with audio recognition (Strasbourg)">Smart CCTV system in Strasbourg</a> (France)</strong>. The <a class="maplink" data-title="Serenicity project (Saint-Etienne)">project piloted in Saint-Etienne</a> for example, worked by placing “audio capture devices” - the term microphone was avoided- in strategic parts of the city. Sounds qualified by an anomaly detection algorithm as suspicious would then alert operators in the Urban Supervision Center, prompting further investigation via CCTV or deployment of the necessary services (healthcare or police for example) (France 3 Auvergne-Rhône-Alpes 2019.)</p>
|
||||
|
@ -938,8 +1021,8 @@
|
|||
<h3>Integrated solutions </h3>
|
||||
<section id="smart-cities" class="level4">
|
||||
<h4>Smart cities</h4>
|
||||
<p>While some cities or companies decide to implement some of the functionalities with their existing or updated CCTV systems, several chose to centralise several of these “smart” functions in <strong>integrated systems</strong> often referred to as “safe city” solutions. These solutions do not necessarily process biometric information. This is the case for example for the deployments in <strong>TIM’s</strong>, <strong>Insula</strong> and <strong>Venis’</strong> <strong><a class="maplink" data-title="Control Room (Venice)">Safe City Platform in Venice</a> (Italy)</strong>, <strong><a class="maplink" data-title="Huawei">Huawei</a>’s</strong> <strong><a class="maplink" data-title="Smart video surveillance in Valenciennes">Safe City in Valenciennes</a> (France)</strong>, <strong><a class="maplink" data-title="Dahua Deployment in Brienon-sur-Armançon">Dahua’s integrated solution in Brienon-sur-Armançon</a></strong> <strong>(France)</strong>, <strong><a class="maplink" data-title="Thales">Thalès</a>’ Safe City in <a class="maplink" data-title="Safe City Pilot (La Défense)">La Défense</a> and <a class="maplink" data-title= "Safe City Pilot Project (Nice)",
|
||||
>Nice</a> (France)</strong>, <strong>Engie Inéo’s and SNEF’s <a class="maplink" data-title="SNEF Smart CCTVs in Marseille">integrated solution in Marseille</a> (France)</strong>, the <strong><a class="maplink" data-title="Center of Urban Supervision (Roubaix)">Center of Urban Supervision in Roubaix</a> (France)</strong>, <strong><a class="maplink" data-title="AI Mars (Potential)">AI Mars</a> (Madrid, in development)</strong> or <strong>NEC’s platform in <a class="maplink" data-title="NEC Technology in Lisbon">Lisbon</a> and London</strong>.</p>
|
||||
<p>While some cities or companies decide to implement some of the functionalities with their existing or updated CCTV systems, several chose to centralise several of these “smart” functions in <strong>integrated systems</strong> often referred to as “safe city” solutions. These solutions do not necessarily process biometric information. This is the case for example for the deployments in <strong><a class="maplink" data-title="Gruppo TIM">TIM</a>’s</strong>, <strong><a class="maplink" data-title="Insula Spa">Insula</a></strong> and <strong><a class="maplink" data-title="Venis Spa">Venis</a>’</strong> <strong><a class="maplink" data-title="Control Room (Venice)">Safe City Platform in Venice</a> (Italy)</strong>, <strong><a class="maplink" data-title="Huawei">Huawei</a>’s</strong> <strong><a class="maplink" data-title="Smart video surveillance in Valenciennes">Safe City in Valenciennes</a> (France)</strong>, <strong><a class="maplink" data-title="Dahua Deployment in Brienon-sur-Armançon">Dahua’s integrated solution in Brienon-sur-Armançon</a></strong> <strong>(France)</strong>, <strong><a class="maplink" data-title="Thales">Thalès</a>’ Safe City in <a class="maplink" data-title="Safe City Pilot (La Défense)">La Défense</a> and <a class="maplink" data-title= "Safe City Pilot Project (Nice)",
|
||||
>Nice</a> (France)</strong>, <strong>Engie Inéo’s and <a class="maplink" data-title="Groupe SNEF">SNEF</a>’s <a class="maplink" data-title="SNEF Smart CCTVs in Marseille">integrated solution in Marseille</a> (France)</strong>, the <strong><a class="maplink" data-title="Center of Urban Supervision (Roubaix)">Center of Urban Supervision in Roubaix</a> (France)</strong>, <strong><a class="maplink" data-title="AI Mars (Potential)">AI Mars</a> (Madrid, in development)</strong><a href="#fn32" class="footnote-ref" id="fnref32" role="doc-noteref"><sup>32</sup></a> or <strong>NEC’s platform in <a class="maplink" data-title="NEC Technology in Lisbon">Lisbon</a> and London</strong>.</p>
|
||||
<p>The way “Smart/Safe City” solutions work is well exemplified by the <a class="maplink" data-title="Control Room (Venice)">“Control room” deployed in Venice</a>, connected to an urban surveillance network. The system is composed of a central command and control room which aggregates cloud computing systems, together with smart cameras, artificial intelligence systems, antennas and hundreds of sensors distributed on a widespread network. The idea is to monitor what happens in the lagoon city in real time. The scope of the abilities of the centre is wide-ranging. It promises to: manage events and incoming tourist flows, something particularly relevant to a city which aims to implement a visiting fee for tourists; predict and manage weather events in advance, such as the shifting of tides and high water, by defining alternative routes for transit in the city; indicating to the population in real time the routes to avoid traffic and better manage mobility for time optimisation; improve the management of public safety allowing city agents to intervene in a more timely manner; control and manage water and road traffic, also for sanctioning purposes, through specific video-analysis systems; control the status of parking lots; monitor the environmental and territorial situation; collect, process data and information that allow for the creation of forecasting models and the allocation of resources more efficiently and effectively; bring to life a physical "Smart Control Room" where law enforcement officers train and learn how to read data as well. (LUMI 2020)</p>
|
||||
</section>
|
||||
<section id="smartphone-apps" class="level4">
|
||||
|
@ -948,7 +1031,7 @@
|
|||
</section>
|
||||
<section id="crowd-management" class="level4">
|
||||
<h4>Crowd management</h4>
|
||||
<p>Integrated solutions are generally comprised of a set of crowd management features, such as in the case of the systems <strong>in <a class="maplink" data-title="Smart video surveillance in Valenciennes">Valenciennes</a> and <a class="maplink" data-title="Urban Surveillance Center in Marseille">Marseille</a> (France), <a class="maplink" data-title="Mannheim public surveillance">Mannheim</a> (Germany), <a class="maplink" data-title="Control Room (Venice)">Venice</a> (Italy), Amsterdam, <a class="maplink" data-title="Citybeacons Eindhoven">Eindhoven</a> and Den Bosch with the <a class="maplink" data-title="CrowdWatch">Crowdwatch</a> project (Netherlands).</strong> Such crowd management software generally does not recognise individuals, but rather estimates the number of people on (a part of) the video frame. Sudden movements of groups or changes in density are then flagged for attention of the security operator (Nishiyama 2018).</p>
|
||||
<p>Integrated solutions are generally comprised of a set of crowd management features, such as in the case of the systems <strong>in <a class="maplink" data-title="Smart video surveillance in Valenciennes">Valenciennes</a> and <a class="maplink" data-title="Urban Surveillance Center in Marseille">Marseille</a> (France), <a class="maplink" data-title="Mannheim public surveillance">Mannheim</a> (Germany), <a class="maplink" data-title="Control Room (Venice)">Venice</a> (Italy), Amsterdam, <a class="maplink" data-title="Citybeacons Eindhoven">Eindhoven</a> and Den Bosch with the <a class="maplink" data-title="Korte Putstraat (Stopped)">pilot in the Korte Putstraat</a> (using software by <a class="maplink" data-title="CrowdWatch">CrowdWatch</a>, Netherlands).</strong> Such crowd management software generally does not recognise individuals, but rather estimates the number of people on (a part of) the video frame. Sudden movements of groups or changes in density are then flagged for attention of the security operator (Nishiyama 2018).</p>
|
||||
</section>
|
||||
</section>
|
||||
</section>
|
||||
|
@ -962,10 +1045,8 @@
|
|||
<p>- <strong>Live Facial Recognition in Budapest</strong> (Hungary, see detailed case study, CHAPTER 10)</p>
|
||||
<p>- <strong>Live Facial Recognition <a class="maplink" data-title="Facial Recognition Pilot Project during Carnival (Nice)">pilot project during the Carnival in Nice</a></strong> (France, see detailed case study, CHAPTER 8)</p>
|
||||
<p>- <strong>Live Facial Recognition <a class="maplink" data-title="Pilot Project Südkreuz Berlin">Pilot Project Südkreuz Berlin</a></strong> (Germany, see detailed case study, CHAPTER 9)</p>
|
||||
<ul>
|
||||
<li><p>Live Facial Recognition during <a class="maplink" data-title="Korte Putstraat (Stopped)">Carnival 2019 in 's-Hertogenbosch’s Korte Putstraat</a> (the (Netherlands)</p></li>
|
||||
</ul>
|
||||
<p>As most of these cases are extensively discussed in the following chapters, we do not comment further on them here.</p>
|
||||
<p>Additional cases are the <strong><a class="maplink" data-title="Korte Putstraat (Stopped)"></a>Live Facial Recognition pilot during Carnival 2019</a> in 's-Hertogenbosch’s Korte Putstraat</strong> (the Netherlands) and the pilot of <strong><a class="maplink" data-title="SARI Enterprise in Como">Live Facial Recognition in the city of Como</a></strong><a href="#fn33" class="footnote-ref" id="fnref33" role="doc-noteref"><sup>33</sup></a>, recently struck down by the Italian DPA (<a class="maplink" data-title="Garante per la Privacy">Garante per la Privacy</a>). The deployment of facial recognition in <strong><a class="maplink" data-title="Madrid Estacion Sur">Estacion Sur</a></strong> in Madrid (Spain) is also live.</p>
|
||||
</section>
|
||||
<section id="deployment-of-rbi-in-commercial-spaces" class="level3">
|
||||
<h3>Deployment of RBI in commercial spaces</h3>
|
||||
|
@ -1095,7 +1176,7 @@
|
|||
<h3>Active promotion</h3>
|
||||
<p>A certain number of actors, both at the national and at the local level are pushing for the development and the extension of biometric remote identification. At the local level, the new technological developments meet a growing apetite for smart city initiatives and the ambitions of mayors that strive for developing digital platforms and employ technology-oriented solutions for governance and law enforcement. The intention of the mayor of Nice, Christian Etrosi, to make <strong>Nice a “laboratory” of crime prevention, despite repeated concerns of the French DPA,</strong> is a case in point (for a detailed analysis, see chapter 8 in this report, see also Barelli 2018). Law enforcement agencies across Europe also continue to press ahead with efforts to build <strong>digital and automated infrastructures that benefits tech companies who push their face recognition technologies with the concept of smart city and innovation tech</strong> (ex. <a class="maplink" data-title="Huawei">Huawei</a>, <a class="maplink" data-title="NEC">NEC</a>, etc.).</p>
|
||||
<p><strong>At the national level, Biometric systems for the purposes of authentication are increasingly deployed for forensic applications</strong> among law-enforcement agencies in the European Union. As we elaborate in Chapter 3, 11 out of 27 member states of the European Union are already using facial recognition against biometric databases for forensic purposes and 7 additional countries are expected to acquire such capabilities in the near future. The map of the European deployments of Biometric Identification Technologies (see Chapter 3) bear witness to a <strong>broad range of algorithmic processing of security images</strong> in a spectrum that goes from individual, localised authentication systems to generalised law enforcement uses of authentication, to Biometric Mass Surveillance.</p>
|
||||
<p>Several states that have not yet adopted such technologies seem inclined to follow the trend, and push further. Belgian Minister of Interior Pieter De Crem for example, recently declared he was in favour of the use of facial recognition both for judicial inquiries but also for live facial recognition, a much rarer instance.</p>
|
||||
<p>Several states that have not yet adopted such technologies seem inclined to follow the trend, and push further. Former Belgian Minister of Interior Pieter De Crem for example, recently declared he was in favour of the use of facial recognition both for judicial inquiries but also for live facial recognition, a much rarer instance.</p>
|
||||
<section id="the-use-of-facial-recognition-can-mean-increased-efficiency-for-security-services-the-police-are-interested-in-using-this-technology-in-several-of-their-missions.-first-of-all-within-the-framework-of-the-administrative-police-with-the-aim-of-guaranteeing-the-security-of-a-closed-place-accessible-to-the-public-it-would-allow-them-to-immediately-intercept-a-person-who-is-known-in-the-police-databases-and-who-constitutes-a-danger-for-public-security-but-this-technology-can-also-be-used-within-the-framework-of-the-judicial-police-with-the-aim-of-controlling-during-an-investigation-if-the-suspect-was-present-at-the-scene-of-the-crime-at-the-time-when-the-punishable-act-was-committed.-de-halleux-2020" class="level4 Quote">
|
||||
<blockquote class="Quote">"The use of facial recognition can mean increased efficiency for security services […] The police are interested in using this technology in several of their missions. First of all, within the framework of the administrative police, with the aim of guaranteeing the security of a closed place accessible to the public, it would allow them to immediately intercept a person who is known in the police databases and who constitutes a danger for public security; but this technology can also be used within the framework of the judicial police, with the aim of controlling, during an investigation, if the suspect was present at the scene of the crime at the time when the punishable act was committed". <footer>(De Halleux 2020)</footer></blockquote>
|
||||
<p>Such outspoken advocates of the use of RBI constitute an important voice, but do not find an echo in the EU mainstream discussions.</p>
|
||||
|
@ -1123,7 +1204,7 @@
|
|||
</section>
|
||||
<section id="outright-ban" class="level3">
|
||||
<h3>Outright Ban</h3>
|
||||
<p>Finally, a certain number of EU Political Parties, EU and national NGOs have argued that there is no acceptable deployment of RBI, because the danger of Biometric Mass Surveillance is too high. Such actors include organisations such as EDRi, <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a>, <a class="maplink" data-title="Algorithm Watch">Algorithm Watch</a> or the French Défenseur des Droits<a href="#fn29" class="footnote-ref" id="fnref29" role="doc-noteref"><sup>29</sup></a>.</p>
|
||||
<p>Finally, a certain number of EU Political Parties, EU and national NGOs have argued that there is no acceptable deployment of RBI, because the danger of Biometric Mass Surveillance is too high. Such actors include organisations such as EDRi, <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a>, <a class="maplink" data-title="Algorithm Watch">Algorithm Watch</a> or the French Défenseur des Droits<a href="#fn34" class="footnote-ref" id="fnref34" role="doc-noteref"><sup>34</sup></a>.</p>
|
||||
<p>In the European Parliament, the <strong>European Greens</strong> have most vocally promoted the position of the ban, and have gathered support across party lines. In a letter to the European Commission dated 15 April 2021, 40 MEPs from the European Greens, the Party of the European Left, the Party of European Socialists, Renew Europe, a few non-attached MEPs and one member of the far-right party Identity and Democracy expressed their concerns about the leaked EU commission’ proposal for the AI Regulation a few days earlier. As they argued</p>
|
||||
<section id="people-who-constantly-feel-watched-and-under-surveillance-cannot-freely-and-courageously-stand-up-for-their-rights-and-for-a-just-society.-surveillance-distrust-and-fear-risk-gradually-transforming-our-society-into-one-of-uncritical-consumers-who-believe-they-have-nothing-to-hide-and---in-a-vain-attempt-to-achieve-total-security---are-prepared-to-give-up-their-liberties.-that-is-not-a-society-worth-living-in-breyer-et-al.-2021" class="level4 Quote">
|
||||
<blockquote class="Quote">People who constantly feel watched and under surveillance cannot freely and courageously stand up for their rights and for a just society. Surveillance, distrust and fear risk gradually transforming our society into one of uncritical consumers who believe they have “nothing to hide” and - in a vain attempt to achieve total security - are prepared to give up their liberties. That is not a society worth living in! <footer>(Breyer et al. 2021)</footer></blockquote>
|
||||
|
@ -1185,13 +1266,13 @@
|
|||
</ul>
|
||||
</div> <!-- key points -->
|
||||
<p>Belgium is, with Spain, one of the few countries in Europe that <strong>has not authorised the use of facial recognition technology</strong>, neither for criminal investigations nor for mass surveillance (Vazquez 2020). This does not mean that it is unlikely to change its position in the very near future. <strong>Law enforcement is indeed strongly advocating its use</strong>, and the current legal obstacles are not likely to hold for very long (Bensalem 2018). The pilot experiment that took place in Zaventem / Brussels International Airport, although aborted, occurred within a national context in which <strong>biometric systems are increasingly used and deployed</strong>.</p>
|
||||
<p>Belgium will, for example, soon roll out at the national level the new biometric identity card “<strong>eID</strong>”, the Minister of Interior Annelies Verlinden has recently announced. The identification document, which will rely on the constitution of a broad biometric database and is part of a broader <a class="maplink" data-title="European Union">European Union</a> initiative, is developed in partnership with security multinational <strong><a class="maplink" data-title="Thales">Thales</a></strong>, was already trialled with 53.000 citizens in (Prins 2021; Thales Group 2020).<a href="#fn30" class="footnote-ref" id="fnref30" role="doc-noteref"><sup>30</sup></a></p>
|
||||
<p>Municipalities in different parts of the country are experimenting with <strong>Automated Number Plate Recognition (ANPR) technology</strong>. A smaller number have started deploying “<strong>smart CCTV</strong>” cameras, which fall just short of using facial recognition technology. The city of Kortrijk has for example deployed “<strong>body recognition</strong>” technology, which uses walking style or clothing of individuals to track them across the city’s CCTV network. Facial recognition is possible with these systems, but has not been activated as of yet <strong>pending legal authorisation to do so</strong>. In the city of Roeselare, “smart cameras” have been installed in one of the shopping streets. Deployed by telecom operator Citymesh, they could provide facial recognition services, but are currently used to count and estimate crowds, data which is shared with the police (van Brakel 2020). All the emerging initiatives of remote biometric identification are however pending a reversal of the decision to halt <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">the experiment at Zaventem Brussels International Airport</a>.</p>
|
||||
<p>Belgium will, for example, soon roll out at the national level the new biometric identity card “<strong>eID</strong>”, the Minister of Interior Annelies Verlinden has recently announced. The identification document, which will rely on the constitution of a broad biometric database and is part of a broader <a class="maplink" data-title="European Union">European Union</a> initiative, is developed in partnership with security multinational <strong><a class="maplink" data-title="Thales">Thales</a></strong>, was already trialled with 53.000 citizens in (Prins 2021; Thales Group 2020).<a href="#fn35" class="footnote-ref" id="fnref35" role="doc-noteref"><sup>35</sup></a></p>
|
||||
<p>Municipalities in different parts of the country are experimenting with <strong>Automated Number Plate Recognition (ANPR) technology</strong>. A smaller number have started deploying “<strong>smart CCTV</strong>” cameras, which fall just short of using facial recognition technology. The city of Kortrijk has for example <a class="maplink" data-title="Monitoring Kortrijk">deployed</a> “<strong>body recognition</strong>” technology, which uses walking style or clothing of individuals to track them across the city’s CCTV network<a href="#fn36" class="footnote-ref" id="fnref36" role="doc-noteref"><sup>36</sup></a>. Facial recognition is possible with these systems, but has not been activated as of yet <strong>pending legal authorisation to do so</strong>. In the city of <a class="maplink" data-title="Roeselare Municipality">Roeselare</a>, “smart cameras” have been installed in one of the shopping streets. Deployed by telecom operator Citymesh, they could provide facial recognition services, but are currently used to count and estimate crowds, data which is shared with the police (van Brakel 2020). All the emerging initiatives of remote biometric identification are however pending a reversal of the decision to halt <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">the experiment at Zaventem Brussels International Airport</a>.</p>
|
||||
<section id="the-zaventem-pilot-in-the-context-of-face-recognition-technology-in-belgium" class="level2">
|
||||
<h2>The Zaventem pilot in the context of Face Recognition Technology in Belgium</h2>
|
||||
<p>The use of <strong>facial recognition technology</strong> at the Brussels International Airport was announced on 10 July 2019 in the Flemish weekly <em>Knack</em> by General Commissioner of Federal Police Marc De Mesmaeker (Lippens and Vandersmissen 2019). There is currently no publicly available information as to whom provided the technical system. De Mesmaeker explained that an agreement had been found with the company managing the airport and the labour unions, and thus that the technology was already in use (Organe de Controle de l'Information Policière 2019, 3).</p>
|
||||
<p>As part of the justification for the deployment of FRT in Zaventem, De Mesmaeker made a comparison with <strong>ANPR-enabled cameras</strong>, arguing that “They have already helped to solve investigations quickly, (…). Citizens understand this and have learned to live with their presence, but privacy remains a right”. (7sur7 2019)</p>
|
||||
<p>The <strong>Belgian Supervisory Body for Police Information (COC)</strong><a href="#fn31" class="footnote-ref" id="fnref31" role="doc-noteref"><sup>31</sup></a>, in its advisory document, explained that it had no prior knowledge of the deployment and learned about the existence of the facial recognition systems through the interview of De Mesmaeker in the <em>Knack</em> magazine (Organe de Controle de l'Information Policière 2019, 3). On 10 July 2019, the COC thus invited the General Commissioner to communicate all the details of the deployment of this technology in the Brussels International Airport. On 18 July 2019, COC received a summary of the system’s main components. On 9 August 2019, it subsequently visited the premises of the <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">federal police deployment in Zaventem airport</a> (Organe de Controle de l'Information Policière 2019, 3).</p>
|
||||
<p>The <strong>Belgian Supervisory Body for Police Information (COC)</strong><a href="#fn37" class="footnote-ref" id="fnref37" role="doc-noteref"><sup>37</sup></a>, in its advisory document, explained that it had no prior knowledge of the deployment and learned about the existence of the facial recognition systems through the interview of De Mesmaeker in the <em>Knack</em> magazine (Organe de Controle de l'Information Policière 2019, 3). On 10 July 2019, the COC thus invited the General Commissioner to communicate all the details of the deployment of this technology in the Brussels International Airport. On 18 July 2019, COC received a summary of the system’s main components. On 9 August 2019, it subsequently visited the premises of the <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">federal police deployment in Zaventem airport</a> (Organe de Controle de l'Information Policière 2019, 3).</p>
|
||||
<p>We know some technical details about the system through the public information shared by the COC. In early 2017, Brussels airport had acquired <strong>4 cameras connected to a facial recognition software for use by the airport police</strong> (Police Aéronautique, LPA) (Farge 2020, 15; Organe de Controle de l'Information Policière 2019, 3). The system works in two steps.</p>
|
||||
<p>When provided with video feeds from the four cameras, the software first creates <strong>snapshots</strong>, generating individual records with the faces that appear in the frame. These snapshots on record are then in a second step compared and potentially matched to previously established “<strong>blacklists</strong>” created by the police itself (the reference dataset is thus not external to this particular deployment) (Organe de Controle de l'Information Policière 2019, 3).</p>
|
||||
<p>The system did however not live up to its promise and generated a high number of <strong>false positives</strong>. Many features such as skin colour, glasses, moustaches, and beards led to false matches. The system was thus partially disconnected in March 2017, and at the time of the visit of the COC, the system was no longer fully in use (Organe de Controle de l'Information Policière 2019, 3). Yet the second step had not been de-activated (matching video feeds against pre-established blacklists of faces), the first function of creating a biometric record of the video feeds was still in place (Organe de Controle de l'Information Policière 2019, 3).</p>
|
||||
|
@ -1227,7 +1308,7 @@
|
|||
<section id="effects-of-the-technologies" class="level2">
|
||||
<h2>Effects of the technologies</h2>
|
||||
<p>While the city of Brussels is the location of much EU-level activism, this hasn’t translated yet in an equal mobilisation at the national level – perhaps due to the currently very restrictive legislative position on the matter and the institutional checks and balances described in this chapter – banning de facto the use of such technologies.</p>
|
||||
<p>The French campaign Technopolice has extended to Belgium and is raising awareness through a diversified strategy based on public forums, cartography of technology and organization of events. The NGO <strong>Ligue des Droits Humains</strong> is a member of the <strong>Reclaim Your Face</strong> campaign, along with 40 other organisations<a href="#fn32" class="footnote-ref" id="fnref32" role="doc-noteref"><sup>32</sup></a>, yet it hasn’t been as active as partner organizations in neighbouring France or Germany.</p>
|
||||
<p>The French campaign Technopolice has extended to Belgium and is raising awareness through a diversified strategy based on public forums, cartography of technology and organization of events. The NGO <strong>Ligue des Droits Humains</strong> is a member of the <strong>Reclaim Your Face</strong> campaign, along with 40 other organisations<a href="#fn38" class="footnote-ref" id="fnref38" role="doc-noteref"><sup>38</sup></a>, yet it hasn’t been as active as partner organizations in neighbouring France or Germany.</p>
|
||||
</section>
|
||||
</section>
|
||||
<section id="the-burglary-free-neighbourhood-in-rotterdam-netherlands" class="level1 case" data-title="Data-lab Burglary-free Neighbourhood">
|
||||
|
@ -1245,14 +1326,14 @@
|
|||
<p>In October 2019, the Carlo Collodihof, a courtyard in the Rotterdam neighbourhood Lombardijen, was equipped with a new kind of streetlamp. The twelve new luminaires did not just illuminate the streets; they were <strong>fitted with cameras, microphones, speakers, and a computer which was connected to the internet</strong>. They are part of the so called <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Fieldlab Burglary Free Neighbourhood</a></strong>: an experiment in the public space with technologies for computer sensing and data processing, aimed at the prevention of break-ins, robberies, and aggression; increasing the chances of catching and increasing a sense of safety for the inhabitants of the neighbourhood ((Redactie Inbraakvrije Wijk 2019; Kokkeler et al. 2020b). The practical nature of a Fieldlab provides a way to examine concretely how the various technologies come together, and how they fit in with existing infrastructures and regulations.</p>
|
||||
<section id="detection-and-decision-making-in-the-burglary-free-neighbourhood-fieldlab" class="level2">
|
||||
<h2>Detection and decision-making in the “Burglary free neighbourhood” Fieldlab</h2>
|
||||
<p>The national programme Burglary Free Neighbourhood was initiated and funded by the <strong>Dutch Ministry of Justice and Security</strong>. It is led by <strong><a class="maplink" data-title="Dutch Institute for Technology Safety and Security (DITSS)">DITSS</a></strong> (Dutch Institute for Technology, Safety & Security), a non-profit organisation, that has been involved in earlier computer sensing projects in the Netherlands – for example in <strong>Stratumseind, Eindhoven</strong> (The Hague Security Delta 2021). Other parties involved include the municipality of Rotterdam, the police –both on a local and national level– the Public Prosecutor’s Office and insurance company <a class="maplink" data-title="Interpolis">Interpolis</a>. Part of the research is carried out by University of Twente, <a class="maplink" data-title="Avans Hogeschool">Avans Hogeschool</a>, the Network Institute of the Vrije Universiteit Amsterdam and the Max Planck Institute for Foreign and International Criminal Law (Freiburg, Germany).</p>
|
||||
<p>The national programme Burglary Free Neighbourhood was initiated and funded by the <strong>Dutch Ministry of Justice and Security</strong>. It is led by <strong><a class="maplink" data-title="Dutch Institute for Technology Safety and Security (DITSS)">DITSS</a></strong> (Dutch Institute for Technology, Safety & Security), a non-profit organisation, that has been involved in earlier computer sensing projects in the Netherlands – for example in <strong><a class="maplink" data-title="Living Lab Stratumseind">Stratumseind</a>, Eindhoven</strong> (The Hague Security Delta 2021). Other parties involved include the <a class="maplink" data-title="Rotterdam Municipality">municipality of Rotterdam</a>, the <a class="maplink" data-title="Rotterdam Municipality">police</a> –both on a local and national level– the Public Prosecutor’s Office and insurance company <a class="maplink" data-title="Interpolis">Interpolis</a>. Part of the research is carried out by <a class="maplink" data-title="Rotterdam Municipality">University of Twente</a>, <a class="maplink" data-title="Avans Hogeschool">Avans Hogeschool</a>, the Network Institute of the Vrije Universiteit Amsterdam and the <a class="maplink" data-title="Max Planck Institute for the Study of Crime, Security and Law">Max Planck Institute for Foreign and International Criminal Law</a> (Freiburg, Germany).</p>
|
||||
<p><img src="images/media/image2.jpg" style="width:6.25564in;height:3.51788in" alt="A picture containing roller coaster, ride Description automatically generated" /></p>
|
||||
<p>Figure 2. Fieldlab in Rotterdam Lombardijen</p>
|
||||
<p>From a technological perspective, the project has two aims: to <strong>detect suspicious behaviour</strong>, and in turn<strong>, to influence the behaviour of the suspect</strong>. As such, project manager Guido Delver, who agreed to be interviewed for this report, describes the project as being primarily a behavioural experiment (Delver 2021). The twelve luminaires are provided by <a class="maplink" data-title="Sustainder">Sustainder</a> (their Anne series (Sustainder 2021)). The processing of the video and audio is done on the spot by a computer embedded in the luminaire, using software from the Eindhoven based company <strong><a class="maplink" data-title="ViNotion">ViNotion</a></strong> (ViNotion 2020). This software reads the video frames from the camera and estimates the presence and position of people – thereby mapping the coordinates of the video frame to coordinates in the space. It then determines the direction they are facing. <strong>Only these values –position and direction– and no other characteristics nor any images,</strong> are sent over the internet to a datacentre somewhere in the Netherlands, where the position data is stored for further processing (Delver 2021).</p>
|
||||
<p>Currently, <strong>there is no immediate processing of the position data</strong> to classify behaviour as being suspicious or not. The proposed pipeline consists of two stages: first, an unsupervised machine algorithm for <strong>anomaly (outlier) detection processes the gathered trajectories</strong>, in order to distinguish trajectories that statistically deviate from the norm. As an example, both children playing, as well as burglars making a scouting round through the neighbourhood can potentially produce anomalous trajectories. Secondly, <strong>these anomalous trajectories are judged as being suspicious or not by a computer model</strong> that was trained with human supervision. In the Fieldlab’s first data collection experiment 100.000 trajectories were collected, totalling 20.000.000 data points (Hamada 2020). It turned out however that this was still too few to draw any conclusions about viability of the approach; the big data was still too small (Delver 2021).</p>
|
||||
<p>Another input for detecting suspicious situations is the <strong>microphone with which some of the streetlamps are equipped</strong>. By recording two frequencies of sound, sounds can be categorised as coming from for example a conversation, shouting, dog barking, or the breaking of glass. The two frequencies recorded provide too little information to distinguish the words in a conversation (Delver 2021).</p>
|
||||
<p>Aside from experimenting with the automated detection of suspicious behaviour, the Fieldlab experiments with various ways in which the detected situations can be played out. Project manager Guido Delver notes that the aim is not <em>per se</em> to involve the police. Instead, the suspect should be deterred before any crime is committed (Delver 2021). Various strategies are laid out: the yet-to-be-autonomous system can <strong>voice warnings through the speakers</strong> embedded in the streetlamps. Or, in line with the work of DITSS in Eindhoven’s Stratumseind street, the <strong>light intensity or colour of the streetlamps can be changed</strong> (Intelligent Lighting Institute, n.d.). Both strategies are aimed at signalling the subjects that their behaviour is noticed, which generally suffices to have burglars break off their scouting. Another option under consideration is to send a signal to the residents living nearby.</p>
|
||||
<p>The process of data gathering in the Burglary Free Neighbourhood is quite similar to technologies that are deployed for anonymous people counting. One such application has been developed by <strong><a class="maplink" data-title="Numina">Numina</a></strong> and is deployed in the Dutch city of Nijmegen: |