greens_report/www/report/report.html

1921 lines
437 KiB
HTML
Raw Permalink Normal View History

2021-10-07 08:43:56 +02:00
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" lang="" xml:lang="">
<head>
<meta charset="utf-8" />
<meta name="generator" content="pandoc" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" />
<title>Biometric and Behavioural Mass Surveillance in EU Member States</title>
2021-10-13 18:35:49 +02:00
<script>
(function(document, history, location) {
var HISTORY_SUPPORT = !!(history && history.pushState);
var anchorScrolls = {
ANCHOR_REGEX: /^#[^ ]+$/,
OFFSET_HEIGHT_PX: 100,
/**
* Establish events, and fix initial scroll position if a hash is provided.
*/
init: function() {
this.scrollToCurrent();
window.addEventListener('hashchange', this.scrollToCurrent.bind(this));
document.body.addEventListener('click', this.delegateAnchors.bind(this));
},
/**
* Return the offset amount to deduct from the normal scroll position.
* Modify as appropriate to allow for dynamic calculations
*/
getFixedOffset: function() {
return this.OFFSET_HEIGHT_PX;
},
/**
* If the provided href is an anchor which resolves to an element on the
* page, scroll to it.
* @param {String} href
* @return {Boolean} - Was the href an anchor.
*/
scrollIfAnchor: function(href, pushToHistory) {
var match, rect, anchorOffset;
if(!this.ANCHOR_REGEX.test(href)) {
return false;
}
match = document.getElementById(href.slice(1));
if(match) {
rect = match.getBoundingClientRect();
anchorOffset = window.pageYOffset + rect.top - this.getFixedOffset();
window.scrollTo(window.pageXOffset, anchorOffset);
// Add the state to history as-per normal anchor links
if(HISTORY_SUPPORT && pushToHistory) {
history.pushState({}, document.title, location.pathname + href);
}
}
return !!match;
},
/**
* Attempt to scroll to the current location's hash.
*/
scrollToCurrent: function() {
this.scrollIfAnchor(window.location.hash);
},
/**
* If the click event's target was an anchor, fix the scroll position.
*/
delegateAnchors: function(e) {
var elem = e.target;
if(
elem.nodeName === 'A' &&
this.scrollIfAnchor(elem.getAttribute('href'), true)
) {
e.preventDefault();
}
}
};
window.addEventListener(
'DOMContentLoaded', anchorScrolls.init.bind(anchorScrolls)
);
})(window.document, window.history, window.location);
</script>
<link rel="stylesheet" media="screen" href="https://fontlibrary.org//face/cello-sans" type="text/css"/>
2021-10-07 13:32:13 +02:00
<link rel="stylesheet" href="report.css" />
2021-10-07 08:43:56 +02:00
<style type="text/css">
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
code, samp {
font-size: 1.2em;
}
</style>
2021-10-07 13:32:13 +02:00
<script>
window.addEventListener('DOMContentLoaded', () => {
const frameEl = document.getElementById('map');
frameEl.addEventListener('load', function () {
const caseEls = document.getElementsByClassName('level1');
for (let caseEl of caseEls) {
console.log(caseEl.dataset.title);
const toSelect = typeof caseEl.dataset.title == 'undefined' || caseEl.dataset.title == 'none' ? null : frameEl.contentWindow.getIdForTitle(caseEl.dataset.title);
// navItemEl.hash url-encodes
// let targetEl = document.getElementById(navItemEl.attributes.href.value.substr(1));
// let wrapperEl = targetEl.parentNode;
let intersectionObserver = new IntersectionObserver(function (entries) {
console.log(entries);
// If intersectionRatio is 0, the target is out of view
// and we do not need to do anything.
if (entries[0].intersectionRatio <= 0) {
// navItemEl.classList.remove('active');
} else {
if(toSelect === null) {
2021-10-13 18:35:49 +02:00
frameEl.contentWindow.mapGraph.triggerReset();
// frameEl.contentWindow.mapGraph.deselectNode();
// frameEl.contentWindow.mapGraph.resetZoom();
2021-10-07 13:32:13 +02:00
} else {
2021-10-13 18:35:49 +02:00
frameEl.contentWindow.mapGraph.triggerSelect(toSelect);
// frameEl.contentWindow.mapGraph.selectNode(node);
2021-10-07 13:32:13 +02:00
}
// navItemEl.classList.add('active');
}
});
// start observing
intersectionObserver.observe(caseEl);
}
2021-10-08 17:03:03 +02:00
const linkEls = document.getElementsByClassName('maplink');
for (let linkEl of linkEls) {
linkEl.addEventListener('click', (ev) => {
const toSelect = typeof linkEl.dataset.title == 'undefined' || linkEl.dataset.title == 'none' ? null : frameEl.contentWindow.getIdForTitle(linkEl.dataset.title);
if(toSelect === null) {
frameEl.contentWindow.mapGraph.deselectNode();
frameEl.contentWindow.mapGraph.resetZoom();
} else {
const node = frameEl.contentWindow.mapGraph.graph.nodes.filter(n => n.id == toSelect)[0]
frameEl.contentWindow.mapGraph.selectNode(node);
}
})
linkEl.addEventListener('mouseover', (ev) => {
const toSelect = typeof linkEl.dataset.title == 'undefined' || linkEl.dataset.title == 'none' ? null : frameEl.contentWindow.getIdForTitle(linkEl.dataset.title);
if(toSelect){
const node = frameEl.contentWindow.mapGraph.graph.nodes.filter(n => n.id == toSelect)[0]
frameEl.contentWindow.mapGraph.hoverNode(false, node);
}
})
linkEl.addEventListener('mouseout', (ev) => {
const toSelect = typeof linkEl.dataset.title == 'undefined' || linkEl.dataset.title == 'none' ? null : frameEl.contentWindow.getIdForTitle(linkEl.dataset.title);
if(toSelect){
const node = frameEl.contentWindow.mapGraph.graph.nodes.filter(n => n.id == toSelect)[0]
frameEl.contentWindow.mapGraph.endHoverNode(node);
}
})
}
});
2021-10-07 13:32:13 +02:00
});
// frame.contentWindow;
</script>
2021-10-07 08:43:56 +02:00
</head>
<body>
2021-10-07 13:32:13 +02:00
<iframe id='map' src="../index.html"></iframe>
2021-10-07 08:43:56 +02:00
<header id="title-block-header">
<nav id="TOC">
2021-10-07 11:07:36 +02:00
<a href="#">Biometric and Behavioural Mass Surveillance in EU Member States</a>
2021-10-07 08:43:56 +02:00
<ul>
<li><a href="#">Navigation</a>
<ul>
<li><a href="#table-of-figures">TABLE OF FIGURES</a></li>
<li><a href="#authors"> AUTHORS</a></li>
<li><a href="#acronyms">ACRONYMS</a></li>
<li><a href="#executive-summary">EXECUTIVE SUMMARY</a></li>
2021-10-07 11:07:36 +02:00
<li class="space"><a href="#introduction">Introduction</a>
</li>
<li class="space"><a href="#part-i-overview-of-european-practices">PART I: OVERVIEW OF EUROPEAN PRACTICES</a></li>
2021-10-07 08:43:56 +02:00
<li><a href="#technical-overview">Technical overview</a>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
<li><a href="#overview-of-deployments-in-europe">Overview of deployments in Europe</a>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
<li><a href="#legal-bases">Legal bases </a>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
<li><a href="#main-political-issues-and-debates">Main political issues and debates</a>
2021-10-07 11:07:36 +02:00
</li>
<li class="space"><a href="#part-ii-case-studies">PART II: CASE STUDIES</a></li>
2021-10-07 08:43:56 +02:00
<li><a href="#facial-recognition-cameras-at-brussels-international-airport-belgium">Facial Recognition cameras at Brussels International Airport (Belgium)</a>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
<li><a href="#the-burglary-free-neighbourhood-in-rotterdam-netherlands">The Burglary Free Neighbourhood in Rotterdam (Netherlands)</a>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
<li><a href="#the-safe-city-projects-in-nice-france">The Safe City Projects in Nice (France)</a>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
<li><a href="#facial-recognition-in-hamburg-mannheim-berlin-germany">Facial Recognition in Hamburg, Mannheim &amp; Berlin (Germany)</a>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
<li><a href="#the-dragonfly-project-hungary">The Dragonfly project (Hungary)</a>
2021-10-07 11:07:36 +02:00
</li>
<li class="space"><a href="#recommendations">Recommendations</a></li>
2021-10-08 17:03:03 +02:00
<li class="space"><a href="#references">REFERENCES</a></li>
2021-10-07 08:43:56 +02:00
<li><a href="#annex-cases">ANNEX: CASES</a>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
</ul>
</li>
</ul>
</nav>
</header>
<main>
2021-10-07 11:07:36 +02:00
<h1 class="title">Biometric and Behavioural Mass Surveillance in EU Member States</h1>
2021-10-07 08:43:56 +02:00
<hr>
<article>
2021-10-07 11:07:36 +02:00
<!-- <p><strong>Biometric and Behavioural Mass Surveillance in EU Member States</strong></p> -->
2021-10-07 08:43:56 +02:00
<p>Report for the Greens/EFA in the European Parliament</p>
<p>01/10/2021</p>
2021-10-07 11:07:36 +02:00
<p><a href="#ragazzi">Francesco Ragazzi</a></p>
<p><a href="#mendoskusmonmaz">Elif Mendos Kuskonmaz</a></p>
<p><a href="#plajas">Ildikó Plájás</a></p>
<p><a href="#vandeven">Ruben van de Ven</a></p>
<p><a href="#wagner">Ben Wagner</a></p>
2021-10-07 08:43:56 +02:00
<section id="table-of-figures" class="level1 Title">
<h1 class="Title">TABLE OF FIGURES</h1>
<p>Figure 1. EU Countries use of FRT for forensic applications 35</p>
<p>Figure 2. Fieldlab in Rotterdam Lombardijen 63</p>
<p>Figure 3. The one-and-a-half-meter monitor developed by the municipality of Amsterdam 64</p>
<p>Figure 4. Growth in police requests to INPOL system 81</p>
<p>Figure 5. Snapshots from the video Home Quarantine System Short Presentation by Asura Technologies 85</p>
</section>
<section id="authors" class="level1 Title">
<h1 class="Title"> AUTHORS</h1>
2021-10-07 11:07:36 +02:00
<p id="ragazzi"><strong>Dr. Francesco Ragazzi (scientific coordinator)</strong> is an associate professor in International Relations at Leiden University (Netherlands), an associated scholar at the Centre dEtude sur les Conflits, Liberté et Sécurité (France). He holds a PhD in Political Science from Sciences Po Paris (France) and Northwestern University (USA). His research interests include radicalisation, terrorism, and mass surveillance. His current research project, <em>Security Vision</em>, funded by a European Research Council Consolidator Grant analyses the politics of computer vision in the field of security. His work has been published in numerous peer-reviewed journals and edited volumes. He serves on the editorial board of the journals <em>International Political Sociology</em>, <em>Citizenship Studies</em> and <em>Cultures &amp; Conflits</em>. He has been consulted as an expert on issues of security by the European Parliament, for whom he has co-authored several reports, the Council of Europe and the French Senate.</p>
<p id="mendoskusmonmaz"><strong>Dr. Elif Mendos Kuskonmaz</strong> is a lecturer at the School of Law at the University of Portsmouth. She holds a Masters Degree in Public Law from Istanbul University, and an LLM in Public International Law and a PhD from Queen Mary University of London. She researches on surveillance measures and the nexus with the right to privacy and data protection. Elif is also a registered lawyer with the Istanbul Bar Association.</p>
<p id="plajas"><strong>Ildikó Z Plájás</strong> is a post-doctoral researcher at the Institute of Political Science, Leiden University. She has studied anthropology and cultural studies in Romania and Hungary, later graduating in Visual Ethnography at Leiden University, the Netherlands. She is currently completing her PhD at the University of Amsterdam. Her research examines how visual technologies in governance enact certain groups of people as “racial others” in Europe.</p>
<p id="vandeven"><strong>Ruben van de Ven</strong> is a PhD candidate in Political Science at the Institute of Political Science, Leiden University. His PhD project studies the ethical and political implications of surveillance algorithms that order human gestures. Since graduating from the Master in Media Design programme at the Piet Zwart Institute, he has researched algorithmic politics through media art, computer programming and scholarly work. He has focused on how the human individual becomes both the subject of and input into machine learning processes. Earlier artistic work on the quantification of emotions examined the transformation of humanistic concepts as they are digitised. His work has been presented at both art exhibitions and academic conferences.</p>
<p id="wagner"><strong>Dr. Ben Wagner</strong> is an assistant professor at the Faculty of Technology, Policy and Management at TU Delft, where his research focuses on technology policy, human rights and accountable information systems. He is Associate Faculty at the Complexity Science Hub Vienna and a visiting researcher at the Human Centred Computing Group, University of Oxford. He previously worked at WU Vienna, TU-Berlin, the University of Pennsylvania and European University Viadrina. He holds a PhD in Political and Social Sciences from the European University Institute in Florence.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="acronyms" class="level1 Title">
<h1 class="Title">ACRONYMS</h1>
<table>
<tbody>
2021-10-07 11:07:36 +02:00
<tr class="even">
<th>ABIS</th>
<td>Automated Biometric Identification Systems</td>
</tr>
2021-10-07 08:43:56 +02:00
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>ACLU</th>
2021-10-07 08:43:56 +02:00
<td>American Civil Liberties Union</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>ADM</th>
2021-10-07 08:43:56 +02:00
<td>Automated Decision-Making (System)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>AFIS</th>
2021-10-07 08:43:56 +02:00
<td>Automated Fingerprint Identification System</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>AI</th>
2021-10-07 08:43:56 +02:00
<td>Artificial Intelligence</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>ANPR</th>
2021-10-07 08:43:56 +02:00
<td>Automated Number Plate Recognition</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>API</th>
2021-10-07 08:43:56 +02:00
<td>Application Programming Interface</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>AWS</th>
2021-10-07 08:43:56 +02:00
<td>Amazon Web Services</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>BDAS</th>
2021-10-07 08:43:56 +02:00
<td>Biometric Data Processing System</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>BDSG</th>
2021-10-07 08:43:56 +02:00
<td>Federal Data Protection Act (Germany)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>BKA</th>
2021-10-07 08:43:56 +02:00
<td>Federal Criminal Police Office (Germany)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>BKK</th>
2021-10-07 08:43:56 +02:00
<td>Centre for Budapest Transport (Hungary)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>BPI</th>
2021-10-08 17:03:03 +02:00
<td><a class="maplink" data-title="Bpifrance">Public Investment Bank</a> (France)</td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>BPOL</th>
2021-10-13 18:35:49 +02:00
<td><a class="maplink" data-title="German Federal Police (Bundespolizei)">German Federal Police</a></td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="even">
2021-10-07 15:08:33 +02:00
<th><a class="maplink" data-title="CATCH">CATCH</a></th>
2021-10-07 08:43:56 +02:00
<td>Central Automatic TeChnology for Recognition of Persons (Netherlands)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>CBIS</th>
2021-10-07 08:43:56 +02:00
<td>Central Biometric Information System (Czechia)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>CCTV</th>
2021-10-07 08:43:56 +02:00
<td>Closed Circuit Television</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>CGT</th>
2021-10-07 08:43:56 +02:00
<td>General Labour Confederation (France)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>CJEU</th>
2021-10-07 08:43:56 +02:00
<td>Court of Justice of the European Union (EU)</td>
</tr>
<tr class="odd">
2021-10-07 15:08:33 +02:00
<th><a class="maplink" data-title="CNIL">CNIL</a></th>
2021-10-07 08:43:56 +02:00
<td>National Commission for Informatics and Freedoms (France)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>COC</th>
2021-10-07 08:43:56 +02:00
<td>Supervisory Body for Police Information (Belgium)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>CoE</th>
2021-10-07 08:43:56 +02:00
<td>Council of Europe</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>COCO</th>
2021-10-07 08:43:56 +02:00
<td>Common Objects in Context (Dataset)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>COVID</th>
2021-10-07 08:43:56 +02:00
<td>Coronavirus Disease</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>CSU</th>
2021-10-07 08:43:56 +02:00
<td>Centre for Urban Supervision (France)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>DEP</th>
2021-10-07 08:43:56 +02:00
<td>Digital European Program</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>DITSS</th>
2021-10-08 17:03:03 +02:00
<td><a class="maplink" data-title="Dutch Institute for Technology Safety and Security (DITSS)">Dutch Institute for Technology, Safety &amp; Security</a></td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>DPA</th>
2021-10-07 08:43:56 +02:00
<td>Data Protection Authority</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>EC</th>
2021-10-07 08:43:56 +02:00
<td>European Commission (EU)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>ECtHR</th>
2021-10-07 08:43:56 +02:00
<td>European Court of Human Rights</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>EDE</th>
2021-10-07 08:43:56 +02:00
<td>Criminal identification database (Austria)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>EDPB</th>
2021-10-07 08:43:56 +02:00
<td>European Data Protection Board (EU)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>EDPS</th>
2021-10-07 08:43:56 +02:00
<td>European Data Protection Supervisor (EU)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>EDS</th>
2021-10-07 08:43:56 +02:00
<td>European Data Strategy</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>EEA</th>
2021-10-07 08:43:56 +02:00
<td>European Economic Area</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>EPP</th>
2021-10-07 08:43:56 +02:00
<td>European Peoples Party</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>EU</th>
2021-10-07 15:08:33 +02:00
<td><a class="maplink" data-title="European Union">European Union</a></td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>FRA</th>
2021-10-07 08:43:56 +02:00
<td>Fundamental Rights Agency (EU)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>FRT</th>
2021-10-07 08:43:56 +02:00
<td>Facial Recognition Technology</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>FRVT</th>
2021-10-07 08:43:56 +02:00
<td>Face Recognition Vendor Test</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>GDPR</th>
2021-10-07 08:43:56 +02:00
<td>General Data Protection Regulation (EU)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>HCLU</th>
2021-10-07 15:08:33 +02:00
<td><a class="maplink" data-title="HCLU">Hungarian Civil Liberties Union (Hungary)</a>. See “</td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>HD</th>
2021-10-07 08:43:56 +02:00
<td>High Definition</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>HDR</th>
2021-10-07 08:43:56 +02:00
<td>Habitoscopic Data Register</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>HKR</th>
2021-10-07 15:08:33 +02:00
<td><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App (Hungary)</a></td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>IARPA</th>
2021-10-07 08:43:56 +02:00
<td>Intelligence Advanced Research Projects Agency (USA)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>ID</th>
2021-10-07 08:43:56 +02:00
<td>Identification</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>IFRS</th>
2021-10-07 15:08:33 +02:00
<td><a class="maplink" data-title="IFRS (Interpol)">Interpol Facial Recognition System</a></td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>IKSZR</th>
2021-10-07 08:43:56 +02:00
<td>Integrated Traffic Management and Control System (Hungary)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>INCLO</th>
2021-10-07 08:43:56 +02:00
<td>International Network of Civil Liberties Organisations</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>INPOL</th>
2021-10-08 17:03:03 +02:00
<td><a class="maplink" data-title="German central criminal information system INPOL">Criminal Case Management System (Germany)</a></td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>KAK</th>
2021-10-07 08:43:56 +02:00
<td>Governmental Data Centre (Hungary)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>KDNP</th>
2021-10-07 08:43:56 +02:00
<td>Christian Democratic People's Party (Hungary)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>LED</th>
2021-10-07 08:43:56 +02:00
<td>Law Enforcement Directive (EU)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>LFP</th>
2021-10-07 08:43:56 +02:00
<td>Law on the Function of Police (Belgium)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>LGBTQ</th>
2021-10-07 08:43:56 +02:00
<td>Lesbian, Gay, Bisexual,Transgender, Queer</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>LIDAR</th>
2021-10-07 08:43:56 +02:00
<td>Light Detection and Ranging</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>LPA</th>
2021-10-07 08:43:56 +02:00
<td>Airport Police (Belgium)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>LQDN</th>
2021-10-07 15:08:33 +02:00
<td><a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> (France)</td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>GMO</th>
2021-10-07 08:43:56 +02:00
<td>Genetically Modified Organism</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>MIT</th>
2021-10-07 08:43:56 +02:00
<td>Massachusetts Institute of Technology</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>MRAP</th>
2021-10-07 08:43:56 +02:00
<td>Movement against racism and for friendship between peoples (France)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>NAIH</th>
2021-10-07 08:43:56 +02:00
<td>Hungarian National Authority for Data Protection and Freedom of Information</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>NBIS</th>
2021-10-07 08:43:56 +02:00
<td>National Biometric Identification System (Romania)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>NGO</th>
2021-10-07 08:43:56 +02:00
<td>Non-Governmental Organisation</td>
</tr>
<tr class="odd">
2021-10-07 15:08:33 +02:00
<th><a class="maplink" data-title="NIST">NIST</a></th>
2021-10-07 08:43:56 +02:00
<td>National Institute of Standards and Technology (USA)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>NISZ</th>
2021-10-07 08:43:56 +02:00
<td>National Infocommunication Services (Hungary)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>PARAFE</th>
2021-10-07 08:43:56 +02:00
<td>Rapid passage at the external borders (France)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>PPM</th>
2021-10-07 08:43:56 +02:00
<td>Pixels Per Meter</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>RBI</th>
2021-10-07 08:43:56 +02:00
<td>Remote Biometric Identification</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>RETU</th>
2021-10-07 08:43:56 +02:00
<td>Registered persons identifying features database and Aliens database (Finland)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>RGB</th>
2021-10-07 08:43:56 +02:00
<td>Red, Green, Blue</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>SIS</th>
2021-10-07 08:43:56 +02:00
<td>Schengen Information System</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>SSNS</th>
2021-10-07 08:43:56 +02:00
<td>Secret Service for National Security (Hungary)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>TAJ</th>
2021-10-08 17:03:03 +02:00
<td><a class="maplink" data-title="Deployment of TAJ">Criminal case history database (France)</a></td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>TASZ</th>
2021-10-07 15:08:33 +02:00
<td><a class="maplink" data-title="HCLU">Hungarian Civil Liberties Union</a></td>
2021-10-07 08:43:56 +02:00
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>TELEFI</th>
2021-10-07 08:43:56 +02:00
<td>Towards the European Level Exchange of Facial Images (EU Project)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>UAVG</th>
2021-10-07 08:43:56 +02:00
<td>GDPR Implementation Act (Germany)</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>UK</th>
2021-10-07 08:43:56 +02:00
<td>United Kingdom</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>UN</th>
2021-10-07 08:43:56 +02:00
<td>United Nations</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>UNHRC</th>
2021-10-07 08:43:56 +02:00
<td>United Nations Human Rights Council</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>US(A)</th>
2021-10-07 08:43:56 +02:00
<td>United States of America</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>VGG</th>
2021-10-07 08:43:56 +02:00
<td>Visual Geometry Group (Dataset)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>VMD</th>
2021-10-07 08:43:56 +02:00
<td>Video motion detection</td>
</tr>
<tr class="even">
2021-10-07 11:07:36 +02:00
<th>VOC</th>
2021-10-07 08:43:56 +02:00
<td>Visual Object Classes (Pascal VOC)</td>
</tr>
<tr class="odd">
2021-10-07 11:07:36 +02:00
<th>YOLO</th>
2021-10-07 08:43:56 +02:00
<td>You Only Look Once (Algorithm)</td>
</tr>
</tbody>
</table>
</section>
<section id="section" class="level1 Title">
<h1 class="Title"></h1>
</section>
<section id="executive-summary" class="level1 Title">
<h1 class="Title">EXECUTIVE SUMMARY</h1>
<p><strong>CHAPTER 1: Introduction</strong></p>
<ul>
<li><p>The aim of this report is to establish a problematised overview of what we know about what is currently being done in Europe when it comes to remote biometric identification (RBI), and to assess in which cases we could potentially fall into forms of biometric mass surveillance.</p></li>
<li><p>Private and public actors are increasingly deploying “smart surveillance” solutions including RBI technologies which, if left unchecked, could become biometric mass surveillance.</p></li>
<li><p>Facial recognition technology has been the most discussed of the RBI technologies. However, there seems to be little understanding of the ways in which this technology might be applied and the potential impact of such a broad range of applications on the fundamental rights of European citizens.</p></li>
<li><p>The development of RBI systems by authoritarian regimes which may subsequently be exported to and used within Europe is of concern. Not only as it pertains to the deployments of such technologies but also the lack of adequate insight into the privacy practices of the companies supplying the systems.</p></li>
2021-10-13 18:35:49 +02:00
<li><p>Four main positions have emerged among political actors with regard to the deployments of RBI technologies and their potential impact on fundamental rights: 1) active promotion 2) support with safeguards; 3) moratorium and 4) outright ban.</p></li>
2021-10-07 08:43:56 +02:00
</ul>
<p><strong>CHAPTER 2: Technical Overview</strong></p>
<ul>
<li><p>The current market of RBI systems is overwhelmingly dominated by image-based products, at the centre of which is facial recognition technology (FRT). Other products such as face detection and person detection technologies are also in use.</p></li>
<li><p>FRT is typically being deployed to perform two types of searches: cooperative searches for verification and/ or authentication purposes, and non-cooperative searches to identify a data subject. The former involves voluntary consent from the data subject to capture their image, while the latter may not.</p></li>
<li><p>Live facial recognition is currently the most controversial deployment of FRT: Live video feeds are used to generate snapshots of individuals and then match them against a database of known individuals the “watchlist”.</p></li>
<li><p>Other RBI technologies are being deployed though their use at present is marginal compared to FRT, these include gait (movement), audio, and emotion recognition technologies, amongst others.</p></li>
<li><p>A better understanding of the technical components and possible usage applications of image-based RBI technologies is needed in order to assess their potential political implications.</p></li>
<li><p>RBI technologies are subject to technical challenges and limitations which should be considered in any broader analysis of their ethical, legal, and political implications.</p></li>
</ul>
<p><strong>CHAPTER 3: Overview of deployments in Europe</strong></p>
<ul>
<li><p>Current deployments of RBI technologies within Europe are primarily experimental and localised. However, the technology coexists with a broad range of algorithmic processing of security images being carried out on a scale which ranges from the individual level to what could be classed as biometric mass surveillance. Distinguishing the various characteristics of these deployments is not only important to inform the public debate, but also helps to focus the discussion on the most problematic uses of the technologies.</p></li>
<li><p>Image and sound-based security applications being used for authentication purposes do not currently pose a risk for biometric mass surveillance. However, it should be noted that an alteration to the legal framework could increase the risk of them being deployed for biometric mass surveillance especially as many of the databases being used contain millions of data subjects.</p></li>
<li><p>In addition to authentication, image and sound-based security applications are being deployed for surveillance. Surveillance applications include the deployment of RBI in public spaces.</p></li>
<li><p>Progress on two fronts makes the development of biometric mass surveillance more than a remote possibility. Firstly, the current creation and/or upgrading of biometric databases being used in civil and criminal registries. Secondly, the repeated piloting of live-feed systems connected to remote facial and biometric information search and recognition algorithms.  </p></li>
</ul>
<p><strong>CHAPTER 4: Legal bases</strong></p>
<ul>
<li><p>The use of biometric tools for law enforcement purposes in public spaces raises a key issue of the legal permissibility in relation to the collection, retention and processing of data when considering the individuals fundamental rights to privacy and personal data protection. When viewed through this lens, RBI technologies could have a grave impact on the exercise of a range of fundamental rights.</p></li>
<li><p>The deployment of biometric surveillance in public spaces must be subject to strict scrutiny in order to avoid circumstances which could lead to mass surveillance. This includes targeted surveillance which has the potential for indiscriminate collection of data on any persons present in the surveilled location, not only that of the target data subject.</p></li>
<li><p>The normative legal framework for conducting biometric surveillance in public spaces can be found in the EU secondary legislation on data protection (GDPR and LED). The use of biometric data under this framework must be reviewed in light of the protection offered by fundamental rights.</p></li>
<li><p>The European Commissions April 2021 proposal on the Regulation for the Artificial Intelligence Act aims to harmonise regulatory rules for Member States on AI-based systems. The Proposed Regulation lays out rules focused on three categories of risks (unacceptable, high, and low/ minimal risk) and anticipates covering the use of RBI systems. It also aims to compliment the rules and obligations set out in the GDPR and LED.</p></li>
</ul>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>CHAPTER 5: Political developments and main issues of contention</strong></p>
<ul>
<li><p>Four main positions on RBI systems have emerged among political actors as a result of both technical developments in the field and early legislative activity of EU institutions: 1) active promotion 2) support with safeguards; 3) moratorium and 4) outright ban.</p></li>
<li><p>Those who are in favour of support with safeguards argue that the deployment RBI technologies should be strictly monitored because of the potential risks they pose, including the potential danger of FRT, for example, to contribute to the further criminalisation or stigmatisation of groups of people who already face discrimination.</p></li>
<li><p>The European Parliament passed a resolution on artificial intelligence in January 2020 in which they invite the Commission “to assess the consequences of a moratorium on the use of facial recognition systems”. If deemed necessary, such a moratorium could impact some existing uses of FRT including its deployment in public spaces by public authorities.</p></li>
<li><p>A number of EU and national NGOs have called for an outright ban on the use of RBI with some arguing that the mass processing of biometric data from public spaces creates a serious risk of mass surveillance that infringes on fundamental rights.</p></li>
<li><p>The European Commissions legislative proposal for an Artificial Intelligence Act (EC 2021) is both a proposal for a regulatory framework on AI and a revised coordinated plan to support innovation. One feature of the act is the establishment of risk-dependent restrictions which would apply to the various uses of AI systems. </p></li>
</ul>
<p><strong>CHAPTER 6: Facial Recognition cameras at Brussels International Airport (Belgium)</strong></p>
<ul>
<li><p>Belgium is one of two European countries that has not yet authorised the use of FRT, however, law enforcement is strongly advocating for its use and the current legal obstacles to its implementation are unlikely to hold for very long.</p></li>
<li><p>In 2017, unbeknownst to the Belgian Supervisory Body for Police Information (COC), Brussels International Airport acquired 4 cameras connected to a facial recognition software for use by the airport police. Though the COC subsequently ruled that this use fell outside of the conditions for a lawful deployment, the legality of the airport experiment fell into a legal grey area because of the ways in which the technology was deployed.</p></li>
<li><p>One justification for the legality of the airport experiment from the General Commissioner of Federal Police was to compare the technological deployment to that of the legal use of other intelligent technologies such as Automated Number Plate Recognition (ANPR). Although this argument was rejected at the time, such a system could be re-instated if the grounds for interruption are no long present in the law.</p></li>
<li><p>There is an emerging civil society movement in Belgium contesting the legitimacy of remote biometric identification. However, the amendments to the Police Act permitting<strong> </strong>the use of real-time smart cameras by the police in carrying out their administrative and judicial duties, and recent declarations of the Minister of Interior seems to point in the direction of more acceptance for remote biometric surveillance.</p></li>
</ul>
<p><strong>CHAPTER 7: The Burglary Free Neighbourhood in Rotterdam (Netherlands)</strong></p>
<ul>
<li><p>The Fieldlab Burglary Free Neighbourhood is a public-private collaboration with two aims: to detect suspicious behaviour and to influence the behaviour of the suspect. While the system of smart streetlamps does collect some image and sound-based data, it does not record any characteristics specific to the individual.</p></li>
<li><p>From a legal perspective, there is a question as to whether or not the data processed by the Burglary Free Neighbourhood programme qualifies as personal data and thus would fall within the scope of data protection legislation.</p></li>
<li><p>It is contested whether forms of digital monitoring and signalling are actually the most efficient methods for preventing break ins. Despite the aims of the programme, to date, the streetlights have only been used to capture data for the purposes of machine learning.</p></li>
</ul>
<ul>
<li><p>The infrastructure installed for the experiments can potentially be used for more invasive forms of monitoring. During the project, local police, for example, already voiced an interest in access to the cameras.</p></li>
<li><p>In March 2021, the Fieldlab trial ended. The data collected over the course of the project was not sufficient enough to have the computer distinguish suspicious trajectories. The infrastructure of cameras and microphones is currently disabled, yet remains in place.</p></li>
</ul>
<p><strong>CHAPTER 8: The Safe City Projects in Nice (France)</strong></p>
<ul>
<li><p>Several French cities have launched “safe city” projects involving biometric technologies, however Nice is arguably the national leader. The city currently has the highest CCTV coverage of any city in France and has more than double the police agents per capita of the neighbouring city of Marseille.</p></li>
<li><p>Through a series of public-private partnerships the city began a number of initiatives using RBI technologies (including emotion and facial recognition). These technologies were deployed for both authentication and surveillance purposes with some falling into the category of biometric mass surveillance.</p></li>
2021-10-08 17:03:03 +02:00
<li><p>One project which used FRT at <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">a high school in Nice</a> and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">one in Marseille</a> was eventually declared unlawful. The court determined that the required consent could not be obtained due to the power imbalance between the targeted public (students) and the public authority (public educational establishment). This case highlights important issues about the deployment of biometric technologies in public spaces.</p></li>
2021-10-07 15:08:33 +02:00
<li><p>The use of biometric mass surveillance by the mayor of Nice Christian Estrosi has put him on a collision course with the French Data Protection Authority (<a class="maplink" data-title="CNIL">CNIL</a>) as well as human rights/ digital rights organisations (Ligue des Droits de lHomme, <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a>). His activities have raised both concern and criticism over the usage of the technologies and their potential impact on the privacy of personal data.</p></li>
2021-10-07 08:43:56 +02:00
</ul>
<p><strong>CHAPTER 9: Facial Recognition in Südkreuz Berlin, Hamburg G20 and Mannheim (Germany)</strong></p>
<ul>
<li><p>The German federal police, in cooperation with the German railway company, conducted a project called “Sicherheitsbahnhof” at the Berlin railway station Südkreuz in 2017/18, which included 77 video cameras and a video management system.</p></li>
2021-10-07 15:08:33 +02:00
<li><p>The police in Hamburg used facial recognition software <a class="maplink" data-title="Videmo">Videmo</a> 360 during the protests against the G20 summit in 2017. The database includes 100.000 individuals in Hamburg during the G20 summit and whose profiles are saved in the police database. The technology allows for the determination of behaviour, participation in gatherings, preferences, and religious or political engagement.</p></li>
2021-10-07 08:43:56 +02:00
<li><p>Sixty-eight cameras were installed by local police on central squares and places in the German city Mannheim to record the patterns of movement of people. In this project, which started in 2018, the software is used to detect conspicuous behaviour.</p></li>
<li><p>Half of these deployments (Mannheim &amp; Berlin Südkreuz) took place as measures to test the effectiveness of facial recognition and behavioural analysis software. This “justification as a test” approach is often used in Germany to argue for a deviation from existing rules and societal expectations and was similarly applied during deviations to commonly agreed measures in the Coronavirus/COVID-19 pandemic.</p></li>
<li><p>Resistance to video surveillance is also in no small part a result of constant campaigning and protest by German civil society. The Chaos Computer Club and Digital Courage have consistently campaigned against video surveillance and any form of biometric or behavioural surveillance. The long-term effect of these “pilots” is to normalise surveillance.</p></li>
</ul>
<p><strong>CHAPTER 10: The Dragonfly Project (Hungary)</strong></p>
<ul>
<li><p>The Hungarian Government led by Prime Minister Viktor Orbán has long been on a collision course with EU Institutions over the rule of law and the undermining of the countrys judicial independence and democratic institutions.</p></li>
</ul>
<ul>
2021-10-07 15:08:33 +02:00
<li><p>Hungary is a frontrunner in Europe when it comes to authorising law enforcements use of Facial Recognition Technology, developing a nationwide and centralised database (The <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a>), and using the <a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a> as part of the Governments Coronavirus measures.</p></li>
2021-10-07 08:43:56 +02:00
<li><p>The infrastructure in place that potentially allows for a centralised deployment of biometric mass surveillance technologies in Hungary has reached an unprecedented scale while the legal and ethical scrutiny of these technologies lags dangerously behind.</p></li>
<li><p>This is due to (1) the overlap between the private and public sectors, specifically government institutions, and (2) the complex entanglements biometric systems have with other information systems (such as car registries, traffic management, public transport monitoring and surveillance, etc.).</p></li>
<li><p>Although the latter are not concerned with the traces of the human body they can nonetheless be used for and facilitate biometric mass surveillance. These entanglements create grey zones of biometric mass surveillance where the development and deployment of such technologies is hidden from visibility and critical scrutiny.</p></li>
2021-10-07 15:08:33 +02:00
<li><p>The <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> has elicited numerous warnings regarding data protection and the rights to privacy from both public and private organisations. However the lack of contestation and social debate around the issues of privacy and human rights in relation to such projects as the Hungarian Governments Dragonfly is striking.</p></li>
2021-10-07 08:43:56 +02:00
</ul>
<p><strong>CHAPTER 11: Recommendations</strong></p>
2021-10-13 18:35:49 +02:00
<p><strong>1. The EU should prohibit the deployment of both indiscriminate and “targeted” Remote Biometric and Behavioural Identification (RBI) technologies in public spaces (real-time RBI), as well as ex-post identification (or forensic RBI). Our analysis shows that both practices, even when used for “targeted surveillance” amount to mass surveillance.</strong></p>
2021-10-07 08:43:56 +02:00
<ul>
2021-10-13 18:35:49 +02:00
<li><p>In line with similar recommendations made by the EDPB and the EDPS,<a href="#fn1" class="footnote-ref" id="fnref1" role="doc-noteref"><sup>1</sup></a> the EU should <strong>prohibit the deployment of Remote Biometric and Behavioural Identification technologies in public spaces</strong></p></li>
<li><p>In line with the position of the EDRi regardings EU Artificial Intelligence Act<a href="#fn2" class="footnote-ref" id="fnref2" role="doc-noteref"><sup>2</sup></a>, our research supports the notion that the <strong>distinction between "real-time” and “ex-post” is irrelevant</strong> when it comes to the impact of these technologies on fundamental rights. Ex-post identification carries in fact a higher potential of harm, as more data can be pooled from different sources to proceed to the identification. The use of such technologies for <strong>“targeted surveillance” is thus equally harmful</strong> as the practice might be considered as expansive and intrusive to an extent that it would constitute disproportionate interference with the rights to privacy and personal data protection.</p></li>
<li><p>This concerns not only the acquisition and processing of <strong>faces, but also gait, voice and other biometric or behavioural signals.</strong></p></li>
2021-10-07 08:43:56 +02:00
</ul>
<p><strong>2. The EU should strengthen transparency and accountability of biometric and behavioural recognition technologies</strong></p>
<ul>
<li><p>Our research found that the majority of surveillance systems remain <strong>opaque</strong>. There is very <strong>little information on how citizens' data is processed</strong> when they enter surveilled public spaces. Rarely are <strong>concrete alternatives</strong> provided if they do not wish to be surveilled. In some extreme cases, such as the deployment of FRT trials in London, citizens who deliberately avoided surveillance by covering their faces were <strong>subjected to fines</strong>. This poses considerable challenges to citizens rights, as well as to transparency and accountability of these systems.</p></li>
<li><p><strong>It seems thus necessary to expand existing transparency and accountability requirements</strong> in the new EU Artificial Intelligence Act for biometric technologies. These requirements should be expanded to include external independent accountability, transparency and oversight for any implementations of biometric technologies that are not already prohibited by the Act.</p></li>
<li><p>In particular, it seems imperative to increase the transparency of such systems, by conditioning their operation to the publication of <strong>key characteristics and features</strong> (type of data acquisition, type of machine learning algorithm, nature of data collected in the database) necessary for <strong>effective public oversight</strong> of their operation. These details should be disclosed even when deployments are used for national security or law enforcement purposes, and the public should be informed about planned and ongoing projects.</p></li>
</ul>
<p><strong>3. The EU should promote the reinforcement of robust accountability mechanisms for biometric surveillance systems.</strong></p>
<ul>
2021-10-07 11:07:36 +02:00
<li>
2021-10-07 08:43:56 +02:00
<p>The current legislative framework remains <strong>unclear</strong> as to which institutions may <strong>review or authorise biometric surveillance</strong> systems. In light of the GDPR and the LED, the Data Protection Authorities (DPAs) in some member states enforce the relevant data protection legislation and oversee the processing of biometric data, while in others a separate authority is tasked with the responsibility to review the compatibility with the relevant legislation insofar as personal data processing by law enforcement authorities is concerned (such as Belgium, see case study).</p>
2021-10-07 11:07:36 +02:00
</li>
<li>
2021-10-07 08:43:56 +02:00
<p>The EU should work toward developing a <strong>centralised authorisation process for biometric surveillance</strong>, within which all relevant authorities are included and are able to veto the authorisation.</p>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
</ul>
<ul>
<li><p>Although the proposed EU Artificial Intelligence Act limits a prior authorisation by a court or independent administrative authority to real-time biometric surveillance, it is necessary to underline that ex-post biometric identification systems must be subject to supervision or authorisation taking into account the standards under the ECHR and the Charter.</p></li>
</ul>
<p><strong>4. The EU should promote individual rights under the GDPR through the promotion of digital-rights-by-design technologies.</strong></p>
<ul>
<li><p>More attention could be given to protect <strong>individuals rights under GDPR</strong> when it comes to data collection and processing mechanisms as well as a <strong>fundamental rights assessment</strong> ex ante and <strong>ex post.</strong></p></li>
<li><p>This could be implemented technically through <strong>data minimisation or digital rights-by-design</strong> methods, either through <strong>technical solutions that do not collect biometric information</strong>, or systems which incorporate automated forms of <strong>notification</strong>, <strong>immutable</strong> <strong>transparency and accountability logging</strong>, and <strong>control of data</strong> or ideally by a combination of both approaches.</p></li>
</ul>
<p><strong>5. The EU should ensure effective enforcement of GDPR purpose limitation.</strong></p>
<ul>
<li><p><strong>Purpose limitation</strong> is one of the key principles of the GDPR. As our report shows, the <strong>re-purposing of biometric data</strong> is not always kept sufficiently in check.</p></li>
</ul>
<ul>
<li><p>From a technical perspective, <strong>biometric mass surveillance can easily emerge by connecting different elements of a technical infrastructure</strong> (video acquisition capacities, processing algorithms, biometric datasets) <strong>developed in other contexts.</strong></p></li>
<li><p>For example, while the <strong>forensic use of facial recognition</strong> is not a form of <strong>remote biometric identification</strong> per se, the adoption of such systems has allowed for the creation of biometrically searchable national datasets. These datasets are one piece of a potential <strong>biometric mass surveillance</strong> infrastructure which can become a technical reality if live camera feeds, processed through live facial recognition software is connected to them.</p></li>
2021-10-07 15:08:33 +02:00
<li><p>In order to maintain democratic oversight over the uses of the infrastructure, and <strong>avoid the risk of function creep</strong> (i.e. when a technology is being used beyond its initial purpose) it is thus imperative that the principle of <strong>purpose limitation</strong> is systematically enforced and strictly regulated with regard to the <strong>type of data</strong> (criminal or civilian datasets, datasets generated from social media, as in the <a class="maplink" data-title="Clearview AI">Clearview AI</a> controversy) against which biometric searches can be performed.</p></li>
2021-10-07 08:43:56 +02:00
</ul>
<p><strong>6. The EU should support voices and organisations which are mobilised for the respect of EU fundamental rights</strong></p>
<ul>
<li><p>Finally, our research showed, in addition to state oversight agencies, many institutions from <strong>civil society</strong> are active in making sure that <strong>EU fundamental rights are respected</strong> in the field of biometric security technologies.</p></li>
<li><p>While in some countries they benefit from a <strong>dense network of civil society funding</strong>, in others they are subjected to <strong>heavy scrutiny and financial restrictions</strong> (see for example the Hungary case study in this report).</p></li>
<li><p><strong>Supporting civil society organisations</strong> that operate in the <strong>sector of digital rights</strong> is therefore instrumental for a healthy democratic debate and oversight. Civil society need to be able to participate in all relevant legislative and other decision-making procedures.</p></li>
<li><p>Particularly in the area of <strong>litigation</strong>, support for civil society and EU citizens access to rights could be extremely helpful. We have found numerous areas in our study where sufficient legal clarity was lacking and would likely only take place through the courts. We would thus advise that the EU support existing <strong>digital rights litigation initiatives</strong> and create additional mechanisms to support this approach.</p></li>
</ul>
<p><strong>7. The EU should take into account the global dimension of the Biometric and Behavioural Analysis Technology Industry</strong></p>
<ul>
<li><p>The technologies used for FRT in Europe come from <strong>vendors across the world</strong>. Technologies for biometric or behavioural analysis are often tested in one country before they are implemented in another.</p></li>
<li><p>EU policy on the biometric or behavioural analysis technology industry thus needs to consider its impact both <strong>inside and outside of Europe</strong>. Here, the recently revised <strong>EU Export Control framework</strong> which may include biometric and behavioural technologies can play a role.</p></li>
</ul>
</section>
<section id="introduction" class="level1">
<h1>Introduction</h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
<li><p>The aim of this report is to establish a problematised overview of what is currently being done in Europe when it comes to remote biometric identification (RBI), and to assess in which cases we could potentially fall into forms of biometric mass surveillance.</p></li>
<li><p>Private and public actors are increasingly deploying “smart surveillance” solutions including RBI technologies which, if left unchecked, could become biometric mass surveillance.</p></li>
<li><p>Facial recognition technology has been the most discussed of the RBI technologies. However, there seems to be little understanding of the ways in which this technology might be applied and the potential impact of such a broad range of applications on the fundamental rights of European citizens.</p></li>
<li><p>The development of RBI systems by authoritarian regimes which may subsequently be exported to and used within Europe is of concern. Not only as it pertains to the deployments of such technologies but also the lack of adequate insight into the privacy practices of the companies supplying the systems.</p></li>
<li><p>Four main positions have emerged with regard to the deployments of RBI technologies and their potential impact on fundamental rights: 1) active promotion 2) support with safeguards; 3) moratorium and 4) outright ban.</p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-07 15:08:33 +02:00
<p>Since the widespread use of neural network algorithms in 2012, artificial intelligence applied to the field of security has steadily grown into a political, economic, and social reality. As examples from Singapore, the UK, South Africa, or China demonstrate, the image of a digital society of control, in which citizens are monitored through algorithmically processed audio and video feeds is becoming a tangible possible reality in the <a class="maplink" data-title="European Union">European Union</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Through a set of “pilot projects”, private and public actors including supermarkets, casinos, city councils, border guards, local and national law enforcement agencies are increasingly deploying a wide array of “<strong>smart surveillance</strong>” solutions. Among them <strong>remote biometric identification,</strong> namely <strong>security mechanisms “that leverage unique biological characteristics” such as fingerprints, facial images, iris or vascular patterns to “identify multiple persons identities at a distance, in a public space and in a continuous or ongoing manner by checking them against data stored in a database.”</strong> (European Commission 2020b, 18) European institutions have reacted with a series of policy initiatives in the last years, but as we will show in this report, if left unchecked, remote biometric identification technologies can easily become <strong>biometric mass surveillance</strong>.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>Among technologies of <strong>remote biometric identification</strong>, <strong>facial recognition</strong> has been at the centre of the attention of most discussions in the public debate. The foregrounding of this specific use case of computer vision in the public debate has allowed concerned actors to raise awareness on <strong>the dangers of artificial intelligence algorithms applied to biometric datasets</strong>. But it has also generated confusion. The perception that facial recognition is a single type of technology (i.e., an algorithm “that recognises faces”) has obscured <strong>the broad range of applications of “smart technologies” within very different bureaucratic contexts</strong>: from the “smart cities” live facial recognition of video feeds deployed for the purpose of public space surveillance, to the much more specific, on-the-spot searches by law enforcement for the purpose of carrying out arrests or forensic investigations.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>The disentanglement and specification of each of these uses is important, if only because each distinct technological arrangement between sensing devices (cameras, microphones), datasets and algorithmic processing tools <strong>allows for radically different applications</strong>, and thus can have different types of <strong>impact on European citizens fundamental rights</strong>. As the recent communication of the European Commission (2021) stated, not all systems and not all applications are equally threatening for our democratic freedoms: some bear too much risk of infringing our fundamental rights and therefore should never be allowed; some are “high risk” applications that can take place in certain circumstances with very clear safeguards; and some are more mundane uses of the technologies that require less attention. <strong>The ethical, political, and legal assessment of these levels of danger can therefore not be separated from a detailed understanding of how these technologies work.</strong> The limitation being of course that while we know what technologies are theoretically available to public actors, the detail of their <strong>characteristics is often hidden from view.</strong></p>
<section id="objectives-of-the-report" class="level2">
<h2>Objectives of the report</h2>
<p><strong>The aim of this report</strong> is thus to establish a problematised overview of <strong>what we know about</strong> what is currently being done in Europe when it comes to <strong>remote biometric identification</strong>, and to assess in which cases we could potentially fall into forms of <strong>biometric mass surveillance</strong>. The report will thus answer the following questions: What types of technologies are being used and how? In what context? By whom are these technologies used and to what aim? What types of actors are involved? What types of consequences does the use of those technologies entail? What legal basis and framework are applied to the use of those technologies? What are the forms of mobilisation and contestation against these uses?</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>In the rest of this introduction, we locate the political context for this study, including the voices that have called for a moratorium or a ban of all technologies that are associated with “biometric mass surveillance”. We then specify the objectives, scope, methodology, some working definitions and outline the remaining chapters.</p>
</section>
<section id="the-international-context" class="level2">
<h2>The international context</h2>
2021-10-07 15:08:33 +02:00
<p>The concern for uncontrolled deployment of <strong>remote biometric identification</strong> systems emerges in a context characterised by the development of technologies in authoritarian regimes; the development of controversial “pilot” projects as part of “smart cities projects” in Europe; revelations about controversial privacy practices of companies such as <a class="maplink" data-title="Clearview AI">Clearview AI</a>; and finally, by the structuration of a US and EU debate around some of the key biases and problems they entail.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>In 2013, the Chinese authorities officially revealed the existence of a large system of mass surveillance involving more than 20 million cameras called Skynet, which had been established since 2005. While the cameras were aimed at the general public, more targeted systems were deployed in provinces such as Tibet and Xinjiang where political groups contest the authority of Beijing. In 2018, the surveillance system became coupled with a <strong>system of social credit</strong>, and Skynet became increasingly connected to facial recognition technology (Ma 2018; Jiaquan 2018). By 2019, it was estimated that Skynet had reached 200 million face-recognition enabled CCTV cameras (Mozur 2018).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>The intrusiveness of the system, and its impact on fundamental rights is best exemplified by its deployment in the Xinjiang province. The province capital, Urumqi, is chequered with <strong>checkpoints and identification stations</strong>. Citizens need to submit to facial recognition ID checks in supermarkets, hotels, train stations, highway stations and several other public spaces (Chin and Bürge 2017). The information collected through the cameras is centralised and matched against other <strong>biometric data</strong> such as <strong>DNA samples</strong> and <strong>voice samples</strong>. This allows the government to attribute <strong>trust-worthiness scores</strong> (trustworthy, average, untrustworthy) and thus generate a list of individuals that can become candidates for detention (Wang 2018).</p>
2021-10-07 11:07:36 +02:00
2021-10-13 18:35:49 +02:00
<p>European countries deployments are far from the Chinese experience. But the companies involved in Chinas pervasive digital surveillance network (such as <strong>Tencent</strong>, <strong><a class="maplink" data-title="Dahua Technologies">Dahua Technology</a></strong>, <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong>SenseTime</strong>, <strong>ByteDance</strong> and <strong><a class="maplink" data-title="Huawei">Huawei</a></strong>) are exporting their know-how to Europe, under the form of “<strong>safe city” packages</strong>. <strong><a class="maplink" data-title="Huawei">Huawei</a></strong> is one of the most active in this regard. On the European continent, the city of Belgrade has for example deployed an extensive communication network of more than 1.000 cameras which collect up to 10 body and facial attributes (Stojkovski 2019). The cameras, deployed on poles, major traffic crossings and a large number of public spaces allow the Belgrade police to monitor large parts of the city centre, collect <strong>biometric information</strong> and communicate it directly to police officers deployed in the field. Belgrade has the most advanced deployment of <a class="maplink" data-title="Huawei">Huawei</a>s surveillance technologies on the European continent, but similar projects are being implemented by other corporations including the <strong>European companies <a class="maplink" data-title="Thales">Thales</a>, <a class="maplink" data-title="Engie Ineo">Engie Ineo</a> or <a class="maplink" data-title="IDEMIA">Idemia</strong> in other European cities and many “Safe City” deployments are planned soon in EU countries such as France, Italy, Spain, <a class="maplink" data-title="Safe City Malta">Malta</a>, and Germany (Hillman and McCalpin 2019). Furthermore, contrary to the idea China would be the sole exporter of Remote Biometric Identification technologies, EU companies have substantially developed their exports in this domain over the last years (Wagner 2021)</p>
<p>The turning point of public debates on facial recognition in Europe was probably <strong>the <a class="maplink" data-title="Clearview AI">Clearview AI</a> controversy</strong> in 2019-2020. <strong><a class="maplink" data-title="Clearview AI">Clearview AI</a></strong>, a company founded by Hoan Ton-That and Richard Schwartz in the United States, maintained a relatively secret profile until a New York Times article revealed in late 2019 that it was selling <strong>facial recognition technology</strong> to law enforcement.  In February 2020, it was reported that the client list of <a class="maplink" data-title="Clearview AI">Clearview AI</a> had been stolen, and a few days later the details of the list were leaked (Mac, Haskins, and McDonald 2020). To the surprise of many in Europe, in addition to US government agencies and corporations, it appeared that the Metropolitan Police Service (London, UK), as well as <strong>law enforcement from Belgian, Denmark, Finland, France, Ireland, <a class="maplink" data-title="Carabinieri">Italy</a>, Latvia, Lithuania, <a class="maplink" data-title="Maltese State">Malta</a>, the <a class="maplink" data-title="Dutch Police">Netherlands</a>, Norway, Portugal, Serbia, <a class="maplink" data-title="Slovenian Police">Slovenia</a>, Spain, <a class="maplink" data-title="Use of Clearview AI in Sweden">Sweden</a>, and Switzerland were on the client list.</strong> The controversy grew larger as it emerged that <a class="maplink" data-title="Clearview AI">Clearview AI</a> had (semi-illegally) harvested a large number of images from social media platforms such as <strong><a class="maplink" data-title="Facebook">Facebook</a>, YouTube</strong> and <strong>Twitter</strong> in order to constitute the datasets against which clients were invited to carry out searches (Mac, Haskins, and McDonald 2020).</p>
2021-10-07 11:07:36 +02:00
2021-10-13 18:35:49 +02:00
<p>The news of the hacking strengthened a strong push-back movement against the development of facial recognition technology by companies such as <a class="maplink" data-title="Clearview AI">Clearview AI</a>, as well as their use by government agencies. In 2018, <strong>Massachusetts Institute of Technology</strong> (MIT) scholar and <strong><a class="maplink" data-title="Algorithmic Justice League">Algorithmic Justice League</a></strong> founder <strong>Joy Buolamwini</strong> together with <strong>Temnit Gebru</strong> had published the report <em>Gender Shades</em> (Buolamwini and Gebru 2018), in which they assessed the racial bias in the face recognition datasets and algorithms used by companies such as <a class="maplink" data-title="IBM">IBM</a> and Microsoft. Buolamwini and Gebru found that <strong>algorithms performed generally worse on darker-skinned faces, and in particular darker-skinned females, with error rates up to 34% higher than lighter-skinned males</strong> (Najibi 2020). <a class="maplink" data-title="IBM">IBM</a> and Microsoft responded by amending their systems, and a re-audit showed less bias. Not all companies responded equally. <strong>Amazons Rekognition</strong> system, which was included in the second study continued to show a 31% lower rate for darker-skinned females. The same year <strong>ACLU</strong> conducted another key study on Amazons Rekognition, using the pictures of <strong>members of congress against a dataset of mugshots from law enforcement</strong>. 28 members of Congress, <strong>largely people of colour were incorrectly matched</strong> (Snow 2018). A number of organizations seized the problem as a policy issue (<strong><a class="maplink" data-title="Black in AI">Black in AI</a></strong>, <strong><a class="maplink" data-title="Algorithmic Justice League">Algorithmic Justice League</a>, <a class="maplink" data-title="Data for Black Lives">Data for Black Lives</a></strong>) and some engaged lawmakers. In 2019, the Algorithmic Accountability Act allowed the Federal Trade Commission to regulate private companies uses of facial recognition. In 2020, several companies, including <a class="maplink" data-title="IBM">IBM</a>, Microsoft, and Amazon, announced a moratorium on the development of their facial recognition technologies. Several US cities, including <strong>Boston</strong>, <strong>Cambridge</strong> (Massachusetts) <strong>San Francisco</strong>, <strong>Berkeley</strong>, <strong>Portland</strong> (Oregon), have also banned their police forces from using the technology.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
</section>
<section id="the-european-context" class="level2">
<h2>The European context</h2>
<p>In Europe, a similar set of developments took place around Artificial Intelligence in activist circles, both at the member states level and at the EU level. (Andraško et al. 2021, 3). The first intervention dates from 2017 with the <strong>European Parliament</strong> <em>Resolution of 16 February to the Commission on Civil Law Rules on Robotics</em> (European Parliament 2017). It was followed by two statements and advisory documents: <em>The Age of Artificial Intelligence</em>, published by the European Political Strategy Centre; and a <em>Statement on Artificial Intelligence</em>, <em>Robotics and Autonomous Systems</em> (March 2018), published by the European Group on Ethics in Science and New Technologies (Andraško et al. 2021, 3). At the beginning of 2018, the <strong>European Economic and Social Committee</strong> issued three opinions on the deployment of AI in practice (European Economic and Social Committee 2018a, 2018b, 2018c). All these documents addressed <strong>the need for the EU to understand AI uses</strong>, and embedded them in the various ethical and political frameworks created by EU institutions. The same year, the <strong>Council of Europe</strong> began its activities on the matter. In 2017, the <strong>Parliamentary Assembly of the Council of Europe</strong> adopted a <em>Recommendation on Technological Convergence, Artificial Intelligence and Human Rights</em> pointing towards the need to established common guidelines for the use of artificial intelligence in court (Parliamentary Assembly of the Council of Europe 2017; Gonzalez Fuster 2020, 45).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>Legislative activity accelerated in 2018. The <strong>European Commission</strong> (2018a) published a communication <em>Artificial Intelligence for Europe</em>, in which it called for a joint legal framework for the regulation of AI-related services. Later in the year, the Commission (2018b) adopted a <em>Coordinated Plan on Artificial Intelligence</em> with similar objectives. It compelled EU member states to adopt a national strategy on artificial intelligence which should meet the EU requirements. It also allocated 20 billion euros each year for investment in AI development. (Andraško et al. 2021, 4).</p>
2021-10-07 11:07:36 +02:00
2021-10-13 18:35:49 +02:00
<p>In 2019, the <strong>Council of Europe Commissioner for Human Rights</strong> published a Recommendation entitled <em>Unboxing Artificial Intelligence: 10 steps to Protect Human Rights</em> which describes several steps for national authorities to maximise the potential of AI while preventing or mitigating the risk of its misuse. (Gonzalez Fuster 2020, 46). The same year the <strong><a class="maplink" data-title="European Union">European Union</a>s <a class="maplink" data-title="European Union High-Level Expert Group on Artificial Intelligence (AI HLEG)">High Level Expert Group on Artificial Intelligence</a> (AI HLEG)</strong> adopted the <em>Ethics Guidelines for Trustworthy Artificial Intelligence</em>, a key document for the EU strategy in bringing AI within ethical standards (Nesterova 2020, 3).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>In February 2020, the new <strong>European Commission</strong> went one step further in regulating matters related to AI, adopting the digital agenda package a set of documents outlining the strategy of the EU in the digital age. Among the documents the <em>White Paper on Artificial Intelligence: a European approach to excellence and trust</em> captured most of the commissions intentions and plans.  </p>
</section>
<section id="four-positions-in-the-policy-debates" class="level2">
<h2>Four positions in the policy debates</h2>
<p>Over the past 3-4 years, positions around the use of facial recognition and more specifically the use of remote biometric identification in public space have progressively crystalised into four camps (for a more detailed analysis of the positions, see Chapter 5).</p>
<section id="active-promotion" class="level3">
<h3>Active promotion</h3>
2021-10-13 18:35:49 +02:00
<p>A certain number of actors, both at the national and at the local level are pushing for the development and the extension of biometric remote identification. At the local level, figures such as Nices (France) mayor Christian Estrosi have repeatedly challenged Data Protection Authorities, arguing for the usefulness of such technologies in the face of insecurity (for a detailed analysis, see chapter 8 in this report, see also Barelli 2018). <strong>At the national level, Biometric systems for the purposes of authentication are increasingly deployed for forensic applications</strong> among law-enforcement agencies in the <a class="maplink" data-title="European Union">European Union</a>. As we elaborate in Chapter 3, 11 out of 27 member states of the <a class="maplink" data-title="European Union">European Union</a> are already using facial recognition against biometric databases for forensic purposes and 7 additional countries are expected to acquire such capabilities in the near future. Several states that have not yet adopted such technologies seem inclined to follow the trend, and push further. Former Belgian Minister of Interior Pieter De Crem for example, recently declared he was in favour of the use of facial recognition both for judicial inquiries but also for live facial recognition, a much rarer instance. Such outspoken advocates of the use of RBI constitute an important voice, but do not find an echo in the EU mainstream discussions.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="support-with-safeguards" class="level3">
<h3>Support with safeguards </h3>
<p>While there is little widespread support for the development of centralised biometric mass surveillance, some actors such as the <strong>European Council</strong> and the <strong>European Commission</strong> have advocated a cautious and regulated development of remote biometric identification systems, as part of a broader Artificial Intelligence strategy. The principles of such strategies have been outlined in the various strategy documents discussed above. A large number of the technology companies are hoping that this position remains the main one, with many of them eager to implement the ethical requirements necessary for the deployments of their systems. <strong>In addition to the political and legislative activity mentioned above, the EU institutions have been active in promoting the use of Artificial Intelligence and biometric surveillance technologies. As detailed in chapter 5, instruments such as the Digital Europe programme, the Connecting Europe Facility 2 and Horizon Europe will form the basis for collaboration between public institutions and the security industry developing biometric remote identification products. In the European Parliament, positions are divided and moving, but parties like the European People Party support a similar notion of careful development.</strong></p>
</section>
<section id="moratorium" class="level3">
<h3>Moratorium</h3>
<p>For other actors, such as the <strong>European Parliament</strong> or the Council of Europe, remote biometric identification systems entail too many unknown risks and thus need to be put on hold. The proponents of a moratorium invoke the necessity of applying the <em>principle of precaution</em> a similar strategy to opponents of the commercialisation of GMO in Europe so that all dimensions of the technology can be assessed before a decision can be made. On 20 January 2021,  the <strong>European Parliament</strong> passed a resolution inviting the EU Commission to consider a moratorium on the use of facial recognition systems (European Parliament 2021). Similarly, in 2021, the Council of Europe (2021) adopted <strong>Guidelines on Facial Recognition</strong> (Council of Europe, 2021) which call for a <strong>moratorium for the live facial recognition technologies</strong> and lay out certain conditions for the use of facial recognition technologies by law enforcement authorities.)</p>
</section>
<section id="ban" class="level3">
<h3>Ban</h3>
2021-10-13 18:35:49 +02:00
<p>Finally, a growing number of actors considers that there is enough information about remote biometric identification in public space to determine that they will never be able to comply to the strict requirement of the <a class="maplink" data-title="European Union">European Union</a> in terms of respect of Fundamental Rights, and as such should be banned entirely. It is the current position of the <strong>European Data Protection Supervisor (EDPS, 2021)</strong> the <strong>Council of Europe</strong> and a large coalition of NGOs (among which <strong><a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a></strong> and the collaborative project <strong>Technopolice</strong>,) gathered under the umbrella of the <strong><a class="maplink" data-title="European Digital Rights (EDRi)">European Digital Rights organisation</a></strong> (EDRi 2020). In the <strong>European Parliament</strong>, the position has most vocally been defended by the European Greens, but has been shared by several other voices, such as members of the Party of the European Left, the Party of European Socialists or Renew Europe (Breyer et al 2021).</p>
2021-10-07 08:43:56 +02:00
</section>
</section>
<section id="lack-of-transparency-and-the-stifling-of-public-debate" class="level2">
<h2>Lack of transparency and the stifling of public debate</h2>
<p>An additional important question concerns the reaction of the public at large. While the development of face recognition technologies and more broadly remote biometric identification systems has elicited stark responses from watchdogs, civil liberties unions and human rights activist (CNIL 2019b; EDRi 2020; Renaissance Numérique 2019; Gonzales Fuster 2020), the state of the debate and awareness in the wider public is actively muddled by a lack of transparency in how these technologies are developed and implemented, both by private companies and public authorities.</p>
<p>This lack of transparency, and sometimes secrecy surrounding some of the technological parameters is not casual: “vendors of facial recognition software might not want to disclose information about the training data, as was experienced by an expert from a civil society organization” warns the FRA (2019, 10). Copyright issues and trade secrets are invoked to also block access to information that would be needed to assess the quality of systems employed. Governments, at the national or local level, invoke national security concerns in order to remain opaque about the deployment of the technologies, the contracted parties (See Chapter 10) and citizens often found out about their implementation after the fact (see Chapter 6). Finally, the societal debate about these issues is further hindered by the porosity between the public and private dimensions of these technologies. Users often willingly volunteer their biometric data, and do not always perceive the technical differences that might exist between unlocking their phones through facial recognition (the data remains in a separate chip on the phone) and using applications which leak biometric information in remote databases, making them available not only to commercial vendors, governments, law enforcements authorities, but hackers and other actors interested in the misuse of this data.</p>
<p>For these reasons, <strong>informed political debate cannot take place without a thorough effort of digital literacy concerning the development of these new technologies.</strong> But it will also rely on information being made available to the public, so that the parties involved can be held accountable and the impact of technologies on the everyday life of European citizens can be critically assessed. The aim of this report is thus precisely to present evidence about the remote biometric identification technologies, the current state of their deployment in Europe as well as their ethical, social, and political implications to provide context and recommendation on the various positions.</p>
</section>
<section id="scope-and-working-definitions" class="level2">
<h2>Scope and working definitions</h2>
<p>This report will be centred on “biometric and behavioural mass surveillance” in public spaces. In addition to the definition of Remote Biometric Identification provided above, we define <strong>biometric data</strong> as all data related to the body, which can be used to identify or monitor individuals or groups of individuals and is impossible or very difficult to alter (face, fingerprints, iris, etc.). <strong>Behavioural data</strong> concerns the data collected related to the way in which individuals uniquely behave (facial expressions, body movements, voice, etc.). Finally, we define <strong>Biometric Mass Surveillance</strong> as a form of <strong>monitoring, tracking, or processing of personal (biometric and behavioural) data of individuals indiscriminately and in a generalised manner without a prior criminal suspicion (FRA, 2019</strong>). We can add that this surveillance occurs <strong>at a distance, in a public space and in a continuous or ongoing manner by checking them against data stored in a database.</strong> We thus conceptualise biometric mass surveillance, if left unchecked, as the <strong>possible dystopian horizon</strong> of <strong>remote biometric identification technologies.</strong></p>
<p>The report will primarily focus on those technologies (<strong>facial recognition</strong>, <strong>voice recognition</strong>, and the <strong>classification of behaviour</strong>) that are deployed in EU public spaces. It will initially focus on the deployment of such technologies by public actors in <strong>public spaces in the EU</strong>, such as cities. Public spaces can be <strong>publicly owned</strong> (roads, streets, city squares, parking facilities, government facilities) or <strong>privately owned</strong> (shopping malls, stadiums). Similarly, private actors can deploy these technologies <strong>in public spaces in collaboration with, or for further use by, public actors</strong> (e.g., the use of private Amazon Ring footage collected by individuals and shared with the police in some US cities).</p>
<p>On the basis of these specifications, the following cases are being excluded from the analysis: deployment of remote biometric identification technologies by private actors in private spaces (ones house) if such deployments have no public consequences; deployment of remote biometric identification technologies created by EU companies used outside of the EU (exports); deployment of remote biometric identification outside of EU public spaces (such as surveillance of the EU borders in the Mediterranean). Further technical definitions are provided in CHAPTER 2.</p>
</section>
<section id="methodology" class="level2">
<h2>Methodology</h2>
<p>This report is based primarily on desk research. It is based on primary sources from international and regional organisations, national governments, local authorities, non-governmental organizations and private companies, as well as secondary sources (academic literature). For some of the case studies qualitative interviews were carried out remotely (via telephone or video-call) and are indicated as such. For the dataset used in the related interactive map, we are particularly grateful to the Technopolice project and to Felix Tréguer for helping us accessing the data already collected for France. The report was commissioned in February 2021 and was written between February 2021 and October 2021.</p>
</section>
</section>
<section id="part-i-overview-of-european-practices" class="level1 Title">
<h1 class="Title">PART I: OVERVIEW OF EUROPEAN PRACTICES</h1>
</section>
<section id="technical-overview" class="level1">
<h1>Technical overview</h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
<li><p>The current market of RBI systems is overwhelmingly dominated by image-based products, at the centre of which is facial recognition technology (FRT). Other products such as face detection and person detection technologies are also in use.</p></li>
<li><p>FRT is typically being deployed to perform two types of searches: cooperative searches for verification and/ or authentication purposes, and non-cooperative searches to identify a data subject. The former involves voluntary consent from the data subject to capture their image, while the latter may not.</p></li>
<li><p>Live facial recognition is currently the most controversial deployment of FRT: Live video feeds are used to generate snapshots of individuals and then match them against a database of known individuals the “watchlist”.</p></li>
<li><p>Other RBI technologies are being deployed though their use at present is marginal compared to FRT, these include gait, audio, and emotion recognition technologies, amongst others.</p></li>
<li><p>A better understanding of the technical components and possible usage applications of image-based RBI technologies is needed in order to assess their potential political implications.</p></li>
<li><p>RBI technologies are subject to technical challenges and limitations which should be considered in any broader analysis of their ethical, legal, and political implications.</p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-07 15:08:33 +02:00
<p>In order to grasp the various facets of remote biometric identification that could potentially lead to biometric mass surveillance, this section provides an overview of the currently available technologies, how they work and what their limitations are as well as where and by whom they are deployed in the <a class="maplink" data-title="European Union">European Union</a>.</p>
2021-10-07 08:43:56 +02:00
<section id="remote-biometric-identification-and-classification-defining-key-terms" class="level2">
<h2>Remote Biometric Identification and classification: defining key terms</h2>
<p>Although there are a growing number of technologies based on other supports than images (photographs or videos) such as voice recognition (audio), LIDAR scans or radio waves, the current market of remote biometric identification is overwhelmingly dominated by image-based products, at the centre of which is face recognition. In the following sections we thus focus primarily on image-based products.</p>
</section>
<section id="detection-vs-recognition" class="level2">
<h2><strong>D</strong>etection vs <strong>r</strong>ecognition</h2>
<p><strong>- Person detection</strong> denotes the ability of a software application to estimate (as in, provide a statistical probability) whether an object in the camera image is a person. Generally, it is able to indicate the position of the person in the image. Person detection systems can be used in <strong>basic analytics scenarios</strong>, where for example the presence of people is counted. Moreover, object detection algorithms can be used to track individuals between video frames, although they generally have a hard time tracking occlusions (people walking in front of others, hiding them from the camera) and specific people across multiple camera viewpoints. Person detection does not obtain any information about individuals faces.</p>
<p><strong>- Face detection</strong>, similar to person detection, refers to the capacity of a software application to detect that an object in the field of view of a camera is a human face. It is the most familiar function of smart technologies: it has been present in consumer electronics, such as photo cameras and mobile phones for years. Face detection provides the recognisable rectangle around faces when taking a picture with a smart phone. Similarly, it can be used in surveillance applications to assess the presence or positions of individuals.</p>
<p>- <strong>Facial recognition</strong> builds on top of face detection. The software uses the detected faces to determine who is in the picture. In order to do so, an algorithm calculates a numerical representation of the detected face, called a “feature vector” or “embedding”. This vector, which is unique to each individual, is what allows systems to perform searches. The detected vector can for example be used to search for existing identical vectors in a database of known individuals, where vectors are related to an identity. In a different type of usage, the feature vector can be used to track people moving from one cameras field of view to the next. In this case, the vector is not used to find a “match” in a database but serves instead to confirm that it is the same individual that appears in different camera feeds.</p>
</section>
<section id="facial-recognition-verificationauthentication-vs-identification" class="level2">
<h2>Facial Recognition: verification/authentication vs identification </h2>
<p>Within the domain of facial recognition, two general types of searches are performed.</p>
<p>- <strong>One-to-one (1:1) searches</strong> are called <strong>verification or authentication</strong> searches and are used to determine whether an individual face presented to the camera matches a single face stored in the system. This is how “Face ID” works on iPhones for example. In this example, people volunteer the capture of their face, they are thus considered in a “<strong>cooperative” scenario</strong>.</p>
<p>- <strong>One-to-many (1:N) searches</strong> are called <strong>identification</strong> searches. An unknown single face, picked up for example from surveillance video footage or from a passport, is run against a large dataset of known faces, in order to identify the unknown face, or to determine if it occurs on a so called “watchlist”. This can be done in the case of forensic investigations or can be deployed in remote biometric identification scenarios in the public space. In this latter example, when faces are captured without the intention or consent of the individuals, the capture is considered “<strong>non-cooperative</strong>”. Because of the larger amount of data, identification is from a technical perspective, substantially more difficult to perform than authentication. As such, many of these implementations do not return a single identity upon request, but rather provide a list of likely identities with, for example, a match likeliness score. Note that <strong>identification does not automatically entail the recoding of the name of the individual in the database</strong>. For example, if visitors of a shop are recorded, the software can look for recurring visitors without having their names.</p>
</section>
<section id="forensic-ex-post-vs-live-facial-recognition" class="level2">
<h2>Forensic (ex-post) vs Live Facial Recognition</h2>
<p>A final distinction can be made between forensic (or ex-post) and live facial recognition. <strong>Forensic facial</strong> recognition is carried out generally in the context of judicial investigations in order to match photographs of persons of interest captured via surveillance cameras or extracted from documents to an operational database of known individuals (Al-Kawaz et al. 2018). It is the most commonly use type of facial recognition in Europe, in particular by law enforcement authorities. <strong>Live facial recognition,</strong> instead, uses live video feeds in order to generate snapshots of individuals and then match them against a database of known individuals the “watchlist”. It is the most controversial deployment of facial recognition (Fussey and Murray 2019).</p>
</section>
<section id="other-systems-gait-recognition-emotion-recognition" class="level2">
<h2>Other systems: gait recognition, emotion recognition</h2>
<p>Facial recognition occupies the central stage of the discussion when it comes to remote biometric identification and classification, because it is simply the most mature technology. Yet other technologies should be mentioned, in particular when considering biometric classification. They are for the moment relatively marginal, and information about their deployment is anecdotal at this stage.</p>
<section id="gait-recognition" class="level3">
<h3>Gait recognition </h3>
<p><strong>Gait recognition</strong> consists of recognising the specific way in which a person walks (gait), but in reality it covers a broader range of criteria (body, proportions, posture, etc.) (Segal 2020, 2). The advantages of gait recognition are that it does not require a clear access to a face, and it requires a lower image resolution (as it analyses an entire body, not only a face). Gait recognition, however, requires <strong>more computing power</strong> because it works on the basis of moving images (i.e., multiple frames of still images, up to 30 frames per second) rather than still images. Gait recognition has been used as evidence in court for a case in Denmark (Segal 2020, 18). Gait recognition poses important technical challenges: The amount of data storage and processing power far exceeds that of facial recognition. There are currently very few training datasets. So far, systems have proven to be more expensive, and less accurate than facial recognition.</p>
</section>
<section id="people-tracking-and-counting" class="level3">
<h3>People tracking and counting </h3>
2021-10-13 18:35:49 +02:00
<p>This is perhaps the form of person tracking with which the least information about an individual is stored. An <strong>object detection algorithm</strong> estimates the presence and position of individuals on a camera image. These positions are stored or counted and used for further metrics. It is used to count <strong>passers-by in city centres</strong>, and for a <strong>one-and-a-half-meter social distancing monitor in Amsterdam</strong><a href="#fn3" class="footnote-ref" id="fnref3" role="doc-noteref"><sup>3</sup></a>. See also the case study in this document on the <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (CHAPTER 7), which goes into more detail about the use of the recorded trajectories of individuals to label anomalous behaviour.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="emotion-recognition." class="level3">
<h3>Emotion recognition. </h3>
<p>Software that categorises facial expressions into emotion categories happiness, sadness, anger, etc. is known to be used in billboards that are equipped with cameras, in order to <strong>analyse audience response to advertisements</strong>. For example, in airports or at train stations. While the face is claimed to be a “window into the brain” by some, the technology has been heavily criticised. Firstly, some consider it an undesirable invasion of their privacy, while other critique the technology for capturing primarily stereotypical ways of expressing oneself (van de Ven, 2017). In some places, such as at Dutch train stations, these critiques have led to disabling the cameras in billboards altogether (Het Parool, 2017).</p>
</section>
<section id="age-gender-and-ethnicity-classification" class="level3">
<h3>Age, gender, and ethnicity classification </h3>
2021-10-07 15:08:33 +02:00
<p>Aside from deducing emotions, the face is used to deduce a variety of traits from individuals. For example, <strong>gender</strong>, <strong>ethnicity</strong>, and <strong>age estimations</strong> are available in many off-the-shelf facial analysis products. As with <strong>emotion recognition</strong>, these classifications are mainly used in digital signage and video advertisement contexts. LGBTQ+ communities have spoken out against automatic gender classification, pointing out that a long fought, non-binary understanding of gender is made undone by the technologys binary classifications (Vincent, 2021). Similarly, recent revelations that <a class="maplink" data-title="Hikvision">Hikvision</a> (China) has used similar technology to estimate whether an individual is from <strong>Chinas Uyghur minority</strong>, has directly led the <strong>European Parliament</strong> to call for a ban of <strong><a class="maplink" data-title="Hikvision">Hikvision</a>s</strong> products on the Parliaments premises (Rollet, 2021).</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="audio-recognition" class="level3">
<h3>Audio recognition </h3>
2021-10-13 18:35:49 +02:00
<p>From a technological perspective, neural networks process audio relatively similarly to how video is processed: rather than feeding an image, a spectrogram is used as input for the network. However, under the GDPR, recording conversations, is illegal in the <a class="maplink" data-title="European Union">European Union</a> without informed consent of the participants. In order to adhere to these regulations, on some occasions, only particular frequencies are recorded and processed. For example, in the <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (Netherlands)</strong> (CHAPTER 7), only two frequencies are used to classify audio; making conversations indiscernible while being able to discern shouting or the breaking of glass<a href="#fn4" class="footnote-ref" id="fnref4" role="doc-noteref"><sup>4</sup></a>. Another initiative using audio in to enhance the surveillance camera is the <strong>Living Lab International Zone</strong> project in the Hague (Netherlands), a collaboration between a broad range of partners<a href="#fn5" class="footnote-ref" id="fnref5" role="doc-noteref"><sup>5</sup></a>.</p>
2021-10-07 08:43:56 +02:00
</section>
</section>
<section id="how-does-image-based-remote-biometric-identification-work" class="level2">
<h2>How does image-based remote biometric identification work?</h2>
<p>In order to assess the political implication of each of these systems, it is important to disaggregate the main technical components and understand the different possible technologies at play. Although the marketing of security companies uses the notion of “smart cameras”, one should distinguish between the sensing hardware (cameras, microphones, LIDAR scanners) and the type of video analytics the captured data is subjected to. This second aspect should be further divided into an analysis of the training datasets and the algorithms.</p>
<section id="image-acquisition-controlled-and-uncontrolled-images" class="level3">
<h3>Image acquisition: Controlled and uncontrolled images</h3>
<p>Facial recognition begins with an image. An image which will be subject to the algorithms scrutiny. <strong>Controlled images</strong> are images that are captured for the purpose of processing, aimed at optimal positions and lighting conditions. They are for example taken at a police station, or at a photographers studio with strict requirements, and are either contained in databases that precede the introduction of a facial recognition system (e.g., drivers license databases) or are specifically designed to match high criteria of biometric systems (i.e., photographs for biometric passports). <strong>Uncontrolled images</strong> are images that are captured outside of specific requirement, collected for example through social media scraping or video surveillance.</p>
2021-10-13 18:35:49 +02:00
<p>When it comes to the acquisition technologies (cameras) for uncontrolled images, over the past decades, the main evolution in terms of video has been the passage from analogue video to digital video, the latter allowing images to be processed through computers. As in the realm of consumer cameras, the initial race was for better definition (calculated in terms of megapixels). “Smart” camera systems require a slightly higher resolution than standard video surveillance systems in order to guarantee a minimum of 300 PPM to adequately feed the software (IPVM Team 2020, 5). But overall, the average camera does not exceed a definition of 4 megapixels and are more often in the area of 2 megapixels (which yields a 1080p or HD resolution)<a href="#fn6" class="footnote-ref" id="fnref6" role="doc-noteref"><sup>6</sup></a>. <strong>The quality of capture</strong>, especially in non-cooperative scenarios, is determined by two main external variables: the angle of the face relative to the camera (front, side, back, top) and the lighting conditions (bright daylight, dark night). In recent years, manufacturers have added an additional infra-red channel to the red-green-blue (RGB) video channels in order to increase detail accuracy in low-light conditions.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="what-makes-systems-smart-image-processing-algorithms" class="level3">
<h3>What makes systems “smart”: image processing algorithms</h3>
<p>The processing of the photographic or video image by a specific software application is where the “smart” processing happens. Broadly speaking video surveillance technology can be split in two key historical moments: before machine learning, and after machine learning.</p>
<p><strong>Video motion detection (VMD) and heuristic filters.</strong> The early smart technologies relied on simple motion detection algorithms which compared pixel changes from one image to the next (Quevillon 2012). The problem is that any movement (the leaves of a tree) or change of light (a car passing in the night) can trigger the systems. <strong>Heuristic filters</strong> were thus added to VMD systems in order to give additional parameters to the system (amount and size of pixel changing etc.). Both systems were highly inefficient and prone to trigger false alarms, making such technologies unattractive. The main problem was that only pre-established changes hard coded by humans would be detected by the systems.</p>
<p><strong>Machine learning.</strong> Machine learning revolutionised image-based biometric identification. Machine learning is an automated process through which the software application will be programmed to recognise particular patterns, based on a dataset it is “trained” on. There are three ways in which this configuration of the machine learning model can be controlled: supervised, semi-supervised or unsupervised. <strong>Supervised machine learning</strong> consists of teaching the system to recognise people, cars, guns, or any other object by feeding it an annotated dataset of such objects. It is supervised because humans “supervise” how the computer learns, by annotating the dataset (“this is a car”, “this is a gun” etc.). The categories of the annotations (cars, guns, etc.) will thus be the only ones that the system will be able to recognise (if only cars and guns are annotated, the system wont in such a case recognise cats). Most video surveillance systems use supervised machine learning (IPVM Team 2021a, 11)<strong>. Unsupervised machine learning</strong> lets the system cluster objects by itself. The advantage is the open-endedness of the systems (meaning they can generate categories of objects not anticipated in the training dataset), but the disadvantage is that algorithms can potentially cluster objects along irrelevant criteria for the task (for example clustering red motorcycles, cars, and trucks in one group and green ones in another, as opposed to creating one cluster for all motorcycles, one for cars and one for trucks). For this reason, <strong>semi-supervised machine learning</strong>, where only a small part of the data is labelled, can be used. Currently not widely in use, unsupervised machine learning is a growing trend in the video surveillance sector (IPVM Team 2021a, 1213).</p>
2021-10-13 18:35:49 +02:00
<p>Both supervised and unsupervised learning exist in many shapes and sizes. For example, the Viola-Jones object detection algorithm<a href="#fn7" class="footnote-ref" id="fnref7" role="doc-noteref"><sup>7</sup></a> from 2001, which made real-time face detection viable, is a supervised algorithm. Contemporary developments in video processing focus on using various kinds of artificial <strong>neural networks</strong> (i.e., <strong>convolutional neural networks</strong>, <strong>recurrent neural networks</strong>) to classify images and videos. These networks can be trained either <strong>supervised</strong>, <strong>semi-supervised</strong> or <strong>unsupervised</strong> depending on their configuration.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="machine-learning-and-operational-datasets" class="level3">
<h3><strong>Machine learning</strong> and operational datasets</h3>
<p>Remote biometric identification and classification relies in large part on datasets, for two key but distinct moments of their operation.</p>
2021-10-13 18:35:49 +02:00
<p><strong>Machine learning datasets.</strong> These are the datasets used to train models through <strong>machine learning.</strong> We find three categories of such datasets. <strong>Publicly available datasets</strong> for object detection such as COCO, ImageNet, Pascal VOC include a varying number of images labelled in a range of categories, these can be used to train algorithms to detect for example people on an image (IPVM Team 2021a, 27). The most used open-source datasets for surveillance technologies are Celeb 500k, MS-Celeb-1Million-Cleaned, Labeled Faces in the Wild, VGG Face 2, DeepGlint Asian Celeb, IMDB-Face, IMDB-Wiki, CelebA, Diveface, Flickr faces and the IARPA Janus Benchmark (IPVM Team 2021b, 7). Many of these datasets also function as a public benchmark, against which the performance and accuracy of various algorithms is measured. For example, Labeled Faces in the Wild, the COCO dataset and <a class="maplink" data-title="NIST">NIST</a> present such leaderboards on their website<a href="#fn8" class="footnote-ref" id="fnref8" role="doc-noteref"><sup>8</sup></a>. <strong>Government datasets</strong> are generally collections of images available to a government for other purposes (drivers license, passport, or criminal record photo datasets). While in Europe most of these datasets are not accessible to the public, in China and in the US, they are made available for testing and training purposes to private companies, such as the Multiple Encounter Dataset (NIST, 2010). Finally <strong>proprietary datasets</strong> may be developed by providers for their specific applications.</p>
2021-10-07 08:43:56 +02:00
<p><strong>Machine learning models.</strong> In the machine learning process, an algorithm gets iteratively configured for the optimal output, based on the particular dataset that it is fed with. This can be a neural network, but also e.g., the aforementioned Viola-Jones object detector algorithm. The <strong>model</strong> is the final configuration of this learning process. As such, it does not contain the images of the dataset in and of themselves. Rather, it represents the abstractions the algorithm “learned” over time. In other words, the model operationalises the machine learning dataset. For example, the YOLO object detection algorithm yields different results when it is trained on either the COCO or the model (in conjunction with the algorithm) which determines the translation of an image into a category, or of the image of a face into its embedding.</p>
<p><strong>Operational datasets, or image databases.</strong> Datasets used in training machine learning models should be distinguished from matching or operational datasets which are the “watchlists” of for example criminals, persons of interest or other lists of individuals against which facial recognition searches will be performed whether these are in real time or post hoc. These datasets contain pre-processed images of individuals on the watchlist, and store the numerical representations of these faces, their feature vectors or <em>embedding</em>, in an index for fast retrieval and comparison with the queried features (using for example k-Nearest Neighbour or Support Vector Machines). Face or object detection models do not use such a dataset.</p>
</section>
<section id="availability" class="level3">
<h3>Availability</h3>
<p>Facial recognition algorithms can be developed in-house, taken from an open-source repository, or purchased (IPVM Team 2021b, 14). Popular <strong>open-source facial recognition</strong> implementations include OpenCV, Face_pytorch, OpenFace and Insightface. Many of these software libraries are developed at universities or implement algorithms and neural network architectures presented in academic papers. They are free, and allow for a great detail of customisation, but require substantial programming skills to be implemented in a surveillance system. Moreover, when using such software, the algorithms run on ones own hardware which provides the developer with more control, but also requires more maintenance.</p>
2021-10-13 18:35:49 +02:00
<p><strong>Proprietary facial recognition.</strong> There are three possible routes for the use of proprietary systems: There are <strong>“turnkey”</strong> systems sold by manufacturers such as <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong><a class="maplink" data-title="Dahua Technologies">Dahua</a></strong>, <strong><a class="maplink" data-title="AnyVision">AnyVision</a></strong> or <strong><a class="maplink" data-title="Briefcam Ltd">Briefcam</a></strong>. Those integrate the software and hardware, and as such can be directly deployed by the client. <strong>Algorithm developers</strong> such as <strong>Amazon AWS Rekognition</strong> (USA), <strong><a class="maplink" data-title="NEC">NEC</a></strong> (Japan), <strong><a class="maplink" data-title="Ntech Lab">NTechlab</a></strong> (Russia), <strong><a class="maplink" data-title="Paravision">Paravision</a></strong> (USA) allow to implement their algorithms and customise them to ones needs, and finally there are <strong>“cloud” API systems</strong>, a sub-set of the former category, where the algorithm is hosted in a datacentre and is accessed remotely (IPVM Team 2021b, 16). The latter type of technology bears important legal ramifications, as the data may travel outside of national or European jurisdictions. It should be noted that many of the proprietary products are based on similar algorithms and network architectures as their open-source counterparts (OpenCV, 2021). Contrary to the open-source software, it is generally unclear which datasets of images have been used to train the proprietary algorithms.</p>
2021-10-07 08:43:56 +02:00
</section>
</section>
<section id="technical-limits-problems-and-challenges-of-facial-recognition" class="level2">
<h2>Technical limits, problems, and challenges of facial recognition</h2>
<p>Contrary to what can often be read in dystopian accounts of <strong>remote biometric identification</strong> technologies, these systems are neither “entirely inefficient”, nor “all powerful”. They are subjected to technical challenges and limitations, which should be considered in the broader analysis of their <strong>ethical, legal, and political implications</strong>.</p>
<section id="data-capture-challenges" class="level3">
<h3>Data capture challenges</h3>
<p>Facial recognitions accuracy can easily be challenged by a certain number of factors in the capture of the data to be analysed, in particular when dealing with “non-cooperative” image capture. The <strong>resolution of the camera</strong>, and in particular the key variable of Pixels per Meter (minimum 300 PPM is generally required) is instrumental in ensuring that enough information is provided to the algorithm. <strong>Lighting conditions</strong> are similarly important. Although increasingly cameras add an infra-red channel to the RGB channels in order to recover detail in low-light conditions, inadequately illuminated faces will generate a high number of errors. <strong>Orientation of the face</strong> in relation to the camera is one more key factor to take into account, especially because a camera will rarely be mounted at face level (more likely overhead), and thus difficult angles will often result in partial representation of faces (Fernandez et al. 2020, 29). <strong>Vision can often be blocked</strong> by other factors, such as other individuals in large crowds, sunglasses, masks (in particular in times of COVID-19). <strong>Obstruction can be voluntary</strong> when individuals for example look down to avoid surveillance. Finally, not all systems have a <strong>liveness detection</strong> system, meaning that they can be tricked by a photograph of a face instead of a real face. (IPVM Team 2020, 1213)</p>
</section>
<section id="dataset-related-challenges" class="level3">
<h3>Dataset-related challenges</h3>
<p>Datasets also face a number of technical challenges. For machine learning systems, <strong>small datasets</strong> will inadequately train the algorithms, simply because there are not enough different instances of the type of face or object that is supposed to be recognised. This is a challenge for gait recognition algorithms for example, for which there is a dearth of large datasets. <strong>Changes in features</strong> (such as hair, facial hair, beard, earrings) in the dataset can lead to a poorly trained algorithm. <strong>Datasets are often labelled with a specific purpose</strong>, and thus training an algorithm on a dataset that is not representative of the use-case can provide counter-productive results.</p>
<p>More problematically, a <strong>lack of diversity</strong>, in particular when it comes to ethnicity, age, or gender leads to bias in the algorithm. This issue has been at the core of the US-based discussion on the banning of Facial Recognition. Public databases such as VGGFace2 (based on faces from Google images) and MS-Celeb-1M42 (celebrity faces) are often used to train facial recognition algorithms yet are far from representative of everyday populations this is called <strong>representation bias</strong> (Fernandez et al. 2020, 30). The main goal of the project <em>Gender Shades</em> led by <strong>Joy Buolamwini</strong> was both to show the lack of representativity of existing datasets and address the problem of the consequent discrepancy between the error rates related to light-skinned men and dark-skinned women (Fernandez et al. 2020, 3031).</p>
<p>However, a representational dataset is not always a desirable dataset, because actual <strong>structural biases</strong> often do not match the values of society. Illustrative of this is that, when doing a Google image search for the term “CEO” it would originally return primarily photographs of white male people. While this was representative of the CEO population (and thus accurate), the results reinforce the vision of a world that does not align with progressive societal values (Suresh, 2019). Because of the gap between ideals of equality and actual societal structural inequalities, datasets <strong>can be either representative of an unequal society, or representative of desired equality but never of both at the same time.</strong></p>
<p>Datasets upon which the computer algorithm will later be able to distinguish particular entities or behaviour are built through vast amounts of <strong>human labour</strong>. For example, the work that has gone into the image dataset ImageNet is equivalent to <strong>19 years of working 24 hours a day, 7 days a week</strong> (Malevé, 2020). Nevertheless, quantity does not necessarily equal quality. Many of the categories with which images are annotated are ambiguous. Not in their dictionary definition per se, but when they enter the culture of the annotation workers. For example, the category of “ratatouille” contains images of various stews, salads and even a character of the eponymous Pixar movie. Similarly, the category “Parisian” contains images of Paris Hilton (Malevé, 2020). This ambiguity of categories does not only haunt ImageNet. The aforementioned COCO dataset contains images of a birdhouse in the shape of a bird, which is tagged as bird, or a bare pizza bottom which is tagged as pizza (Cochior and van de Ven, 2020). <strong>These examples show that even seemingly unambiguous concepts become fluid the moment they have to become strictly delineated in a dataset.</strong></p>
2021-10-07 15:08:33 +02:00
<p>Another important issue with ethical and political repercussions is <strong>unethically collected data</strong>, as in the case of <a class="maplink" data-title="Clearview AI">Clearview AI</a> detailed above. When it comes to <strong>operational datasets</strong>, i.e., datasets used in the actual process of facial authentication and/or identification, we have seen that possible deployments include the use of cloud-based services (either for the processing or the storage of the sensitive information). This increases the risks of data breaches and attacks by hackers. (Fernandez et al. 2020, 34)</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="algorithm-related-challenges" class="level3">
<h3>Algorithm-related challenges</h3>
2021-10-07 15:08:33 +02:00
<p>Finally, there are issues related to the quality and performance of the algorithms and how to measure it. The National Institute of Standards and Technology is an agency of the US Department of Commerce. The <a class="maplink" data-title="NIST">NIST</a> provides the possibility for vendors to test the efficacy of their algorithms on a standardised dataset, the “Ongoing Face Recognition Vendor Test (FRVT).</p>
<p>As an IPVM study shows, brands often use single-number scores obtained from <a class="maplink" data-title="NIST">NIST</a> vendor tests (i.e., “our algorithm showed 98,6% accuracy”.) (IPVM Team 2021b, 17). These scores are however obtained in very controlled conditions that <strong>do not match the real-world use</strong> of the algorithms. There are thus important discrepancies in this regard. Moreover, the accuracy score is not always representative of desirable behaviour of a model. Data scientists therefore distinguish <strong>precision</strong> and <strong>recall</strong>, to better account for cases where e.g., positive classification is rare, yet of high impact for example when classifying individuals as high risk (Shung 2020, 202). These distinctions are often lost in the commercial language and in the public debate.</p>
2021-10-07 08:43:56 +02:00
<p>A final issue related to working with the existing algorithms is what is known as <strong>observer bias or confirmation bias</strong>. The output of an algorithm reinforces the (subconscious) biases that went into producing it. It can occur both when creating the dataset or when training and running the algorithms. For example, the software used for predictive policing in Chicago helped determine where to send police officers on patrol. “Because these predictions are likely to overrepresent areas that were already known to police, officers become increasingly likely to patrol these same areas and observe new criminal acts that confirm their prior beliefs regarding the distributions of criminal activity. The newly observed criminal acts that police document as a result of these targeted patrols then feed into the predictive policing algorithm on subsequent days, generating increasingly biased predictions. This creates a feedback loop where the model becomes increasingly confident that the locations most likely to experience further criminal activity are exactly the locations, they had previously believed to be high in crime.” (Lum and Isaac, 2016). The example reveals that the different kinds of biases at play are hard to untangle, as the observer bias coincides with a historical bias of over-policing. It requires a lot of work to recognise such confirmation biases in the automated operation of automated classification software. The “black box” dimension of their operation and the only just emerging efforts to build explanatory AI make it difficult to understand their categorisation process (Xie et al. 2020; Fernandez et al. 2020, 34)</p>
</section>
</section>
</section>
<section id="overview-of-deployments-in-europe" class="level1">
<h1>Overview of deployments in Europe</h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
<li><p>Current deployments of RBI technologies within Europe are primarily experimental and localised. However, the technology coexists with a broad range of algorithmic processing of security images being carried out on a scale which ranges from the individual level to what could be classed as biometric mass surveillance. Distinguishing the various characteristics of these deployments is not only important to inform the public debate, but also helps to focus the discussion on the most problematic uses of the technologies.</p></li>
<li><p>Image and sound-based security applications being used for authentication purposes do not currently pose a risk for biometric mass surveillance. However, it should be noted that an alteration to the legal framework could increase the risk of them being deployed for biometric mass surveillance especially as many of the databases being used contain millions of data subjects.</p></li>
<li><p>In addition to authentication, image and sound-based security applications are being deployed for surveillance. Surveillance applications include the deployment of RBI in public spaces.</p></li>
<li><p>Progress on two fronts makes the development of biometric mass surveillance more than a remote possibility. Firstly, the current creation and/or upgrading of biometric databases being used in civil and criminal registries. Secondly, the repeated piloting of live-feed systems connected to remote facial and biometric information search and recognition algorithms.  </p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-07 08:43:56 +02:00
<p>When looking at the map of actual deployments of image and sound-based security technologies in Europe, <strong>Remote Biometric Identification</strong> is, as this report is being written, so far mostly an <strong>experimental and localised</strong> application. It coexists alongside a broad range of algorithmic processing of security images in a spectrum that goes from individual, localised authentication systems to generalised law enforcement uses of authentication, to what can properly be defined as Biometric Mass Surveillance. <strong>Distinguishing the various characteristics</strong> of these deployments is not only important to inform the public debate, but it also <strong>helps focus the discussion on the most problematic uses of the technologies</strong>. It also highlights the risks of <strong>function creep</strong>: systems deployed for one use which is respectful of EU fundamental rights can in some cases very easily be upgraded to function as biometric mass surveillance.</p>
<p>The European map of image and sound-based security technologies can be divided into two broad categories: <strong>authentication applications</strong> and <strong>surveillance applications</strong>. <strong>Remote Biometric Identification is a sub-category of the latter</strong>.</p>
<section id="authentication" class="level2">
<h2>Authentication</h2>
<p>A broad range of deployments, which we consider in this first section, is not aimed at surveillance, but at authentication (see section 2.3 in this report), namely making sure that the person in front of the security camera is who they say they are.</p>
<section id="live-authentication" class="level3">
<h3>Live authentication</h3>
2021-10-13 18:35:49 +02:00
<p>As in the cases of the use of <a class="maplink" data-title="Cisco Systems">Cisco systems</a> powered FRT in two pilot projects in <strong><a class="maplink" data-title="Facial Recognition Pilot in High School (Nice)">high schools of Nice</a></strong> (see section 8.1) <strong>and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">Marseille</a> (France)</strong><a href="#fn9" class="footnote-ref" id="fnref9" role="doc-noteref"><sup>9</sup></a>, or as in the case of the <strong><a class="maplink" data-title="Facial Recognition in Anderstorp Upper Secondary School (Skelleftea, Sweden)">Anderstorp Upper Secondary School</a> in Skelleftea (Sweden)</strong><a href="#fn10" class="footnote-ref" id="fnref10" role="doc-noteref"><sup>10</sup></a>, the aim of these projects was to identify students who could have access to the premises. School-wide biometric databases were generated and populated with students portraits. Gates were fitted with cameras connected to facial recognition technology and allowed access only to recognised students. Another documented use has been for the <strong><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App (Hungary)</a></strong>, in which telephone cameras are used by authorities to verify the identity of the persons logged into the app (see also section 10.1).</p>
2021-10-07 08:43:56 +02:00
<p>In these deployments, people must submit themselves to the camera in order to be identified and gain access. While these techniques of identification pose <strong>important threats to the privacy of the concerned small groups of users</strong> (in both high school cases, DPAs banned the use of FRTs), and run the risk of false positives (unauthorised people recognised as authorised) or false negatives (authorised people not recognised as such) <strong>the risk of biometric mass surveillance strictly speaking is low to non-existent because of the nature of the acquisition of images and other sensor-based data.</strong></p>
2021-10-08 17:03:03 +02:00
<p>However, other forms of live authentication tie in with surveillance practices, in particular various forms of <strong>blacklisting</strong>. With blacklisting the face of every passer-by is compared to a list of faces of individuals who have been rejected access to the premises. In such an instance, people do not have to be identified, as long as an image of their face is provided. This has been used in public places, for example in the case of the <a class="maplink" data-title="Korte Putstraat (Stopped)">Korte Putstraat</a> in the Dutch city of 's-Hertogenbosch: during the carnival festivities of 2019 two people were rejected access to the street after they were singled out by the system (Gotink, 2019). It is unclear how many false positives were generated during this period. Other cases of blacklisting can be found at, for example, access control at various football stadiums in Europe, see also section 3.3. In many cases of blacklisting, individuals do not enrol voluntarily.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="forensic-authentication" class="level3">
<h3>Forensic authentication</h3>
<p>Biometric systems for the purposes of authentication are also increasingly deployed for <strong>forensic applications</strong> among law-enforcement agencies in the European Union. The typical scenario for the use of such technologies is to match the photograph of a suspect (extracted, for example, from previous records or from CCTV footage) against an existing dataset of known individuals (e.g., a national biometric database, a drivers license database, etc.). (TELEFI, 2021). The development of these forensic authentication capabilities is particularly relevant to this study, because it entails making large databases ready for searches on the basis of biometric information.</p>
2021-10-13 18:35:49 +02:00
<p>To date, <strong>11 out of 27 member states of the <a class="maplink" data-title="European Union">European Union</a></strong> are using facial recognition against biometric databases for forensic purposes: <strong>Austria</strong> (<a class="maplink" data-title="EDE (AFR used by Austrian Criminal Intelligence Service)">EDE</a>)<a href="#fn11" class="footnote-ref" id="fnref11" role="doc-noteref"><sup>11</sup></a>, <strong>Finland</strong> (<a class="maplink" data-title="KASTU (Finland)">KASTU</a>)<a href="#fn12" class="footnote-ref" id="fnref12" role="doc-noteref"><sup>12</sup></a>, <strong>France</strong> (<a class="maplink" data-title="Deployment of TAJ">TAJ</a>)<a href="#fn13" class="footnote-ref" id="fnref13" role="doc-noteref"><sup>13</sup></a>, <strong>Germany</strong> (<a class="maplink" data-title="German central criminal information system INPOL">INPOL</a>)<a href="#fn14" class="footnote-ref" id="fnref14" role="doc-noteref"><sup>14</sup></a>, <strong>Greece</strong> (<a class="maplink" data-title="Facial Recognition in Greece (Law Enforcement)">Mugshot Database</a>)<a href="#fn15" class="footnote-ref" id="fnref15" role="doc-noteref"><sup>15</sup></a>, <strong>Hungary</strong> (<a class="maplink" data-title="NEC Face Recognition Search Engine in Hungary">Facial Image Registry</a>)<a href="#fn16" class="footnote-ref" id="fnref16" role="doc-noteref"><sup>16</sup></a>, <strong>Italy</strong> (<a class="maplink" data-title="AFIS (Deployment, Italy)">AFIS</a>)<a href="#fn17" class="footnote-ref" id="fnref17" role="doc-noteref"><sup>17</sup></a>, <strong>Latvia</strong> (<a class="maplink" data-title="BDAS Deployment (Latvia)">BDAS</a>)<a href="#fn18" class="footnote-ref" id="fnref18" role="doc-noteref"><sup>18</sup></a>, <strong>Lithuania</strong> (<a class="maplink" data-title="HDR (Deployment, Lithuania)">HDR</a>)<a href="#fn19" class="footnote-ref" id="fnref19" role="doc-noteref"><sup>19</sup></a>, <strong>Netherlands</strong> (<a class="maplink" data-title="CATCH">CATCH</a>)<a href="#fn20" class="footnote-ref" id="fnref20" role="doc-noteref"><sup>20</sup></a> and <strong>Slovenia</strong> (<a class="maplink" data-title="VeriLook (and Face Trace) in Slovenia">Record of Photographed Persons</a>)<a href="#fn21" class="footnote-ref" id="fnref21" role="doc-noteref"><sup>21</sup></a> (TELEFI 2021).</p>
<p><strong>Seven additional countries</strong> are expected to acquire such capabilities in the near future: <strong>Croatia</strong> (<a class="maplink" data-title="ABIS (Deployment, Croatia)">ABIS</a>)<a href="#fn22" class="footnote-ref" id="fnref22" role="doc-noteref"><sup>22</sup></a>, <strong>Czech Republic</strong> (<a class="maplink" data-title="CBIS (deployment, Czech Republic)">CBIS</a>)<a href="#fn23" class="footnote-ref" id="fnref23" role="doc-noteref"><sup>23</sup></a>, <strong>Portugal</strong> (<a class="maplink" data-title="AFIS (Deployment, Portugal)">AFIS</a>) <strong>Romania</strong> (<a class="maplink" data-title="Romanian Police use of Facial Recognition (NBIS)">NBIS</a>)<a href="#fn24" class="footnote-ref" id="fnref24" role="doc-noteref"><sup>24</sup></a>, <strong>Spain</strong> (<a class="maplink" data-title="ABIS (Deployment, Spain)">ABIS</a>), <strong><a class="maplink" data-title="National Forensic Center">Sweden</a></strong> (<a class="maplink" data-title="Facial Recognition National Mugshot Database (Sweden)">National Mugshot Database</a>), <strong>Cyprus</strong> (<a class="maplink" data-title="Facial Recognition in Cyprus (Law Enforcement)">ISIS Faces</a>)<a href="#fn25" class="footnote-ref" id="fnref25" role="doc-noteref"><sup>25</sup></a>, <strong><a class="maplink" data-title="Estonian Forensic Science Institute">Estonia</a></strong> (<a class="maplink" data-title="ABIS (Deployment, Estonia)">ABIS</a>)<a href="#fn26" class="footnote-ref" id="fnref26" role="doc-noteref"><sup>26</sup></a> (TELEFI 2021).</p>
<p>When it comes to international institutions, <strong><a class="maplink" data-title="Interpol">Interpol</a></strong> (2020) has a facial recognition system (<a class="maplink" data-title="IFRS (Interpol)">IFRS</a>)<a href="#fn27" class="footnote-ref" id="fnref27" role="doc-noteref"><sup>27</sup></a>, based on facial images received from more than 160 countries. <strong><a class="maplink" data-title="Europol">Europol</a></strong> has two sub-units which use the facial recognition search tool and database known as <a class="maplink" data-title="FACE Deployment by EUROPOL">FACE</a>: the European Counter Terrorism Center (ECTC) and the European Cybercrime Center (ECC). (TELEFI, 2021 149-153) (Europol 2020)</p>
2021-10-07 08:43:56 +02:00
<p><strong>Only 9 countries in the EU so far have rejected or do not plan to implement</strong> FRT for forensic purposes: <strong>Belgium</strong> (see CHAPTER 6), <strong>Bulgaria</strong>, <strong>Denmark</strong>, <strong>Ireland</strong>, <strong>Luxembourg</strong>, <strong>Malta</strong>, <strong>Poland</strong>, <strong>Portugal</strong>, <strong>Slovakia</strong>.</p>
<p><img src="images/media/image1.png" style="width:4.62502in;height:3.28283in" alt="Map Description automatically generated" /></p>
2021-10-13 18:35:49 +02:00
<p>Figure 1. EU Countries use of FRT for forensic applications<a href="#fn28" class="footnote-ref" id="fnref28" role="doc-noteref"><sup>28</sup></a></p>
2021-10-07 08:43:56 +02:00
<p><strong>When it comes to databases</strong>, some countries limit the searches to <strong>criminal databases</strong> (Austria, Germany, France, Italy, Greece, Slovenia, Lithuania, UK), while other countries open the searches to <strong>civil databases</strong> (Finland, Netherlands, Latvia, Hungary).</p>
<p>This means that the <strong>person categories can vary substantially.</strong> In the case of criminal databases it can range from suspects and convicts, to asylum seekers, aliens, unidentified persons, immigrants, visa applicants. When <strong>civil databases</strong> are used as well, such as in Hungary, the database contains a broad range of “individuals of known identity from various document/civil proceedings” (TELEFI 2021, appendix 3).</p>
2021-10-08 17:03:03 +02:00
<p><strong>Finally, the database sizes</strong>, in comparison to the authentication databases mentioned in the previous section, are of a different magnitude. The databases of school students in France and Sweden, mentioned in the previous section contains a few hundred entries. National databases can contain instead several millions. Criminal databases such as Germanys INPOL contains <strong>6,2 million individuals</strong>, Frances <a class="maplink" data-title="Deployment of TAJ">TAJ</a> <strong>21 million individuals</strong> and <a class="maplink" data-title="AFIS (Deployment, Italy)">Italys AFIS</a> <strong>9 million individuals.</strong> Civil databases, such as Hungarys Facial Image Registry contain <strong>30 million templates</strong> (TELEFI, 2021 appendix 3).</p>
2021-10-07 15:08:33 +02:00
<p>Authentication has also been deployed as part of integrated “safe city” solutions, such as the <strong><a class="maplink" data-title="NEC">NEC</a> Technology <a class="maplink" data-title="NEC Technology in Lisbon">Bio-IDiom system in Lisbon</a> and London,</strong> deployed for forensic investigation purposes. For this specific product, authentication can occur via facial recognition, as well as other biometric authentication techniques such as <strong>ear acoustics, iris, voice, fingerprint, and finger vein recognition</strong>. We currently do not have public information on the use of <a class="maplink" data-title="NEC Technology in Lisbon">Bio-IDiom in Lisbon</a> nor in London. On <a class="maplink" data-title="NEC">NEC</a>s Website (2021) however, Bio-IDiom is advertised as a “multimodal” identification system, that has been used for example by the Los Angeles County Sheriffs Department (LASD) for criminal investigations. The system “combines multiple biometric technologies including fingerprint, palm print, face, and iris recognition” and works “based on the few clues left behind at crime scenes. In Los Angeles, “this system is also connected to the databases of federal and state law enforcement agencies such as the California Department of Justice and FBI, making it the worlds largest-scale service-based biometrics system for criminal investigation”. We dont know if that is the case in Portugal and in the UK deployments.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="case-study-inpol-germany" class="level3">
<h3>Case study: INPOL (Germany)</h3>
2021-10-13 18:35:49 +02:00
<p>In order to give a concrete example of the forensic use of biometric technology, we can take the German case. Germany has been using <strong>automated facial recognition</strong> technologies to identify criminal activity since 2008 using a central criminal information system called <strong><a class="maplink" data-title="German central criminal information system INPOL">INPOL</a> (Informationssystem Polizei)</strong>, maintained by the <strong><a class="maplink" data-title="German Federal Criminal Police Office (Bundeskriminalamt)">Bundeskriminalamt</a> (BKA)</strong>, which is the federal criminal police office. INPOL uses <strong><a class="maplink" data-title="Oracle Corporation">Oracle Software</a></strong> and includes the following information: name, aliases, date and place of birth, nationality, fingerprints, mugshots, appearance, information about criminal histories such as prison sentences or violence of an individual, and DNA information. However, DNA information is not automatically recorded (TELEFI 2021).</p>
2021-10-08 17:03:03 +02:00
<p>The <a class="maplink" data-title="German central criminal information system INPOL">INPOL</a> database includes <strong>facial images of suspects, arrestees, missing persons, and convicted individuals</strong>. For the purpose of facial recognition, anatomical features of a person's face or head as seen on video surveillance or images are used as a material to match with data in <a class="maplink" data-title="German central criminal information system INPOL">INPOL</a>. The facial recognition system compares templates and lists all the matches ordered by degree of accordance. The BKA has specific personnel visually analysing the system's choices and providing an assessment, defining the probability of identifying a person. This assessment can be used in a court of law if necessary (Bundeskriminalamt, n.d.). Searches in the database are conducted by using <a class="maplink" data-title="Cognitec Systems">Cognitec</a> Face VACS software (TELEFI 2021).</p>
<p>As of March 2020, <strong><a class="maplink" data-title="German central criminal information system INPOL">INPOL</a></strong> consists of <strong>5,8 million images of about 3,6 million individuals</strong>. All police stations in Germany have access to this database. The BKA saves biometric data and can be used by other ministries as well, for instance, to identify asylum seekers. Furthermore, the data is shared in the context of the <strong>Prüm cooperation</strong> on an international level (mostly fingerprints and DNA patterns). Furthermore, the <strong>BKA</strong> saves <strong>DNA analysis data as part of <a class="maplink" data-title="German central criminal information system INPOL">INPOL</a></strong>, accessible for all police stations in Germany. That database contains <strong>1,2 million data sets</strong> (Bundeskriminalamt, n.d.). Other recorded facial images, for instance, drivers licenses or passports, are not included in the search, and the database is mainly used for police work (TELEFI 2021).</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="a-blurred-boundary-between-authentication-and-surveillance" class="level3">
<h3>A blurred boundary between authentication and surveillance</h3>
<p>In principle, because of the strict legal framework to which law enforcement agencies are submitted, <strong>forensic biometric identification</strong> should not present a <strong>risk of biometric mass surveillance</strong>. The acquisition of images and the subsequent one-to-one searches are carried out as part of judicial investigations when a legal threshold of suspicion is met. The operation of the system by specialised forensic departments should follow the procedural limits set by the judicial process.</p>
<p><strong>Function creep is however particularly concerning.</strong> If the legal framework is altered to allow the acquisition of live video, and if live searches on these are legally authorised against existing criminal and civil databases, then from a technical perspective it can be argued that there is <strong>potentially a risk of biometric mass surveillance. The main risk here being that the individuals whose identities are searched or tagged are not selected as a result of a judicial investigation, but indiscriminately. The system in place would then allow for search of these individuals against huge databases.</strong> In other words, <strong>by creating new biometric databases or upgrading existing databases to be FRT-readable</strong>, and developing or acquiring algorithmic capabilities to search them, law enforcement agencies across Europe are <strong>building an infrastructure which is technically capable of “switching” easily to a mode of operation akin to biometric mass surveillance.</strong></p>
</section>
</section>
<section id="surveillance" class="level2">
<h2>Surveillance</h2>
<p>A second broad use of image and audio-based security technologies is for surveillance purposes. Here again, it is important, we suggest, to distinguish between two broad categories.</p>
<section id="smart-surveillance-features" class="level3">
<h3>Smart surveillance features</h3>
2021-10-13 18:35:49 +02:00
<p>A first range of deployments of <strong>“smart” systems</strong> correspond to what can broadly be defined as “smart surveillance” yet <strong>do not collect or process biometric information per se</strong><a href="#fn29" class="footnote-ref" id="fnref29" role="doc-noteref"><sup>29</sup></a>. Smart systems can be used <strong>ex-post</strong>, <strong>to assist CCTV camera operators</strong> in processing large amounts of <strong>recorded information</strong>, or can guide their attention when they have to monitor a large number of <strong>live video feeds</strong> simultaneously. Smart surveillance uses the following features:</p>
<p><strong>- Anomaly detection. <a class="maplink" data-title="IBM Smart CCTV deployment in Toulouse">In Toulouse</a> (France), the City Council commissioned <a class="maplink" data-title="IBM">IBM</a> to connect 30 video surveillance cameras to software able to "assist human decisions" by raising alerts when "abnormal events are detected." (Technopolice 2021) The request was justified by the “difficulties of processing the images generated daily by the 350 cameras and kept for 30 days (more than 10,000 images per second)”. The objective, according to the digital direction is "to optimise and structure the supervision of video surveillance operators by generating alerts through a system of intelligent analysis that facilitates the identification of anomalies detected, whether: movements of crowds, isolated luggage, crossing virtual barriers north of the Garonne, precipitous movement, research of shapes and colour. All these detections are done in real time or delayed (Technopolice 2021). In other words, the anomaly detection is a way to <em>operationalise</em> the numerical output of various computer vision based recognition systems. Similar systems are used</strong> in the <strong>Smart video surveillance deployment in Valenciennes (France)</strong> or in the <strong>Urban Surveillance Centre (Marseille).</strong></p>
<p><strong>- Object Detection.</strong> In Amsterdam, around the <strong><a class="maplink" data-title="Johan Cruijff ArenA">Johan Cruijff ArenA</a></strong> (Stadium), the city has been experimenting with a <strong><a class="maplink" data-title="Digitale Perimeter">Digitale Perimeter</a></strong> (digital perimeter) surveillance system. In addition to the usual features of facial recognition, and crowd monitorining, the system includes the possibility of automatically detecting specific objects such as <strong>weapons, fireworks</strong> or <strong>drones</strong>. Similar features are found in <strong><a class="maplink" data-title="Inwebit">Inwebit</a>s <a class="maplink" data-title="Smart Security Platform (SSP), Poland">Smart Security Platform</a> (SSP) in Poland.</strong></p>
<p><strong>- Feature search. In <a class="maplink" data-title="City of Marbella">Marbella</a> (Spain), <a class="maplink" data-title="Avigilon">Avigilon</a> deployed <a class="maplink" data-title="Avigilon deployment in Marbella">a smart camera system</a> aimed at providing “smart” functionalities without biometric data. Since regional law bans facial and biometric identification without consent, the software uses “appearance search”. “Appearance search” provides estimates for “unique facial traits, the colour of a persons clothes, age, shape, gender and hair colour”. This information is not considered biometric. The individuals features can be used to search for suspects fitting a particular profile. Similar technology has been <a class="maplink" data-title="Monitoring Kortrijk">deployed in Kortrijk</a> (Belgium), which provides search parameters for people, vehicles and animals</strong> (Verbeke 2019). <strong>During the Covid-19 pandemic, several initiatives emerged to automatically detect whether the mask mandates were observed by the public, such as in the <a class="maplink" data-title="Face mask recognition in Châtelet-Les Halles (Stopped)">aborted face mask recognition project in Châtelet-Les Halles</a> developed by the company <a class="maplink" data-title="Datakalab">Datakalab</a>.</strong></p>
<p>- <strong>Video summary.</strong> Some companies, such as <strong><a class="maplink" data-title="Briefcam Ltd">Briefcam</a></strong> and their product <strong>Briefcam Review</strong>, offer a related product, which promises to shorten the analysis of long hours of CCTV footage, by identifying specific topics of interest (children, women, lighting changes) and making the footage searchable. The product combines face recognition, license plate recognition, and more mundane video analysis features such as the possibility to overlay selected scenes, thus highlighting recurrent points of activity in the image. Briefcam is deployed in several cities across Europe, including <a class="maplink" data-title="Briefcam deployment in Vannes">Vannes</a>, <a class="maplink" data-title="Eiffage / Briefcam deployment in Roubaix">Roubaix</a> (in partnership with <strong><a class="maplink" data-title="Eiffage">Eiffage</a></strong> managed by <strong>the <a class="maplink" data-title="City of Roubaix">City of Roubaix</a></strong> and the <strong><a class="maplink" data-title="Métropole Européenne de Lille">Métropole Européenne de Lille</a></strong>) and <a class="maplink" data-title="Smart Surveillance in Moirans">Moirans</a> in France (with equipment provided by <strong><a class="maplink" data-title="Nomadys">Nomadys</a></strong>).</p>
<p><strong>- Object detection and object tracking. As outlined in chapter 2, object detection is often the first step in the various digital detection applications for images. An object here can mean anything the computer is conditioned to search for: a suitcase, a vehicle, but also a person; while some products further process the detected object to estimate particular features, such as the colour of a vehicle, the age of a person. However, on some occasions — often to address concerns over privacy — only the position of the object on the image is stored. This is for example the case with the</strong> test of the <strong><a class="maplink" data-title="Test of One and a half meter monitor">One-and-a-half-meter monitor</a> in Amsterdam (Netherlands), <a class="maplink" data-title="Intemo">Intemo</a>s <a class="maplink" data-title="People counting in Nijmegen">people counting system in Nijmegen</a> (Netherlands),</strong> the <a class="maplink" data-title="ViSense at MINDBase">ViSense social distancing monitor</a> at MINDBase,</strong> a testing location of the <strong><a class="maplink" data-title="Dutch Defence Equipment Organisation">Dutch Defence Equipment Organization</a></strong>; the <strong><a class="maplink" data-title="Project KICK">KICK project</a></strong> in <strong><a class="maplink" data-title="Brugge Municipality">Brugge</a></strong>, <strong><a class="maplink" data-title="Kortrijk Municipality">Kortrijk</a></strong>, <strong><a class="maplink" data-title="Ieper Municipality">Ieper</a></strong>, <strong><a class="maplink" data-title="Roeselare Municipality">Roeselare</a></strong> and <strong><a class="maplink" data-title="Economisch Huis Oostende">Oostende</a></strong> <strong>(Belgium), the <a class="maplink" data-title="ViSense - Mechelen">ViSense project in Mechelen</a> (Belgium)</strong> or the <strong><a class="maplink" data-title="Eco-Counter">Eco-counter</a></strong> <strong><a class="maplink" data-title="Tracking cameras pilot in Lannion">tracking cameras pilot project</a></strong> in <strong><a class="maplink" data-title="City of Lannion">Lannion</a> (France).</strong></p>
<p><strong>- Movement recognition. <a class="maplink" data-title="Avigilon">Avigilon</a>s software that is deployed in Marbella (Spain) also detects unusual movement. “To avoid graffiti, we can calculate the time someone takes to pass a shop window, “explained Javier Martín, local chief of police in Marbella to the Spanish newspaper El País. “If it takes them more than 10 seconds, the camera is activated to see if they are graffitiing. So far, it hasnt been activated.” (Colomé 2019) Similar movement recognition technology is used in, the ViSense deployment at the Olympic Park London (UK) and the <a class="maplink" data-title="Security cameras Mechelen-Willebroek">security camera system in Mechelen-Willebroek</a> (Belgium). It should be noted that movement</strong> recognition can be done in two ways: where projects such as the <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Data-lab Burglary-free Neighbourhood</a> in Rotterdam (Netherlands)</strong><a href="#fn30" class="footnote-ref" id="fnref30" role="doc-noteref"><sup>30</sup></a> are only based on the tracking of trajectories of people through an image (see also Object detection), cases such as <strong>the <a class="maplink" data-title="Living Lab Stratumseind">Living Lab Stratumseind</a></strong><a href="#fn31" class="footnote-ref" id="fnref31" role="doc-noteref"><sup>31</sup></a> <strong>in Eindhoven (Netherlands)</strong> also process the movements and gestures of individuals in order to estimate their behaviour.</p>
2021-10-07 08:43:56 +02:00
<section id="audio-recognition-1" class="level4">
<h4>Audio recognition</h4>
2021-10-08 17:03:03 +02:00
<p>- In addition to image (video) based products, some deployments use audio recognition to complement the decision-making process, for example used in the <strong><a class="maplink" data-title="Serenecity">Serenecity</a> (a branch of <a class="maplink" data-title="Verney-Carron SA">Verney-Carron</a>) Project in Saint-Etienne (France)</strong>, the <strong><a class="maplink" data-title="Smart CCTV with audio detection in Rouen public transportation">Smart CCTV deployment in public transportation</a> in <a class="maplink" data-title="City of Rouen">Rouen</a> (France)</strong> or the <strong><a class="maplink" data-title="Smart surveillance coupled with audio recognition (Strasbourg)">Smart CCTV system in Strasbourg</a> (France)</strong>. The <a class="maplink" data-title="Serenicity project (Saint-Etienne)">project piloted in Saint-Etienne</a> for example, worked by placing “audio capture devices” - the term microphone was avoided- in strategic parts of the city. Sounds qualified by an anomaly detection algorithm as suspicious would then alert operators in the Urban Supervision Center, prompting further investigation via CCTV or deployment of the necessary services (healthcare or police for example) (France 3 Auvergne-Rhône-Alpes 2019.)</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="emotion-recognition" class="level4">
<h4>Emotion recognition</h4>
2021-10-08 17:03:03 +02:00
<p>- <strong>Emotion recognition</strong> is a rare occurrence. We found evidence of its deployment only in a <strong><a class="maplink" data-title="Facial/Emotion Recognition Pilot in Tramway (Nice)">pilot project in Nice</a> (see section 8.1)</strong> and in the <strong><a class="maplink" data-title="Citybeacon">Citybeacon</a> project in Eindhoven, but even then, the project was never actually tested. The original idea proposed by the company <a class="maplink" data-title="Two-I">Two-I</a> was “a "real-time emotional mapping" capable of highlighting "potentially problematic or even dangerous situations". "A dynamic deployment of security guards in an area where tension and stress are felt, is often a simple way to avoid any overflow," also argues <a class="maplink" data-title="Two-I">Two-I</a>, whose "Security" software would be able to decipher some 10,000 faces per second. (Binacchi 2019)</strong></p>
2021-10-07 08:43:56 +02:00
</section>
<section id="gait-recognition-1" class="level4">
<h4>Gait recognition</h4>
<p><strong>Gait recognition is currently not deployed in Europe. To our knowledge, o</strong>nly one company, <strong>Watrix (a company based in China)</strong>, has commercialised gait recognition, but only in China (Segal 2020, 2).</p>
</section>
</section>
<section id="integrated-solutions" class="level3">
<h3>Integrated solutions </h3>
<section id="smart-cities" class="level4">
<h4>Smart cities</h4>
2021-10-13 18:35:49 +02:00
<p>While some cities or companies decide to implement some of the functionalities with their existing or updated CCTV systems, several chose to centralise several of these “smart” functions in <strong>integrated systems</strong> often referred to as “safe city” solutions. These solutions do not necessarily process biometric information. This is the case for example for the deployments in <strong><a class="maplink" data-title="Gruppo TIM">TIM</a>s</strong>, <strong><a class="maplink" data-title="Insula Spa">Insula</a></strong> and <strong><a class="maplink" data-title="Venis Spa">Venis</a></strong> <strong><a class="maplink" data-title="Control Room (Venice)">Safe City Platform in Venice</a> (Italy)</strong>, <strong><a class="maplink" data-title="Huawei">Huawei</a>s</strong> <strong><a class="maplink" data-title="Smart video surveillance in Valenciennes">Safe City in Valenciennes</a> (France)</strong>, <strong><a class="maplink" data-title="Dahua Deployment in Brienon-sur-Armançon">Dahuas integrated solution in Brienon-sur-Armançon</a></strong> <strong>(France)</strong>, <strong><a class="maplink" data-title="Thales">Thalès</a> Safe City in <a class="maplink" data-title="Safe City Pilot (La Défense)">La Défense</a> and <a class="maplink" data-title= "Safe City Pilot Project (Nice)",
>Nice</a> (France)</strong>, <strong>Engie Inéos and <a class="maplink" data-title="Groupe SNEF">SNEF</a>s <a class="maplink" data-title="SNEF Smart CCTVs in Marseille">integrated solution in Marseille</a> (France)</strong>, the <strong><a class="maplink" data-title="Center of Urban Supervision (Roubaix)">Center of Urban Supervision in Roubaix</a> (France)</strong>, <strong><a class="maplink" data-title="AI Mars (Potential)">AI Mars</a> (Madrid, in development)</strong><a href="#fn32" class="footnote-ref" id="fnref32" role="doc-noteref"><sup>32</sup></a> or <strong>NECs platform in <a class="maplink" data-title="NEC Technology in Lisbon">Lisbon</a> and London</strong>.</p>
2021-10-08 17:03:03 +02:00
<p>The way “Smart/Safe City” solutions work is well exemplified by the <a class="maplink" data-title="Control Room (Venice)">“Control room” deployed in Venice</a>, connected to an urban surveillance network. The system is composed of a central command and control room which aggregates cloud computing systems, together with smart cameras, artificial intelligence systems, antennas and hundreds of sensors distributed on a widespread network. The idea is to monitor what happens in the lagoon city in real time. The scope of the abilities of the centre is wide-ranging. It promises to: manage events and incoming tourist flows, something particularly relevant to a city which aims to implement a visiting fee for tourists; predict and manage weather events in advance, such as the shifting of tides and high water, by defining alternative routes for transit in the city; indicating to the population in real time the routes to avoid traffic and better manage mobility for time optimisation; improve the management of public safety allowing city agents to intervene in a more timely manner; control and manage water and road traffic, also for sanctioning purposes, through specific video-analysis systems; control the status of parking lots; monitor the environmental and territorial situation; collect, process data and information that allow for the creation of forecasting models and the allocation of resources more efficiently and effectively; bring to life a physical "Smart Control Room" where law enforcement officers train and learn how to read data as well. (LUMI 2020)</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="smartphone-apps" class="level4">
<h4>Smartphone apps</h4>
2021-10-08 17:03:03 +02:00
<p>Integrated solutions can entail smartphone apps, used to connect citizens with the control and command centres. This is for example the case in Nice with the (failed) <strong><a class="maplink" data-title="Reporty App Nice (Stopped)">Reporty App</a></strong> project (See Chapter 5), the <strong>Dragonfly project (Hungary) (See chapter 10)</strong> and was part of the original plan of <strong><a class="maplink" data-title="Safe City project in Marseille (Observatoire Big Data de la Tranquillité Publique)">Marseilles Safe City project</a></strong>.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="crowd-management" class="level4">
<h4>Crowd management</h4>
2021-10-13 18:35:49 +02:00
<p>Integrated solutions are generally comprised of a set of crowd management features, such as in the case of the systems <strong>in <a class="maplink" data-title="Smart video surveillance in Valenciennes">Valenciennes</a> and <a class="maplink" data-title="Urban Surveillance Center in Marseille">Marseille</a> (France), <a class="maplink" data-title="Mannheim public surveillance">Mannheim</a> (Germany), <a class="maplink" data-title="Control Room (Venice)">Venice</a> (Italy), Amsterdam, <a class="maplink" data-title="Citybeacons Eindhoven">Eindhoven</a> and Den Bosch with the <a class="maplink" data-title="Korte Putstraat (Stopped)">pilot in the Korte Putstraat</a> (using software by <a class="maplink" data-title="CrowdWatch">CrowdWatch</a>, Netherlands).</strong> Such crowd management software generally does not recognise individuals, but rather estimates the number of people on (a part of) the video frame. Sudden movements of groups or changes in density are then flagged for attention of the security operator (Nishiyama 2018).</p>
2021-10-07 08:43:56 +02:00
</section>
</section>
</section>
<section id="remote-biometric-identification" class="level2">
<h2>Remote Biometric Identification</h2>
<p>While all the deployments described above are variants of security applications of algorithmically processed images and sound, the number of deployments which match the narrow definition of <strong>Remote Biometric Identification (RBI)</strong>, namely the use of live camera feeds processed through search algorithms against pre-existing databases, is relatively small. They are often presented as “pilots”, limited in time and often quickly interrupted for legal reasons.</p>
<section id="deployment-of-rbi-in-public-spaces" class="level3">
<h3>Deployment of RBI in public spaces</h3>
<p>Here are the documented cases of RBI in public spaces we could find through our research:</p>
2021-10-08 17:03:03 +02:00
<p>- <strong>Live Facial Recognition <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">pilot project in Brussels International Airport / Zaventem</a></strong> (Belgium, see detailed case study, CHAPTER 6)</p>
<p>- <strong>Live Facial Recognition in Budapest</strong> (Hungary, see detailed case study, CHAPTER 10)</p>
<p>- <strong>Live Facial Recognition <a class="maplink" data-title="Facial Recognition Pilot Project during Carnival (Nice)">pilot project during the Carnival in Nice</a></strong> (France, see detailed case study, CHAPTER 8)</p>
<p>- <strong>Live Facial Recognition <a class="maplink" data-title="Pilot Project Südkreuz Berlin">Pilot Project Südkreuz Berlin</a></strong> (Germany, see detailed case study, CHAPTER 9)</p>
2021-10-07 08:43:56 +02:00
<p>As most of these cases are extensively discussed in the following chapters, we do not comment further on them here.</p>
2021-10-13 18:35:49 +02:00
<p>Additional cases are the <strong><a class="maplink" data-title="Korte Putstraat (Stopped)"></a>Live Facial Recognition pilot during Carnival 2019</a> in 's-Hertogenboschs Korte Putstraat</strong> (the Netherlands) and the pilot of <strong><a class="maplink" data-title="SARI Enterprise in Como">Live Facial Recognition in the city of Como</a></strong><a href="#fn33" class="footnote-ref" id="fnref33" role="doc-noteref"><sup>33</sup></a>, recently struck down by the Italian DPA (<a class="maplink" data-title="Garante per la Privacy">Garante per la Privacy</a>). The deployment of facial recognition in <strong><a class="maplink" data-title="Madrid Estacion Sur">Estacion Sur</a></strong> in Madrid (Spain) is also live.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="deployment-of-rbi-in-commercial-spaces" class="level3">
<h3>Deployment of RBI in commercial spaces</h3>
<p>The number of deployments of live facial recognition systems in commercial spaces hosting the public is much higher, but because of its commercial nature, difficult to document and trace. Our research found the following instances:</p>
2021-10-07 15:08:33 +02:00
<p>- <strong><a class="maplink" data-title="AFR at Brøndby IF">Live Facial Recognition project, Brøndby IF Football stadium</a></strong> (Denmark)</p>
<p>- <strong>Live <a class="maplink" data-title="Facial Recognition Pilot in Metz Stadium">Facial Recognition Pilot in Metz Stadium</a></strong> (France)</p>
<p>- <strong>Live <a class="maplink" data-title="Facial Recognition in Ifema">Facial Recognition in Ifema</a></strong> (Spain)</p>
<p>- <strong>Live <a class="maplink" data-title="Facial Recognition in Mercadona">Facial Recognition in Mercadona</a> or Mallorca, Zaragoza, Valencia</strong> (Spain)</p>
<p>The systems operate more or less in the same way as RBI in public spaces, or as forensic authentication systems if they were connected to live cameras. In the <strong><a class="maplink" data-title="AFR at Brøndby IF">Brøndby IF Football stadium deployment</a> for example</strong>, developed in partnership with <strong><a class="maplink" data-title="Panasonic">Panasonic</a></strong> and the <strong><a class="maplink" data-title="National University of Singapore">National University of Singapore</a></strong>, the football fans who want to access the game have to pass through a gate equipped with a camera, connected to a facial recognition algorithm. The stadium administration has constituted a database of unwanted individuals and if the software matches one of the incoming fans with a record in the database, it flags it to the system (Overgaard 2019).</p>
2021-10-07 08:43:56 +02:00
<p>There is however little to no information of the uses of these technologies in commercial spaces, as there is no requirement to publicise the various components of these systems. The case studies of this report thus focus mostly on the deployment of RBI in public spaces. More research, and more transparency would however be welcome in order to understand the data gathering practices and the impact of these deployments.</p>
</section>
</section>
<section id="conclusion" class="level2">
<h2>Conclusion</h2>
<p>To conclude the overview of the deployment of “smart” security applications in Europe, “actually existing” <strong>Remote Biometric Identification deployments are a rare occurrence</strong>, <strong>but</strong> they are part of a much broader infrastructure of automated biometric authentication and smart surveillance that are increasingly maturing. The existence of this broader technical infrastructure means that while all the components necessary for biometric mass surveillance are not yet assembled, if given the legal authorisation, <strong>Remote Biometric Identification could potentially be deployed at a scale that could enact Biometric Mass Surveillance.</strong></p>
<p>That this is more than a remote possibility as evidenced by progress in two directions that are necessary pre-conditions for Biometric Mass Surveillance: 1) The creation of large, new biometric databases, or the upgrading of existing databases, <strong>both of civil and criminal registries</strong>, so that they can be searched by FRT and other biometric recognition algorithms <strong>on a broad scale</strong> by country-wide agencies. 2) The repeated <strong>piloting</strong> and <strong>experimentation</strong> of live-feed systems connected to remote facial and biometric information search and recognition algorithms. The evolution of these two developments (database integration and live deployment pilots), while carried out in general by different categories of actors (national law enforcement for the former, municipal police and city authorities for the latter) <strong>should however be analysed together</strong>, and given a permissive legislative framework, they demonstrate the <strong>plausible characteristics of potential technical systems of Biometric Mass Surveillance.</strong> In the following chapter, we explore the current legal framework that limits the existing technological developments and explore the growing jurisprudence on the matter.</p>
</section>
</section>
<section id="legal-bases" class="level1">
<h1>Legal bases </h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
<li><p>The use of biometric tools for law enforcement purposes in public spaces raises a key issue of the legal permissibility in relation to the collection, retention and processing of data when considering the individuals fundamental rights to privacy and personal data protection. When viewed through this lens, RBI technologies could have a grave impact on the exercise of a range of fundamental rights.</p></li>
<li><p>The deployment of biometric surveillance in public spaces must be subject to strict scrutiny in order to avoid circumstances which could lead to mass surveillance. This includes targeted surveillance which has the potential for indiscriminate collection of data on any persons present in the surveilled location, not only that of the target data subject.</p></li>
<li><p>The normative legal framework for conducting biometric surveillance in public spaces can be found in the EU secondary legislation on data protection (GDPR and LED). The use of biometric data under this framework must be reviewed in light of the protection offered by fundamental rights.</p></li>
<li><p>The European Commissions April 2021 proposal on the Regulation for the Artificial Intelligence Act aims to harmonise regulatory rules for Member States on AI-based systems. The Proposed Regulation lays out rules focussed on three categories of risks (unacceptable, high, and low/ minimal risk) and anticipates covering the use of RBI systems. It also aims to compliment the rules and obligations set out in the GDPR and LED.</p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-07 08:43:56 +02:00
<p>The deployment of <strong>remote biometric identification</strong> in public spaces might have grave effects on the exercise of a range of <strong>fundamental rights of individuals</strong> (FRA 2019) such as the right to peaceful assembly and association (UNHRC 2019, para. 57) and the <strong>rights to liberty and security</strong>. Because <strong>the use of biometric tools for law enforcement purposes in public spaces involves collection, retention and processing of biometric data</strong>, a key issue on their legal permissibility is raised in relation to the obligations under the fundamental rights to privacy and personal data protection. This section thus will consider remote biometric identification against the protection offered by EU fundamental rights framework for the rights to privacy and personal data protection as well as by EU data protection legislation.</p>
<section id="eu-fundamental-rights-framework-for-the-right-to-privacy-and-the-right-to-protection-of-personal-data" class="level2">
<h2>EU Fundamental Rights Framework for the Right to Privacy and the Right to Protection of Personal Data </h2>
<section id="the-scope-of-the-fundamental-right-to-protection-for-rbi" class="level3">
<h3>The scope of the fundamental right to protection for RBI </h3>
<p><strong>Article 7</strong> of the EU Charter of Fundamental Rights (Charter) sets out national and EU legislators obligations on guaranteeing <strong>the right to private life, family life, and communications of individuals (the right to privacy) under EU law</strong>. The right to privacy can also be found in <strong>Article 8</strong> of the European Convention on Human Rights (ECHR), the scope of which has evolved over the years to cover issues relating to the processing of personal data. Because <strong>Article 7</strong> of the Charter mirrors closely <strong>Article 8 ECHR</strong>, its scope must be interpreted in line with the latter and its interpretation by the European Court of Human Rights (ECtHR) pursuant to Article 52(3) of the Charter. The Charter enshrines a separate right to protection of personal data in its Article 8, which is “distinct from Article 7 of the Charter” (C-203/15, <em>Tele2</em>, para. 129). </p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>Biometric surveillance tools interfere with the <strong>fundamental rights to privacy</strong> and <strong>personal data protection</strong> as enshrined in each of these legal sources because they collect, retain, process and use personal data, including an intrinsically special category of biometric data, <strong>which is - as discussed below, personal data relating to the physical, physiological or behavioural characteristics of an individual that allows their unique identification (see section 4.2.1)</strong>. Notably, it may not be just the physical biometric data such as fingerprints (<em>S and Marper v UK</em>; C-291/12 , <em>Schwarz</em>) or facial images (<em>Gaughran v UK</em>) that benefits from the rights to privacy and personal data protection as enshrined in the ECHR and EU law. For example, the ECtHR has adopted an expansive approach in terms of recognising the protective scope of Article 8 ECHR (<em>S and Marper v UK</em>, para 67), which would afford protection to <strong>different categories of biometric data including behavioural biometric data such as ones way of movement or voice</strong> (Venier and Mordini, 2010).</p>
</section>
<section id="privacy-and-data-protection-in-public-space-and-the-risk-of-mass-surveillance" class="level3">
<h3>Privacy and data protection in public space and the risk of mass surveillance</h3>
<p>The use of a wide range of biometric data discussed above engages with the individuals <strong>right to privacy and data protection</strong> even if they are captured and used in public spaces while individuals enjoy public life. The case law of the ECtHR (<em>PG and JH v UK</em>; <em>Peck v UK</em>) and the Court of Justice of the European Union (CJEU) (Opinion 1/15) shows that they have afforded privacy protection to information that is not inherently private. In fact, <strong>performing biometric surveillance in public spaces is inherently intrusive and amounts to mass surveillance</strong>, which in this context can simply be characterised as <strong>monitoring, tracking, or processing of personal data of individuals indiscriminately and in a generalised manner without a prior criminal suspicion (FRA 2018</strong>). Biometric surveillance in public spaces relies on generalised and indiscriminate collection, retention, use and sharing of biometric data of individuals<strong>. This is the case even if the intended purpose of the biometric surveillance is targeted</strong>, because in order to identify people on the watchlist in a crowd, every person in that particular space must be analysed and compared with the watchlist (Houwing 2020).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>The grave consequences of this type of indiscriminate and generalised collection of personal data on fundamental rights of individuals can be found across the case law of the ECtHR and the CJEU. The ECtHR has repeatedly warned that covert surveillance tools <strong>must not be used to undermine or even destroy democracy on the grounds of defending it</strong> (<em>Klass and others v Germany</em>, para 49). Particularly in considering the lawfulness of collection of biometric data, the ECtHR recognised in <em>S and Marper v UK</em> that the use of biometric data that would allow identification of an individual and would carry the potential to deduce personal data that is classified as sensitive data such as ethnic origin would make the people concerned fundamentally vulnerable to stigmatisation and discrimination (paras 122-126). Because of the heightened level of protection afforded to it, <strong>the ECtHR found that generalised and indiscriminate collection and retention of biometric data did not comply with the ECHR requirements as it amounted to disproportionate interference with the right to privacy and thus constitute a violation of Article 8 ECHR. </strong></p>
<p>The CJEU considered in <em>Digital Rights Ireland</em> (Joined Cases C293/12 and C594/12, para 37) as well as <em>Tele2</em> (C-203/15, para 100) that EU law <strong>precluded the</strong> <strong>mass retention of traffic and location data for law enforcement purposes</strong>, and only allowed for targeted retention of said data. The deployment of biometric surveillance in public spaces thus must be subject to <strong>strict scrutiny</strong> and in light of the case law of both courts, the EU fundamental rights law as well as the ECHR preclude the deployment of biometric surveillance that leads to mass surveillance for law enforcement purposes in public spaces. </p>
</section>
<section id="the-ambiguities-of-targeted-biometric-surveillance" class="level3">
<h3>The ambiguities of “targeted” biometric surveillance</h3>
<p><strong>Targeted biometric</strong> surveillance may still be lawful provided that it is justified under <strong>Article 52(1)</strong> of the Charter in light of the ECHR requirements for the Convention rights that are mirrored in the Charter. <strong>This type of surveillance is distinguishable from mass surveillance as it is directed towards a person or group of persons based on a prior suspicion on their involvement with criminal activities.</strong> Recently in its <em>La Quadrature du net and others</em> decision, the CJEU added <strong>a geographical criterion as a satisfactory limitation for a targeted retention of traffic and location data</strong> (para 149). However, in the context of conducting biometric surveillance in public spaces, this might not be a limitation as such. As mentioned above, by its nature this type of surveillance would amount to mass surveillance since it would indiscriminately monitor and analyse everyone in that space to detect people on the watchlist. <strong>Accordingly, using biometric surveillance in a specific area (e.g., concert venues, football stadiums, public rallies) for law enforcement purposes might be considered as expansive and intrusive to an extent that it would constitute disproportionate interference with the rights to privacy and personal data protection</strong>.</p>
</section>
<section id="conditions-for-targeted-biometric-surveillance" class="level3">
<h3>Conditions for “targeted” biometric surveillance</h3>
<p>Even where biometric surveillance is performed in a targeted way, <strong>its lawfulness would turn on the legitimate aim</strong> for which it is conducted and an <strong>assessment on its proportionality</strong> in light of that aim. A key issue here is that conducting targeted biometric surveillance in public spaces would constitute a serious interference with the rights to privacy and personal data protection because of the special character of biometric data that makes a person unique and identifiable and potentially carries the risk to reveal sensitive data. Thus, it should be conducted for an aim that is proportionate to the level of intrusiveness caused by it (by analogy C-203/15, <em>Tele2</em>, para 102). In essence, this means that targeted biometric surveillance is only allowed if it is strictly necessary for the purpose of fighting against terrorism or serious crime (by analogy C-203/15, <em>Tele2)</em>. There must be appropriate safeguards protecting people concerned from possible abusive uses of biometric surveillance. Moreover, there must be effective legal remedies available to people regarding the use of biometric surveillance. Authorisations for targeted biometric surveillance must be subject to effective review by a court or an independence administrative body who has the power to issue legally binding decisions to verify that a situation justifying the recourse to the measure exists and the conditions and safeguards are observed (C511/18, C512/18 and C520/18, <em>La Quadrature du Net and others, para 179).</em></p>
<p><strong>A fundamental rights assessment</strong> of conducting targeted biometric surveillance in public spaces must be carried out in each stage of the data lifecycle, <strong>including when the data is processed in near real-time before collection.</strong> Especially where the personal data captured in the public sphere in real-time involves the use of data that the individual may not foresee (<em>Uzun v Germany</em>, para 45), that real-time automated processing would trigger an <strong>Article 8</strong> protection. Similarly in the context of the right to personal data protection, the CJEU found in <em>La Quadrature du Net and others </em>(C511/18, C512/18 and C520/18) that the <strong>automated analysis of personal data amounted to an interference with the right to protection of personal data as set out in Article 8 of the Charter</strong>, even though it did not initially involve the collection of the data (para 170). Based on the case law of both courts, the <strong>automated analysis of biometric data in and of itself amounts to an interference</strong> with the right to privacy and personal data protection and <strong>must meet the fundamental rights requirements</strong> to be lawful. Accordingly, it must be subject to review by a court or an independent administrative body, and the pre-established tools or models used in the automated analysis must meet certain qualities (e.g., they must non-discriminatory, specific, and reliable; any positive result must be subject to manual and individual re-examination) (C511/18, C512/18 and C520/18, <em>La Quadrature du Net and others, paras 180-182).</em></p>
</section>
</section>
<section id="eu-secondary-law-gdpr-led" class="level2">
<h2>EU Secondary Law: GDPR &amp; LED </h2>
<p>The normative legal framework for conducting biometric surveillance in public spaces can be found in the EU secondary legislation on data protection. The use of biometric data under this framework must be reviewed in light of the protection offered by fundamental rights (Section a).</p>
<section id="biometric-data-in-gdpr-led" class="level3">
<h3>“Biometric data” in GDPR &amp; LED</h3>
<p><strong>The General Data Protection Regulations (GDPR)</strong> provides the rules relating to the processing of personal data for all purposes except where the processing is carried out for the prevention, investigation, detection, or prosecution of criminal offences including the safeguarding against and the prevention of threats to public safety pursuant to its <strong>Article 2(2)(d)</strong>. <strong>The Law Enforcement Directive (LED)</strong> complements the GDPR in this area as it applies specifically to the processing of personal data by competent authorities for the prevention, investigation, detection, or prosecution of criminal offences including the safeguarding against and the prevention of threats to public safety pursuant to its <strong>Article 1</strong>. </p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>Both legislations provide a specific framework for the processing of special categories of data (formerly known as “sensitive data”) including <strong>biometric data</strong>, which is defined as <strong>“personal data” resulting from specific technical processing relating to the physical, physiological or behavioural characteristics of a natural person, which allow or confirm the unique identification of that natural person, such as facial images or “dactyloscopic data”</strong> under <strong>Article 4(14) of the GDPR</strong> and <strong>Article 3(13) of the LED</strong>. The definition thus recognises expanding categories of biometric data that can capture and measure human characteristics as it covers physical and physiological as well as behavioural biometric data. Notably, <strong>biometric data</strong> is granted a <strong>higher protection</strong> than <strong>non-sensitive personal data</strong> irrespective of the fact that they may not reveal sensitive information such as <strong>racial or ethnic origin</strong>, <strong>health</strong>, or <strong>sexual orientation</strong>.</p>
</section>
<section id="distinguishing-personal-data-and-biometric-data" class="level3">
<h3>Distinguishing personal data and biometric data</h3>
<p>There are two elements that need to be sought for the <strong>personal data</strong> to constitute <strong>biometric data</strong> and for their processing to be subject to the specific limitations imposed by the GDPR and the LED.</p>
<p><strong>- “Specific technical process”.</strong> Neither legislation defines the concept “specific technical process” but it should be understood as a special type of processing that captures the digital representation of biometric characteristics (e.g., facial images, fingerprints, voice) (Kindt 2013, 43; Jasserand 2016, 303). On this point, the European Data Protection Board (EDPB, 2019) notes that <strong>biometric data are the result of measurement of physical, physiological, or behavioural characteristics of individuals and thus the result of this special type of processing is captured by the concept of biometric data.</strong> For example, the image of the person captured by video surveillance is personal data, but it would be classified as biometric data once it is subjected to a specific type of processing to deduce the characteristics of that person (Recital 51, GDPR).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>- “<strong>Unique identification of an individual</strong>”. Compared to the definition of <strong>personal data</strong>, it is unclear whether the element of identification for the purpose of defining biometric data requires a higher threshold (Jasserand 2013, 305-306). Both legislations define personal data broadly, as “any information relating to an identified or identifiable individual”. It has been confirmed both by the former Article 29 Data Protection Working Party (2007) and the CJEU (<em>C-582/14, Breyer</em>) that the personal data is broadly defined to capture <strong>the concept of “identifiability”</strong> whereby a person could be identifiable combined with other information available (including the information retained by someone other than the data controller) <strong>even if the person is not <em>prima facie</em> identified</strong> (paras 39-49).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>The element of identification in the definition of biometric data on the other hand may suggest that said data must relate to an <strong>identified individual.</strong> The fact that the person could be identifiable through possible means would not be sufficient for the personal data to be classified as biometric data (Jasserand 2013, 306). <strong>The EDPB (2019) supports this view as it notes that if video surveillance system is set to detect the physical characteristics of individuals to classify them as opposed to uniquely identify them, this processing would not be subject to the framework reserved for the processing of sensitive data.</strong> Nevertheless, the data captured might still amount to personal data irrespective of the fact that they are not subject to any special type of processing.</p>
</section>
<section id="sensitive-biometric-data-processing-conditions" class="level3">
<h3>Sensitive (biometric) data processing conditions </h3>
<p> Both the GDPR and the LED impose a special framework <strong>for the processing of sensitive data including biometric data as opposed to non-sensitive personal data</strong>. In essence, they impose limitations on the processing of sensitive data by setting out exceptional conditions for which the data may be processed. The following section considers the conditions under the LED on the processing of biometric data in order to set out the regulatory obligations relevant to implementing biometric surveillance in public spaces for law enforcement purposes. The conditions for biometric data processing under the GDPR are excluded from the scope of this report because it does not apply to processing activities for law enforcement purposes.</p>
<section id="conditions-for-the-processing-of-biometric-data-under-the-led" class="level4">
<h4>Conditions for the processing of biometric data under the LED</h4>
<p>The LED imposes <strong>limitations to the processing of biometric data</strong> for the purpose of uniquely identifying an individual. Pursuant to its <strong>Article 10</strong>, competent authorities may process biometric data where <strong>strictly necessary</strong> (which requires a stringent balancing analysis between the data processing and its purpose) and is subject to <strong>appropriate safeguards</strong> for three purposes:</p>
<ul>
<li><p>where authorised by EU or Member States national law</p></li>
<li><p>to protect the vital interests of the data subject or another person</p></li>
<li><p>where the data is manifestly made public by the data subject</p></li>
</ul>
<p>Clearly, the most relevant lawful ground for conducting biometric surveillance under the LED is where the processing is authorised by EU or a Member States national law because, for example, processing for the protection of vital interests is limited to scenarios where the data subject or another person is physically or legally incapable of giving consent, or where there is a humanitarian emergency.</p>
<p>Accordingly, the EU legislator or Member States may adopt a law on conducting biometric surveillance for law enforcement purposes, but it would be subject to the EU fundamental rights requirements and would be <strong>unlawful</strong> <strong>if it affects the essence of the fundamental rights or if it amounts to a disproportionate interference</strong>.</p>
</section>
<section id="automated-decision-making-under-the-led" class="level4">
<h4>Automated decision-making under the LED</h4>
<p><strong>Member States have discretion to use biometric data in automated decision-making processes subject to certain conditions</strong>. According to <strong>Article 11(1) of the LED</strong>, <strong>automated decision-making is prohibited unless authorised by EU or Member States law</strong>, which “provides appropriate safeguards for the rights and freedoms of the data subject, at least the right to obtain human intervention on the part of the controller”. Particularly when that automated decision-making uses sensitive data including biometric data, the law must provide suitable measures taking into account the nature of that data (Article 11(2), the LED). However, there are <strong>restrictions on conducting biometric surveillance where it involves a profiling process,</strong> which is considered a form of automated decision-making process that evaluates the persons personal aspects pursuant to <strong>Article 3(4)</strong> of the LED. <strong>Article 11(3)</strong> of the LED provides <strong>an unconditional prohibition against conducting profiling that has a discriminatory effect on individuals based on their sensitive data (including biometric data) under EU law</strong>. It is thus important to review the existence of the discriminatory effect of biometric surveillance that involves profiling because according to the LED, national law must introduce a human intervention in this context. </p>
</section>
<section id="competent-authority-under-the-led" class="level4">
<h4>Competent authority under the LED </h4>
<p>Another issue with the deployment of biometric surveillance in public spaces for law enforcement purpose is that the <strong>LED only applies to cases where the data controller is the “competent authority” to process the data for the relevant purpose</strong>. Determining what competent authority means is thus important to understand for example whether, and if so when, a private actor may qualify as an authority as such. Pursuant to Article 3(7)(a) and (b) of the LED, <strong>competent authority is a public authority that is entrusted with the power to prevent, investigate, detect, or prosecute criminal offences, and is any other entity or body that exercises public authority and public powers for the relevant purpose based on national law.</strong> It remains open to discussion whether the latter reference indicates that a private actor must be entrusted by law to process personal data (including biometric data) to process personal data for law enforcement purposes under the LED (Garstka 2018).</p>
</section>
</section>
</section>
<section id="eu-soft-law-convention-108" class="level2">
<h2>EU Soft law: Convention 108+</h2>
<p>The <strong>Council of Europes Convention for the Protection of Individuals with regard to Automatic Processing of Personal data (Convention 108),</strong> which is one of the bases for EU data protection legislation, was updated in 2018. The modernised Convention 108, which is known as <strong>Convention 108+, prohibits the processing of sensitive data (subject to certain conditions),</strong> in a similar, albeit arguably more modest (Greenleaf 2016), way to the GDPR. It lays out similar data subject rights, including the right not to be subjected to a sole automated decision-making process (Article 9). <strong>Although Article 11 of Convention 108+ permits the Signatory Parties to derogate from certain rules</strong> and obligations including the purpose limitation (Article 5(4)) and the duty to inform about data breaches (Article 7(2)) based on national security interests, it expressly requires the need to <strong>establish independent and effective review and supervision of data processing activities in a national security context (Article 11(3)).</strong> While Convention 108+ has yet to be ratified by all EU Member States, it provides a strong standpoint for establishing an oversight mechanism for surveillance measures.</p>
</section>
</section>
2021-10-07 13:32:13 +02:00
<section id="main-political-issues-and-debates" class="level1" >
2021-10-07 08:43:56 +02:00
<h1>Main political issues and debates</h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
<li><p>Four main positions on RBI systems have emerged among political actors as a result of both technical developments in the field and early legislative activity of EU institutions:  1) active promotion 2) support with safeguards; 3) moratorium and 4) outright ban.</p></li>
<li><p>Developments in the field of AI for governance, securitisation and law enforcement are widely encouraged and financed at an EU level through funding bodies such as the Digital Europe programme, the Connecting Europe Facility 2 and Horizon Europe.</p></li>
<li><p>Those who are in favour of support with safeguards argue that the deployment of RBI technologies should be strictly monitored because of the potential risks they pose, including the potential danger of FRT, for example, to contribute to the further criminalisation or stigmatisation of groups of people who already face discrimination.</p></li>
<li><p>The European Parliament passed a resolution on artificial intelligence in January 2020  in which they invite the Commission “to assess the consequences of a moratorium on the use of facial recognition systems”. If deemed necessary, such a moratorium could impact some existing uses of FRT including its deployment in public spaces by public authorities.</p></li>
<li><p>A number of EU and national NGOs have called for an outright ban on the use of RBI with some arguing that the mass processing of biometric data from public spaces creates a serious risk of mass surveillance that infringes on fundamental rights.</p></li>
<li><p>The European Commissions legislative proposal for an Artificial Intelligence Act (EC 2021b) is both a proposal for a regulatory framework on AI and a revised coordinated plan to support innovation. One feature of the act is the establishment of risk-dependent restrictions which would apply to the various uses of AI systems. </p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-07 08:43:56 +02:00
<section id="the-emergence-of-remote-biometric-identification-as-a-policy-issue" class="level2">
<h2>The emergence of remote biometric identification as a policy issue</h2>
<p>The technological developments in the field of remote biometric identification and the early legislative activity of EU institutions have progressively consolidated <strong>four main positions</strong> in relation to Remote Biometric Identification: <strong>1) active promotion 2) support with safeguards; 3) moratorium and 4) outright ban.</strong> In this section we visit each of these positions and detail the logic of the arguments upon which they are based.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>As detailed in the introduction, so far, the European Commission and the European Council have generally supported the development of Remote Biometric Identification. In the <em>White Paper on AI</em> he European Commission (2020b) proposes a set of rules and actions for excellence and trust in AI that guarantee the safety and fundamental rights of people and businesses, while strengthening investment and innovation across EU countries. The Commissions recent draft legislation takes these objectives a step further by proposing to turn Europe into “the global hub for trustworthy Artificial Intelligence (AI)” (European Commission 2021b). Biometric identification and specifically FRT have been central to many of the AI developments ranging from smart city initiatives financed by the EU all the way to the use of video surveillance and FRTs by law enforcement.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>The implementation of the GDPR and the LED in the EU and EEA in May 2018 has set the scene for wide-ranging contestations over the use of surveillance technologies, specifically facial recognition technologies in public spaces. A number of influential reports have been published (FRA 2018; FRA 2019; CNIL 2019b; EDRi 2020; Fernandez et. al. 2020; González Fuster 2020), online campaigns launched (e.g., #ReclaimYourFace) to warn about the risks posed by AI while simultaneously trying to put pressure on the European Commission to address their impact on safety and fundamental rights. Although many of the issues put forward by these reports reflect the overarching concern with privacy issues and human rights violations, each organisation uses a different problem definition ranging from the technical challenges and limitations of AI all the way to the risks involved in the implementation of biometric technologies. As a consequence they also propose different mitigation strategies such as promotion with safeguards, moratorium or full ban. In what follows, we present the configuration of mobilisation and contestation.</p>
</section>
<section id="four-positions-in-the-policy-debates-1" class="level2">
<h2>Four positions in the policy debates</h2>
<section id="active-promotion-1" class="level3">
<h3>Active promotion</h3>
2021-10-07 15:08:33 +02:00
<p>A certain number of actors, both at the national and at the local level are pushing for the development and the extension of biometric remote identification. At the local level, the new technological developments meet a growing apetite for smart city initiatives and the ambitions of mayors that strive for developing digital platforms and employ technology-oriented solutions for governance and law enforcement. The intention of the mayor of Nice, Christian Etrosi, to make <strong>Nice a “laboratory” of crime prevention, despite repeated concerns of the French DPA,</strong> is a case in point (for a detailed analysis, see chapter 8 in this report, see also Barelli 2018). Law enforcement agencies across Europe also continue to press ahead with efforts to build <strong>digital and automated infrastructures that benefits tech companies who push their face recognition technologies with the concept of smart city and innovation tech</strong> (ex. <a class="maplink" data-title="Huawei">Huawei</a>, <a class="maplink" data-title="NEC">NEC</a>, etc.).</p>
2021-10-07 08:43:56 +02:00
<p><strong>At the national level, Biometric systems for the purposes of authentication are increasingly deployed for forensic applications</strong> among law-enforcement agencies in the European Union. As we elaborate in Chapter 3, 11 out of 27 member states of the European Union are already using facial recognition against biometric databases for forensic purposes and 7 additional countries are expected to acquire such capabilities in the near future. The map of the European deployments of Biometric Identification Technologies (see Chapter 3) bear witness to a <strong>broad range of algorithmic processing of security images</strong> in a spectrum that goes from individual, localised authentication systems to generalised law enforcement uses of authentication, to Biometric Mass Surveillance.</p>
2021-10-13 18:35:49 +02:00
<p>Several states that have not yet adopted such technologies seem inclined to follow the trend, and push further. Former Belgian Minister of Interior Pieter De Crem for example, recently declared he was in favour of the use of facial recognition both for judicial inquiries but also for live facial recognition, a much rarer instance.</p>
2021-10-07 08:43:56 +02:00
<section id="the-use-of-facial-recognition-can-mean-increased-efficiency-for-security-services-the-police-are-interested-in-using-this-technology-in-several-of-their-missions.-first-of-all-within-the-framework-of-the-administrative-police-with-the-aim-of-guaranteeing-the-security-of-a-closed-place-accessible-to-the-public-it-would-allow-them-to-immediately-intercept-a-person-who-is-known-in-the-police-databases-and-who-constitutes-a-danger-for-public-security-but-this-technology-can-also-be-used-within-the-framework-of-the-judicial-police-with-the-aim-of-controlling-during-an-investigation-if-the-suspect-was-present-at-the-scene-of-the-crime-at-the-time-when-the-punishable-act-was-committed.-de-halleux-2020" class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">"The use of facial recognition can mean increased efficiency for security services […] The police are interested in using this technology in several of their missions. First of all, within the framework of the administrative police, with the aim of guaranteeing the security of a closed place accessible to the public, it would allow them to immediately intercept a person who is known in the police databases and who constitutes a danger for public security; but this technology can also be used within the framework of the judicial police, with the aim of controlling, during an investigation, if the suspect was present at the scene of the crime at the time when the punishable act was committed". <footer>(De Halleux 2020)</footer></blockquote>
2021-10-07 08:43:56 +02:00
<p>Such outspoken advocates of the use of RBI constitute an important voice, but do not find an echo in the EU mainstream discussions.</p>
</section>
</section>
<section id="support-with-safeguards-1" class="level3">
<h3>Support with safeguards</h3>
2021-10-07 15:08:33 +02:00
<p>A second category of actors has indeed adopted the point of view that the RBI technologies should be supported, to the condition that their development should be monitored because of the risks they potentially pose. We find in this category the EU Commission, the EU Council, some EU Political parties, as well as the Fundamental Rights Agency (FRA), national DPAs such as the <a class="maplink" data-title="CNIL">CNIL</a>, the CoE (Council of Europe), and a certain number of courts.</p>
2021-10-07 08:43:56 +02:00
<p>Developments in the field of AI for governance, security and law enforcement are widely encouraged and financially supported by EU institutions. In their communication <em><strong>Shaping Europes Digital Futures</strong></em> accompanying the White Paper on AI, the European Commission set out its guidelines and strategies to create a “Europe fit for the digital age” (European Commission 2020a). In support of a “fair and competitive economy” the Commission proposes a European Data Strategy (EDS) <strong>to make Europe a global leader in the data-agile economy</strong>. The EDS further aims to ensure Europes technological sovereignty in a globalised world and “<strong>unlock the enormous potential of new technologies like AI”</strong> (Newsroom 2020). Therefore, the Commission proposes, among others “building and deploying cutting-edge joint digital capacities in the areas of AI, cyber, super and quantum computing, quantum communication and blockchain;” as well as “[r]einforcing EU governments interoperability strategy to ensure coordination and common standards for secure and borderless public sector data flows and services.” (European Commission 2020a, 4)</p>
<p>The financial support for these initiatives is planned to be channelled from the <strong>Digital Europe programme (DEP), the Connecting Europe Facility 2 and Horizon Europe.</strong> Through the Horizon Europe for instance, the Commission plans to invest €15 billion in the Digital, Industry and Space cluster, with AI as a key activity to be supported. The DEP would benefit from almost €2.5 billion in deploying data platforms and AI applications while also supporting national authorities in making their high value data sets interoperable (Newsroom 2020).</p>
<p>In the <strong>European Parliament</strong>, the EPPEuropean People's Party most aligns with this approach. “We want to regulate facial recognition technologies, not ban them. We need clear rules where they can be used and where they must not be used”, has for example declared Emil Radev MEP, EPP Group Member of the Legal Affairs Committee. As he puts it “Without a doubt, we want to prevent mass surveillance and abuse. But this cannot mean banning facial recognition all together. There are harmless and useful applications for facial recognition, which increase personal security" (European Peoples Party, 2021)</p>
<p>The <strong>FRAs</strong> 2019 report on facial recognition technologies (FRA 2019), which builds on several previous reports concerning biometrics, IT systems and fundamental rights (FRA 2018); big data and decision making (FRA 2018); data quality and artificial intelligence (FRA 2019); calls for a moderate approach. The FRA advocates for a comprehensive understanding of how exactly facial recognition technologies work and what their impact on fundamental human rights are. Fundamental rights implications of using FRT, they argue, vary considerably depending on the purpose, scope and context. They highlight a number of issues based on the EU fundamental rights framework as well as the EU data protection legislation. For example, according to Article 9 of the GDPR, processing of biometric data is allowed based on the data subjects <strong>explicit</strong> consent, which requires a higher threshold of precision and definitiveness including for processing purposes. In terms of using <strong>biometric surveillance in public spaces</strong>, <strong>explicit consent</strong> would not provide a lawful ground for the relevant data processing because as observed by the CJEU in its <em>Schwarz</em> decision, the data subject who is entering the premises <strong>would not have any choice of opting out of data processing</strong>. If the processing of biometric data is based on substantial public interest, which is another lawful data processing ground under Article 9 of the GDPR, it must be “<strong>proportionate</strong> to the aim pursued, <strong>respect the essence of the right to data protection</strong> and provide for <strong>suitable and specific measures to safeguard the fundamental rights and interest</strong> of the data subjects” ((Article 9(2)(g), GDPR). Finally, when emphasising that the processing must be based on a lawful ground as recognised under the EU data protection legislation, the FRA was particularly vocal about the “<strong>function creep”,</strong> in regard to use of facial recognition systems and emphasised that the purpose of information collection must be strictly determined in light of the gravity of the intrusion upon peoples fundamental rights (25). </p>
<p>Therefore, the FRA places the <strong>right to privacy and protection of personal and sensitive data at the core of their problem definition</strong>, emphasising the potential dangers of FRTs undermining the freedom of expression, association and assembly. The FRA report also makes a case for <strong>the rights of special groups</strong> such as children, the elderly and people with disabilities, and addresses the issue of how the use of FRTs can contribute to further criminalise and stigmatise already discriminated groups of people (e.g., certain ethnic or racial minorities). In light of these considerations they advocate for a clear and “sufficiently detailed” legal framework, close monitoring and a thorough and continuous impact assessment of each deployment.</p>
2021-10-07 15:08:33 +02:00
<p>The French DPA, the <a class="maplink" data-title="CNIL">CNIL</a>, takes a similar position in the report “Facial Recognition. For a debate living up to the challenges” (CNIL 2019b). The <strong><a class="maplink" data-title="CNIL">CNIL</a></strong> report argues that the contactless and ubiquitous nature of the different FRTs can create an <strong>unprecedented potential for surveillance which</strong>, in the long run, could potentially undermine societal choices.  They also emphasise that biometric data is sensitive data therefore its collection is never completely harmless: “Even legitimate and well-defined use can, in the event of a cyber-attack or a simple error, have particularly serious consequences. In this context, the question of securing biometric data is crucial and must be an overriding priority in the design of any project of this kind” (CNIL 2019b, 6). In their recommendations, while <strong>calling for special vigilance,</strong> they <strong>acknowledge the legitimacy and proportionality of <em>some</em> uses.</strong> The <a class="maplink" data-title="CNIL">CNIL</a> pointed out that GDPR-endangering applications are often presented as “pilot projects”, and thus requested the drawing of “some <strong>red lines even before any experimental use”.</strong> They call instead for “a genuinely experimental approach” that test and perfect technical solutions that respect the legal framework (CNIL 2019b, 10).</p>
2021-10-07 08:43:56 +02:00
<p>The CoEs <strong>Practical Guide on the Use of Personal Data in the Police Sector</strong> (Council of Europe 2018), supplementing Convention 108+, puts great emphasis on implementing specific safeguards where an automated biometric system is introduced and considers that due to the high risk that such system poses to individuals rights, data protection authorities should be consulted in its implementation (10). Also, as mentioned below, the Council of Europes <strong>Guidelines on Facial Recognition</strong> (Council of Europe 2021), while considering <strong>a moratorium on the live facial recognition technology,</strong> sets out certain requirements to be met when implementing (possibly forensic) facial recognition technology.</p>
</section>
<section id="moratorium-1" class="level3">
<h3>Moratorium</h3>
<p>On 20 January 2021,  the <strong>European Parliament</strong> (2021) passed a resolution on artificial intelligence in which they invite the Commission “to assess the consequences of a <strong>moratorium on the use of facial recognition systems</strong>, and, depending on the results of this assessment, to consider a moratorium on the use of these systems <strong>in public spaces by public authorities</strong> and in premises meant for <strong>education and healthcare</strong>, as well as on the use of facial recognition systems <strong>by law enforcement authorities in semi-public spaces</strong> such as airports, until the technical standards can be considered fully fundamental rights-compliant, the results derived are non-biased and non-discriminatory, and there are strict safeguards against misuse that ensure the necessity and proportionality of using such technologies;” (European Parliament 2021).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>Another authority calling for a <strong>moratorium on automated recognition technologies in public spaces</strong> is the European Data Protection Supervisor (<strong>EDPS</strong>), the independent supervisory authority with responsibility for monitoring the processing of personal data by the EU institutions and bodies<strong>.</strong> According to their 2020-2024 Strategy (EDPS 2020) “Shaping a Safer Digital Future” released on 30 June 2020, the EDPS stresses that they are committed to supporting the idea of a moratorium on “the deployment, in the EU, of automated recognition in public spaces of human features, <strong>not only of faces but also of gait, fingerprints, DNA, voice, keystrokes and other biometric or behavioural signals</strong>, so that an informed and democratic debate can take place” (EDPS 2020).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>The EDPS was also among the first to react to the draft <em>Artificial Intelligence Act</em> of the European Commission. While they welcomed the EUs leadership aiming to ensure that AI solutions are shaped according to the EUs values and legal principles, nonetheless they expressed their regret to see that their call for a moratorium on the use of remote biometric identification systems - including facial recognition - in <strong>publicly accessible spaces</strong> had not been addressed by the Commission. A stricter approach is necessary, they argue because “remote biometric identification, where AI may contribute to unprecedented developments, presents extremely high risks of deep and non-democratic intrusion into individuals private lives” (EDPS 2021). As mentioned below, shortly after their first reaction, the EDPS called for a general ban on the use of remote biometric systems with the European Data Protection Board (EDPB) (2021a).</p>
<p>A call for a moratorium, particularly, on facial recognition systems can be found in the Council of Europe documents. The <strong>Guidelines on Facial Recognition</strong> (Council of Europe, 2021) that is one of the instruments supplementing Convention 108+ call for a <strong>moratorium for the live facial recognition technologies (5)</strong> and lay out certain conditions for the use of facial recognition technologies by law enforcement authorities (6). For example, the Guidelines call for <strong>clear parameters and criteria when creating databases such as watchlists</strong> in light of a specific, legitimate, and explicit law enforcement purposes (ibid.)</p>
</section>
<section id="outright-ban" class="level3">
<h3>Outright Ban</h3>
2021-10-13 18:35:49 +02:00
<p>Finally, a certain number of EU Political Parties, EU and national NGOs have argued that there is no acceptable deployment of RBI, because the danger of Biometric Mass Surveillance is too high. Such actors include organisations such as EDRi, <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a>, <a class="maplink" data-title="Algorithm Watch">Algorithm Watch</a> or the French Défenseur des Droits<a href="#fn34" class="footnote-ref" id="fnref34" role="doc-noteref"><sup>34</sup></a>.</p>
2021-10-07 08:43:56 +02:00
<p>In the European Parliament, the <strong>European Greens</strong> have most vocally promoted the position of the ban, and have gathered support across party lines. In a letter to the European Commission dated 15 April 2021, 40 MEPs from the European Greens, the Party of the European Left, the Party of European Socialists, Renew Europe, a few non-attached MEPs and one member of the far-right party Identity and Democracy expressed their concerns about the leaked EU commission proposal for the AI Regulation a few days earlier. As they argued</p>
<section id="people-who-constantly-feel-watched-and-under-surveillance-cannot-freely-and-courageously-stand-up-for-their-rights-and-for-a-just-society.-surveillance-distrust-and-fear-risk-gradually-transforming-our-society-into-one-of-uncritical-consumers-who-believe-they-have-nothing-to-hide-and---in-a-vain-attempt-to-achieve-total-security---are-prepared-to-give-up-their-liberties.-that-is-not-a-society-worth-living-in-breyer-et-al.-2021" class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">People who constantly feel watched and under surveillance cannot freely and courageously stand up for their rights and for a just society. Surveillance, distrust and fear risk gradually transforming our society into one of uncritical consumers who believe they have “nothing to hide” and - in a vain attempt to achieve total security - are prepared to give up their liberties. That is not a society worth living in! <footer>(Breyer et al. 2021)</footer></blockquote>
2021-10-07 08:43:56 +02:00
<p>Taking in particular issue with Article 4 and the possible exemptions to regulation of AI “in order to safeguard public safety”, they urge the commissionEuropean Commission “to make sure that existing protections are upheld and <strong>a clear ban on biometric mass surveillance in public spaces is proposed</strong>. This is what a majority of citizens want” (Breyer et al. 2021)</p>
2021-10-07 11:07:36 +02:00
2021-10-07 15:08:33 +02:00
<p><strong><a class="maplink" data-title="European Digital Rights (EDRi)">European Digital Rights (EDRi)</a>, an umbrella organisation of</strong> 44 digital rights NGOs in Europe takes a radical stance on the issue. They argue <strong>that mass processing of biometric data in public spaces creates a serious risk of mass surveillance</strong> that infringes on fundamental rights, and therefore they call on the Commission to <strong>permanently stop all deployments that can lead to mass surveillance</strong>. In their report <em>Ban Biometric Mass Surveillance</em> (2020) they demand that the EDPB and national DPAs) <strong>“publicly disclose all existing and planned activities and deployments that fall within this remit.</strong>” (EDRi 2020, 5). Furthermore, they call for ceasing all planned legislation which establishes biometric processing as well as the funding for all such projects, amounting to an “immediate and indefinite ban on biometric processing”.</p>
2021-10-08 17:03:03 +02:00
<p><strong><a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> (LQDN) one of EDRis founding members</strong> (created in 2008 to “promote and defend fundamental freedoms in the digital world") similarly called for a ban on <strong>any present and future use of facial recognition for security and surveillance purposes</strong>. Together with a number of other French NGOs monitoring legislation impacting digital freedoms, as well as other collectives, companies, associations and trade unions, the <a class="maplink" data-title="La Quadrature du Net">LQDN</a> initiated a joint open letter in which they call on French authorities to ban any security and surveillance use of facial recognition due to their <strong>uniquely invasive and dehumanising</strong> nature. In their letter they point to the fact that in France there are a “multitude of systems already installed, outside of any real legal framework, without transparency or public discussion” referring, among others, to the PARAFE system and the use of FRTs by civil and military police. As they put it:</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="facial-recognition-is-a-uniquely-invasive-and-dehumanising-technology-which-makes-possible-sooner-or-later-constant-surveillance-of-the-public-space.-it-creates-a-society-in-which-we-are-all-suspects.-it-turns-our-face-into-a-tracking-device-rather-than-a-signifier-of-personality-eventually-reducing-it-to-a-technical-object.-it-enables-invisible-control.-it-establishes-a-permanent-and-inescapable-identification-regime.-it-eliminates-anonymity.-no-argument-can-justify-the-deployment-of-such-a-technology.-la-quadrature-du-net.-et-al.-2019" class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">“Facial recognition is a uniquely invasive and dehumanising technology, which makes possible, sooner or later, constant surveillance of the public space. It creates a society in which we are all suspects. It turns our face into a tracking device, rather than a signifier of personality, eventually reducing it to a technical object. It enables invisible control. It establishes a permanent and inescapable identification regime. It eliminates anonymity. No argument can justify the deployment of such a technology.” <footer>(La Quadrature du Net. et al. 2019)</footer></blockquote>
2021-10-07 15:08:33 +02:00
<p>Another prominent voice asking for a full ban on FRTs is the Berlin-based NGO <strong><a class="maplink" data-title="Algorithm Watch">Algorithm Watch</a></strong>.  In their report <em><strong>Automating Society (2020)</strong></em> the NGO similarly calls for a ban to all facial recognition technology that might amount to mass surveillance. Their analysis and recommendations place FRTs in a broader discussion regarding <strong>Automated Decision-Making (ADM) systems</strong>. They condemn any use of live facial recognition in public spaces and demand that public uses of FRTs that might amount to mass surveillance be decisively "<strong>banned until further notice, and urgently, at the EU level</strong>” (Algorithm Watch 2020, 10).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>They further demand <strong>meaningful transparency</strong> that not only means “disclosing information about a systems purpose, logic, and creator, as well as the ability to thoroughly analyse, and test a systems inputs and outputs. It also requires <strong>making training data and data results accessible</strong> to independent researchers, journalists, and civil society organisations for public interest research” (Algorithm Watch 2020, 11).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>Parallel to these reports there are also various campaigns that prove to be effective in raising awareness and putting pressure on governmental bodies both at a national and European level. In May 2020 EDRi launched the <strong>#ReclaimYourFace</strong> campaign, a European Citizens' Initiative (ECI) petition, that calls for a ban on all biometric mass surveillance practices. The campaign centres around the power <strong>imbalances inherent to surveillance</strong>. As of May 2021 the campaign has been supported by more than 50.000 individual signatures. #ReclaimYourFace is not the only campaign, though undoubtedly the most visible and influential, in a European Contextcontext. Other similar international initiatives are: "<strong>Ban the Scan" initiated by Amnesty International, "Ban Automated Recognition of Gender and Sexual Orientation" led by the</strong> international NGO Access Now, or <strong>"Project Panopticon" launched by the Indian based Panoptic Tracker.</strong></p>
2021-10-07 15:08:33 +02:00
<p>In early June; a global coalition was launched under the hashtag <strong>#BanBS consisting of 175 organisations from 55 countries</strong> the<strong>.</strong> The coalition demands the halting of biometric surveillance practices. Drafted by Access Now, Amnesty International, <a class="maplink" data-title="European Digital Rights (EDRi)">European Digital Rights (EDRi)</a>, Human Rights Watch, Internet Freedom Foundation (IFF), and Instituto Brasileiro de Defesa do Consumidor (IDEC)), the open letter has been signed by almost 200 organisations, in which they call for an outright ban on uses of facial recognition and biometric technologies that enable mass surveillance and discriminatory targeted surveillance:</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="these-uses-of-facial-and-remote-biometric-recognition-technologies-by-design-threaten-peoples-rights-and-have-already-caused-significant-harm.-no-technical-or-legal-safeguards-could-ever-fully-eliminate-the-threat-they-pose-and-we-therefore-believe-they-should-never-be-allowed-in-public-or-publicly-accessible-spaces-either-by-governments-or-the-private-sector.-access-now-2021" class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">“These uses of facial and remote biometric recognition technologies, by design, threaten peoples rights and have already caused significant harm. No technical or legal safeguards could ever fully eliminate the threat they pose, and we therefore believe they should never be allowed in public or publicly accessible spaces, either by governments or the private sector.” <footer>(Access Now 2021)</footer></blockquote>
2021-10-07 08:43:56 +02:00
</section>
</section>
</section>
<section id="eu-commission-proposal-on-the-regulation-for-the-artificial-intelligence-act" class="level2">
<h2>EU Commission Proposal on the Regulation for the Artificial Intelligence Act</h2>
<section id="the-eu-commission-proposal" class="level3">
<h3>The EU Commission Proposal</h3>
<p> In April 2021, the European Commission (2021b) published its proposal on the <em>Regulation for the Artificial Intelligence Act</em> with the aim of setting out the harmonised regulatory rules for Member States on AI- based systems. It responded in part to the many challenges posed by the rapid technological development of AI as well as the pressure from watchdogs, regulatory bodies and civil society. If adopted in its current form, the proposed EU Artificial Intelligence Act will have important implications for the use of biometric identification systems for law enforcement purposes.</p>
<p>On the whole, the proposed EU Artificial Intelligence Act lays out those rules based on three categories of possible risks that the use of AI may create: <strong>(i) an unacceptable risk</strong> according to which the use of AI is prohibited (Article 5); <strong>(ii) a high-risk AI system</strong>, whose use is subject to certain conditions including an ex-ante conformity assessment (Article 6); and <strong>(iii) low or minimal risk</strong>, whose use is permitted without restrictions.</p>
<p>Notably for the purpose of this report, the proposed EU Artificial Intelligence Act covers “remote biometric identification systems” defined as “an AI system for the purpose of identifying natural persons at a distance through the comparison of a persons biometric data with the biometric data contained in a reference database, and without prior knowledge of the user of the AI system whether the person will be present and can be identified” (Article 3(36)). In this way, the proposed EU Artificial Intelligence Act anticipates covering (AI-based) biometric video surveillance systems. In so doing, it differentiates between the use of “real-time” and “post” remote biometric identification systems in public spaces for law enforcement purposes.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>On initial observation, the proposal <strong>prohibits</strong> the use of <strong>“real-time” (live) remote biometric identification systems</strong> in public spaces for law enforcement purposes because it classifies them as systems that create an <strong>unacceptable risk</strong>.</p>
<p>However, Article 5 of the proposed EU Artificial Intelligence Act reads more as a <strong>heavy regulation rather than a prohibition</strong>. This is because the real-time remote biometric identification systems is prohibited unless it is “strictly necessary” for: <strong>(i) targeted search for specific potential victims of crime</strong>, including missing people; <strong>(ii) the prevention of a specific, substantial and imminent threat</strong> to the life or physical safety of natural persons or of a terrorist attack; or (<strong>iii) in relation to a criminal offence for which a European Arrest Warrant can be issued provided</strong> that it is punishable by a custodial sentence or detention order of minimum three years. In determining the use of real-time remote biometric identification systems for one of those purposes, Member States should be subject to appropriate limits in time, space, and target person (Article 5(2)). A court or an independent administrative body should authorise the use of this type of biometric identification systems, except in duly justified emergency situations (Article 5(3)). Member States may allow for full or partial use of real-time biometric identification systems in public spaces for law enforcement purposes based on the requirements laid out in Article 5 of the Proposed Regulation (Article 5(4)).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>The use of “post” (forensic) remote identification systems</strong> for law enforcement purposes, on the other hand, is <strong>considered a high-risk AI system</strong> whose developers have the obligation to ensure that the system meets the condition set out in the proposed EU Artificial Intelligence Act (Chapter 2 and Annex III). As opposed to other high-risk AI systems whose providers have to conduct internal control checks, the post remote identification systems would be subject to third party conformity assessment.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>The above provisions concerning the use of remote biometric identification systems have important implications on the</strong> protection of personal data and privacy as those systems involve processing of personal data. For this reason, they should be read alongside the rules and obligations set out in the GDPR and the LED. When conducted for law enforcement purposes by competent authorities, the remote biometric systems must follow the relevant data protection legislation as well as the ECHR and the Charter requirements since they would involve processing of sensitive personal data.</p>
</section>
<section id="reactions-to-the-proposal" class="level3">
<h3>Reactions to the proposal</h3>
<p>While <strong>data protection authorities</strong> and <strong>civil society</strong> generally welcomed the Commissions initiative praising its horizontal approach and the broad scope of its application, several organisations expressed their concerns that the regulations put forward are often far too lenient and do not do enough to safeguard fundamental rights. The EDPS (2021) for instance voiced its concern that the Commission did not address their <strong>call for a moratorium on the use of biometric identification</strong>, and specifically on <strong>facial recognition systems</strong>. Notably, in their later opinion on the proposed EU Artificial Intelligence Act with the EDPB (EDPS and EDPB 2021), they called for a general ban on the use of biometric identification (as opposed to their earlier calls on its moratorium). Both institutions were particularly vocal about the high regulation as opposed to the prohibition of the use of real-time remote biometric identification and they observed that the conditions for which the system could be implemented were extensive and would render the so-called prohibition meaningless (11).</p>
<p>They were also very critical of the distinction between real-time and ex-post (forensic) use of biometric identification systems, noting that the latter is as intrusive as the former because of its chilling effect on the freedom of expression, of assembly, of association and the freedom of movement (12). Furthermore, they highlighted the inherently intrusive nature of all types of remote biometric identification systems as they would involve indiscriminate and disproportionate amount of personal data of data subjects in public spaces to identify a few individuals (ibid). This would consequently impact peoples reasonable expectation of anonymity in public spaces (ibid). For these reasons, the EDPS and the EDPB called for a general ban on any use of AI for an automated recognition of human features in publicly accessible spaces (ibid).</p>
<p>The EDRi called the proposed EU Artificial Intelligence Act “a glimmer of hope for civil society who have called for red lines on impermissible uses of AI, like facial recognition in public spaces and predictive policing”, however, defined the prohibitions proposed as “relatively feeble”. They criticised in particular the many exemptions in which law enforcement agencies are still allowed to use “<strong>real-time remote biometric identification systems</strong>” (like facial recognition) in public spaces. These exceptions are <strong>targeted searches for specific potential victims of crime</strong>, to prevent a <strong>threat to life or terrorist attack, or to detect, localise, identify or prosecute a perpetrator or suspect of certain serious crimes</strong> (European Commission 2021b, 22). EDRi also points out that the Act “risks giving a green light to governments or public authorities to deploy discriminatory surveillance systems. Its rules for “high risk” AI like predictive policing, AI in asylum procedures and worker surveillance fall mainly on the developers themselves, not the public institutions actually deploying them a cause for concern.” (EDRi 2021)</p>
<p>In <strong>June 2021 EDPB-EDPS also joined civil society in their call for a ban of automated facial recognition technologies</strong> (EDPB 2021a). In their joint opinion on the draft AI regulation (EDPB 2021a), they voice their concern of the exclusion of international law enforcement cooperation from the scope of the AI Proposal. While the EDPB and the EDPS welcome the risk-based approach underpinning the Proposal, they consider the concept of “risk to fundamental rights” as one which should be aligned with the EU data protection framework and the societal risks for groups of individuals should also be assessed and mitigated. Therefore, they call for “<strong>a general ban on any use of AI for automated recognition of human features in publicly accessible spaces, such as recognition of faces, gait, fingerprints, DNA, voice, keystrokes and other biometric or behavioural signals, in any context.”</strong> (EDPB 2021a))</p>
</section>
</section>
</section>
<section id="part-ii-case-studies" class="level1 Title">
<h1 class="Title">PART II: CASE STUDIES</h1>
</section>
2021-10-07 13:32:13 +02:00
<section id="facial-recognition-cameras-at-brussels-international-airport-belgium" class="level1 case" data-title="Facial Recognition in Brussels Airport (Stopped)">
2021-10-07 08:43:56 +02:00
<h1>Facial Recognition cameras at Brussels International Airport (Belgium)</h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
<li><p>Belgium is one of two European countries that has not yet authorised the use of FRT, however, law enforcement is strongly advocating for its use and the current legal obstacles to its implementation might not hold for very long given the political pressure.</p></li>
<li><p>In 2017, unbeknownst to the Belgian Supervisory Body for Police Information (COC), Brussels International Airport acquired 4 cameras connected to a facial recognition software for use by the airport police. Though the COC subsequently ruled that this use fell outside of the conditions for a lawful deployment, the legality of the airport experiment fell into a legal grey area because of the ways in which the technology was deployed.</p></li>
2021-10-08 17:03:03 +02:00
<li><p>One justification for the legality of the airport experiment from the General Commissioner of <a class="maplink" data-title="Belgian Federal Police">Federal Police</a> was to compare the technological deployment to that of the legal use of other intelligent technologies such as Automated Number Plate Recognition (ANPR). Although this argument was rejected at the time, such a system could be re-instated if the grounds for interruption are no longer present in the law.</p></li>
2021-10-07 08:43:56 +02:00
<li><p>Some civil society actors in Belgium contest the legitimacy of remote biometric identification. However, current legislative activity seems to point in the direction of more acceptance for remote biometric surveillance.</p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-07 08:43:56 +02:00
<p>Belgium is, with Spain, one of the few countries in Europe that <strong>has not authorised the use of facial recognition technology</strong>, neither for criminal investigations nor for mass surveillance (Vazquez 2020). This does not mean that it is unlikely to change its position in the very near future. <strong>Law enforcement is indeed strongly advocating its use</strong>, and the current legal obstacles are not likely to hold for very long (Bensalem 2018). The pilot experiment that took place in Zaventem / Brussels International Airport, although aborted, occurred within a national context in which <strong>biometric systems are increasingly used and deployed</strong>.</p>
2021-10-13 18:35:49 +02:00
<p>Belgium will, for example, soon roll out at the national level the new biometric identity card “<strong>eID</strong>”, the Minister of Interior Annelies Verlinden has recently announced. The identification document, which will rely on the constitution of a broad biometric database and is part of a broader <a class="maplink" data-title="European Union">European Union</a> initiative, is developed in partnership with security multinational <strong><a class="maplink" data-title="Thales">Thales</a></strong>, was already trialled with 53.000 citizens in (Prins 2021; Thales Group 2020).<a href="#fn35" class="footnote-ref" id="fnref35" role="doc-noteref"><sup>35</sup></a></p>
<p>Municipalities in different parts of the country are experimenting with <strong>Automated Number Plate Recognition (ANPR) technology</strong>. A smaller number have started deploying “<strong>smart CCTV</strong>” cameras, which fall just short of using facial recognition technology. The city of Kortrijk has for example <a class="maplink" data-title="Monitoring Kortrijk">deployed</a><strong>body recognition</strong>” technology, which uses walking style or clothing of individuals to track them across the citys CCTV network<a href="#fn36" class="footnote-ref" id="fnref36" role="doc-noteref"><sup>36</sup></a>. Facial recognition is possible with these systems, but has not been activated as of yet <strong>pending legal authorisation to do so</strong>. In the city of <a class="maplink" data-title="Roeselare Municipality">Roeselare</a>, “smart cameras” have been installed in one of the shopping streets. Deployed by telecom operator Citymesh, they could provide facial recognition services, but are currently used to count and estimate crowds, data which is shared with the police (van Brakel 2020). All the emerging initiatives of remote biometric identification are however pending a reversal of the decision to halt <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">the experiment at Zaventem Brussels International Airport</a>.</p>
2021-10-07 08:43:56 +02:00
<section id="the-zaventem-pilot-in-the-context-of-face-recognition-technology-in-belgium" class="level2">
<h2>The Zaventem pilot in the context of Face Recognition Technology in Belgium</h2>
<p>The use of <strong>facial recognition technology</strong> at the Brussels International Airport was announced on 10 July 2019 in the Flemish weekly <em>Knack</em> by General Commissioner of Federal Police Marc De Mesmaeker (Lippens and Vandersmissen 2019). There is currently no publicly available information as to whom provided the technical system. De Mesmaeker explained that an agreement had been found with the company managing the airport and the labour unions, and thus that the technology was already in use (Organe de Controle de l'Information Policière 2019, 3).</p>
<p>As part of the justification for the deployment of FRT in Zaventem, De Mesmaeker made a comparison with <strong>ANPR-enabled cameras</strong>, arguing that “They have already helped to solve investigations quickly, (…). Citizens understand this and have learned to live with their presence, but privacy remains a right”. (7sur7 2019)</p>
2021-10-13 18:35:49 +02:00
<p>The <strong>Belgian Supervisory Body for Police Information (COC)</strong><a href="#fn37" class="footnote-ref" id="fnref37" role="doc-noteref"><sup>37</sup></a>, in its advisory document, explained that it had no prior knowledge of the deployment and learned about the existence of the facial recognition systems through the interview of De Mesmaeker in the <em>Knack</em> magazine (Organe de Controle de l'Information Policière 2019, 3). On 10 July 2019, the COC thus invited the General Commissioner to communicate all the details of the deployment of this technology in the Brussels International Airport. On 18 July 2019, COC received a summary of the systems main components. On 9 August 2019, it subsequently visited the premises of the <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">federal police deployment in Zaventem airport</a> (Organe de Controle de l'Information Policière 2019, 3).</p>
2021-10-07 08:43:56 +02:00
<p>We know some technical details about the system through the public information shared by the COC. In early 2017, Brussels airport had acquired <strong>4 cameras connected to a facial recognition software for use by the airport police</strong> (Police Aéronautique, LPA) (Farge 2020, 15; Organe de Controle de l'Information Policière 2019, 3). The system works in two steps.</p>
<p>When provided with video feeds from the four cameras, the software first creates <strong>snapshots</strong>, generating individual records with the faces that appear in the frame. These snapshots on record are then in a second step compared and potentially matched to previously established “<strong>blacklists</strong>” created by the police itself (the reference dataset is thus not external to this particular deployment) (Organe de Controle de l'Information Policière 2019, 3).</p>
<p>The system did however not live up to its promise and generated a high number of <strong>false positives</strong>. Many features such as skin colour, glasses, moustaches, and beards led to false matches. The system was thus partially disconnected in March 2017, and at the time of the visit of the COC, the system was no longer fully in use (Organe de Controle de l'Information Policière 2019, 3). Yet the second step had not been de-activated (matching video feeds against pre-established blacklists of faces), the first function of creating a biometric record of the video feeds was still in place (Organe de Controle de l'Information Policière 2019, 3).</p>
</section>
<section id="legal-bases-and-challenges" class="level2">
<h2>Legal bases and challenges</h2>
2021-10-08 17:03:03 +02:00
<p>The legality of the <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">Zaventem airport experiment</a> fell into a legal grey area, but eventually the COC ruled that it fell outside of the conditions for a lawful deployment.</p>
2021-10-07 08:43:56 +02:00
<p>The right to privacy is enshrined in <strong>Article 22 of the Belgian Constitution</strong>, which reads as “everyone has the right to the respect of his private and family life, except in the cases and conditions determined by the law.” The <strong>ECHR</strong> and the case law of the <strong>ECtHR</strong> have had considerable influence over the interpretation of Article 22 of the Belgian Constitution (Lavrysen et al. 2017) and thus the right enshrined therein can be broadly construed to encompass the right to protection of personal data and to address risks associated with the use of new technologies (Kindt et al. 2008; De Hert 2017). <strong>Articles 7 and 8 of the Charter</strong> are also relevant where the legislator acts within the scope of EU law (Cour constitutionnelle, N° 2/2021, 14 January 2021).</p>
<p>Belgium adapted its data protection law to the GDPR by enacting the <strong>Act of 30 July 2018</strong> on the Protection of Natural Persons with regard to the Processing of Personal data (the Data Protection Act). The same act implements the LED, as well. </p>
<p>In regard to processing of sensitive data for non-law enforcement purposes, the Act sets out certain processing activities which would be regarded as necessary for reasons of substantial public interest, which is one of the lawful grounds listed in <strong>Article 9 of the GDPR</strong> to process said data. Overall, the relevant public interest purposes relate to processing by human rights organisations in relation to their objective of defending and promoting human rights and fundamental freedoms and in relation to an offence in relation to missing persons or sexual exploitation (Article 8, §1, the Data Protection Act). A separate data processing purpose for <strong>personal data of sexual life of the data subject</strong> is introduced in relation to the statutory purpose of evaluating, supervising, and treating persons whose sexual behaviour may be qualified as a criminal offence (Article 8, §1, 3°, the Data Protection Act).</p>
<p><strong>Biometric data, however, cannot be processed</strong> by the respective associations for said public interest purpose unless specified in legal provisions (<strong>Article 8, §1, the Data Protection Act)</strong>. More importantly <strong>when biometric data is processed</strong> (not limited to the reasons of public interest), <strong>there must be additional safeguards</strong> whereby the data controller designates the categories of people who have access to the data, keeps a record of people who have access to the data for the data protection authority, and makes sure that they are bound by a legal, statutory or contractual obligation of confidentiality with respect to the personal data that they process (Article 9, the Data Protection Act).</p>
<p>The Act further provides a number of lawful bases for which sensitive data may be processed for law enforcement purposes as specified in <strong>Article 10 of the GDPR</strong>. The legal bases include processing as authorised by law, decree, ordinance, EU law or international agreement, necessary for protecting the vital interests of the data subject or another, in relation to data that is made public (Article 34, the Data Protection Act). Competent authorities have the same obligations of compiling a list of persons who have access to the data and are bound by obligations of confidentiality (Article 34, §2, the Data Protection Act).</p>
<p>Pursuant to Article 23 of the GDPR, the Act provides exceptions to the data subjects rights when personal data are processed by a range of authorities specified therein including the police services (Title I, Chapter III, the Data Protection Act) and intelligence and security services (Title III, the Data Protection Act). Particularly in the context of data processing for law enforcement purposes, pursuant to Article 35 of the Act, the sole automated decision making is permitted if the law, the decree, the ordinance, the EU legislation, or the international agreement provides for appropriate safeguards for the rights and freedoms of the data subject, including at least the right to human intervention on the part of the controller. However, profiling that leads to discrimination based on the sensitive data is prohibited (Article 35, the Data Protection Act).</p>
<p>In March 2018, Belgium amended its law on the use of surveillance cameras, particularly the <strong>Police Act (<em>Loi sur la fonction de police</em>/<em>Wet op het politieambt</em>)</strong> to regulate the use of cameras by the police. Accordingly, it provided new rules on the use of mobile cameras <em>(caméra mobile</em>/<em>mobiele camera</em>) and smart cameras (<em>caméra intelligente</em>/<em>intelligente camera</em>) equipped with additional technology that is beyond simple processing of images such as <strong>facial recognition cameras</strong> or <strong>automatic number plate recognition</strong>, as acknowledged in the Parliamentary Document No. 54 2855/001 of 4 January. <strong>The amendment to the Act permits the use of real-time smart cameras by the police in carrying out their administrative and judicial duties</strong> (subject to when they are to be used in public, in enclosed places freely accessible by the public or not). According to the amended Act, the personal data collected by cameras can be retained for a maximum period of 12 months (Article 25/5, Police Act). During this period, access to the data is allowed for a period of one month from their registration and subject to a written and reasoned decision of the public prosecutor (<em>Procureur du roi/Procureur des Konings</em>) (Article 25/6, Police Act). When cameras are used visibly in administrative as well as judicial missions of the police (e.g., maintaining public order, crowd management etc.) (ibid), they should not be aimed at collecting information about a person's racial or ethnic origin, religious or philosophical beliefs, political views, trade union membership, health status, sex life or sexual orientation (Article 25/3, §3, Police Act). Interestingly, biometric data is not included in this list of information whose collection by means of cameras are prohibited. The Police Act further sets out the police powers to collect information about people and it provides that biometric data that categorises different data subjects such as those who committed an offence against maintaining public order or for whom there is a monitoring order can be processed solely for the identification of those subjects (Article 44/1, the Police Act). If such processing is likely to generate a high risk to the rights and freedoms of the persons concerned, the police must consult the Belgian Supervisory Body for Police Information (ibid).</p>
</section>
<section id="mobilisations-and-contestations" class="level2">
<h2>Mobilisations and contestations </h2>
<p>Based on this legislative framework, the General Commissioner, in his letter to the COC dated 18 July 2019, justified a deployment without consultation of the COC nor the Belgian DPA on the grounds that</p>
<section id="although-the-creation-of-a-technical-database-for-facial-recognition-is-not-possible-under-the-current-legislation-the-use-of-real-time-intelligent-technologies-other-than-automatic-number-plate-recognition-anpr-is-possible-under-article-253-of-the-lfp.-the-legislator-has-indeed-provided-that-a-camera-used-by-the-police-regardless-of-its-type-can-be-equipped-with-intelligent-technology.-the-introduction-of-real-time-facial-recognition-is-therefore-in-our-opinion-in-accordance-with-the-law.-organe-de-controle-de-linformation-policière-2019-4" class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">“although the creation of a technical database for facial recognition is not possible under the current legislation, the use of real-time intelligent technologies other than Automatic Number Plate Recognition (ANPR) is possible under Article 25/3 of the LFP. The legislator has indeed provided that a camera used by the police, regardless of its type, can be equipped with intelligent technology. The introduction of real-time facial recognition is therefore, in our opinion, in accordance with the law.” <footer>(Organe de Controle de l'Information Policière 2019, 4)</footer></blockquote>
2021-10-07 08:43:56 +02:00
<p>The COC was not convinced by the arguments of the General Commissioner and concluded that the LFP did not apply. It justified its decision as follows:</p>
</section>
<section id="as-the-case-stands-the-regulator-is-not-entirely-convinced-that-the-lfp-is-applicable.-it-is-true-that-the-definition-of-a-smart-camera-is-taken-in-a-very-broad-sense.-according-to-article-252-1-3-of-the-lfp-this-term-refers-to-a-camera-which-also-includes-components-and-software-which-whether-or-not-coupled-with-registers-or-files-can-process-the-images-collected-autonomously-or-not.-in-the-explanatory-memorandum-anpr-cameras-and-cameras-for-facial-recognition-are-mentioned-as-examples-organe-de-controle-de-linformation-policière-2019-4" class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">“As the case stands, the Regulator is not entirely convinced that the LFP is applicable. It is true that the definition of a "smart camera" is taken in a very broad sense. According to Article 25/2, §1, 3° of the LFP, this term refers to "a camera which also includes components and software which, whether or not coupled with registers or files, can process the images collected autonomously or not". In the explanatory memorandum, ANPR cameras and cameras for facial recognition are mentioned as examples <footer>(Organe de Controle de l'Information Policière 2019, 4)</footer></blockquote>
2021-10-07 08:43:56 +02:00
<p>It further added that</p>
</section>
<section id="the-possibility-of-testing-a-facial-recognition-system-first-raises-questions-about-the-exact-scope-of-the-processing.-when-determining-the-correct-legal-framework-it-is-not-possible-to-establish-from-the-outset-whether-the-processing-of-personal-data-in-the-context-of-research-and-prosecution-is-already-being-considered-in-the-test-environment-or-during-a-test-period---and-thus-whether-the-fpa-and-title-ii-of-the-dpa-apply.-the-answer-to-this-question-is-crucial-in-order-to-determine-the-legal-basis-the-level-of-decision-making-within-the-police-that-is-entitled-to-decide-to-use-facial-recognition-the-nature-of-the-storage-medium-and-the-duration-of-storage-and-the-level-of-information-security-to-be-observed-operational-or-not.-secondly-and-in-the-alternative-the-review-body-notes-that-the-lfp-if-applicable-does-describe-what-falls-under-the-definition-of-a-smart-camera-but-does-not-stipulate-in-what-circumstances-and-under-what-conditions-the-use-of-facial-recognition-cameras-is-permitted-let-alone-on-what-medium-the-images-canshould-be-recorded-and-what-data-should-at-least-be-stored.-in-the-current-state-of-the-legislation-the-legislator-only-wanted-to-regulate-the-creation-of-a-technical-database-for-anpr-images.-organe-de-controle-de-linformation-policière-2019-4" class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">The possibility of testing a facial recognition system first raises questions about the exact scope of the processing. When determining the correct legal framework, it is not possible to establish from the outset whether the processing of personal data in the context of research and prosecution is already being considered in the test environment or during a test period - and thus whether the FPA and Title II of the DPA apply. The answer to this question is crucial in order to determine the legal basis, the level of decision making within the police that is entitled to decide to use facial recognition, the nature of the storage medium and the duration of storage, and the level of information security to be observed (operational or not). Secondly, and in the alternative, the Review Body notes that the LFP, if applicable, does describe what falls under the definition of a smart camera, but does not stipulate in what circumstances and under what conditions the use of facial recognition cameras is permitted, let alone on what medium the images can/should be recorded and what data should at least be stored. In the current state of the legislation, the legislator only wanted to regulate the creation of a technical database for ANPR images. <footer>(Organe de Controle de l'Information Policière 2019, 4)</footer></blockquote>
2021-10-07 08:43:56 +02:00
<p>The COC thus counter-argued that because the current CCTV law was not voted on with facial recognition but ANPR in mind, and facial recognition is permitted but only for commercial use (such as the check-in of passengers) <strong>it was thus not legal to set up a technical database</strong> containing <strong>biometric information</strong> and the system did therefore not have a sound legal basis (7sur7 2019). The interesting technicality of the case is that <strong>the “snapshots”</strong> generated in the first phase of the systems workflow were in practice stored only for a fraction of a second. Yet according to the law, this is still a biometric database, and thus not allowed (L'Avenir 2019).</p>
<p>The reaction by the Belgian Supervisory Body for Police Information shows that a degree of unclarity on the legal basis for conducting biometric surveillance persists. From a legislative perspective, <strong>such a system can easily be re-activated</strong> if the grounds for the interruption are not present in the law anymore. The current legislative activity seems to point in this direction.</p>
</section>
</section>
<section id="effects-of-the-technologies" class="level2">
<h2>Effects of the technologies</h2>
<p>While the city of Brussels is the location of much EU-level activism, this hasnt translated yet in an equal mobilisation at the national level perhaps due to the currently very restrictive legislative position on the matter and the institutional checks and balances described in this chapter banning de facto the use of such technologies.</p>
2021-10-13 18:35:49 +02:00
<p>The French campaign Technopolice has extended to Belgium and is raising awareness through a diversified strategy based on public forums, cartography of technology and organization of events. The NGO <strong>Ligue des Droits Humains</strong> is a member of the <strong>Reclaim Your Face</strong> campaign, along with 40 other organisations<a href="#fn38" class="footnote-ref" id="fnref38" role="doc-noteref"><sup>38</sup></a>, yet it hasnt been as active as partner organizations in neighbouring France or Germany.</p>
2021-10-07 08:43:56 +02:00
</section>
</section>
2021-10-07 13:32:13 +02:00
<section id="the-burglary-free-neighbourhood-in-rotterdam-netherlands" class="level1 case" data-title="Data-lab Burglary-free Neighbourhood">
2021-10-07 08:43:56 +02:00
<h1>The Burglary Free Neighbourhood in Rotterdam (Netherlands)</h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
2021-10-08 17:03:03 +02:00
<li><p>The <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Fieldlab Burglary Free Neighbourhood</a> is a public-private collaboration with two aims: to detect suspicious behaviour and to influence the behaviour of the suspect. While the system of smart streetlamps does collect some image and sound-based data, it does not record any characteristics specific to the individual.</p></li>
2021-10-07 08:43:56 +02:00
<li><p>From a legal perspective, there is a question as to whether or not the data processed by the Burglary Free Neighbourhood programme qualifies as personal data and thus would fall within the scope of data protection legislation.</p></li>
<li><p>It is contested whether forms of digital monitoring and signalling are actually the most efficient methods for preventing break ins. Despite the aims of the programme, to date, the streetlights have only been used to capture data for the purposes of machine learning.</p></li>
<li><p>The infrastructure installed for the experiments can potentially be used for more invasive forms of monitoring. During the project, local police, for example, already voiced an interest in access to the cameras.</p></li>
<li><p>In March 2021, the Fieldlab trial ended. The data collected over the course of the project was not sufficient enough to have the computer distinguish suspicious trajectories. The infrastructure of cameras and microphones is currently disabled, yet remains in place.</p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-08 17:03:03 +02:00
<p>In October 2019, the Carlo Collodihof, a courtyard in the Rotterdam neighbourhood Lombardijen, was equipped with a new kind of streetlamp. The twelve new luminaires did not just illuminate the streets; they were <strong>fitted with cameras, microphones, speakers, and a computer which was connected to the internet</strong>. They are part of the so called <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Fieldlab Burglary Free Neighbourhood</a></strong>: an experiment in the public space with technologies for computer sensing and data processing, aimed at the prevention of break-ins, robberies, and aggression; increasing the chances of catching and increasing a sense of safety for the inhabitants of the neighbourhood ((Redactie Inbraakvrije Wijk 2019; Kokkeler et al. 2020b). The practical nature of a Fieldlab provides a way to examine concretely how the various technologies come together, and how they fit in with existing infrastructures and regulations.</p>
2021-10-07 08:43:56 +02:00
<section id="detection-and-decision-making-in-the-burglary-free-neighbourhood-fieldlab" class="level2">
<h2>Detection and decision-making in the “Burglary free neighbourhood” Fieldlab</h2>
2021-10-13 18:35:49 +02:00
<p>The national programme Burglary Free Neighbourhood was initiated and funded by the <strong>Dutch Ministry of Justice and Security</strong>. It is led by <strong><a class="maplink" data-title="Dutch Institute for Technology Safety and Security (DITSS)">DITSS</a></strong> (Dutch Institute for Technology, Safety &amp; Security), a non-profit organisation, that has been involved in earlier computer sensing projects in the Netherlands for example in <strong><a class="maplink" data-title="Living Lab Stratumseind">Stratumseind</a>, Eindhoven</strong> (The Hague Security Delta 2021). Other parties involved include the <a class="maplink" data-title="Rotterdam Municipality">municipality of Rotterdam</a>, the <a class="maplink" data-title="Rotterdam Municipality">police</a> both on a local and national level the Public Prosecutors Office and insurance company <a class="maplink" data-title="Interpolis">Interpolis</a>. Part of the research is carried out by <a class="maplink" data-title="Rotterdam Municipality">University of Twente</a>, <a class="maplink" data-title="Avans Hogeschool">Avans Hogeschool</a>, the Network Institute of the Vrije Universiteit Amsterdam and the <a class="maplink" data-title="Max Planck Institute for the Study of Crime, Security and Law">Max Planck Institute for Foreign and International Criminal Law</a> (Freiburg, Germany).</p>
2021-10-07 08:43:56 +02:00
<p><img src="images/media/image2.jpg" style="width:6.25564in;height:3.51788in" alt="A picture containing roller coaster, ride Description automatically generated" /></p>
<p>Figure 2. Fieldlab in Rotterdam Lombardijen</p>
2021-10-07 15:08:33 +02:00
<p>From a technological perspective, the project has two aims: to <strong>detect suspicious behaviour</strong>, and in turn<strong>, to influence the behaviour of the suspect</strong>. As such, project manager Guido Delver, who agreed to be interviewed for this report, describes the project as being primarily a behavioural experiment (Delver 2021). The twelve luminaires are provided by <a class="maplink" data-title="Sustainder">Sustainder</a> (their Anne series (Sustainder 2021)). The processing of the video and audio is done on the spot by a computer embedded in the luminaire, using software from the Eindhoven based company <strong><a class="maplink" data-title="ViNotion">ViNotion</a></strong> (ViNotion 2020). This software reads the video frames from the camera and estimates the presence and position of people thereby mapping the coordinates of the video frame to coordinates in the space. It then determines the direction they are facing. <strong>Only these values position and direction and no other characteristics nor any images,</strong> are sent over the internet to a datacentre somewhere in the Netherlands, where the position data is stored for further processing (Delver 2021).</p>
2021-10-07 08:43:56 +02:00
<p>Currently, <strong>there is no immediate processing of the position data</strong> to classify behaviour as being suspicious or not. The proposed pipeline consists of two stages: first, an unsupervised machine algorithm for <strong>anomaly (outlier) detection processes the gathered trajectories</strong>, in order to distinguish trajectories that statistically deviate from the norm. As an example, both children playing, as well as burglars making a scouting round through the neighbourhood can potentially produce anomalous trajectories. Secondly, <strong>these anomalous trajectories are judged as being suspicious or not by a computer model</strong> that was trained with human supervision. In the Fieldlabs first data collection experiment 100.000 trajectories were collected, totalling 20.000.000 data points (Hamada 2020). It turned out however that this was still too few to draw any conclusions about viability of the approach; the big data was still too small (Delver 2021).</p>
<p>Another input for detecting suspicious situations is the <strong>microphone with which some of the streetlamps are equipped</strong>. By recording two frequencies of sound, sounds can be categorised as coming from for example a conversation, shouting, dog barking, or the breaking of glass. The two frequencies recorded provide too little information to distinguish the words in a conversation (Delver 2021).</p>
<p>Aside from experimenting with the automated detection of suspicious behaviour, the Fieldlab experiments with various ways in which the detected situations can be played out. Project manager Guido Delver notes that the aim is not <em>per se</em> to involve the police. Instead, the suspect should be deterred before any crime is committed (Delver 2021). Various strategies are laid out: the yet-to-be-autonomous system can <strong>voice warnings through the speakers</strong> embedded in the streetlamps. Or, in line with the work of DITSS in Eindhovens Stratumseind street, the <strong>light intensity or colour of the streetlamps can be changed</strong> (Intelligent Lighting Institute, n.d.). Both strategies are aimed at signalling the subjects that their behaviour is noticed, which generally suffices to have burglars break off their scouting. Another option under consideration is to send a signal to the residents living nearby.</p>
2021-10-13 18:35:49 +02:00
<p>The process of data gathering in the Burglary Free Neighbourhood is quite similar to technologies that are deployed for anonymous people counting. One such application has been developed by <strong><a class="maplink" data-title="Numina">Numina</a></strong> and is <a class="maplink" data-title="People counting in Nijmegen">deployed in the Dutch city of Nijmegen</a>: individuals are <strong>traced through space and time, but not identified or categorised.</strong> This information is then used to provide statistics about the number of visitors in the city centre (Schouten and Bril 2019). Another Dutch deployment of technologically similar software is the <strong><a class="maplink" data-title="Test of One and a half meter monitor">One-and-a-half-meter monitor</a> developed by the <a class="maplink" data-title="Amsterdam Municipality">municipality of Amsterdam</a>,</strong> which is based on the YOLO5 object detection algorithm and trained on the COCO dataset. This data processing architecture can detect the presence of persons but is incapable of deducing any characteristics (Amsterdam-Amstelland safety region 2020). These implementations show biometrics can be used to detect the presence of people, while refraining from storing these characteristics.</p>
2021-10-07 08:43:56 +02:00
<p><img src="images/media/image3.png" style="width:5.35242in;height:3.07738in" alt="Two people holding umbrellas on a street Description automatically generated with low confidence" /></p>
<p>Figure 3. The one-and-a-half-meter monitor developed by the municipality of Amsterdam</p>
</section>
<section id="legal-bases-and-challenges-1" class="level2">
<h2>Legal bases and challenges</h2>
2021-10-08 17:03:03 +02:00
<p>The <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Fieldlab Burglary Free Neighbourhood</a> programme shows how data can be used to <strong>conduct monitoring</strong> and <strong>nudging of individuals behaviours</strong>. From a legal point of view, the question is whether the data processed in the context of the programme qualifies as <strong>personal data</strong> and would thus fall within <strong>the scope of data protection legislation</strong>.</p>
2021-10-07 08:43:56 +02:00
<p>The Constitution for the Kingdom of the Netherlands provides for a general right to protection for privacy in Article 10, according to which restrictions to that right must be laid down by law. The GDPR Implementation Act (<em>Uitvoeringswet Algemene Verordening Gegevens-bescherming</em>) (UAVG), as well as the Police Data Act (<em>Wet Politiegegevens</em>) or the Judicial Data and Criminal Records Act (<em>Wet Justitiele en Strafvorderlijke Gegevens</em>) which implement the GDPR and the LED, provides the legal framework regarding privacy and data protection.</p>
<p>The <strong>definition of personal data</strong> as enshrined in the GDPR and the LED is directly applicable under the Dutch law. To qualify data as such, “any information” must relate to an identified or identifiable natural person. Based on the data that can be captured by the Fieldlab programme, two elements of this definition need further attention.</p>
<p><strong>-“Information “relating to” a natural person”</strong>. The former <strong>Article 29 Working Party</strong> (2007) substantiated this element by noting that information can relate to an individual based on its content (i.e., information is about the individual), its purpose (i.e., information is used or likely to be used to evaluate, treat in a way, or influence the status or behaviour of an individual), or its result (i.e., information is likely to have an impact on a certain persons rights and interests, taking into account all the circumstances surrounding the precise case). These three alternative notions to determine whether the information relates to an individual was endorsed by the <strong>CJEU</strong> in its <em>Nowak</em> decision (C-434/16), where it dealt with the purpose (i.e., it evaluates the candidates competence) and the result (i.e., it is used to determine whether the candidate passes or fails, which can have an impact on the candidates rights) of the information in question in determining whether the written answers to an exam would qualify as personal data. In brief, in determining whether the data captured by the Fieldlab programme qualify as personal data, the context for which the data is used or captured is important. Information about the level of crowding or sound could “relate” to an individual if it is used to evaluate or influence the behaviour of a person (based on its purpose), or to affect a persons rights (based on its result) (Galič and Gellert 2021).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>-“Identifiable Person”.</strong> The notion of identifiability covers circumstances where it is possible to distinguish the individual from a group of people by combining additional information (See 4.2.1). In situations where the person cannot be identified, determining the extent to which that person can be identifiable depends on the possibilities of combining additional identifying information (Galič and Gellert 2021). However, where the system mainly operates on <strong>non-personal data because its aim is to influence the behaviour of a group of people</strong>, instead of an identified or identifiable person, the chances of having sufficient data to render the person identifiable would be lower (ibid). </p>
<p>The uncertainties around these two elements of personal data mean that <strong>a project that monitors and tracks the behaviour of individuals in public spaces may fall outside the scope of data protection legislation</strong> if there are uncertainties around whether the data it processes actually qualify as personal data. Notably, the Whitepaper on the sensors in the role of municipalities (van Barneveld, Crover, and Yeh 2018), produced in collaboration with the Ministry of Interior, a reference to the definition of personal data and the possibility of combining for example sound-data with camera recordings to trigger the application of the data protection legislation, without giving further details. Unlike in the relevant sections of the other case studies, this section will not explore further data processing conditions under the UAVG and the other relevant laws because the issue from a data protection view in the first instance with the Fieldlab programme or any similar initiative is whether they process personal data.</p>
</section>
<section id="mobilisations-and-contestations-1" class="level2">
<h2>Mobilisations and contestations</h2>
<p>Despite visits from the mayor of Rotterdam and Secretary of State Sander Dekker, the <strong>Fieldlab of the Burglary Free Neighbourhood</strong> <strong>has not been discussed much in Dutch media</strong>. The most prominent discussion on the project has been in a TV broadcast and online video by Nieuwsuur, in which criminologist Marc Schuilenburg is sceptical about the technology deployed in the Fieldlab (Nieuwsuur 2020a, 5:38m):</p>
<section id="so-far-there-has-not-been-any-study-that-assesses-the-effectiveness-of-the-streetlamps.-we-know-what-works-best-against-burglary-looking-out-for-each-other-and-fitting-your-door-with-a-double-lock.-social-cohesion-is-known-to-work-best.-what-is-happening-now-is-that-social-cohesion-is-degrading-because-neighbours-can-trust-in-the-intelligent-streetlight.-any-responsibility-is-delegated-to-a-streetlight." class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">So far, there has not been any study that assesses the effectiveness of the streetlamps. We know what works best against burglary: looking out for each other and fitting your door with a double lock. Social cohesion is known to work best. […] What is happening now is that social cohesion is degrading, because neighbours can trust in the intelligent streetlight. Any responsibility is delegated to a streetlight.</blockquote>
2021-10-07 08:43:56 +02:00
<p>Schuilenburg frames the interest of cities in technologies such as those used in the Burglary Free Neighbourhood as being part <strong>of the well-marketed narrative of the “smart city” that is sold by technology companies</strong>: “no city wants to be dumb” (“Nieuwsuur” 2020b, 36m). To some extent, Guido Delver positions the projects <strong>privacy-by-design methodology</strong> <strong>in contrast to many of these commercial products for surveillance</strong>. In his conversations with various municipalities he recognises, and shares, the interest for “smart” surveillance technologies. However, Delver attempts to minimise the data gathering in the Burglary Free Neighbourhood. This proves to be a constant negotiation, for example the police have voiced an interest in access to the camera feeds in case suspicious behaviour was detected. However, access to the camera feeds has been deliberately kept outside of the scope of the project (Delver 2021).</p>
<p>While the project currently only stores the position of passers-by, there are also <strong>technical considerations for the capture of more information.</strong> For example, the video cameras cannot cover the entire area, therefore, as no characteristics of individuals are stored, <strong>tracking people from one camera to the next is problematic</strong>. It raises the question of whether biometric measurements such as a persons estimated volume, length, or colour of clothing should be recorded, this would allow the computer to link the trace of one camera to another. Posing ethical and legal questions for the project: <strong>what are the legal ramifications of deducing and (temporarily) storing these characteristics, and for how long should they be stored (Delver 2021)?</strong> Even for projects that decide to consider privacy by design, it can be tempting to store and process biometric information. However, as mentioned above (see section 7.2.), the challenges in determining whether the Fieldlab or any other similar initiatives process personal data as defined in the GDPR raises questions on the extent to which these programmes fall within the scope of the data protection legislation, irrespective of the fact that they may be designed to affect the personal autonomy of individuals (as opposed to an identified or identifiable individual) by influencing and nudging their behaviours.</p>
<p>Finally, commentators have pointed out the <strong>discrepancy between what is expected of the technology, and what it is actually doing.</strong> For example, the Algemeen Dagblad (Krol 2019) writes that the “smart streetlights” are actually able to “recognise behaviour” and to “sound the alarm” if necessary. <strong>Whereas up until now, the streetlights have only been used to capture data for machine learning.</strong></p>
2021-10-08 17:03:03 +02:00
<p>These observations raise the question as to whether or not the communication about the technologies used suffices. When entering the neighbourhood, a sign signals to the visitor that the Fieldlab is operative, however, much of the information discussed above could not be found on the website that is mentioned on the sign as is indicated by the breath of references used. This situation is substantially different from the way that, for example, the <a class="maplink" data-title="Amsterdam Municipality">city of Amsterdam</a> lays out its use of algorithms: one website presents the goals of the projects, the kinds of data processing that is happening, the datasets on which the algorithms are trained, and in some cases the source code is shared (Amsterdam Algoritmeregister, 2021). <strong>The Dutch government is currently drafting regulations for a national register of cameras and sensors as deployed by municipalities (Nieuwsuur 2020b).</strong></p>
2021-10-07 08:43:56 +02:00
</section>
</section>
<section id="effects-of-the-technologies-1" class="level2">
<h2>Effects of the technologies</h2>
<p>Since March 2021, the experiment in the Fieldlab of the Burglary Free Neighbourhood in Rotterdam has been on hold. <strong>Researchers have not yet been able to have the computer distinguish suspicious trajectories or sounds.</strong> As such, the system has not been able to respond to any such situations with lights or sounds (Redactie LikeJeWijk 2021). Further research into this is happening in a Virtual Reality environment, as was discussed in the first section.</p>
2021-10-07 15:08:33 +02:00
<p>As part of the Fieldlab, research about the effects of the technologies deployed in the streets has been carried out by the <a class="maplink" data-title="Avans Hogeschool">Avans Hogeschool</a>, presenting five relevant observations. First, it is too early to draw any conclusions about the impact of the deployed technologies on the statistics for high impact crime (e.g., break-ins, aggression, robberies) in the neighbourhood (Kokkeler et al. 2020b, 25). Moreover, no research has yet been done into the waterbed effect of crime whether crime prevention in one block, leads to an increase in crime in an adjacent neighbourhood (Kokkeler et al. 2020b, 9).</p>
2021-10-07 08:43:56 +02:00
<p>Secondly, in the Rotterdam neighbourhood that was examined, the streetlights equipped with cameras were by no means the only technological interventions to prevent break-ins. A breadth of technology is deployed e.g., cameras, alarm systems which are either privately owned, owned by the municipality, the police, or distributed by insurance companies. In this cacophony of technological appliances, <strong>it becomes unclear which data is collected and how it is processed.</strong> Furthermore, it is unclear <strong>who owns and manages these data collection and processing networks, whether they are private parties or law enforcement agencies</strong>. Kokkeler et al. argue that a better overview of these practices is crucial in order to assess the ethical, legal, and social impact of these deployments (Kokkeler et al. 2020b, 24).</p>
<p>Thirdly, after conducting interviews with the residents, Kokkeler et al. concluded that most were unaware that the newly placed streetlights were equipped with sensors. <strong>Moreover, when discussing the “sensors” in the streetlights, many residents could only imagine the use of cameras not realising what data was being gathered (Kokkeler et al. 2020a, 21).</strong> While resident participation features prominent in the goals of the Fieldlab, the Coronavirus pandemic has hindered the planned involvement of the residents (Delver 2021).</p>
<p>Fourth, the moment residents were informed about the data gathering and processing taking place, they were optimistic about a potential use of the data by the local police and municipality. As long as the cameras <strong>were only directed at public space</strong>. Some residents voiced their concern that the information should only be used to address high impact crime, and not for minor offences in particular if these involve minors. On the other hand, some other residents suggested a broader use of the streetlights, for example in fighting litter and speeding (Kokkeler et al. 2020a, 21). Despite the fact that the direct sharing of the generated data with the police is contrary to the aims of the project (Delver 2021) the infrastructure that is deployed in the streets enables other engagements with the technology the so-called <strong>function creep</strong>.</p>
<p>Finally, the residents of the Rotterdam neighbourhood are known to not properly use more “low-tech” security measures. A case in which someone went out to walk their dog while leaving the key in the door is an illustration of this. Moreover, when a break in happens, it is not always reported, as often the culprit is directly or indirectly known (Kokkeler et al. 2020a, 22). This indicates that the technologies used in the Fieldlab <strong>might be unfit to address the primary issues in the neighbourhood</strong>.</p>
<p>All in all, the case of the Fieldlab Burglary Free Neighbourhood provides for interesting and relevant research into <strong>alternative means of behavioural monitoring and influencing</strong>. The direct “nudging” of behaviour theoretically removes the need for a centralised database of biometric data, it also does not capture and process biometric information on individuals. Yet in order to increase detection capabilities, it can be a <strong>slippery slope to implement algorithmic deduction of individuals traits</strong>, while storing these for short amounts of time. However, for how long should this information be kept? In other words: <strong>what is the desired balance between increased detection performance and storage duration</strong>. In this light, the projects current setup of not storing, nor deducing any biometric information is a clear, fixed, guideline that avoids this grey zone and many of the issues with remote biometric identification that are addressed within this report.</p>
<p>It is however apparent that projects such as these <strong>require (legal) fail-safes for their usage</strong>. In the case of the <strong>Burglary Free Neighbourhood</strong>, it seems the <strong>projects privacy-by-design has been secured by project manager Delvers</strong>. The fact that local police requested access to the cameras indicates the necessity for proper oversight in such deployments. Assuming that the data protection legislation applies to the <strong>Fieldlab</strong> or any similar initiatives (to the extent that they process “personal data”), it is unclear as to who processes which type of data and the level of collaboration between the private and public sectors. This uncertainty means that it may be <strong>hard to allocate responsibilities and obligations</strong> under the data protection legislation since it may be complicated to determine who is the “competent authority”, whose processing activities in a law enforcement context fall under the <strong>scope of the LED</strong>, as well as who is the <strong>authority responsible for processing personal data</strong> (i.e., the <strong>data controller</strong>) and the one responsible for processing the data on behalf of that authority (i.e., the <strong>data processor</strong>). Such uncertainties may complicate the effectiveness of the <strong>data protection legislation</strong> (Purtova, 2018).</p>
</section>
</section>
2021-10-07 13:32:13 +02:00
<section id="the-safe-city-projects-in-nice-france" class="level1 case" data-title="Safe City Pilot Project (Nice)">
2021-10-07 08:43:56 +02:00
<h1>The Safe City Projects in Nice (France)</h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
<li><p>Several French cities have launched “safe city” projects involving biometric technologies, however Nice is arguably the national leader. The city currently has the highest CCTV coverage of any city in France and has more than double the police agents per capita of the neighbouring city of Marseille.</p></li>
<li><p>Through a series of public-private partnerships the city began a number of initiatives using RBI technologies (including emotion and facial recognition). These technologies were deployed for both authentication and surveillance purposes with some falling into the category of biometric mass surveillance.</p></li>
2021-10-08 17:03:03 +02:00
<li><p>One project which used FRT at a <a class="maplink" data-title="Facial Recognition Pilot in High School (Nice)">high school in Nice</a> and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">one in Marseille</a> was eventually declared unlawful. The court determined that the required consent could not be obtained due to the power imbalance between the targeted public (students) and the public authority (public educational establishment). This case highlights important issues about the deployment of biometric technologies in public spaces.</p></li>
2021-10-13 18:35:49 +02:00
<li><p>The use of biometric mass surveillance by the mayor of Nice Christian Estrosi has put him on a collision course with the French Data Protection Authority (<a class="maplink" data-title="CNIL">CNIL</a>) as well as human rights/ digital rights organisations (<a class="maplink" data-title="La Ligue des droits de l'Homme">Ligue des Droits de lHomme</a>, <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a>). His activities have raised both concern and criticism over the usage of the technologies and their potential impact on the privacy of personal data.</p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-13 18:35:49 +02:00
<p>Although several French cities such as Paris, <a class="maplink" data-title="City of Valenciennes">Valenciennes</a> or <a class="maplink" data-title="Mairie de Marseille">Marseille</a> have launched pilot projects for “safe city” projects involving <strong>biometric technologies (facial, voice, sound recognition),</strong> the city of Nice is perhaps the <strong>national leader in the experimentation with such technologies at a local level</strong> (Nice Premium 2017). The mayor of Nice, Christian Estrosi (Les Républicains Party, right) a prominent political figure on the national political scene, has made clear his intention was to make Nice a “laboratory” of crime prevention (Barelli 2018). Since 2010, more than <strong>1.962 surveillance cameras have been deployed throughout the city</strong>, making it the city with <strong>highest CCTV coverage in France</strong> (27 cameras per square meter). Nice also possesses the most local police in France per inhabitant: 414 agents, for a population of 340.000 (in comparison, the neighbouring city of Marseille has 450 agents for 861.000 inhabitants).</p>
2021-10-07 08:43:56 +02:00
<section id="the-various-facets-of-the-safe-city-project-in-nice" class="level2">
<h2>The various facets of the “Safe city” project in Nice</h2>
2021-10-08 17:03:03 +02:00
<p>Nice has experimented with various initiatives related to <strong>remote biometric identification</strong> many of which fall into the category of biometric mass surveillance. In 2017, Christian Estrosi announced a partnership with the energy company <a class="maplink" data-title="Engie Ineo">Engie Ineo</a> for the development of an Urban Surveillance Centre (Centre de Surveillance Urbain, CSU). Based on a touch-interface technology, it centralises a platform of <strong>real-time data such as traffic accidents, patrol locations, as well as video feeds from CCTV</strong>s on the streets and in public transportation. (Dudebout 2020, 1). The video feeds from the city tramways are connected to an <strong><a class="maplink" data-title="Facial/Emotion Recognition Pilot in Tramway (Nice)">emotion recognition algorithm</a></strong> to flag suspicious situations (Allix 2018).</p>
2021-10-13 18:35:49 +02:00
<p>In June 2018, an additional step was taken with the signing of a partnership agreement with a consortium of companies headed by <a class="maplink" data-title="Thales">Thales</a>, specialised in social network intelligence, geolocation, biometrics and crowd simulation<a href="#fn39" class="footnote-ref" id="fnref39" role="doc-noteref"><sup>39</sup></a> for a <strong>“Safe City” project</strong> (Dudebout 2020, 2). Established for three years (2018-2021) with a budget of EUR 10,9 million, the project is financed by the city council, subsidised in part by <a class="maplink" data-title="Bpifrance">BPI France</a><a href="#fn40" class="footnote-ref" id="fnref40" role="doc-noteref"><sup>40</sup></a>, and supported by the Committee for the Security Industrial Sector, an agency under the tutelage of the Prime Ministers office<a href="#fn41" class="footnote-ref" id="fnref41" role="doc-noteref"><sup>41</sup></a> (Allix 2018; BPI France 2018)</p>
2021-10-07 08:43:56 +02:00
<p>The first facial recognition test of the Safe city project took place from 16 February to 2 March 2019, during the Nice Carnival. The experiment was a simulation, involving matching faces collected through CCTV footage of the crowd attending the carnival with a fictitious set of databases (lost individuals, wanted individuals, or individuals with restraining orders). The fictitious datasets were constituted by 50 volunteers, recruited mostly among the municipality, who provided their pictures, or were freshly photographed for the test. The system used <strong>live facial recognition software provided by the company Anyvision</strong>. The live feeds were filmed during the carnival. Passers-by (approximately 1000 people were concerned) were informed of the ongoing test and asked to wear a bracelet if they consented to being filmed (Hassani 2019).</p>
2021-10-07 15:08:33 +02:00
<p>A second experiment took the form of a <strong>software application (app) named “Reporty”,</strong> rolled out in January 2018. The app, developed by the Israeli American company <a class="maplink" data-title="Carbyne">Carbyne</a>, allows citizens to be in direct audio and video connection and share geolocation information with the Urban Supervision Centre in order to report any incivility, offense, or crime that they might witness (Barelli 2018).</p>
2021-10-08 17:03:03 +02:00
<p>The third project, involving <strong>facial recognition</strong> was tested in the education context. In February 2019, <strong><a class="maplink" data-title="Facial Recognition Pilot in High School (Nice)">a high school in Nice</a> and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">a high school in Marseille</a> were fitted with facial recognition technology</strong> at their gates in order to grant or bar access to the premises. The official motivation behind the deployment was to "assist the personnel of the high schools and to fight against identity theft (Dudebout 2020, 34).</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="legal-bases-and-challenges-2" class="level2">
<h2>Legal bases and challenges</h2>
<p>The use of facial recognition systems in high schools in Nice and Marseille, which <strong>was declared unlawful by the Administrative Court of Marseille</strong>, raised important issues on the legality of deploying biometric technologies in public places.</p>
2021-10-08 17:03:03 +02:00
<p>There is no specific provision devoted to the right to privacy or data protection in the French Constitution of 1958, but constitutional safeguards for the interests protected under said rights exists. <strong>The French Constitutional Council</strong> (<em><a class="maplink" data-title="Conseil constitutionnel">Conseil Constitutionnel</a></em>) has recognised that the respect for privacy is protected by Article 2 of the 1789 Declaration of the Rights of Man and of the Citizen, which is incorporated in the <strong>French constitutionality bloc</strong> <strong>as binding constitutional rule (bloc de constitutionnalité)</strong> (French Constitutional Council, Decision N° 2004-492 DC of 2 March 2004). Accordingly, the collection, retention, use and sharing of personal data attracts protection <strong>under the right to privacy</strong> (French Constitutional Council, Decision n° 2012-652 DC of 22 March 2012). The limitations to that right must thus be justified on grounds of general interest and implemented in an adequate manner, <strong>proportionate to this objective (ibid).</strong></p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>France has updated the Act N°78-17</strong> of 6 January 1978 on information technology, data files and civil liberties in various stages to incorporate the provisions of the <strong>GDPR</strong>, address the possible exemptions contained in the <strong>GDPR</strong>, and implement the <strong>LED</strong>.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>The Act sets out the <strong>reserved framework for sensitive data including biometric data</strong> in its Article 6, which states that <strong>sensitive data</strong> can be processed for purposes listed in Article 9(2) of the GDPR as well as those listed in its Article 44. The latter includes the <strong>re-use of information contained in court rulings and decisions</strong>, provided that neither the purpose nor the outcome of such processing is the re-identification of the data subjects; and the processing of biometric data by employers or administrative bodies if it is strictly necessary to control access to workplaces, equipment, and applications used by employees, agents, trainees, or service providers in their assignments. </p>
2021-10-07 15:08:33 +02:00
<p>Pursuant to Article 6 of the Act N°78-17, <strong>processing of sensitive data can be justified for public interest if it is duly authorised</strong> in accordance with Articles 31 and 32 of the Act. Accordingly, an <strong>authorisation by decree of the <a class="maplink" data-title="Conseil d'État">Conseil d'État</a> (<em>State Council</em>) is required after reasoned opinion of <a class="maplink" data-title="CNIL">CNIL</a></strong>, for processing of biometric data on behalf of the State for the authentication of control of the identity of the individuals (Article 32, Act N°78-17). </p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>In February 2020, the Administrative Court of Marseille considered the extent to which the <strong>data</strong> <strong>subjects explicit consent may provide an appropriate legal basis</strong> in the deployment of facial recognition systems to control access to high schools in Nice and Marseille (Administrative Court of Marseille, Decision N°1901249 of 27 February 2020). After recognising that data collected <strong>by</strong> <strong>facial recognition constitute biometric data</strong> (para 10), the Court held that the required consent could not be obtained simply by the students or their legal representatives in the case of minors signing a form due to the power imbalance between the targeted public and the public educational establishment as the public authority (para. 12). More importantly, <strong>the Court determined that the biometric data processing could not be justified based on a substantial public interest</strong> (i.e., controlling access to premises) envisioned in Article 9(2)(g) of the GDPR in the absence of considerations that the relevant aim could not be achieved by badge checks combined with where appropriate video surveillance (ibid). </p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>Article 88 of the Act N°78-17 provides the specific limitations of the processing of sensitive data for law enforcement purposes, according to which their processing is prohibited unless it is strictly necessary, subject to appropriate safeguards for the data subjects rights and freedoms and based on any of the same three grounds listed in Article 10 of the <strong>LED</strong>, including where it is authorised by law.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>The Act N°78-17 provides the data subject rights against the processing of their personal data</strong> with restrictions to the exercise of those rights subject to certain conditions (e.g., the restriction for protecting public security to the right to access the data processed for law enforcement purposes pursuant to Art 107 of Act N°78-17). An important data subjects right in the context of biometric surveillance is <strong>the data subjects right not to be subjected to solely automated decision-making, including profiling, except if it is carried out in light of circumstances laid out in Article 22 of the GDPR</strong> and for individual administrative decisions taken in compliance with French legislation (Article 47 of Act N°78-17). That said, for the latter circumstance, the automated data processing must not involve sensitive data (Article 47(2), Act N°78-17). Regarding the data processing operations relating to State security and defence (Article 120, Act N°78-17) and to the prevention, investigation, and prosecution of criminal offences (Article 95, Act N°78-17), the Act lays out an absolute prohibition against solely automated decision-making, according to which no decision producing legal effects or similarly significant effects can be based on said decision-making intended to predict or assess certain personal aspects of the person concerned. Particularly, with respect to data processing operations for law enforcement purposes, Article 95 of the Act prohibits any type of profiling that discriminates against natural persons based on sensitive data as laid out in Article 6.</p>
2021-10-07 11:07:36 +02:00
2021-10-08 17:03:03 +02:00
<p>In addition to the data protection legislation, <strong>the other legislation applicable to biometric surveillance is the Code of Criminal Procedure.</strong> Its Article R40-26 allows the national police and gendarmerie to retain in a criminal records database (<em><a class="maplink" data-title="Deployment of TAJ">Traitement des Antécédents Judiciaires</a></em> or <em><a class="maplink" data-title="Deployment of TAJ">TAJ</a></em>) photographs of people suspected of having participated in criminal offences as well as victims and persons being investigated for causes of death, serious injury or disappearance to make it possible to use a facial recognition device. According to a 2018 report by Parliament, <strong><a class="maplink" data-title="Deployment of TAJ">TAJ</a> contains between 7 and 8 million facial images (<em>Assemblée Nationale</em> N°1335, 2018, 64, f.n. 2).</strong> <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> lodged legal complaints against the retention of facial images before the <a class="maplink" data-title="Conseil d'État">Conseil d'État</a>, arguing that this practice does not comply with the strict necessity test required under Article 10 of LED and Article 88 of Act N°78-17 (La Quadrature du Net, 2020).</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="mobilisations-and-contestations-2" class="level2">
<h2>Mobilisations and contestations</h2>
2021-10-07 15:08:33 +02:00
<p>The political agenda of Nices mayor to be at the forefront of biometric mass surveillance technologies in France and possibly in Europe has put him on a collision course with two main actors: <strong>the French Data Protection Authority (<a class="maplink" data-title="CNIL">CNIL</a>) and human rights/digital rights organisations.</strong></p>
2021-10-13 18:35:49 +02:00
<p>The French digital rights organisation <strong><a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> was quick to highlight the problems raised by the deployment of these technologies in Nice</strong>. “The safe city is the proliferation of tools from the intelligence community, with a logic of massive surveillance, identification of weak signals and suspicious behaviour," commented Félix Tréguer, a Marseilles-based leader of the association <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> and member of the campaign Technopolice<a href="#fn42" class="footnote-ref" id="fnref42" role="doc-noteref"><sup>42</sup></a>. “We do not find it reassuring that the municipal police will become the intelligence service of the urban public space and its digital double" (Allix 2018).</p>
<p><strong>The <a class="maplink" data-title="La Ligue des droits de l'Homme">Ligue des Droits de lHomme</a> emphasised similar points, highlighting the political dangers involved.</strong> As Henri Busquet of the Ligue des Droits de l'Homme in Nice put “improving emergency services and traffic is legitimate, but the generalisation of video surveillance worries us, and scrutinising social networks is not the role of a mayor. Without any safeguards, such a tool cannot demonstrate the necessary neutrality [...] It is potentially a tool for political destruction, which puts opponents and journalists at particular risk” (Allix 2018).</p>
2021-10-07 15:08:33 +02:00
<p>In July 2019, the city of Nice hoped the <a class="maplink" data-title="CNIL">CNIL</a> would provide advice related to its first test experiment during the Carnival. The <a class="maplink" data-title="CNIL">CNIL</a> responded however that not enough information was provided by the municipality for the DPA to assess it. The French DPA pointed out in particular the lack of “quantified elements on the effectiveness of the technical device or the concrete consequences of a possible bias (related to gender, skin colour ...) of the software” (Dudebout 2020, 3).</p>
2021-10-08 17:03:03 +02:00
<p><strong>The launch of the smartphone application “Reporty” was the catalyst for mobilisation in Nice, united under the umbrella organisation “Collectif anti-Reporty"</strong>. The coalition was formed by local representatives from two left-wing parties (Parti Socialiste, Les Insoumis), Tous Citoyens, the <a class="maplink" data-title="CGT Union (Education) Alpes-Maritimes">union CGT</a> and the anti-discrimination NGO MRAP. The coalition appealed to two institutions to block the use of the application: <strong>The Defender of Rights</strong> (Défenseur des Droits) and the French DPA (<a class="maplink" data-title="CNIL">CNIL</a>). The coalition denounced “a risk of generalised denunciation and a serious breach of privacy”, calling to “put an end to the securitarian drift of Christian Estrosi” (Barelli 2018).</p>
2021-10-07 15:08:33 +02:00
<p><strong>On 15 March 2018, the <a class="maplink" data-title="CNIL">CNIL</a> stated that the application was too invasive and did not meet the criteria set out by the legislation</strong>. It did not meet the proportionality test; it failed to fall within the frame of existing law on video-protection due to the integration of private citizens terminals (smartphones) with a security database managed by the police; it was excessively intrusive due to the collection of images and voice of people in the public space and finally it covered a field of offenses that was too broad (CNIL 2018).</p>
<p><strong>The school experimentation further pushed the <a class="maplink" data-title="CNIL">CNIL</a> to take a position on the technological activism of Nices mayor.</strong> On 29 October 2019, it expressed serious concerns over the experimentation, arguing that the technology was clashing with the principles of proportionality and data collection minimisation enshrined in the principles of the GDPR. It pointed out that other methods, less intrusive for the privacy of the students, could be used to achieve the technologys stated goal, namely increasing the students security and traffic fluidity (Dudebout 2020, 4).</p>
<p><strong>In a landmark opinion published on 15 November 2019, the <a class="maplink" data-title="CNIL">CNIL</a> clarified what it defined as guidelines related to facial recognition (CNIL 2019a).</strong> The French DPA expressed concerns over a blanket and indiscriminate use of the technologies, highlighting possible infringements to fundamental rights, because these technologies operate in the public space, where these freedoms (expression, reunion, protest) are expressed. It however did not suggest that they should be banned in all circumstances it suggested instead that its uses could be justified if properly regulated, on a case-by-case basis. <strong>Certain uses could be rejected a priori such as in the case of minors, whose data are strictly protected</strong>. The question of data retention is also central, warning against excessive data duration and excessive centralisation, suggesting instead citizens control over their own data. But as the president of the <a class="maplink" data-title="CNIL">CNIL</a>, Marie-Laure Denis explained, facial recognition technology “can have legitimate uses, and there is a not firm position of the <a class="maplink" data-title="CNIL">CNIL</a>s board” (Untersinger 2019).</p>
<p><strong>The repeated rebukes of the Nices experimentation with facial recognition technology by the <a class="maplink" data-title="CNIL">CNIL</a> have however not tempered the enthusiasm of the mayor</strong>. Rather than cave in, Estrosi questioned the legitimacy of the <a class="maplink" data-title="CNIL">CNIL</a>s decisions, arguing that the legal framework, and in particular the French law of 1978 regulating data collection in relation to digital technologies was itself a limitation. In 2018, Estrosi asked: “I have the software that would allow us to apply facial recognition tomorrow morning and to identify registered individuals wherever they are in the city... Why should we prevent this? Do we want to take the risk of seeing people die in the name of individual freedoms, when we have the technology that would allow us to avoid it?” (Allix 2018) In December 2019, Estrosi reiterated his attacks on the <a class="maplink" data-title="CNIL">CNIL</a>, and together with the mayor of Gravelines Bertrand Ringot (Socialist Party) accused the institution of acting as a “permanent obstruction to the development of digital experiments” (Dudebout 2020, 5).</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="effects-of-the-technologies-2" class="level2">
<h2>Effects of the technologies</h2>
<p>To our knowledge, there has not been any systematic ex-post impact assessment of the effects of these three experiments in the city of Nice.</p>
2021-10-07 15:08:33 +02:00
<p>The city of Nice asked the <a class="maplink" data-title="CNIL">CNIL</a> to provide an assessment of the Carnival experiment, but the <a class="maplink" data-title="CNIL">CNIL</a> refused to do so, arguing that not enough information had been communicated to them about the parameters of the experiment.</p>
2021-10-07 08:43:56 +02:00
<p>There are no systematic qualitative or quantitative studies about the perception of the citizens in relation to the technologies in Nice. While the political opposition to these technologies has been documented, it would be erroneous to conclude that they are generally unpopular among the population. Surveys conducted at the national level, such as the one carried out by the organisation Renaissance Numérique show that the public is generally supportive. While 51% of the polled citizens consider that the technologies are not transparent, do not sufficiently allow for consent and can potentially lead to mass surveillance, 84% consider it justified for National Security issues (kidnappings, terror attacks), 76% to secure important public events, and 72% consider it justified to secure public spaces in general. Only when asked about their faith in private actors using the technologies properly, the confidence rates decline (38%). (Reconnaissance Numérique 2019)).</p>
<p>As one press article reports, “For their part, many people in Nice do not seem to be hostile to this application”. The article further quotes a 72-year-old from Nice: “With terrorism, any measure that allows us to reinforce security seems desirable to me. On the condition that we don't give this application to just anyone". (Barelli 2018)</p>
</section>
</section>
2021-10-07 13:32:13 +02:00
<section id="facial-recognition-in-hamburg-mannheim-berlin-germany" class="level1 case" data-title="Pilot Project Südkreuz Berlin">
2021-10-07 08:43:56 +02:00
<h1>Facial Recognition in Hamburg, Mannheim &amp; Berlin (Germany)</h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
<li><p>The German federal police, in cooperation with the German railway company, conducted a project called “Sicherheitsbahnhof” at the Berlin railway station Südkreuz in 2017/18, which included 77 video cameras and a video management system.</p></li>
2021-10-13 18:35:49 +02:00
<li><p>The police in Hamburg used facial recognition software <a class="maplink" data-title="Videmo">Videmo</a> 360 during the protests against the G20 summit in 2017. The database includes 100.000 individuals <a class="maplink" data-title="Facial Recognition during Hamburg G20 Summit">in Hamburg during the G20 summit</a> and whose profiles are saved in the police database. The technology allows for the determination of behaviour, participation in gatherings, preferences, and religious or political engagement</p></li>
<li><p>Sixty-eight cameras were installed by local police on central squares and places in the German city <a class="maplink" data-title="Mannheim Police">Mannheim</a> to record the patterns of movement of people. In this project, which started in 2018, the software is used to detect conspicuous behaviour.</p></li>
2021-10-07 08:43:56 +02:00
<li><p>Half of these deployments (Mannheim &amp; Berlin Südkreuz) took place as measures to test the effectiveness of facial recognition and behavioural analysis software. This “justification as a test” approach is often used in Germany to argue for a deviation from existing rules and societal expectations and was similarly applied during deviations to commonly agreed measures in the Coronavirus/COVID-19 pandemic.</p></li>
<li><p>Resistance to video surveillance is also in no small part a result of constant campaigning and protest by German civil society. The Chaos Computer Club and Digital Courage have consistently campaigned against video surveillance and any form of biometric or behavioural surveillance. The long term effect of these “pilots” is to normalise surveillance.</p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-07 08:43:56 +02:00
<section id="rbi-deployments-in-germany" class="level2">
<h2>RBI Deployments in Germany</h2>
<p><strong>All the deployments of RBI we are aware of in Germany were conducted by law enforcement</strong>. The deployments range from using facial recognition software to analyse the German central criminal information system, to specific deployments in more targeted locations such as Berlin Südkreuz train station or Mannheim city centre, or to deployments around specific events such as the G20 in Hamburg in 2019.</p>
<section id="pilot-project-südkreuz-berlin" class="level3">
2021-10-07 15:08:33 +02:00
<h3><a class="maplink" data-title="Pilot Project Südkreuz Berlin">Pilot Project Südkreuz Berlin</a></h3>
2021-10-13 18:35:49 +02:00
<p><strong><a class="maplink" data-title="German Federal Police (Bundespolizei)">The German federal police</a> (BPOL)</strong>, in cooperation with the <a class="maplink" data-title="Deutsche Bahn AG">Deutsche Bahn AG</a>, the German railway company, conducted a project called “Sicherheitsbahnhof” at the Berlin railway station Südkreuz in 2017/18. The project consisted of two parts: part one was done from August 2017 until January 2018 with 312 voluntary participants. Part two was carried out from February until July 2018, including 201 participants (Bundespolizeipräsidium Potsdam 2018).</p>
2021-10-07 15:08:33 +02:00
<p><strong>For the first project, 77 video cameras and a video management system were installed at the train station Berlin Südkreuz.</strong> Three cameras were used for the <strong>biometric facial recognition during live monitoring</strong>. During the project, the systems BioSurveillance by the company <a class="maplink" data-title="Herta Security">Herta Security</a>, delivered by Dell EMC AG, Morpho Video Investigator (MVI) by <a class="maplink" data-title="IDEMIA">IDEMIA AG</a>, and <a class="maplink" data-title="AnyVision">AnyVision</a> by Anyvision were used and tested. To detect and identify faces, the systems worked based <strong>on neural networks using Template Matching Methods</strong>. For that purpose, images of the faces were recorded and converted into templates. Subsequently, the facial recognition software matched the unknown picture to a known model saved in the reference database. As soon as a certain threshold of similarity is reached, the image is considered a match (see 2.3. for a technical description) The reference database consisted of high-quality images of the participants. That means the photographs had to adhere to quality standards such as a neutral grey background, no shadow in the faces, enough lighting, low compression to avoid artefacts, high resolution, and a straightforward viewing direction (Bundespolizeipräsidium Potsdam 2018).</p>
<p><strong>For the first testing phase,</strong> the participants passed the designated area of the train station Berlin Südkreuz a total of 41.000 times. BioSurveillance had an average hit rate of 68,5%, MVI of 31,7%, and <a class="maplink" data-title="AnyVision">AnyVision</a> 63,1%. <strong>A combined hit rate by the interconnection of the three systems resulted in an increased total hit rate of 84,9%.</strong> <strong>The interconnection also increased the rate of false positives.</strong> The matches were logged but not saved (Bundespolizeipräsidium Potsdam 2018).</p>
<p><strong>For the second testing phase</strong>, the reference database consisted of participant images from the video stream of the first testing phase. For each participant, 2-5 images were extracted from the video stream. The images recorded during the second testing phase generally were of worse quality than from the first phase. All systems used more than one picture as a reference to identify a person (Bundespolizeipräsidium Potsdam 2018). During the second phase, the interconnected systems had <strong>an average testing rate of 91,2%.</strong> BioSurveillance resulted in an average hit rate of 82,8%, MVI in 31,2%, and <a class="maplink" data-title="AnyVision">AnyVision</a> in 76,2%. The performance increased as the systems had more images as a reference (Bundespolizeipräsidium Potsdam 2018).</p>
2021-10-13 18:35:49 +02:00
<p><strong>The <a class="maplink" data-title="Deutsche Bahn AG">Deutsche Bahn AG</a> used the existing infrastructure at the railway station Berlin Südkreuz for an experiment on behavioural analysis starting in June 2019.</strong> The tests were done twice a week during the day. Volunteers performed situations the system should recognise and identify. After scanning people's behaviour, the software would alert the police or the railway company (Henning 2019). <strong>The police assembled a list of behaviours that should be recognised by the system: people lying down or entering certain zones of the train station (such as construction areas), groups of people or streams of people, objects that were set down such as luggage, and the positions of persons and objects</strong>. Furthermore, the system would be counting the number of people in certain areas and allow the analysis of the video data by the police. The software used by the tests is provided by <a class="maplink" data-title="IBM Germany Gmbh">IBM Germany GmbH</a>, the <a class="maplink" data-title="Hitachi Consortium">Hitachi Consortium</a> (<a class="maplink" data-title="Hitachi">Hitachi</a>, <a class="maplink" data-title="Conef">Conef</a>, <a class="maplink" data-title="MIG GA">MIG</a>), <a class="maplink" data-title="Funkwerk video systems Gmbh">Funkwerk video systems GmbH</a> and <a class="maplink" data-title="G2K Group Gmbh">G2K Group GmbH</a> (Bundespolizei 2019).</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="hamburg-g20-summit" class="level3">
<h3>Hamburg G20 Summit </h3>
2021-10-13 18:35:49 +02:00
<p><strong>The police in Hamburg used facial recognition software <a class="maplink" data-title="Videmo">Videmo</a> 360 (by <a class="maplink" data-title="Videmo">Videmo</a>)</strong> during the protests against the <a class="maplink" data-title="Facial Recognition during Hamburg G20 Summit">G20 summit in 2017</a> (Bröckling 2019). The database, consisting of 100 TB of data, consists of material the police assembled during recording identities in investigations and data from external sources such as surveillance cameras in train stations, the BKA's online portal called “Boston Infrastruktur”, from the internet and the media. <strong>"Boston Infrastruktur" is a web portal accessible to the public in July 2017, where people could upload images and videos</strong>. <strong>All data that concerns the time and place of the G20 summit were included.</strong> Furthermore, data were assembled in 2017 during investigations of the special commission “Schwarzer Block” in the context of the G20 summit protests. The images were first detected and identified, meaning templates of faces were made. <strong>Subsequently, experts checked the material manually (Caspar 2018). The database includes 100.000 individuals in Hamburg during the G20 summit and whose profiles are saved in the police database.</strong> The technology allows for the determination of behaviour, participation in gatherings, preferences, and religious or political engagement (Bröckling 2019).</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="mannheim-public-surveillance" class="level3">
2021-10-07 15:08:33 +02:00
<h3><a class="maplink" data-title="Mannheim public surveillance">Mannheim public surveillance</a></h3>
2021-10-13 18:35:49 +02:00
<p><strong>68 cameras were installed by local police on central squares and places in the German city Mannheim to record the moving patterns of people.</strong> In this project, which started in 2018, the software developed by the <strong><a class="maplink" data-title="Frauenhofer Institute Karlsruhe">Fraunhofer Institute of Optronics</a> in Karlsruhe</strong> is used to detect conspicuous behaviour. The police are alerted by the cameras and investigate the emerging situation they have observed on camera further (Reuter 2018). The cameras were placed in areas with increased incidences of criminal activity. Only two minutes lie between the alert of the system and the intervention by the police on average. <strong>As the software is learning, it is increasingly able to detect criminal or violent activity. However, sometimes the alerts are not correct, for instance, the system cannot recognise a hug as not dangerous (heise online 2020).</strong> The software is continuously tested and adapted to be suitable for public spaces. Twenty cameras are used to test the software (Ministerium für Inneres 2020).</p>
2021-10-07 08:43:56 +02:00
</section>
</section>
<section id="legal-bases-and-challenges-3" class="level2">
<h2>Legal bases and challenges</h2>
<p><strong>The question on the legal permissibility of examples of biometric video surveillance explained above requires a brief description of the constitutional and legislative framework</strong> for the protection of privacy and personal data, and the police powers granted under the German law in relation to the use and processing of personal data.</p>
<p>The general right of personality based on <strong>Articles 2(1) and 1(1) of the German Constitution</strong> <strong>protects individuals against the collection, storage, and use of their personal data by public authorities (Eichenhofer and Gusy, 2017).</strong>  The basic right to informational self-determination guarantees the authority to decide on the disclosure and also on the type of use of one's personal data (BVerfG, judgment of 15 December 1983 - 1 BvR 209/83, para. 149).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>Germany adapted a new Federal Data Protection Act (BDSG), to use the discretionary powers and the application of national laws contained in the GDPR</strong>. The <strong>BDSG</strong> also contains data protection provisions on the processing of personal data by activities of public federal bodies which do not fall within the scope of Union law (e.g., intelligence services, Federal Armed Forces) (Part 4, BDSG) and implements the <strong>LED</strong> (Part 3, BDSG).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>Paragraph 22 of the BDSG sets out lawful purposes additional to those listed in Article 9 of the GDPR</strong> for which sensitive data may be processed. For the purpose of this report, the lawful purposes that are relevant for public bodies processing operations are the following: (i) processing is <strong>urgently necessary for reasons of substantial public interest</strong>; (ii) processing is necessary to <strong>prevent substantial threats to public security</strong>; (iii) processing is urgently necessary to <strong>prevent substantial harm to the common good or to safeguard substantial concerns of the common good</strong>; (iv) processing is necessary for <strong>urgent reasons of defence or to fulfil supra- or intergovernmental obligations of a public body</strong>. In each case, the interests sought with any of these purposes must outweigh the data subjects interest. <strong>Paragraph 22 of the BDSG</strong> further imposes obligations such as access restriction and encryption in relation to implementing appropriate safeguards to protect the data subjects interest when the processing is carried out based on the above purposes. Furthermore <strong>§27 of the BDSG</strong> envisages the <strong>processing of sensitive data for scientific or historical research purposes or statistical purposes subject to certain conditions</strong>. </p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>In regard to the processing of sensitive data for law enforcement purposes, <strong>§48 of the BDSG</strong> permits the processing only where it is <strong>strictly necessary for the performance of the competent authoritys task</strong>, and subject to the <strong>existence of certain safeguards such as those in relation to data security and encryption</strong>. </p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>In terms of the further use of the data, <strong>§23 of the BDSG</strong> designates purposes for which personal data <strong>may be processed other than the initial intended purpose</strong> such as where it is necessary to prevent substantial harm to common good, threat to public security, defence, or national security or where it is necessary to prevent serious harms to others rights. <strong>§49 of the BDSG</strong> lays out the rules for the <strong>processing of personal data for law-enforcement purposes</strong> other than the initial intended law enforcement purpose.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>Moreover, the <strong>BDSG devotes a specific section to the processing of personal data while conducting video surveillance</strong>. Pursuant to <strong>§4 of the BDSG</strong>, video surveillance of public spaces is <strong>permitted only as far as it is necessary</strong> (i) for public bodies to perform their tasks; (ii) to exercise the right to determine who shall be allowed or denied access, or (iii) to safeguard legitimate interests for specifically defined purposes. There should be nothing to indicate that the data subjects legitimate interest overrides the interest protected by any of the respective purposes and protecting lives, health and freedom of people should be considered as a very important interest (§4, the BDSG). More importantly, the <strong>data collected through the use of video surveillance can be further processed if it is necessary to prevent threats to state and public security and to prosecute crimes (§4(4), the BDSG).</strong> The same section further provides conditions for notification at the earliest possible moment about the surveillance, informing the data subject whose personal data may be collected as a result of the surveillance and the deletion of the data if it is no longer necessary.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>The BDSG restricts the application of certain data subject rights</strong> as enshrined in the GDPR such as the right to be informed (§33) and the right to request access (§34). §37 of the Act provides a sectorial exception in relation to providing services pursuant to an insurance contract for the prohibition against the sole automated decision-making. In relation to the processing of personal data for law enforcement purposes, <strong>the BDSG permits the sole automated decision-making if it is authorised by law (§55)</strong>. Nevertheless, the decision cannot be based on sensitive data unless there are suitable safeguards to the data subject (§55(2)). In any case, it <strong>provides an explicit prohibition against conducting profiling that may discriminate against people based on their sensitive data (§55(3)).</strong>  </p>
2021-10-07 11:07:36 +02:00
2021-10-13 18:35:49 +02:00
<p><strong>The collection of personal data in general and facial images in particular in criminal investigation proceedings are authorised under German Law by the Federal Police Act (<em>Gesetz über die Bundespolizei</em>) (BPoIG),</strong> by the Federal Criminal Police Office and the Cooperation of the Federal and State Governments in Criminal Police Matters (<em><a class="maplink" data-title="German Federal Criminal Police Office (Bundeskriminalamt)">Bundeskriminalamtgesetz</a></em>) (BKAG), the Code of Criminal Procedure (<em>Strafprozessordnung</em>) (StPO), and the police acts of <em>Länder</em>.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>§24 of the BPoIG grants the Federal Police the authority to take photographs including image recordings of a person subject to specific conditions</strong>. Moreover, <strong>§26 of the BPoIG</strong>, entrusts the Federal Police the power to collect personal data by making <strong>picture and sound recordings of participants in public events or gatherings</strong> if facts justify that there are significant risks to border security or to categories of people or objects. §27 of the BPoIG further authorises the use of <strong>automatic image recording</strong>, albeit in relation to security risks at the border or to categories of people or objects. Each section provides the obligations for the deletion of the data after a specific timeframe.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>The BKAG provides the rules for information collection by the Federal Criminal Police Office in its information system</strong>, BKAG established pursuant to §13 of the BKAG. §12 of the Act allows the processing of personal data by the Office for purposes other than those for which they were collected in order to <strong>prevent, investigate, and prosecute serious crimes</strong>. Additionally, the personal data of people who are convicted of, accused of, and suspected of committing a crime, and for whom there are factual indications that they may commit crimes of considerable importance in the near future may be processed to identify that person. (§12, para. 5, the BKAG).  The same Article states that personal data obtained by taking photos or image recordings of a person by means of covert use of technical means in or out of homes may not be further processed for law enforcement purposes. </p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><strong>§81b of the StPO grants the police the authority to obtain the photographs and fingerprints of a suspect and any of his measurements in order to conduct criminal proceedings</strong>. §100h of the StPO covers the police power to conduct <strong>covert surveillance measures,</strong> which includes the recording of the photographs and other images of the person concerned outside of private premises where other means of establishing the facts or determining an accuseds whereabouts would offer less prospect of success or would be more difficult. <strong>In terms of the investigative powers of police</strong> to use personal data in general, §98c of the StPO <strong>grants the authority to automatic matching of personal data from criminal proceedings with other data stored for the purposes of criminal prosecution or the enforcement of a sentence, or in order to avert a danger.</strong> This is, however, subject to the specific rules under federal law or <em>Länder</em> law. §483 of the StPO authorises a number of authorities to process personal data where necessary for the purposes of criminal proceedings including for criminal proceedings other than the one for which the data were collected. §484 of the StPO allows for the processing of personal data for future criminal proceedings.</p>
</section>
<section id="mobilisations-and-contestations-3" class="level2">
<h2>Mobilisations and contestations</h2>
<p>What is notable about these deployments, is that two thirds of them (Mannheim &amp; Berlin Südkreuz) <strong>took place as measures to test the effectiveness of facial recognition and behavioural analysis software</strong>. This “justification as a test” approach is often used in Germany to argue for a <strong>deviation from existing rules and societal expectations and was similarly applied during deviations to commonly agreed measures in the Coronavirus/COVID-19 pandemic</strong>. Similar special justifications were used for <strong>biometric surveillance around the G20 in Hamburg</strong>, which was justified by referencing the <strong>exceptional security threats associated with these large summits</strong>. Thus, three out of four implementations of biometric or behavioural surveillance in Germany - and all of those in Germany using video data - <strong>require special justifications in order to be able to take place at all</strong>. Notably, German civil society such as the Chaos Computer Club - a central civil society watchdog promoting fundamental rights in the digital age - have criticised these “tests” as not being very scientific in nature (Chaos Computer Club 2018).</p>
<p><strong>The Berlin experiments were criticised as being unscientific in the handling of the data</strong>, the low number of participants and the use of pictures of high quality in the database. Moreover, the Chaos Computer Club asserted that the <strong>results would not justify using the technology on a bigger scale as it did not function very well due to its low hit rate</strong> (Chaos Computer Club 2018). The fact that such special justifications are even needed in order to conduct biometric or behavioural surveillance in Germany, suggests that it is highly unpopular in society. Both the <strong>German public and civil society have argued strongly against all forms of video surveillance,</strong> which is itself already uncommon compared to many other places in Europe. In this context, <strong>biometric or behavioural surveillance has been very difficult to justify.</strong> Even when behavioural surveillance projects receive approval from data protection authorities (Wazulin 2019a), these projects are still criticised for not taking privacy sufficiently seriously (Wazulin 2019b).</p>
<p><strong>However outside of Mannheim, German DPAs have been one of the central actors contesting biometric and behavioural video surveillance</strong>. Once given the opportunity to analyse the usage of biometric video surveillance during the G20, <strong>the Hamburg data protection</strong> <strong>authority</strong> (<em>Hamburgische Beauftragte für Datenschutz und Informationsfreiheit</em>) <strong>found this use to be in breach of data protection laws</strong> (Schemm 2018). It considered that the StPO <strong>did not provide the legal basis to authorise that surveillance on the basis that the facial recognition technology took place independently of the initiation of a specific investigation</strong> (DPA Hamburg, 12). The Hamburg DPA further argued that §98c of the StPO did not <strong>provide the legal basis for authorisation because it covers only the comparing of the data and assumes the legality of the other data processing cycle (e.g., collection and storage)</strong>. A criminal suspicion is not a pre-requisite to conduct the comparison, and, on that basis, the Hamburg DPA argued that it authorised only minor interferences with fundamental rights (25). Along those arguments on the legality of the biometric video surveillance at the G20 summit, <strong>the Hamburg DPA ordered the database of data collected by the police during that surveillance to be deleted</strong> (Caspar 2018) but were unsuccessful in a legal battle with the Hamburg police to compel them to delete the database (Bröckling 2019).</p>
<p><strong>Resistance against video surveillance is also in no small part a result of constant campaigning and protest by German civil society.</strong> The <strong>Chaos Computer Club and Digital Courage</strong> have consistently campaigned against video surveillance and any form of biometric or behavioural surveillance. <strong>The widely popular online blog Netzpolitik.org</strong> has also reported extensively on video surveillance technologies, as have other leading German media outlets like Sueddeutsche Zeitung or Die Zeit. As a result, it is <strong>difficult to implement biometric or behavioural surveillance in Germany without being noticed by civil society and engaging in a public debate about whether these forms of surveillance are appropria</strong>te. Therefore, biometric, or behavioural surveillance can only be found in a limited set of cases in Germany, for which purported tests or exceptional justifications are typically required.</p>
</section>
<section id="effects-of-the-technologies-normalising-surveillance" class="level2">
<h2>Effects of the technologies: normalising surveillance</h2>
<p><strong>As there have only been a few implementations of behavioural or biometric surveillance in Germany, many of which have been as part of tests or for “exceptional circumstances”, their effects are relatively hard to measure.</strong> In some cases this can lead to a normalisation of video surveillance, as was the case in Hamburg (Gröhn 2017). The video surveillance cameras that were installed for the G20 summit remain in use and additional video surveillance cameras have since been installed.</p>
<p><strong>All of the video data stored by the Hamburg police during the G20 remains stored by the police and even if the Hamburg data protection authority believes that it should be removed, deletion is not currently possible.</strong> This video data includes several days of footage from central Hamburg from 6-10 July 2017 and includes many people going about their daily lives without any indication of committing a crime (Monroy 2018).</p>
2021-10-07 15:08:33 +02:00
<p><strong>Another element of normalisation is in regard to the integration of biometric facial recognition for historical data using the <a class="maplink" data-title="German central criminal information system INPOL">German central criminal information system INPOL</a>.</strong> Historical data of the usage of the systems shows a systematic year on year increase in the number of requests being made to the system by the German police (Monroy 2020), even though the number of criminal offenses has gone down steadily over the past decade (Statista 2021).</p>
2021-10-07 08:43:56 +02:00
<p><img src="images/media/image4.png" style="width:6.07974in;height:3.33433in" alt="Chart, bar chart, histogram Description automatically generated" /></p>
2021-10-13 18:35:49 +02:00
<p>Figure 4. Growth in police requests to INPOL system<a href="#fn43" class="footnote-ref" id="fnref43" role="doc-noteref"><sup>43</sup></a></p>
2021-10-07 08:43:56 +02:00
</section>
</section>
2021-10-07 13:32:13 +02:00
<section id="the-dragonfly-project-hungary" class="level1 case" data-title="Dragonfly Project">
2021-10-07 08:43:56 +02:00
<h1>The Dragonfly project (Hungary)</h1>
2021-10-07 11:07:36 +02:00
<div class="keypoints">
2021-10-07 08:43:56 +02:00
<p><strong>Key points</strong></p>
<ul>
<li><p>The Hungarian Government led by Prime Minister Viktor Orbán has long been on a collision course with EU Institutions over the rule of law and the undermining of the countrys judicial independence and democratic institutions.</p></li>
2021-10-07 15:08:33 +02:00
<li><p>Hungary is a frontrunner in Europe when it comes to authorising law enforcements use of Facial Recognition Technology, developing a nationwide and centralised database (The <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a>), and using the <a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a> as part of the Governments Coronavirus measures.</p></li>
2021-10-07 08:43:56 +02:00
<li><p>The infrastructure in place that potentially allows for a centralised deployment of biometric mass surveillance technologies in Hungary has reached an unprecedented scale while the legal and ethical scrutiny of these technologies lags dangerously behind.</p></li>
<li><p>This is due to (1) the overlap between the private and public sectors, specifically government institutions, and (2) the complex entanglements biometric systems have with other information systems (such as car registries, traffic management, public transport monitoring and surveillance, etc.).</p></li>
<li><p>Although the latter are not concerned with the traces of the human body they can nonetheless be used for and facilitate biometric mass surveillance. These entanglements create grey zones of biometric mass surveillance where the development and deployment of such technologies is hidden from visibility and critical scrutiny.</p></li>
2021-10-07 15:08:33 +02:00
<li><p>The <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> has elicited numerous warnings regarding data protection and the rights to privacy from both public and private organisations. However the lack of contestation and social debate around the issues of privacy and human rights in relation to such projects as the Hungarian Governments Dragonfly is striking.</p></li>
2021-10-07 11:07:36 +02:00
</ul>
</div> <!-- key points -->
2021-10-07 08:43:56 +02:00
<p>Under the Government of Prime Minister Viktor Orbán, Hungary has been on a collision course with EU Institutions. It has centralised and consolidated its power by marginalising civil society and curtailing the autonomy of Hungarian media, cultural and higher education institutions (Csaky 2020; Gehrke 2020; Verseck 2020). Orbáns continued <strong>erosion of the countrys democratic institutions</strong> was further advanced with the 2020 adoption of an emergency law which allows the government to rule by decree (Schlagwein 2020; Stolton 2020). In this context, the latest developments in using Biometric Identification Technologies in Hungary flag serious concerns regarding the rule of law, human rights and civil liberties.</p>
2021-10-07 15:08:33 +02:00
<p>Hungary is a frontrunner in Europe when it comes to authorising law enforcements use of Facial Recognition Technology, developing a nationwide and centralised database, and using the <a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a> as part of the Governments Coronavirus measures. The infrastructure in place that potentially allows for a <strong>centralised deployment of biometric mass surveillance technologies</strong> in Hungary has reached an unprecedented scale while the legal and ethical scrutiny of these technologies lags dangerously behind. This is due to (1) <strong>the overlap between the private and public sectors</strong>, specifically government institutions, and (2) due to the <strong>complex entanglements biometric systems have with other information systems (such as car registries, traffic management, public transport monitoring and surveillance, etc.).</strong> Although the latter are not concerned with the traces of the human body they can nonetheless be used for and facilitate biometric mass surveillance. These entanglements create <strong>grey zones</strong> of biometric mass surveillance where the development and deployment of such technologies is hidden from visibility and critical scrutiny.</p>
2021-10-07 11:07:36 +02:00
<h2 id="remote-biometric-identification-in-hungary">Remote Biometric Identification in Hungary</h2>
2021-10-07 15:08:33 +02:00
<h3 id="the-hungarian-polices-use-of-facial-recognition">The <a class="maplink" data-title="Hungarian Police">Hungarian Police</a>s use of Facial Recognition</h3></li>
2021-10-07 11:07:36 +02:00
2021-10-07 15:08:33 +02:00
<p>On 10 December 2019 the Hungarian Parliament passed a package of amendments of acts for the work of law enforcement in Hungary. Entitled “the simplification and digitisation of some procedures” this adjustment legalised <strong>the use of forensic but also live FRT by the <a class="maplink" data-title="Hungarian Police">Hungarian Police</a></strong> (Hungarian Parliament 2019). In cases when a person identified by the police cannot present an ID document, the police agents can take a photograph of the individual on location, take fingerprints, and record the biometric data based on “perception and measurement” of external characteristics. The photo taken on location can be instantly verified against the database of the national registry of citizens. The <strong>automatic search</strong> is performed by a face recognition algorithm and the five closest matches are returned to the police agent who, based on these photos proceeds with identifying the person (1994. Évi XXXIV. Törvény, para 29/4(a)). This application of FRT does not fall under the category of mass surveillance; however, it is only possible due to <strong>a central system which collects and centralises the national and other biometric databases</strong> but also provides the technical support for accessing it in a quick and affective way by various operational units. In this instance by the patrolling police.  </p>
2021-10-07 08:43:56 +02:00
<section id="the-dragonfly-szitakötő-project" class="level3">
<h3>The Dragonfly (Szitakötő) Project</h3>
2021-10-13 18:35:49 +02:00
<p>In 2018 the <a class="maplink" data-title="Ministry of Interior (Hungary)">Ministry of Interior</a> presented a bill in the Hungarian Government that proposed a <strong>centralised CCTV system with data stored in one centralised database called the Governmental Data Centre</strong> (Kormányzati Adatközpont, abbreviated as KAK). All governmental operations aiming at developing this centralised database run under the name <strong>Szitakötő (Dragonfly)</strong>. This central storage facility collects surveillance data of public spaces (streets, squares, parks, parking facilities, etc.); the Centre for Budapest Transport (BKK); bank security and the Hungarian Public Road PLC. The project with an estimated budget of 50 billion forints (160 million euros) proposes to centralise about <strong>35.000 CCTV cameras and 25.000 terabytes of monitoring data</strong> from across the country (NAIH 2018). While the project, and notably the response of Dr. Attila Péterfalvi, head of the Hungarian Data Protection Authority, - Hungarian National Authority for Data Protection and Freedom of Information (NAIH), who warned of the lack of data protection considerations in the bill, have been largely mediatised, this has done little for halting the Project which has already been rolled out. In 2015 the Hungarian company GVSX Ltd (Hungary). Had already been contracted (NISZ-GVSX 2019) to implement an Integrated Traffic Management and Control System called IKSZR (Integrált Közlekedésszervezési és Szabályozási Rendszer) that centralises data from various systems such as ANPR cameras, car parks, traffic monitoring, meteorological data, etc. The <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> has been designed as an expansion of this system by <strong>centralising the data flowing from both the IKSZR system, the databases of the National Infocommunication Services (NISZ) and also CCTV data from other public and private surveillance systems</strong> such as those operated by local governments, public transport companies and banks.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 15:08:33 +02:00
<p>The technical description of the <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> does not make any explicit reference to (live) facial recognition technology, however, the system <strong>collects, stores and searches, in real time, video surveillance footage from 35.000 CCTV cameras</strong>. However, from the reports of the <a class="maplink" data-title="HCLU">Hungarian Civil Liberties Union</a> (HCLU or TASZ in Hungarian) and the DPA, it is known (NAIH 2019, 139) that <strong>to some extend FRT has been used by the Secret Service for National Security (SSNS)</strong>, one of the national security services of Hungary. According to the DPAs investigation all the cases in which FRT has been used happened <strong>in relation to concrete (criminal) cases looking for a missing person or someone under warrant</strong>. These cases were also <strong>limited to specific geographic locations</strong> (NAIH 2019). According to the DPAs investigation, in 2019 the <span class="underline"> </span> FRT system operated by the SSNS found 6.000 matches, which resulted in around 250 instances of stop-and-search and 4 arrests (NAIH 2019). The numbers for 2020 are inconsistent with those given for 2019 (3 matches, 28 instances of stop-and-search, unknown number of arrests), however, this is probably due to the fact that <strong>the system has since been moved primarily to the jurisdiction of the <a class="maplink" data-title="Hungarian Police">Hungarian Police</a></strong>.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>While the legal framework for police checks does refer to the use of facial recognition technologies, the national security act does not mention it. This is even more striking as the SSNS, is <strong>known to be using FRT to provide the national security services, the police, or other authorised institutions (e.g., prosecutors office, tax office, etc.) classified information</strong>.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 15:08:33 +02:00
<p>Two interrelated companies are responsible for the development, maintenance, and administration of this single central system: <strong>the NISZ and <a class="maplink" data-title="IdomSoft">IdomSoft</a> Ltd., both owned by the state.</strong> The NISZ or National Infocommunication Services is a 100% state owned company that only in 2020 signed 6 contracts to purchase the necessary <strong>hardware, storage, and other IT equipment for implementing the <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a></strong>. While Public Procurement documents (Közbeszerzési Hatóság, 2020) bear witness to the ongoing investments and development of the <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> by the Hungarian Government, a comprehensive overview of the project, the stages of its implementation or its budget, is nowhere to be found.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 15:08:33 +02:00
<p>The other company responsible for the administration of the <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> is the <a class="maplink" data-title="IdomSoft">IdomSoft</a> company, a member of the so called NISZ group. Idomsoft is a 100% indirect state-owned company (indirect ownership means that the government owns shares, but not through authorised state institutions or through other organisations) that, according to its website, “plays a leading role in the <strong>development, integration, installation and operation of IT systems of national importance</strong>”. Apart from administering the National Dragonfly Database, Idomsoft also assures the <strong>interoperability of the various national databases</strong> such as the citizens registry, passport and visa databases, car registries, and police alerts, and it connects the Hungarian databases into the <strong>Schengen Information System</strong> (SIS II).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 15:08:33 +02:00
<p>Since the implementation of the <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> the Hungarian government has been collecting video surveillance data that is centralised in the <strong>Governmental Data Centre</strong> (Kormányzati Adatközpont) in the same location and by the same institutions that administer the national registry of citizens, visa-entries, police databases, and also other e-governmental databases such as related to social security, tax office or health records.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 15:08:33 +02:00
<p>While the COVID-19 pandemic has brought a temporary halt of movement in public spaces, it also facilitated the <strong>introduction of new tracking technologies.</strong> Hungary is among two countries in Europe (Poland being the other) to introduce a <strong><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a></strong> which uses automated face recognition technology to verify that people stay in quarantine for the required time.</p>
2021-10-07 08:43:56 +02:00
</section>
<section id="the-normalisation-of-biometric-surveillance-at-home-the-hungarian-home-quarantine-app" class="level3">
2021-10-07 15:08:33 +02:00
<h3> The normalisation of biometric surveillance at home: The Hungarian <a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a></h3>
<p>In May 2020 Hungarian Authorities rolled out two digital applications, the contract-tracing app called <strong>VirusRadar</strong> (Kaszás 2020) and the <strong><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a></strong> (Házi Karantén Rendszer, abreviated HKR). Both of these apps are centralised tracing apps meaning that they send contact logs with pseudonymised personal data to a central (government) back-end server (Council of Europe 2020, 28). While the VirusRadar only uses Bluetooth data and proximity of other devices, the <strong>HKR processes biometric data</strong> when comparing facial images of its users.</p>
2021-10-07 08:43:56 +02:00
<p>Those who, according to the COVID-19 regulations in Hungary, are confined to home quarantine are offered the option to use the app instead of being checked by the police. For those who return from abroad, the use of the app is compulsory. But even those who can choose are encourage by the authorities to make use of the HKR app otherwise they will be subjected to frequent visits by police agents. <strong>Once a person downloads the app, its use becomes compulsory</strong> and failure to do so or attempts to evade its tracking is considered an administrative offense. From a data protection law point of view, this is a clear case where the data subjects consent (and in the case of biometric data, their explicit consent) cannot provide the lawful ground for the processing of data through the app (see section 4.2.2). Even if the processing can be based on another lawful ground such as public interest, the punitive nature of non-compliance may raise issues in terms of adhering to the necessity test, which requires a balancing act between the objective pursued and the data subjects interests.</p>
2021-10-07 11:07:36 +02:00
2021-10-08 17:03:03 +02:00
<p>The HKR app is <strong>developed by <a class="maplink" data-title="Asura Technology">Asura Technologies</a> and implemented by <a class="maplink" data-title="IdomSoft">IdomSoft</a> Ltd</strong>., the same company that provides the software and technical implementation for the nation-wide <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a>. The HKR application works with <strong>face recognition technology combined with location verification</strong>. The application sends notifications at random times prompting the user to <strong>upload a facial image</strong> while retrieving the location data of the mobile device.  The user must respond within 15 minutes and the location data must match the address registered for quarantine. In order for the <a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a> to work, the user first needs to upload a facial image which is compared by a police officer with the photo of the same individual stored in the central database. After this <strong>facial verification</strong>, the app creates <strong>a biometric template on the mobile phone of the user</strong> and the photo is deleted. The consecutive photos are only compared to this biometric template, so neither the photos nor the template leave the personal device. If there is suspicion about the identity or whereabouts of the user, a police officer visits the address to make sure that the person is adhering to the quarantine rules.</p>
2021-10-07 11:07:36 +02:00
2021-10-08 17:03:03 +02:00
<p>Interestingly, the HKR app, — just like the contact tracing app VirusRadar, which was developed <span class="underline"> </span> by Nextsense — has been <strong>“donated” to the Hungarian Government by <a class="maplink" data-title="Asura Technology">Asura Technologies</a> “free of charge”</strong>. </p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p><img src="images/media/image5.png" style="width:3.1203in;height:1.7525in" alt="Graphical user interface, application, chat or text message Description automatically generated" /><img src="images/media/image6.png" style="width:3.10526in;height:1.74405in" alt="A picture containing text, screenshot, monitor Description automatically generated" /></p>
2021-10-13 18:35:49 +02:00
<p>Figure 5. Snapshots from the video Home Quarantine System Short Presentation by Asura Technologies<a href="#fn44" class="footnote-ref" id="fnref44" role="doc-noteref"><sup>44</sup></a></p>
2021-10-07 08:43:56 +02:00
</section>
<section id="legal-bases-and-challenges-4" class="level2">
<h2>Legal bases and challenges</h2>
<p>The creation of a <strong>nation-wide and centralised database</strong> that uses facial recognition technology may raise important legal questions on its compliance under the constitutional framework and the data protection legislation. <strong>Article 6 of the Fundamental Law of Hungary</strong> affirms the right to privacy and the right to protection of personal data. They are implemented by the <strong>Right to Informational Self-Determination and Freedom of Information</strong> (<em>2011. évi CXII. Törvény az információs önrendelkezési jogról és az információszabadságról</em>) (<em>Infotv</em>), which was amended in 2018 to use the discretionary powers and application of national laws contained in the GDPR. With the amendments, the Act also provides rules for the data processing activities that fall outside the scope of the GDPR and implements the LED. The sectoral laws on the processing of personal data have been amended as of 2019 to comply with the GDPR.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>The Infovt <strong>permits the processing of sensitive data where:</strong> (i) the processing is <strong>necessary and proportionate</strong> to protect the vital interest of the data subject or another person; (ii) the data is made <strong>publicly available</strong> by the data subject; (iii) the processing is absolutely necessary and proportionate for the implementation of an <strong>international treaty, or is required by law for the enforcement of fundamental rights, national security, prevention, detection or prosecution of criminal offences</strong> (§5). Furthermore, in relation to processing of (non-sensitive) “personal criminal data” (<em>bűnügyi személyes adat</em>), which is personal data obtained during criminal justice proceedings, can only be processed by state or municipal bodies for the prevention, detection and prosecution of criminal offenses and for administrative and judicial tasks, as well as criminal, civil and non-judicial matters (§5(4)).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>In regard to the data subjects rights, notably, the Infovt permits <strong>sole automated decision-making</strong>, whereby a decision based on the sole automated decision-making process may be taken if it is permitted by national law or EU law and subject to certain conditions (§6). The sole automated decision <strong>can be based on sensitive data if it is authorised by national law or EU law</strong> (§6(c)).</p>
<p>Recently, the Hungarian Government issued a Decree <strong>(Decree No. 179/2020 of 4 May)</strong> as a response to the COVID-19 pandemic for which it declared <strong>a “state of emergency” (Stolton 2020)</strong>. The Decree <strong>restricts the scope of a number of the data subjects rights</strong> such as the right to be informed. The EDPB (2021b) was highly critical of those restrictions. It particularly considered that although the state of emergency adopted in the context of a pandemic may serve as a circumstance to trigger Article 23 of the GDPR, according to which EU Member States can restrict the scope of the data subject rights and certain data protection principles (see section 4.2.2), those states must nevertheless adhere to the guarantees enshrined in the same Article for those restrictions to be legal under the GDPR (ibid). It went further to emphasise the fundamental rights requirements that must be observed and <strong>a general blanket restriction on the scope of the data subjects rights would infringe upon the essence of fundamental rights</strong> (ibid).</p>
<p>In terms of the <strong>public authorities power to use sensitive data in relation to criminal proceedings</strong>, § 269 of the Criminal Procedure Act (<em>2017. évi XC. Törvény a büntetőeljárásró</em>) authorises the prosecutor's office, the investigating authority, and the crime prevention, detection and counter-terrorism bodies of the police <strong>to request the existing biometric data</strong> held in accordance with the Act on the criminal registry system, the registry of judgments against Hungarian citizens passed by the courts of Member States of the European Union and the registry of criminal and police biometric data (<em>2009. évi XLVII. Törvény a bűnügyi nyilvántartási rendszerről, az Európai Unió tagállamainak bíróságai által Magyar állampolgárokkal szemben hozott ítéletek nyilvántartásáról, valamint a bűnügyi és rendészeti biometrikus adatok nyilvántartásáról</em>) and <strong>request facial image analysis from the body responsible for the management and operation of the facial image register.</strong></p>
<p>The Act on Facial Image Analysis Registry and the Facial Image Analysis System (<em>2015. évi CLXXXVIII. Törvény az arcképelemzési nyilvántartásról és az arcképelemző rendszerről</em>) creates <strong>a registry for the processing of biometric data</strong> in relation to identifications at border crossings and for law enforcement purposes (§3 and §4) and it provides <strong>a list of a number of authorities that can request facial image analysis</strong> ((§9). It is reported that the <strong>Special Service for National Security</strong>, which carries out secret surveillance operations under the National Security Services Act (<em>1995. évi CXXV. Törvény a nemzetbiztonsági szolgálatokról</em>), has <strong>broad powers to request data from the registry</strong> (Hidvégi and Zágoni, 2016).</p>
<p>As is mentioned repeatedly in this section, the Dragonfly project was introduced with legal amendments to a range of laws (see section 10.1) and was subject to criticisms by the NAIH (see section 10.3). It thus remains to be seen whether the legal basis of the project would also satisfy the ECHR and Charter requirements.</p>
</section>
<section id="mobilisations-and-contestations-4" class="level2">
<h2>Mobilisations and contestations</h2>
2021-10-07 15:08:33 +02:00
<p>The <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> has elicited <strong>numerous warnings</strong> regarding data protection and the rights to privacy from both public and private organisations (TASZ 2021). The <strong>Hungarian National Authority for Data Protection and Freedom of Information (NAIH)</strong>, in October 2018 filed a communique (NAIH 2018) in which it stresses the problems raised by the centralisation and storing of visual data from as many as 35.000 CCTV cameras from all over the country and public transport facilities resulting in 25.000 terabytes of surveillance data.</p>
2021-10-07 11:07:36 +02:00
2021-10-07 15:08:33 +02:00
<p>The main concerns, according to the NAIH, stemmed from the fact that <strong>once the surveillance data is centralised the collecting bodies stop being the official administrators of these databases</strong>. Moreover, they wont even know how and by whom the data is collected, accessed and utilised, or for what purposes. What is even more worrisome according to this communique, is that the <strong>centralised database (Governmental Data Centre) would not administer the data either, they would only process it</strong>. Therefore, while the database can be accessed and more or less freely “used” by a number of clients (such as government organisations, law enforcement, secret services) there is <strong>no legal body who is responsible for applying the data protection measures or who would be liable in case of transgressions.</strong> Eventually the government incorporated some of the suggestions and owners of the data remain the uploading bodies to whom the requests have to be addressed for accessing the database by the different authorised bodies (e.g., the <a class="maplink" data-title="Hungarian Police">Hungarian Police</a>).</p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>Independent Hungarian media has also picked up the news. For instance, Hungarys <strong>leading independent economic and political weekly HVG</strong> has published an article in which they outline the bill and cite the head of the NAIH (Dercsényi 2018). Interestingly, the article starts with an announcement/amendment saying that the HVG expresses its regrets for violating the good reputation of the Ministry of Internals when claiming that the bill has not incorporated the suggestions from the NAIH, which is not true (Dercsényi 2018). However, the article still claims the opposite. <strong>Other liberal online news sites and Magazines</strong> such as the Magyar Narancs (Szalai 2019), 444.hu (Herczeg 2019) and 24.hu (Kerékgyártó 2018; Spirk 2019) also report on the case. However, t<strong>he main pro-government newspapers such as Magyar Nemzet remain silent.</strong></p>
2021-10-07 11:07:36 +02:00
2021-10-07 15:08:33 +02:00
<p>More recently, in January 2021, the <strong>INCLO, a network of Human Liberties NGOs</strong> published a report (INCLO 2021) in which they discuss the Hungarian Case and specifically the <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> as an example of how the employment of FRT is at odds with the right to privacy and civil liberties. They mainly flag their concern that <strong>due to the inadequate regulation FRT can be used in conjunction with the CCTV network developed at an alarming rate.</strong></p>
2021-10-07 11:07:36 +02:00
2021-10-07 08:43:56 +02:00
<p>In an interview, one of the authors of the INCLO case study, legal expert Ádám Remport, explains:</p>
<section id="regarding-secret-surveillance-in-general-the-problem-is-the-lack-of-adequate-supervision-and-an-effective-remedial-system.-the-legal-provisions-governing-national-security-agencies-are-mostly-satisfactory.-however-they-are-not-necessarily-enforced-or-if-they-are-breached-theres-no-way-to-find-out.-not-via-the-court-which-is-what-our-latest-cases-show-not-via-parliaments-national-security-committee-due-to-the-quorum-in-order-for-the-national-security-committee-to-be-operational-the-majority-of-its-members-must-be-present.-given-that-the-ruling-fidesz-and-kdnp-parties-hold-more-than-half-of-the-seats-if-they-decide-to-boycott-the-committee-they-can-prevent-it-from-performing-its-job.-this-has-already-happened-on-several-occasions-when-the-committee-was-supposed-to-look-into-surveillance-cases-which-would-potentially-have-been-politically-unfeasible-for-the-government.-interview-by-author-with-ádám-remport-2021" class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">Regarding secret surveillance in general the problem is the <strong>lack of adequate supervision and an effective remedial system</strong>. The legal provisions governing national security agencies are mostly satisfactory. However, they are not necessarily enforced, or <strong>if they are breached, theres no way to find ou</strong>t. Not via the court —which is what our latest cases show— not via Parliaments national security committee, due to the quorum: in order for the national security committee to be operational, the majority of its members must be present. Given that the ruling Fidesz and KDNP parties hold more than half of the seats, if they decide to boycott the committee, they can prevent it from performing its job. This has already happened on several occasions when the committee was supposed to look into surveillance cases which would potentially have been politically unfeasible for the government.” <footer>(Interview by author with Ádám Remport 2021)</footer></blockquote>
2021-10-07 15:08:33 +02:00
<p>The lack of contestation and social debate around the issues of privacy and human rights in relations to projects such as the Hungarian Governments Dragonfly is striking. While information about the <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> has sporadically reached the wider public <strong>any discussion of face recognition technologies employed by the HKR App has been missing</strong>.</p>
2021-10-07 08:43:56 +02:00
</section>
</section>
<section id="effects-of-the-technologies-3" class="level2">
<h2>Effects of the technologies</h2>
2021-10-07 15:08:33 +02:00
<p><strong>State operated and centralised mass surveillance systems</strong>, such as the <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a> currently under development in Hungary, bring up at least two sets of questions with regard to their societal and political effects. The first set of questions concerns <strong>visibility and the (lack of) possibility for societal debate and contestation</strong>. The second concerns the <strong>grey areas of legislations and regulations</strong>. When the development and employment of such novel technologies as biometric video surveillance and (live) facial recognition becomes <strong>entangled with the national interest</strong> of reinforcing public order, preventing terrorism, and fighting criminality, or, as with the <a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a>, reinforcing Coronavirus measures, their ability to carry out effective oversight might be seriously compromised. The Hungarian Governmental Decree from 16 March 2020 is a case in point. While the decree authorises the Minister for Innovation and Technology and an operational body consisting of representatives of the Ministry of Interior, the police, and health authorities to “<strong>acquire and process any kind of personal data from private or public entities,</strong> including traffic and location data from telecommunication providers, <strong>with a very broad definition of the purpose for which the data can be used</strong>” (Council of Europe 2020, 12) at the same time ordinary courts have been suspended, thus preventing the Constitutional Court from reviewing the proportionality of measures introduced under emergency conditions (Ibid 10).  </p>
<p>Using such technologies for the so-called public good can even attract the <strong>support of residents</strong> who want to live in safe and predictable environments. The fact that these public environments are “secured” at the expense of <strong>curtailing the human rights to privacy and to ones face and biometric data</strong> is often overlooked by the public. As the human right NGO “<a class="maplink" data-title="HCLU">Hungarian Civil Liberties Union</a>” have put it in their recent publication:</p>
2021-10-07 08:43:56 +02:00
<section id="the-introduction-of-facial-recognition-systems-is-almost-never-preceded-by-social-debate.-its-widespread-application-and-the-omission-of-public-consultation-can-lead-to-the-normalisation-of-continuous-surveillance-and-a-violation-of-rights-where-states-possess-the-ability-of-knowing-where-we-go-whom-we-meet-what-pubs-or-churches-we-visit-inclo-2021." class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">“[…] the introduction of facial recognition systems is almost never preceded by social debate. Its widespread application and the omission of public consultation can lead to the normalisation of continuous surveillance and a violation of rights, where states possess the ability of knowing where we go, whom we meet, what pubs or churches we visit.” <footer>(INCLO 2021)</footer> </blockquote>
2021-10-07 08:43:56 +02:00
<p>To bring awareness to these issues, there is a need for a <strong>strong civil society and independent media</strong> which, if seriously compromised, as in the case of Hungary, can do little to educate the general public. Talking about the <strong>lack of legal framework</strong> with regard to the use of face recognition technologies by the Hungarian Secret Services Ádám Remport explained:</p>
</section>
<section id="if-there-was-oversight-i-think-that-the-use-of-these-technologies-would-be-probably-more-accepted.-theres-certainly-a-possibility-for-abuses.-this-doesnt-necessarily-mean-that-these-abuses-happen-first-of-all-because-its-impossible-to-prove-them-and-second-we-have-no-direct-evidence-of-them.-this-needs-to-be-emphasised.-but-in-reality-it-only-depends-on-the-personal-good-will-of-the-secret-services-not-to-breach-individuals-privacy-rights.-because-in-the-end-theres-no-viable-or-independent-oversight-over-their-workings.-they-can-go-by-the-rules-and-most-of-the-times-they-probably-do.-unless-they-dont.-but-then-we-will-never-find-out." class="level4 Quote">
2021-10-07 11:07:36 +02:00
<blockquote class="Quote">“If there was oversight, I think that the use of these technologies would be probably more accepted. Theres certainly a <strong>possibility for abuses</strong>. This doesnt necessarily mean that these abuses happen, first of all because its impossible to prove them, and second, we have no direct evidence of them. This needs to be emphasised. But <strong>in reality, it only depends on the personal good will of the secret services not to breach individuals privacy rights</strong>. Because in the end theres no viable or independent oversight over their workings. They can go by the rules, and most of the times they probably do. Unless they dont. But then, <strong>we will never find out</strong>.”</blockquote>
2021-10-07 08:43:56 +02:00
</section>
</section>
</section>
<section id="recommendations" class="level1">
<h1>Recommendations</h1>
2021-10-13 18:35:49 +02:00
<p><strong>1. The EU should prohibit the deployment of both indiscriminate and “targeted” Remote Biometric and Behavioural Identification (RBI) technologies in public spaces (real-time RBI), as well as ex-post identification (or forensic RBI). Our analysis shows that both practices, even when used for “targeted surveillance” amount to mass surveillance.</strong></p>
2021-10-07 08:43:56 +02:00
<ul>
2021-10-13 18:35:49 +02:00
<li><p>In line with similar recommendations made by the EDPB and the EDPS,<a href="#fn45" class="footnote-ref" id="fnref45" role="doc-noteref"><sup>45</sup></a> the EU should <strong>prohibit the deployment of Remote Biometric and Behavioural Identification technologies in public spaces</strong></p></li>
<li><p>In line with the position of the EDRi regardings EU Artificial Intelligence Act<a href="#fn46" class="footnote-ref" id="fnref46" role="doc-noteref"><sup>46</sup></a>, our research supports the notion that the <strong>distinction between "real-time” and “ex-post” is irrelevant</strong> when it comes to the impact of these technologies on fundamental rights. Ex-post identification carries in fact a higher potential of harm, as more data can be pooled from different sources to proceed to the identification. The use of such technologies for <strong>“targeted surveillance” is thus equally harmful</strong> as the practice might be considered as expansive and intrusive to an extent that it would constitute disproportionate interference with the rights to privacy and personal data protection.</p></li>
<li><p>This concerns not only the acquisition and processing of <strong>faces, but also gait, voice and other biometric or behavioural signals.</strong></p></li>
</ul>
2021-10-07 08:43:56 +02:00
<p><strong>2. The EU should strengthen transparency and accountability of biometric and behavioural recognition technologies</strong></p>
<ul>
<li><p>Our research found that the majority of surveillance systems remain opaque. There is very <strong>little information on how citizens' data is processed</strong> when they enter surveilled public spaces. Rarely are <strong>concrete alternatives</strong> provided if they do not wish to be surveilled. In some extreme cases, such as the deployment of FRT trials in London, citizens who deliberately avoided surveillance by covering their faces were <strong>subjected to fines</strong>. This poses considerable challenges to citizens rights, as well as to transparency and accountability of these systems.</p></li>
<li><p><strong>It seems thus necessary to expand existing transparency and accountability requirements</strong> in the new EU Artificial Intelligence Act for biometric technologies. These requirements should be expanded to include external independent accountability, transparency and oversight for any implementations of biometric technologies that are not already prohibited by the Act.</p></li>
<li><p>In particular, it seems imperative to increase the transparency of such systems, by conditioning their operation to the publication of <strong>key characteristics and features</strong> (type of data acquisition, type of machine learning algorithm, nature of data collected in the database) necessary for <strong>effective public oversight</strong> of their operation. These details should be disclosed even when deployments are used for national security or law enforcement purposes, and the public should be informed about planned and ongoing projects.</p></li>
</ul>
<p><strong>3. The EU should promote the reinforcement of robust accountability mechanisms for biometric surveillance systems.</strong></p>
<ul>
2021-10-07 11:07:36 +02:00
<li>
2021-10-07 08:43:56 +02:00
<p>The current legislative framework remains <strong>unclear</strong> as to which institutions may <strong>review or authorise biometric surveillance</strong> systems. In light of the GDPR and the LED, the Data Protection Authorities (DPAs) in some member states enforce the relevant data protection legislation and oversee the processing of biometric data, while in others a separate authority is tasked with the responsibility to review the compatibility with the relevant legislation insofar as personal data processing by law enforcement authorities is concerned (such as Belgium, see case study).</p>
2021-10-07 11:07:36 +02:00
</li>
<li>
2021-10-07 08:43:56 +02:00
<p>The EU should work toward developing a <strong>centralised authorisation process for biometric surveillance</strong>, within which all relevant authorities are included and are able to veto the authorisation.</p>
2021-10-07 11:07:36 +02:00
</li>
2021-10-07 08:43:56 +02:00
</ul>
<ul>
<li><p>Although the proposed EU Artificial Intelligence Act limits a prior authorisation by a court or independent administrative authority to real-time biometric surveillance, it is necessary to underline that ex-post biometric identification systems must be subject to supervision or authorisation taking into account the standards under the ECHR and the Charter.</p></li>
</ul>
<p><strong>4. The EU should promote individual rights under the GDPR through the promotion of digital-rights-by-design technologies.</strong></p>
<ul>
<li><p>More attention could be given to protect <strong>individuals rights under GDPR</strong> when it comes to data collection and processing mechanisms as well as a <strong>fundamental rights assessment</strong> ex ante and <strong>ex post.</strong></p></li>
<li><p>This could be implemented technically through <strong>data minimisation or digital rights-by-design</strong> methods, either through <strong>technical solutions that do not collect biometric information</strong>, or systems which incorporate automated forms of <strong>notification</strong>, <strong>immutable</strong> <strong>transparency and accountability logging</strong>, and <strong>control of data</strong> or ideally by a combination of both approaches.</p></li>
</ul>
<p><strong>5. The EU should ensure effective enforcement of GDPR purpose limitation.</strong></p>
<ul>
<li><p><strong>Purpose limitation</strong> is one of the key principles of the GDPR. As our report shows, the <strong>re-purposing of biometric data</strong> is not always kept sufficiently in check.</p></li>
</ul>
<ul>
<li><p>From a technical perspective, <strong>biometric mass surveillance can easily emerge by connecting different elements of a technical infrastructure</strong> (video acquisition capacities, processing algorithms, biometric datasets) <strong>developed in other contexts.</strong></p></li>
<li><p>For example, while the <strong>forensic use of facial recognition</strong> is not a form of <strong>remote biometric identification</strong> per se, the adoption of such systems has allowed for the creation of biometrically searchable national datasets. These datasets are one piece of a potential <strong>biometric mass surveillance</strong> infrastructure which can become a technical reality if live camera feeds, processed through live facial recognition software is connected to them.</p></li>
2021-10-07 15:08:33 +02:00
<li><p>In order to maintain democratic oversight over the uses of the infrastructure, and <strong>avoid the risk of function creep</strong> (i.e. when a technology is being used beyond its initial purpose) it is thus imperative that the principle of <strong>purpose limitation</strong> is systematically enforced and strictly regulated with regard to the <strong>type of data</strong> (criminal or civilian datasets, datasets generated from social media, as in the <a class="maplink" data-title="Clearview AI">Clearview AI</a> controversy) against which biometric searches can be performed.</p></li>
2021-10-07 08:43:56 +02:00
</ul>
<p><strong>6. The EU should support voices and organisations which are mobilised for the respect of EU fundamental rights</strong></p>
<ul>
<li><p>Our research showed that, in addition to state oversight agencies, many institutions from <strong>civil society</strong> are active in making sure that <strong>EU fundamental rights are respected</strong> in the field of biometric security technologies.</p></li>
<li><p>While in some countries they benefit from a <strong>dense network of civil society funding</strong>, in others they are subjected to <strong>heavy scrutiny and financial restrictions</strong> (see for example the Hungary case study in this report).</p></li>
<li><p><strong>Supporting civil society organisations</strong> that operate in the <strong>sector of digital rights</strong> is therefore instrumental for a healthy democratic debate and oversight. Civil society needs to be able to participate in all relevant legislative and other decision-making procedures.</p></li>
<li><p>Particularly in the area of <strong>litigation</strong>, support for civil society and EU citizens access to rights could be extremely helpful. We have found numerous areas in our study where sufficient legal clarity was lacking and would likely only take place through the courts. We would thus advise that the EU support existing <strong>digital rights litigation initiatives</strong> and create additional mechanisms to support this approach.</p></li>
</ul>
<p><strong>7. The EU should take into account the global dimension of the Biometric and Behavioural Analysis Technology Industry</strong></p>
<ul>
<li><p>The technologies used for FRT in Europe come from <strong>vendors across the world</strong>. Technologies for biometric or behavioural analysis are often tested in one country before they are implemented in another.</p></li>
<li><p>EU policy on the biometric or behavioural analysis technology industry thus needs to consider its impact both <strong>inside and outside of Europe</strong>. Here, the recently revised <strong>EU Export Control framework</strong> which may include biometric and behavioural technologies can play a role.</p></li>
</ul>
</section>
<section id="references" class="level1 list-paragraph">
2021-10-07 11:07:36 +02:00
<h1 class="list-paragraph nocount">REFERENCES</h1>
2021-10-13 18:35:49 +02:00
<p>1994. Évi XXXIV. Törvény - Nemzeti Jogszabálytár. 1994. <a target="_blank" href="https://njt.hu/jogszabaly/1994-34-00-00">https://njt.hu/jogszabaly/1994-34-00-00</a>.</p>
<p>2015. Évi CLXXXVIII. Törvény - Nemzeti Jogszabálytár. 2015. <a target="_blank" href="https://njt.hu/jogszabaly/2015-188-00-00">https://njt.hu/jogszabaly/2015-188-00-00</a>.</p>
<p>7sur7. 2019. “Des caméras avec reconnaissance faciale à Brussels Airport.” <a target="_blank" href="https://www.7sur7.be/belgique/des-cameras-avec-reconnaissance-faciale-a-brussels-airport~a46f7a4c/">https://www.7sur7.be/belgique/des-cameras-avec-reconnaissance-faciale-a-brussels-airport~a46f7a4c/</a>.</p>
<p>Access Now. 2021. Spotifys Speech-Recognition Patent Tech: Global Coalition Says Dont Spy. <em>Access Now</em>. <a target="_blank" href="https://www.accessnow.org/spotify-spy-tech-coalition/">https://www.accessnow.org/spotify-spy-tech-coalition/</a> (September 29, 2021).</p>
<p>Access Now. 2021. “Open letter calling for a global ban on biometric recognition technologies that enable mass anddiscriminatory surveillance.” <a target="_blank" href="https://www.accessnow.org/cms/assets/uploads/2021/06/BanBS-Statement-English.pdf">https://www.accessnow.org/cms/assets/uploads/2021/06/BanBS-Statement-English.pdf</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Al-Kawaz, Hiba, Nathan Clarke, Steven Furnell, Fudong Li, and Abdulrahman Alruban. 2018. “<em>Advanced Facial Recognition for Digital Forensics.</em>” In ECCWS 2018 17th European Conference on Cyber Warfare and Security V2, Oslo: Academic Conferences and publishing limited, 11-19</p>
2021-10-13 18:35:49 +02:00
<p>Algorithm Watch. 2020. <em>Automating Society Report 2020</em>. <a target="_blank" href="https://automatingsociety.algorithmwatch.org/wp-content/uploads/2020/12/Automating-Society-Report-2020.pdf">https://automatingsociety.algorithmwatch.org/wp-content/uploads/2020/12/Automating-Society-Report-2020.pdf</a></p>
2021-10-07 08:43:56 +02:00
<p>Allix, Grégoire. 2018. “Comment des villes « hyper connectées » contrôlent lespace public.” <em>Le Monde</em>, 19 December 2018. https://www.lemonde.fr/economie/article/2018/12/19/au-nom-de-la-smart-citydes-villes-sous-surveillance 5399527 3234.html.</p>
<p>Amsterdam Algoritmeregister. 2021. https://algoritmeregister.amsterdam.nl/en/ai-register/</p>
<p>Amsterdam-Amstelland safety region. 2020. “One and a Half Meter Monitor.” Amsterdam: City of Amsterdam Algorithm Register. https://algoritmeregister.amsterdam.nl/en/one-and-a-half-meter-monitor/</p>
<p>Andraško, Jozef, Matúš Mesarčík and Ondrej Hamuľák. 2021. “The regulatory intersections between artificial intelligence, data protection and cyber security: challenges and opportunities for the EU legal framework.” <em>AI &amp; Soc (2021)</em>. doi: 10.1007/s00146-020-01125-5.</p>
2021-10-13 18:35:49 +02:00
<p>Article 29 Data Protection Working Party. 2007. “Opinion 4/2007 on the concept of personal data.” <a target="_blank" href="https://ec.europa.eu/justice/article-29/documentation/opinion-recommendation/files/2007/wp136_en.pdf">https://ec.europa.eu/justice/article-29/documentation/opinion-recommendation/files/2007/wp136_en.pdf</a></p>
2021-10-07 08:43:56 +02:00
<p>Assemblée Nationale. 2018.  “Rapport N°1335 : Rapport dInformation.” https://www.assemblee-nationale.fr/dyn/15/rapports/cion_lois/l15b1335_rapport-information.pdf</p>
2021-10-13 18:35:49 +02:00
<p>Barelli, Paul. 2018. “A Nice, lapplication sécuritaire Reporty divise les habitants.” <em>Le Monde</em>, 6 February 2018. <a target="_blank" href="https://www.lemonde.fr/societe/article/2018/02/06/a-nice-l-applicationsecuritaire-reporty-divise-les-habitants%205252467%203224.html">https://www.lemonde.fr/societe/article/2018/02/06/a-nice-l-applicationsecuritaire-reporty-divise-les-habitants 5252467 3224.html</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Bensalem, Nawal. 2018. “La police belge mise gros sur la reconnaissance faciale : découvrez les techniques scientifiques de demain.” <em>La Dernière Heure</em>, 24 September 2018. https://www.dhnet.be/actu/faits/la-police-belge-mise-gros-sur-lareconnaissance-faciale-decouvrez-les-techniques-scientifiques-de-demain-5ba7ff06cd70a16d81022de6.</p>
2021-10-13 18:35:49 +02:00
<p>Binacchi, Fabien. 2019. “Vos émotions analysées pour des raisons de sécurité? Un test proposé à Nice.” <em>20 Minutes</em>, 15 January 2019. <a target="_blank" href="https://www.20minutes.fr/high-tech/2423167-20190115-nice-si-emotions-analysees-raisons-securite-ville-etudie-question-opposition-offusque">https://www.20minutes.fr/high-tech/2423167-20190115-nice-si-emotions-analysees-raisons-securite-ville-etudie-question-opposition-offusque</a></p>
2021-10-07 08:43:56 +02:00
<p>BPI France. 2018. “Le projet innovant SafeCity, pour renforcer la sécurisation des villes intelligentes sur le territoire, obtient un financement du Programme d'Investissements d'Avenir (PIA).” https://presse.bpifrance.fr/investissements-davenirle-projet-innovant-safecity-pour-renforcer-la-securisation-des-villes-intelligentes-sur-le-territoire-obtient-un-financement-du-programme-dinvestissements-davenir-pia/</p>
2021-10-13 18:35:49 +02:00
<p>Breyer, Patrick et. al. 2021. "MEPs Letter to the Commission on Artificial Intelligence and Biometric Surveillance". Brussels, 15 April. <a target="_blank" href="https://www.patrick-breyer.de/wp-content/uploads/2021/04/MEP-Letter-to-the-Commission-on-Artificial-Intelligence-and-Biometric-Surveillance.pdf">https://www.patrick-breyer.de/wp-content/uploads/2021/04/MEP-Letter-to-the-Commission-on-Artificial-Intelligence-and-Biometric-Surveillance.pdf</a> (July 23, 2021).</p>
<p>Breyer, Patrick, and et. al. 2021. MEPs Letter to the Commission on Artificial Intelligence and Biometric Surveillance. <a target="_blank" href="https://www.patrick-breyer.de/wp-content/uploads/2021/04/MEP-Letter-to-the-Commission-on-Artificial-Intelligence-and-Biometric-Surveillance.pdf">https://www.patrick-breyer.de/wp-content/uploads/2021/04/MEP-Letter-to-the-Commission-on-Artificial-Intelligence-and-Biometric-Surveillance.pdf</a> (July 23, 2021).</p>
<p>Bröckling, Marie. 2019. “Gerichtsurteil zu Gesichtserkennung: Datenschützer scheitert an Löschung biometrischer G20-Datenbank.” <em>Netzpolitik.org</em>, 24 Ocotber 2019. <a target="_blank" href="https://netzpolitik.org/2019/datenschuetzer-scheitert-an-loeschung-biometrischer-g20-datenbank/">https://netzpolitik.org/2019/datenschuetzer-scheitert-an-loeschung-biometrischer-g20-datenbank/</a>.</p>
<p>Bundeskriminalamt. n.d. “<em>Gesichtserkennung.</em><a target="_blank" href="https://www.bka.de/DE/UnsereAufgaben/Ermittlungsunterstuetzung/Kriminaltechnik/Biometrie/Gesichtserkennung/gesichtserkennung_node.html">https://www.bka.de/DE/UnsereAufgaben/Ermittlungsunterstuetzung/Kriminaltechnik/Biometrie/Gesichtserkennung/gesichtserkennung_node.html</a></p>
<p>Bundespolizei. 2019. “Test intelligenter Videoanalyse-Technik.” <a target="_blank" href="https://www.bundespolizei.de/Web/DE/04Aktuelles/01Meldungen/2019/06/190607_videoanalyse.html">https://www.bundespolizei.de/Web/DE/04Aktuelles/01Meldungen/2019/06/190607_videoanalyse.html</a>.</p>
<p>Bundespolizeipräsidium Potsdam. 2018. “Teilprojekt 1. Abschlussbericht.” <a target="_blank" href="https://www.bundespolizei.de/Web/DE/04Aktuelles/01Meldungen/2018/10/181011_abschlussbericht_gesichtserkennung_down.pdf;jsessionid=B00C5E4B9341D9F8733EF8508A6D9C46.2_cid324?__blob=publicationFile&amp;v=1">https://www.bundespolizei.de/Web/DE/04Aktuelles/01Meldungen/2018/10/181011_abschlussbericht_gesichtserkennung_down.pdf;jsessionid=B00C5E4B9341D9F8733EF8508A6D9C46.2_cid324?__blob=publicationFile&amp;v=1</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Buolamwini, Joy and Timnit Gebru. 2018. “Gender Shades: Intersectional Accuracy Disparities in Commercial Gender Classification.” <em>Proceedings of Machine Learning Research</em> 81.</p>
<p>Caspar, Johannes. 2018. “Einsatz der Gesichtserkennungssoftware „Videmo 360“ durch die Polizei Hamburgzur Aufklärung von Straftaten im Zusammenhang mit dem in Hamburg stattgefundenen G20-Gipfel.” <em>Der Hamburgische Beauftragte für Datenschutz und Informationsfreiheit</em>, 18 December 2018. https://datenschutz-hamburg.de/assets/pdf/Anordnung_HmbBfDI_2018-12-18.pdf</p>
2021-10-13 18:35:49 +02:00
<p>Chaos Computer Club. 2018. “Biometrische Videoüberwachung: Der Südkreuz-Versuch war kein Erfolg.” <a target="_blank" href="https://www.ccc.de/de/updates/2018/debakel-am-suedkreuz">https://www.ccc.de/de/updates/2018/debakel-am-suedkreuz</a></p>
2021-10-07 08:43:56 +02:00
<p>Chin, Josh and Clément Bürge. 2017. “Twelve Days in Xinjiang: How Chinas Surveillance State Overwhelms Daily Life.” <em>Wall Street Journal</em>, 19 December 2017. https://www.wsj.com/articles/twelve-days-in-xinjiang-how-chinas-surveillance-state-overwhelms-daily-life-1513700355, checked on 4/24/2021.</p>
<p>CNIL. 2018. “Mise en œuvre expérimentale de lapplication « REPORTY » par la ville de NICE : quelle est la position de la CNIL ?” https://www.cnil.fr/fr/mise-en-oeuvre-experimentale-de-lapplication-reporty-par-laville-de-nice-quelle-est-la-position-de.</p>
2021-10-13 18:35:49 +02:00
<p>CNIL. 2019a. “Délibération n° 2019-001du 10 janvier 2019 portant règlement type relatif à la mise en œuvre de dispositifs ayant pour finalité le contrôle d'accès par authentification biométrique aux locaux, aux appareils et aux applications informatiques sur les lieux de travail.” <a target="_blank" href="https://www.cnil.fr/sites/default/files/atoms/files/deliberation-2019-001-10-01-2019-reglement-type-controle-dacces-biometrique.pdf">https://www.cnil.fr/sites/default/files/atoms/files/deliberation-2019-001-10-01-2019-reglement-type-controle-dacces-biometrique.pdf</a></p>
2021-10-07 08:43:56 +02:00
<p>CNIL. 2019b. “<em>Reconnaissance faciale - pour un débat à la hauteur des enjeux.</em><em>Paris: Commission National Informatique et Libertés</em>.</p>
2021-10-13 18:35:49 +02:00
<p>Cochior, Cristina. and Ruben van de Ven. 2020. “Plotting Data: Acts of Collection and Omission.” <a target="_blank" href="http://plottingd.at/a/introduction.html">http://plottingd.at/a/introduction.html</a></p>
<p>Colomé, Jordi Pérez. 2019. “Marbella, the biggest video surveillance lab in Spain.” <em>EL PAÍS</em>. 29 November 2019. <a target="_blank" href="https://english.elpais.com/elpais/2019/11/27/inenglish/1574849134_892168.html">https://english.elpais.com/elpais/2019/11/27/inenglish/1574849134_892168.html</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Council of Europe. 2018. “The Practical Guide on the Use of Personal Data in the Police Sector.” https://rm.coe.int/t-pd-201-01-practical-guide-on-the-use-of-personal-data-in-the-police-/16807927d5</p>
2021-10-13 18:35:49 +02:00
<p>Council of Europe. 2020. “Digital Solutions to Fight Covid-19. 2020 Data Protection Report.” <a target="_blank" href="https://rm.coe.int/prems-120820-gbr-2051-digital-solutions-to-fight-covid-19-text-a4-web-/16809fe49c">https://rm.coe.int/prems-120820-gbr-2051-digital-solutions-to-fight-covid-19-text-a4-web-/16809fe49c</a></p>
<p>Council of Europe. 2021. “Guidelines on Facial Recognition.” <a target="_blank" href="https://rm.coe.int/guidelines-on-facial-recognition/1680a134f3">https://rm.coe.int/guidelines-on-facial-recognition/1680a134f3</a></p>
<p>Csaky, Zselyke. 2020. “Nations in Tranzit 2020. Dropping the Democratic Facade.” <em>Freedom House.</em> <a target="_blank" href="https://freedomhouse.org/sites/default/files/2020-04/05062020_FH_NIT2020_vfinal.pdf">https://freedomhouse.org/sites/default/files/2020-04/05062020_FH_NIT2020_vfinal.pdf</a> .</p>
<p>De Halleux, Françoise. 2020. Reconnaissance faciale: le ministre de lIntérieur, Pieter De Crem, ny renonce pas! (29/06/2020). <em>Édition digitale de Liège</em>. <a target="_blank" href="https://lameuse.sudinfo.be/591608/article/2020-06-29/reconnaissance-faciale-le-ministre-de-linterieur-pieter-de-crem-ny-renonce-pas">https://lameuse.sudinfo.be/591608/article/2020-06-29/reconnaissance-faciale-le-ministre-de-linterieur-pieter-de-crem-ny-renonce-pas</a> (September 29, 2021).</p>
2021-10-07 08:43:56 +02:00
<p>De Hert, Paul. 2017. “Courts, privacy and data protection in Belgium: Fundamental rights that might as well be struck from the Constitution” in Brkan, Maja and Psychogiopoulou, <em>Evangelia. Courts, Privacy and Data Protection in the Digital Environment</em>, Maastricht: Edward Elgar, 63-81.</p>
2021-10-13 18:35:49 +02:00
<p>Défenseur des Droits. 2021. <em>Technologies Biométriques: Limpératif Respect Des Droits Fondamentaux</em>. Paris: Défenseur des Droits. <a target="_blank" href="https://www.defenseurdesdroits.fr/sites/default/files/atoms/files/rap-biometr-num-08.07.21.pdf">https://www.defenseurdesdroits.fr/sites/default/files/atoms/files/rap-biometr-num-08.07.21.pdf</a> (September 29, 2021).</p>
2021-10-07 08:43:56 +02:00
<p>Delver, Guido. 2021. Phone interview on 29-03-2021. <em>Interviewer: Ruben van de Ven</em></p>
2021-10-13 18:35:49 +02:00
<p>Dercsényi, Dávid. 2018. "Totális megfigyelés 50 milliárdért - Pintérék terve kiakasztotta az adatvédelmi biztost". <em>hvg.hu</em>. 2 December 2018. <a target="_blank" href="https://hvg.hu/itthon/20181202_NAIH_50_milliardos_totalis_megfigyeles_lenne_Pinterek_kamerarendszere">https://hvg.hu/itthon/20181202_NAIH_50_milliardos_totalis_megfigyeles_lenne_Pinterek_kamerarendszere</a>.</p>
<p>DPA Hamburg. 2018. “Datenschutzrechtliche Prüfung des Einsatzes einer Gesichtserkennungssoftware zur Aufklärung von Straftaten im Zusammenhang mit dem G20-Gipfel durch die Polizei Hamburg.” Hamburg: DPA Hamburg. <a target="_blank" href="https://datenschutz-hamburg.de/assets/pdf/Pruefbericht_Gesichtserkennungssoftware.pdf">https://datenschutz-hamburg.de/assets/pdf/Pruefbericht_Gesichtserkennungssoftware.pdf</a></p>
<p>Dudebout, Camille. 2020. “Safe City Project in Nice: Testing Facial Recognition.” <a target="_blank" href="https://ai-regulation.eu/safe-city-projectin-nice-testing-facial-recognition/">https://ai-regulation.eu/safe-city-projectin-nice-testing-facial-recognition/</a>.</p>
<p>EDPB 2019. Guidelines 3/2019 on processing of personal data through video devices, <em>edpb.europa.eu.</em> <a target="_blank" href="https://edpb.europa.eu/sites/edpb/files/consultation/edpb_guidelines_201903_videosurveillance.pdf">https://edpb.europa.eu/sites/edpb/files/consultation/edpb_guidelines_201903_videosurveillance.pdf</a>.</p>
<p>EDPB. 2021a. “EDPB &amp; EDPS Call for Ban on Use of AI for Automated Recognition of Human Features in Publicly Accessible Spaces, and Some Other Uses of AI That Can Lead to Unfair Discrimination | European Data Protection Board.” <a target="_blank" href="https://edpb.europa.eu/news/news/2021/edpb-edps-call-ban-use-ai-automated-recognition-human-features-publicly-accessible_en">https://edpb.europa.eu/news/news/2021/edpb-edps-call-ban-use-ai-automated-recognition-human-features-publicly-accessible_en</a>.</p>
2021-10-07 08:43:56 +02:00
<p>EDPB. 2021b. “Thirtieth Plenary Session: EDPB response to NGOs on Hungarian Decrees and statement on Article 23 GDPR.” https://edpb.europa.eu/news/news/2020/thirtieth-plenary-session-edpb-response-ngos-hungarian-decrees-and-statement-article_en.</p>
2021-10-13 18:35:49 +02:00
<p>EDPS and EDPB. 2021. “Joint Opinion on the Proposal for a Regulation of the European Parliament and of the Council laying down harmonised rules on artificial intelligence (Artificial Intelligence Act)” <a target="_blank" href="https://edps.europa.eu/data-protection/our-work/publications/opinions/joint-opinion-edps-edps-proposal-regulation-european_en">https://edps.europa.eu/data-protection/our-work/publications/opinions/joint-opinion-edps-edps-proposal-regulation-european_en</a></p>
<p>EDPS. 2020. Shaping a Safer Digital Future: a New Strategy for a New Decade. Released 30 June 2020. <a target="_blank" href="https://edps.europa.eu/press-publications/publications/strategy/shaping-safer-digital-future_en">https://edps.europa.eu/press-publications/publications/strategy/shaping-safer-digital-future_en</a>. Accessed on 16 March 2021.</p>
<p>EDPS. 2021. “Artificial Intelligence Act: a welcomed initiative, but ban on remote biometric identification in public space is necessary.” <a target="_blank" href="https://edps.europa.eu/press-publications/press-news/press-releases/2021/artificial-intelligence-act-welcomed-initiative_en">https://edps.europa.eu/press-publications/press-news/press-releases/2021/artificial-intelligence-act-welcomed-initiative_en</a></p>
2021-10-07 08:43:56 +02:00
<p>EDRi. 2020. “Ban Biometric Mass Surveillance!” https://edri.org/our-work/blog-ban-biometric-mass-surveillance/</p>
2021-10-13 18:35:49 +02:00
<p>EDRi. 2021. “EUs AI law needs major changes to prevent discrimination and mass surveillance.” <a target="_blank" href="https://edri.org/our-work/eus-ai-law-needs-major-changes-to-prevent-discrimination-and-mass-surveillance/">https://edri.org/our-work/eus-ai-law-needs-major-changes-to-prevent-discrimination-and-mass-surveillance/</a></p>
2021-10-07 08:43:56 +02:00
<p>Eichenhofer, Johannes. and Cristoph. Gusy. 2017. “Court, privacy and data protection in Germany: Informational self-determination in the digital environment.” In <em>Courts,</em> <em>Privacy and Data Protection in the Digital Environment</em>, eds. Masa Brkan and Evangelia. Psychogiopoulou,101-119. Edward Elgar.</p>
2021-10-13 18:35:49 +02:00
<p>Europan Commission. 2020a. <em>Shaping Europes Digital Future</em>. Brussels: European Commission. <a target="_blank" href="https://ec.europa.eu/info/sites/default/files/communication-shaping-europes-digital-future-feb2020_en_4.pdf">https://ec.europa.eu/info/sites/default/files/communication-shaping-europes-digital-future-feb2020_en_4.pdf</a></p>
2021-10-07 08:43:56 +02:00
<p>European Commission. 2018a. “Communication from the commission to the European Parliament, the European council, the council, the European economic and social committee and the committee of the regions Artificial Intelligence for Europe. {SWD(2018) 137 final}” https://ec.europa.eu/trans parency/regdoc/rep/1/2018/EN/COM-2018-237-F1-EN-MAIN-PART-1.PDF.</p>
<p>European Commission. 2018b. “Coordinated plan on artificial intelligence (COM(2018) 795 final).” https:// ec.europa.eu/knowledge4policy/publication/coordinated-plan-artif icial-intelligence-com2018-795-final en.</p>
2021-10-13 18:35:49 +02:00
<p>European Commission. 2020b. <em>White Paper on Artificial Intelligence: A European Approach to Excellence and Trust</em>. Brussels: European Commission. <a target="_blank" href="https://ec.europa.eu/info/publications/white-paper-artificial-intelligence-european-approach-excellence-and-trust_en">https://ec.europa.eu/info/publications/white-paper-artificial-intelligence-european-approach-excellence-and-trust_en</a></p>
2021-10-07 08:43:56 +02:00
<p>European Commission. 2021a. <em>Fostering a European Approach to Artificial Intelligence</em>. COM(2021)205 (21 April)<br />
2021-10-13 18:35:49 +02:00
<a target="_blank" href="https://digital-strategy.ec.europa.eu/en/library/communication-fostering-european-approach-artificial-intelligence">https://digital-strategy.ec.europa.eu/en/library/communication-fostering-european-approach-artificial-intelligence</a>.</p>
<p>European Commission. 2021b. <em>Proposal for a Regulation of the European Parliament and the Council Laying down Harmonised Rules on Artificial Intelligence (Artificial Intelligence Act) COM(2021) 206 Final</em>. Brussels: European Commission. <a target="_blank" href="https://eur-lex.europa.eu/legal-content/EN/TXT/?uri=CELEX%3A52021PC0206">https://eur-lex.europa.eu/legal-content/EN/TXT/?uri=CELEX%3A52021PC0206</a></p>
2021-10-07 08:43:56 +02:00
<p>European Economic and Social Committee. 2018a. “Trust, privacy and security for consumers and businesses in the Internet of Things (IoT).” https://www. eesc.europa.eu/en/our-work/opinions-information-reports/opini ons/trust-privacy-and-consumer-security-internet-things-iot-owninitiative-opinion.</p>
<p>European Economic and Social Committee. 2018b. “Artificial intelligence: anticipating its impact on work to ensure a fair transition.” https://www. eesc.europa.eu/en/our-work/opinions-information-reports/opini ons/artificial-intelligence-anticipating-its-impact-jobs-ensure-fairtransition-own-initiative-opinion.</p>
<p>European Economic and Social Committee. 2018c. “Artificial intelligence—the consequences of artificial intelligence on the (digital) single market, production, consumption, employment and society.” https://www.eesc. europa.eu/en/our-work/opinions-information-reports/opinions/ artificial-intelligence-consequences-artificial-intelligence-digit al-single-market-production-consumption-employment-and.</p>
<p>European Parliament. 2017. “European Parliament resolution of 16 February 2017 with recommendations to the Commission on Civil Law Rules on Robotics.” https://www. europarl.europa.eu/doceo/document/TA-8-2017-0051EN.html. Accessed 16 March 2020.</p>
2021-10-13 18:35:49 +02:00
<p>European Parliament. 2021. “European Parliament resolution of 20 January 2021 on artificial intelligence: questions of interpretation and application of international law in so far as the EU is affected in the areas of civil and military uses and of state authority outside the scope of criminal justice.” <a target="_blank" href="https://www.europarl.europa.eu/doceo/document/TA-9-2021-0009_EN.html">https://www.europarl.europa.eu/doceo/document/TA-9-2021-0009_EN.html</a>. Accessed 16 March 2021.</p>
<p>European Peoples Party. 2021. “Facial recognition software: regulation instead of ban.” <a target="_blank" href="https://www.eppgroup.eu/newsroom/news/facial-recognition-software-regulation-instead-of-ban">https://www.eppgroup.eu/newsroom/news/facial-recognition-software-regulation-instead-of-ban</a>.</p>
<p>Europol. 2020. “Europol reply to written questions from MEP Chinnici and MEP Breyer to the Joint Parliamentary Scrutiny Group (JPSG).” <a target="_blank" href="https://web.archive.org/web/20201101141435/https://secure.ipex.eu/IPEXL-WEB/dossier/files/download/8a8629a87398b8340173b84ac84115eb.do">https://web.archive.org/web/20201101141435/https://secure.ipex.eu/IPEXL-WEB/dossier/files/download/8a8629a87398b8340173b84ac84115eb.do</a></p>
2021-10-07 08:43:56 +02:00
<p>Farge, Rémy. 2020. “Police du futur et nouvelles technologies du profilage ethnique.” <em>La Chronique de la Ligue des Droits Humains</em> (191): 1316.</p>
2021-10-13 18:35:49 +02:00
<p>Fernandez, Valérie, Jessica Galissaire, Léo Laugier, Guillaume Morat, Marine Pouyat, and Annabelle Richard. 2020. <em>Facial Recognition: Embodying European Values</em>. Paris: Renaissance Numérique. <a target="_blank" href="https://www.renaissancenumerique.org/ckeditor_assets/attachments/548/report_facial_recognition.pdf">https://www.renaissancenumerique.org/ckeditor_assets/attachments/548/report_facial_recognition.pdf</a></p>
<p>FRA. 2018. <em>Preventing Unlawful Profiling Today and in the Future: A Guide</em>. Luxembourg: Publications Office of the European Union. <a target="_blank" href="https://fra.europa.eu/sites/default/files/fra_uploads/fra-2018-preventing-unlawful-profiling-guide_en.pdf">https://fra.europa.eu/sites/default/files/fra_uploads/fra-2018-preventing-unlawful-profiling-guide_en.pdf</a></p>
<p>FRA. 2019. <em>Facial Recognition Technology: Fundamental Rights Considerations in the Context of Law Enforcement.</em> <a target="_blank" href="https://op.europa.eu/publication/manifestation_identifier/PUB_TK0320019ENN">https://op.europa.eu/publication/manifestation_identifier/PUB_TK0320019ENN</a></p>
<p>France 3 Auvergne-Rhône-Alpes. 2019. <em>St Etienne: Des Capteurs Sonores à lécoute de La Ville</em>. <a target="_blank" href="https://www.youtube.com/watch?v=KyCIOCiTqkU">https://www.youtube.com/watch?v=KyCIOCiTqkU</a></p>
2021-10-07 08:43:56 +02:00
<p>Fussey, Pete and Daragh Murray. 2019. “Independent Report on the London Metropolitan Police Services Trial of Live Facial Recognition Technology”. Colchester, UK: Human Rights Centre, University of Essex. http://repository.essex.ac.uk/24946/1/London-Met-Police-Trial-of-Facial-Recognition-Tech-Report-2.pdf.</p>
2021-10-13 18:35:49 +02:00
<p>Galič, Maša and Raphaël Gellert. 2021. "Data Protection Law beyond Identifiability? Atmospheric Profiles, Nudging and the Stratumseind Living Lab". <em>Computer Law &amp; Security Review</em> 40: 105486. <a target="_blank" href="https://doi.org/10.1016/j.clsr.2020.105486">https://doi.org/10.1016/j.clsr.2020.105486</a></p>
<p>Garstka, Krysztof. 2018. Between Security and Data Protection: Searching for a Model Big Data Surveillance Scheme within the European Union Data Protection Framework, <em>HRBDT Occassional Paper Series</em> <a target="_blank" href="https://www.hrbdt.ac.uk/download/between-security-and-data-protection-searching-for-a-model-big-data-surveillance-scheme-within-the-european-union-data-protection-framework/">https://www.hrbdt.ac.uk/download/between-security-and-data-protection-searching-for-a-model-big-data-surveillance-scheme-within-the-european-union-data-protection-framework/</a></p>
<p>Gehrke, Laurenz. 2020. <em></em>Hungary No Longer a Democracy: Report.<em></em> <em>Politico</em>, 6 May. <a target="_blank" href="https://www.politico.eu/article/hungary-no-longer-a-democracy-report/">https://www.politico.eu/article/hungary-no-longer-a-democracy-report/</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Gonzalez Fuster, Gloria. 2020. <em>Artificial Intelligence and Law Enforcement. Impact on Fundamental Rights (PE 656.295).</em> Brussels: European Parliament<em>.</em> https://www.europarl.europa.eu/RegData/etudes/STUD/2020/656295/IPOL_STU(2020)656295_EN.pdf.</p>
2021-10-13 18:35:49 +02:00
<p>Gotink, Bart. 2019. “Slimme cameras herkennen elke carnavalsvierder in Korte Putstraat: Wie er niet in mag, hebben we er zo uitgepikt”. <em>bd.nl</em>. March 6. <a target="_blank" href="https://www.bd.nl/den-bosch-vught/slimme-camera-s-herkennen-elke-carnavalsvierder-in-korte-putstraat-wie-er-niet-in-mag-hebben-we-er-zo-uitgepikt~a55f6fdd/">https://www.bd.nl/den-bosch-vught/slimme-camera-s-herkennen-elke-carnavalsvierder-in-korte-putstraat-wie-er-niet-in-mag-hebben-we-er-zo-uitgepikt~a55f6fdd/</a></p>
<p>Greenleaf, Graham. 2016. <em>Renewing Convention 108: The CoEs GDPR Lite Initiatives</em>. Rochester, NY: Social Science Research Network. SSRN Scholarly Paper. <a target="_blank" href="https://papers.ssrn.com/abstract=2892947">https://papers.ssrn.com/abstract=2892947</a> (July 25, 2021).</p>
<p>Gröhn, Anna. 2017. « G20-Überwachungstechnik filmt weiter: Großer Bruder Telemichel". <em>Die Tageszeitung : Taz</em>. <a target="_blank" href="https://taz.de/!5457108/">https://taz.de/!5457108/</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Hamada, Wael. 2020. “Data-Lab.” <em>Inbraakvrije Wijk</em>. https://inbraakvrijewijk.nl/big-data/</p>
<p>Hassani, Jamal E. 2019. “Expérimentation de reconnaissance faciale : Nice ravie, la Cnil sceptique.”<em>JDN</em>, August 28. https://www.journaldunet.com/economie/services/1443319-reconnaissance-faciale-nice-ravie-la-cnil-sceptique/.</p>
2021-10-13 18:35:49 +02:00
<p>Henning, Maximilian. 2019. Überwachung am Südkreuz soll jetzt Situationen und Verhalten scannen. <em>Netzpolitik.org</em>. <a target="_blank" href="https://netzpolitik.org/2019/ueberwachung-am-suedkreuz-soll-jetzt-situationen-und-verhalten-scannen/">https://netzpolitik.org/2019/ueberwachung-am-suedkreuz-soll-jetzt-situationen-und-verhalten-scannen/</a></p>
<p>Herczeg, Márk. 2019. A Totális Megfigyelés Ellen Senki Sem Tüntet, Pedig Jó Úton Haladunk Felé. 444. 24 April. <a target="_blank" href="https://444.hu/2019/04/24/a-totalis-megfigyeles-ellen-senki-sem-tuntet-pedig-jo-uton-haladunk-fele">https://444.hu/2019/04/24/a-totalis-megfigyeles-ellen-senki-sem-tuntet-pedig-jo-uton-haladunk-fele</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Het Parool. 2017. <em>Cameras in billboards op Centraal Station voorlopig uit</em>. Het Parool. https://www.parool.nl/gs-bd97c612</p>
<p>Hidvégi, Fanny and Zágoni, Rita. 2016. How Technology Enhances the Right to Privacy - A Case Study on the Right to Hide Project of the Hungarian Civil Liberties Union, <em>Journal of National Security Law &amp; Freedom</em>, Vol. 8, 531.</p>
<p>Hillman, Jonathan and Maesea McCalpin,. 2019. “Watching Huaweis Safe Cities”. <em>CSIS Briefs</em>. Washington: Centre for Strategic and International Studies</p>
<p>Houwing, Lotte. 2020. “Stop the Creep of Biometric Surveillance Technology”, <em>European Data Protection Law Review</em>, Vol. 2, 174.</p>
2021-10-13 18:35:49 +02:00
<p><em>Hungarian Parliament. 2019. Bill T/7690</em>. 2019. <em>T/7690</em>. <a target="_blank" href="https://www.parlament.hu/irom41/07690/07690.pdf">https://www.parlament.hu/irom41/07690/07690.pdf</a>.</p>
<p>INCLO 2021. “In Focus: Facial Recognition Tech Stories and Rights Harms from Around the World”. Geneva &amp; Buenos Aires : INCLO. <a target="_blank" href="https://www.inclo.net/pdf/in-focus-facial-recognition-tech-stories.pdf">https://www.inclo.net/pdf/in-focus-facial-recognition-tech-stories.pdf</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Intelligent Lighting Institute. n.d. “Stratumseind.” Eindhoven University of Technology. https://www.tue.nl/en/research/research-institutes/top-research-groups/intelligent-lighting-institute/infrastructure/stratumseind/.</p>
2021-10-13 18:35:49 +02:00
<p>Interpol, 2020. <em>Facial Recognition Fact Sheet</em>. Lyon: Interpol. <a target="_blank" href="https://www.interpol.int/en/content/download/15013/file/FS-04_Facial%20R_Factsheets_EN_2020-03.pdf">https://www.interpol.int/en/content/download/15013/file/FS-04_Facial%20R_Factsheets_EN_2020-03.pdf</a></p>
2021-10-07 08:43:56 +02:00
<p>IPVM Team. 2020. <em>Facial Recognition 101</em>. Bethlehem PA (USA: IPVM</p>
<p>IPVM Team. 2021a. <em>Video Analytics Fundamentals Guide</em>. Bethlehem PA (USA: IPVM</p>
<p>IPVM Team. 2021b. <em>Facial Recognition Guide</em>. Bethlehem PA (USA: IPVM</p>
<p>Jasserand, Catherine. 2016. "Legal Nature of Biometric Data: From Generic Personal Data to Sensitive Data". <em>European Data Protection Law Review (EDPL)</em> 2: 297.</p>
<p>Jiaquan, Zhou. 2018. “Drones, facial recognition and a social credit system: 10 ways China watches its citizens” <em>South China Morning Post</em>. https://www.scmp.com/news/china/society/article/2157883/drones-facial-recognition-and-social-credit-system-10-ways-china, checked on 4/24/2021.</p>
2021-10-13 18:35:49 +02:00
<p>Kaszás, Fanni. 2020. “Coronavirus: New App to Track Nearby Positive Cases Available to Download.” <em>Hungary Today</em>. 14.05.2020. <a target="_blank" href="https://hungarytoday.hu/coronavirus-hungary-app-virusradar/">https://hungarytoday.hu/coronavirus-hungary-app-virusradar/</a></p>
<p>Kerékgyártó, Istvan. 2018.  "Ami most épül, ahhoz képest Sztálin és Hitler titkosrendőrsége vaktában lövöldözött.” <a target="_blank" href="http://24.hu"><em>24.hu</em></a> <a target="_blank" href="https://24.hu/poszt-itt/2018/12/09/kerekgyarto-ami-most-epul-ahhoz-kepest-sztalin-es-hitler-titkosrendorsege-vaktaban-lovoldozott/">https://24.hu/poszt-itt/2018/12/09/kerekgyarto-ami-most-epul-ahhoz-kepest-sztalin-es-hitler-titkosrendorsege-vaktaban-lovoldozott/</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Kindt, Els, Eva Lievens, Eleni Kosta, Thomas Leys, Paul De Hert. 2008. “Constitutional Rights and New Technologies in Belgium” in Leenes, Ronald, Bert-Jaap Koops, Paul de Hert, and Susan W. Brenner, eds. 2008. <em>Constitutional Rights and New Technologies: A Comparative Study</em>. The Hague: Cambridge; New York: T.M.C. Asser Press; Distributed by Cambridge University Press ; 11-55.</p>
<p>Kindt, Els. 2013. <em>Privacy and Data Protection Issues of Biometric Applications: A Comparative Legal Analysis</em>. Dordrecht: Springer Netherlands.</p>
2021-10-13 18:35:49 +02:00
<p>Kokkeler, Ben; Steven van den Oord; Steven van der Minne; Ilona Danen; Jason van Erve; Joelle van der Laan. 2020a. “De rol van sensoring bij coproductie van sociale veiligheid in een wijk: Een conceptueel model op basis van literatuurstudie en een analytische aanpak om digitale coproductie van sociale veiligheid in de wijk te toetsen”. Breda : Avans Hogeschool. <a target="_blank" href="https://www.hbo-kennisbank.nl/details/sharekit_av:oai:surfsharekit.nl:ae9b7c0f-9a34-419a-b409-5b1792d805b3?q=ben+kokkeler&amp;has-link=yes&amp;has-link=yes&amp;re-1-k=avanshogeschool&amp;p=2">https://www.hbo-kennisbank.nl/details/sharekit_av:oai:surfsharekit.nl:ae9b7c0f-9a34-419a-b409-5b1792d805b3?q=ben+kokkeler&amp;has-link=yes&amp;has-link=yes&amp;re-1-k=avanshogeschool&amp;p=2</a></p>
<p>Kokkeler, Ben; Steven van den Oord; Steven van der Minne; Ilona Danen; Jason van Erve; Joelle van der Laan. 2020b. “Het Fieldlab Inbraakvrije Wijk Rotterdam: Een empirische verkenning naar de impact van sensoring ter bevordering van sociale veiligheid in de wijk Lombardijen". Breda : Avans Hogeschool. <a target="_blank" href="https://hbo-kennisbank.nl/details/sharekit_av:oai:surfsharekit.nl:c4baf1cd-4df6-4779-a330-43184094de95">https://hbo-kennisbank.nl/details/sharekit_av:oai:surfsharekit.nl:c4baf1cd-4df6-4779-a330-43184094de95</a></p>
<p>Közbeszerzési Hatóság. 2020. "Táj. az elj. eredményéről-Szitakötőr. fejlesztése". Budapest : Közbeszerzési Hatóság. <a target="_blank" href="http://www.kozbeszerzes.hu/ertesito/2020/0/targy/portal_403/megtekint/portal_9112_2020/">http://www.kozbeszerzes.hu/ertesito/2020/0/targy/portal_403/megtekint/portal_9112_2020/</a>.</p>
<p>Krol, Folkert van der. 2019. "Rotterdam Lombardijen is walhalla voor inbrekers". <em>AD.nl</em>. <a target="_blank" href="https://www.ad.nl/rotterdam/rotterdam-lombardijen-is-walhalla-voor-inbrekers~aba6ac9b/">https://www.ad.nl/rotterdam/rotterdam-lombardijen-is-walhalla-voor-inbrekers~aba6ac9b/</a></p>
2021-10-07 08:43:56 +02:00
<p>L'Avenir. 2019. “La police fédérale doit mettre un terme à son projet de reconnaissance faciale à Zaventem.” <em>L'Avenir</em>, September 20. https://www.lavenir.net/cnt/dmf20190920_01382727/la-police-federale-doit-mettre-un-terme-a-son-projet-de-reconnaissance-faciale-a-zaventem.</p>
2021-10-13 18:35:49 +02:00
<p>La Quadrature du Net. 2020. "Our Legal Action against the Use of Facial Recognition by the French Police". <em>La Quadrature du Net</em>. <a target="_blank" href="https://www.laquadrature.net/en/2020/09/21/our-legal-action-against-the-use-of-facial-recognition-by-the-french-police/">https://www.laquadrature.net/en/2020/09/21/our-legal-action-against-the-use-of-facial-recognition-by-the-french-police/</a></p>
<p>La Quadrature du Net. et al. 2019. “Open Letter: Ban Security and Surveillance Facial Recognition” 19 December 2019. <a target="_blank" href="https://www.laquadrature.net/en/2019/12/19/joint-letter-from-80-organisations-ban-security-and-surveillance-facial-recognition/">https://www.laquadrature.net/en/2019/12/19/joint-letter-from-80-organisations-ban-security-and-surveillance-facial-recognition/</a></p>
<p>Lavrysen, Luc, Jan Theunis, Jurgen Goossens, Pieter Cannoot and Viviane Meerschaert 2017. "Developments in Belgian Constitutional Law: The Year 2016 in Review". <em>International Journal of Constitutional Law</em> 15(3): 77484. <a target="_blank" href="https://doi.org/10.1093/icon/mox060">https://doi.org/10.1093/icon/mox060</a></p>
<p>Lippens, Jan, and Michel Vandersmissen. 2019. “Topman federale politie: 'We gaan camera's met gezichtsherkenning inzetten in Zaventem'.” <em>Knack</em>, July 10. <a target="_blank" href="https://www.knack.be/nieuws/belgie/topman-federale-politie-we-gaan-camera-s-met-gezichtsherkenning-inzetten-in-zaventem/article-longread-1485633.html">https://www.knack.be/nieuws/belgie/topman-federale-politie-we-gaan-camera-s-met-gezichtsherkenning-inzetten-in-zaventem/article-longread-1485633.html</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Lum, Kristian, and William Isaac. 2016. To Predict and Serve? <em>Significance</em> 13(5): 1419. https://doi.org/10.1111/j.1740-9713.2016.00960.x</p>
2021-10-13 18:35:49 +02:00
<p>Lumi. 2020. La control room di Venezia e la rete di videosorveglianza urbana • <em>Lumi</em>, 2020. . Lumi. URL <a target="_blank" href="https://www.lumi4innovation.it/control-room-venezia-videosorveglianza-urbana/">https://www.lumi4innovation.it/control-room-venezia-videosorveglianza-urbana/</a></p>
<p>Ma, Alexandra. 2018. "China is building a vast civilian surveillance network — here are 10 ways it could be feeding its creepy social credit system". <em>Business Insider Nederland</em>. <a target="_blank" href="https://www.businessinsider.nl/how-china-is-watching-its-citizens-in-a-modern-surveillance-state-2018-4/">https://www.businessinsider.nl/how-china-is-watching-its-citizens-in-a-modern-surveillance-state-2018-4/</a></p>
<p>Mac, Ryan, Caroline Haskins, and Logan McDonald. 2020. Clearviews Facial Recognition App Has Been Used By The Justice Department, ICE, Macys, Walmart, And The NBA. <em>BuzzFeed News</em>. <a target="_blank" href="https://www.buzzfeednews.com/article/ryanmac/clearview-ai-fbi-ice-global-law-enforcement">https://www.buzzfeednews.com/article/ryanmac/clearview-ai-fbi-ice-global-law-enforcement</a></p>
<p>Malevé, Nicolas. 2020. On the Data Sets Ruins. <em>AI &amp; Society</em>. <a target="_blank" href="https://doi.org/10.1007/s00146-020-01093-w">10.1007/s00146-020-01093-w</a></p>
<p>Ministerium für Inneres, Digitalisierung und Migration. 2020. <em>Antwort Auf Eine Kleine Anfrage Im Landtag von Baden-Württemberg: Zwischenergebnisse Des Pilotprojekts Zur Intelligenten Videoüberwachung in Mannheim</em>. Stuttgart, Germany. <a target="_blank" href="https://www.landtag-bw.de/files/live/sites/LTBW/files/dokumente/WP16/Drucksachen/8000/16_8128_D.pdf">https://www.landtag-bw.de/files/live/sites/LTBW/files/dokumente/WP16/Drucksachen/8000/16_8128_D.pdf</a>.</p>
<p>Monroy, Matthias. 2018. “Kritik an G20-Gesichtserkennung: Neue Dimension staatlicher Ermittlungs- und Kontrolloptionen”. <a target="_blank" href="https://netzpolitik.org/2018/kritik-an-g20-gesichtserkennung-als-neue-dimension-staatlicher-ermittlungs-und-kontrolloptionen/">https://netzpolitik.org/2018/kritik-an-g20-gesichtserkennung-als-neue-dimension-staatlicher-ermittlungs-und-kontrolloptionen/</a>.</p>
<p>Monroy, Matthias. 2020. “INPOL-Datei: Deutlich mehr Gesichtserkennung bei Bundespolizei und Kriminalämtern”, <em>Netzpolitik.org.</em> <a target="_blank" href="https://netzpolitik.org/2020/deutlich-mehr-gesichtserkennung-bei-bundespolizei-und-kriminalaemtern/">https://netzpolitik.org/2020/deutlich-mehr-gesichtserkennung-bei-bundespolizei-und-kriminalaemtern/</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Mozur, Paul. 2018. “Inside Chinas Dystopian Dreams: A.I., Shame and Lots of Cameras”. <em>New York Times</em>, 7/8/ https://www.nytimes.com/2018/07/08/business/china-surveillance-technology.html.</p>
2021-10-13 18:35:49 +02:00
<p>NAIH 2018. “GDPR Communique to Hungarian Government.<em></em> Accessed 7 April 2021. <a target="_blank" href="https://www.naih.hu/files/NAIH-5578-3-2018-J-181001.PDF">https://www.naih.hu/files/NAIH-5578-3-2018-J-181001.PDF</a>.</p>
<p>NAIH. 2019. “A Nemzeti Adatvédelmi és Információszabadság Hatóság Beszámolója a 2018. évi tevékenységéről B/4542”. <a target="_blank" href="https://www.naih.hu/eves-beszamolok">https://www.naih.hu/eves-beszamolok</a></p>
2021-10-07 08:43:56 +02:00
<p>Najibi, Alex. 2020. “Racial Discrimination in Face Recognition Technology”. <em>Science in the News</em>, 10/24/2020. https://sitn.hms.harvard.edu/flash/2020/racial-discrimination-in-face-recognition-technology/</p>
2021-10-13 18:35:49 +02:00
<p>NEC. 2021. Bio-IDiom — NECs Biometric Authentication Brand. <em>NEC</em>. <a target="_blank" href="https://www.nec.com/en/global/techrep/journal/g18/n02/180203.html">https://www.nec.com/en/global/techrep/journal/g18/n02/180203.html</a> (accessed 7.14.21).</p>
2021-10-07 08:43:56 +02:00
<p>Nesterova, Irena. 2020. "Mass Data Gathering and Surveillance: The Fight against Facial Recognition Technology in the Globalized World". <em>SHS Web of Conferences</em> 74: 03006.</p>
2021-10-13 18:35:49 +02:00
<p>Newsroom. 2020. “Shaping Europes Digital Future: What You Need to Know”. <em>Modern Diplomacy</em>. 22 February. <a target="_blank" href="https://moderndiplomacy.eu/2020/02/22/shaping-europes-digital-future-what-you-need-to-know/">https://moderndiplomacy.eu/2020/02/22/shaping-europes-digital-future-what-you-need-to-know/</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Nice Premium. 2017. “La Smart City au service de la Safe City.” <em>Nice Premium</em>, July 7. https://www.nice-premium.com/actualite,42/local,5/nice-la-smartcity-au-service-de-la-safe-city,21769.html.</p>
<p>Nieuwsuur. 2020a. “Hoe Algoritmes de Stad Besturen.” https://www.youtube.com/watch?v=gJDA4t6lIgY</p>
2021-10-13 18:35:49 +02:00
<p>Nieuwsuur. 2020b. Nieuwsuur #307 9-11-2020. <em>NOS-NTR</em>. <a target="_blank" href="https://www.npostart.nl/nieuwsuur/09-11-2020/VPWON_1310969">https://www.npostart.nl/nieuwsuur/09-11-2020/VPWON_1310969</a></p>
<p>Nishiyama, Hidefumi. 2018. Crowd Surveillance: The (in)Securitization of the Urban Body. <em>Security Dialogue</em> 49(3): 200216. <a target="_blank" href="https://doi.org/10.1177/0967010617741436">https://doi.org/10.1177/0967010617741436</a></p>
2021-10-07 08:43:56 +02:00
<p>NIST. 2010. <em>Special Database 32—Multiple Encounter Dataset (MEDS)</em>. NIST. https://www.nist.gov/itl/iad/image-group/special-database-32-multiple-encounter-dataset-meds</p>
<p>OpenCV. 2021. <em>About</em>. OpenCV. https://opencv.org/about/</p>
<p>Organe de Controle de l'Information Policière. 2019. “Rapport de Visite et de Surveillance - Synthèse version publique. DIO19005” Brussels: Organe de Contrôle de lInformation Policière.</p>
<p>Organe de Controle de l'Information Policière. 2021. “Supervisory Body for Police Information.” Brussels: Organe de Contrôle de lInformation Policière https://www.controleorgaan.be/en/.</p>
2021-10-13 18:35:49 +02:00
<p>Overgaard, S., 2019. “A Soccer Team In Denmark Is Using Facial Recognition To Stop Unruly Fans”. <em><a target="_blank" href="http://npr.org/">NPR.org</a>.</em> URL <a target="_blank" href="https://www.npr.org/2019/10/21/770280447/a-soccer-team-in-denmark-is-using-facial-recognition-to-stop-unruly-fans">https://www.npr.org/2019/10/21/770280447/a-soccer-team-in-denmark-is-using-facial-recognition-to-stop-unruly-fans</a></p>
2021-10-07 08:43:56 +02:00
<p>Parliamentary Assembly of the Council of Europe. 2017 <em>Recommendation 2102 (2017) on Technological Convergence, Artificial Intelligence and Human Rights, adopted on 28 April 2017</em>. Strasbourg: Coucil of Europe.</p>
<p>Prins, Aliou. 2021. “Collecte des empreintes digitales, reconnaissance faciale… Notre vie privée en danger ?” <em>Moustique</em>. https://www.moustique.be/28152/collecte-des-empreintes-digitales-reconnaissance-faciale-notre-vie-privee-en-danger.</p>
<p>Purtova, Nazedha. 2018. Between the GDPR and the Police Directive: Navigating through the maze of information sharing in public-private partnerships. <em>International Data Privacy</em> Law, Vol. 8, 52.</p>
<p>Quevillon, Joey. 2012. Video Motion Detection and Tracking for Surveillance Applications. Thesis University of Victoria. https://dspace.library.uvic.ca/handle/1828/4145.</p>
<p>Redactie Inbraakvrije Wijk. 2019. “Sensoren in Het Carlo Collodihof.” <em>Inbraakvrije Wijk</em> https://inbraakvrijewijk.nl/sensoren-op-het-carlo-collodihof/</p>
<p>Redactie LikeJeWijk. 2021. “Update Fieldlab Inbraakvrije Wijk.” <em>Wijkgids Lombardijen.</em> https://www.likejewijk.nl/lombardijen/update-fieldlab-inbraakvrije-wijk/</p>
2021-10-13 18:35:49 +02:00
<p>Renaissance Numérique, 2019. <em>Reconnaissance Faciale: Quel regard des français?</em> Paris: Renaissance Numérique. <a target="_blank" href="https://www.renaissancenumerique.org/ckeditor_assets/attachments/449/rn-sondage_reconnaissancefaciale.pdf">https://www.renaissancenumerique.org/ckeditor_assets/attachments/449/rn-sondage_reconnaissancefaciale.pdf</a></p>
2021-10-07 08:43:56 +02:00
<p>Rollet, Charles. 2021. <em>EU Parliament Removes Hikvision, Citing Human Rights Abuses</em>. Bethlehem PA (USA): IPVM. https://ipvm.com/reports/hik-eu</p>
<p>Schemm, Martin. 2018. “Einführung Der Automatisierten Gesichtserkennung Beanstandet”. Hambrug: Hamburgische Beauftragte für Datenschutz und Informationsfreiheit der Freien und Hansestadt,<br />
2021-10-13 18:35:49 +02:00
<a target="_blank" href="https://datenschutz-hamburg.de/pressemitteilungen/2018/08/2018-09-31-polhh-g20-videmo360">https://datenschutz-hamburg.de/pressemitteilungen/2018/08/2018-09-31-polhh-g20-videmo360videmo 360</a>.</p>
<p>Schlagwein, Felix. 2020. “Hungary Is No Longer a Democracy Says Hungarian Legal Scholar”. <em>Deutsche Welle</em>. Accessed 19 July 2021b. <a target="_blank" href="https://www.dw.com/en/hungary-is-no-longer-a-democracy-says-hungarian-legal-scholar/a-53442394">https://www.dw.com/en/hungary-is-no-longer-a-democracy-says-hungarian-legal-scholar/a-53442394</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Schouten, Socrates, and Teuntje Bril. 2019. “Volg Jij Nog Waar Je Gevolgd Wordt?” Smart Society Case nr. 1. Amsterdam: Waag; Den Haag: Vereniging van Nederlandse Gemeenten. https://waag.org/sites/waag/files/2020-07/VNG-SSC-1-Beslissen-over-slimme-technologie.pdf</p>
<p>Segal, Zach. 2020. Gait Recognition Examined. IPVM. Bethlehem, PA (USA). https://ipvm.com/reports/gait-recognition-surveilance</p>
2021-10-13 18:35:49 +02:00
<p>Shung, Koo Ping. 2020. Accuracy, Precision, Recall or F1? <em>Medium</em>. <a target="_blank" href="https://towardsdatascience.com/accuracy-precision-recall-or-f1-331fb37c5cb9">https://towardsdatascience.com/accuracy-precision-recall-or-f1-331fb37c5cb9</a></p>
<p>Snow, Jacob. 2018. Amazons Face Recognition Falsely Matched 28 Members of Congress With Mugshots. <em>American Civil Liberties Union</em>. <a target="_blank" href="https://www.aclu.org/blog/privacy-technology/surveillance-technologies/amazons-face-recognition-falsely-matched-28">https://www.aclu.org/blog/privacy-technology/surveillance-technologies/amazons-face-recognition-falsely-matched-28</a></p>
<p>Spirk, Jozsef. 2019. <em></em>A Parkolóautomatákból Is Pintér Kamerái Pásztázhatják Az Arcokat<em>.”</em> <em>24.Hu</em>, 7 January 2019. Accessed 21 April 2021. <a target="_blank" href="https://24.hu/belfold/2019/01/07/terfigyelo-kamerak-belugyminiszterium-pinter-sandor-szitakoto/">https://24.hu/belfold/2019/01/07/terfigyelo-kamerak-belugyminiszterium-pinter-sandor-szitakoto/</a>.</p>
<p>Statista. 2021. “Straftaten in Deutschland bis 2020”. <em>Statista</em>, <a target="_blank" href="https://de.statista.com/statistik/daten/studie/197/umfrage/straftaten-in-deutschland-seit-1997/">https://de.statista.com/statistik/daten/studie/197/umfrage/straftaten-in-deutschland-seit-1997/</a>.</p>
<p>Stojkovski, Bojan. 2019. Big Brother Comes to Belgrade. <em>Foreign Policy</em>. <a target="_blank" href="https://foreignpolicy.com/2019/06/18/big-brother-comes-to-belgrade-huawei-china-facial-recognition-vucic/">https://foreignpolicy.com/2019/06/18/big-brother-comes-to-belgrade-huawei-china-facial-recognition-vucic/</a></p>
<p>Stolton, Samuel. 2020. “EU Data Watchdog “very Worried” by Hungarys GDPR Suspension”. <em>Euractiv</em>. 18 May 2020. <a target="_blank" href="https://www.euractiv.com/section/data-protection/news/eu-data-watchdog-very-worried-by-hungarys-gdpr-suspension/">https://www.euractiv.com/section/data-protection/news/eu-data-watchdog-very-worried-by-hungarys-gdpr-suspension/</a>.</p>
<p>Suresh, Harini. 2019. The Problem with “Biased Data”. <em>Medium</em>. <a target="_blank" href="https://harinisuresh.medium.com/the-problem-with-biased-data-5700005e514c">https://harinisuresh.medium.com/the-problem-with-biased-data-5700005e514c</a></p>
<p>Sustainder. 2021. “Anne.” <a target="_blank" href="https://sustainder.com/en/products/sustainder-anne.">https://sustainder.com/en/products/sustainder-anne.</a></p>
<p>Szalai, Anna. 2019. <em>“A NER-testvér szemmel tart: jön a totális megfigyelés?”</em> Magyarnarancs.hu, 24 March 2019. Accessed 7 April 2021. <a target="_blank" href="https://magyarnarancs.hu/belpol/a-ner-testver-szemmel-tart-117270">https://magyarnarancs.hu/belpol/a-ner-testver-szemmel-tart-117270</a>.</p>
<p>Tasz. 2021. Surveilled but not consulted: Citizens living under constant technological surveillance. <em>TASZ</em>. <a target="_blank" href="https://hclu.hu/en/articles/surveilled-but-not-consulted">https://hclu.hu/en/articles/surveilled-but-not-consulted</a></p>
<p>Technopolice. 2021. Toulouse. <em>Technopolice</em>. URL <a target="_blank" href="https://technopolice.fr/toulouse/">https://technopolice.fr/toulouse/</a></p>
<p>TELEFI Project. 2021. Summary Report of the project “Towards the European Level Exchange of Facial Images”. <a target="_blank" href="https://www.telefi-project.eu/sites/default/files/TELEFI_SummaryReport.pdf">https://www.telefi-project.eu/sites/default/files/TELEFI_SummaryReport.pdf</a></p>
2021-10-07 08:43:56 +02:00
<p>Thales Group. 2020. “Electronic ID cards in Belgium: the keystone of eGovernment.” https://www.thalesgroup.com/en/markets/digital-identity-and-security/government/customer-cases/belgium.</p>
<p>The Hague Security Delta. 2021. “Stratumseind.” https://www.thehaguesecuritydelta.com/innovation/living-labs/lab/3-stratumseind.</p>
<p>UNHRC. 2019. 41<sup>st</sup> Session, UN Doc. A/HRC/41/41 (17 May 2019).</p>
<p>Untersinger, Martin. 2019. “La CNIL plaide pour un « code de la route » de la reconnaissance faciale.”<em>Le Monde</em>, November 15.</p>
2021-10-13 18:35:49 +02:00
<p>Van Amelsvoort, Adri and, John Riemen. 2018. Meerluikfotos van verdachten <em>Het Tijdscrhift voor de Politie.</em> 6 November. <a target="_blank" href="https://www.websitevoordepolitie.nl/meerluikfotos-van-verdachten/">https://www.websitevoordepolitie.nl/meerluikfotos-van-verdachten/</a></p>
<p>van Barneveld, D., D. Crover, and A. Yeh. 2018. <em>Sensoren En de Rol van Gemeenten</em>. VNG Realisatie Whitepaper Den Haag: VNG Realisatie. <a target="_blank" href="https://www.vngrealisatie.nl/sites/default/files/2019-03/Whitepaper%20Sensordatapdf.pdf">https://www.vngrealisatie.nl/sites/default/files/2019-03/Whitepaper%20Sensordata%E2%80%93pdf.pdf</a> (March 24, 2021).</p>
2021-10-07 08:43:56 +02:00
<p>van Brakel, Rosamunde. 2020. “ADM Systems in the COVID-19 Pandemic: Belgium.” In Algorithm <em>Watch, Automating Society Report 2020</em>. https://algorithmwatch.org/en/automating-society-2020-COVID19/belgium.</p>
2021-10-13 18:35:49 +02:00
<p>van de Ven, Ruben. 2017. “Choose how you feel; you have seven options”. <em>Institute of Network Cultures</em>. <a target="_blank" href="https://networkcultures.org/longform/2017/01/25/choose-how-you-feel-you-have-seven-options/">https://networkcultures.org/longform/2017/01/25/choose-how-you-feel-you-have-seven-options/</a></p>
2021-10-07 08:43:56 +02:00
<p>Vazquez, Coline. 2020. “Reconnaissance faciale : comment les forces de police y ont-elles recours en Europe?” <em>L'Express</em>, February 19. https://lexpansion.lexpress.fr/high-tech/reconnaissance-facialecomment-les-forces-de-police-y-ont-elles-recours-en-europe 2118639.html.</p>
2021-10-13 18:35:49 +02:00
<p>Venier, Silvia., and Mordini,Emilio. 2010. “Second-generation biometrics” in Finn, Rachel and David Wright <em>PRESCIENT Deliverable 2: Privacy and emerging fields of science and technology: Towards a common framework for privacy and ethical assessment</em>  <a target="_blank" href="https://prescient-project.eu/prescient/inhalte/download/PRESCIENT_D2.pdf">https://prescient-project.eu/prescient/inhalte/download/PRESCIENT_D2.pdf</a></p>
<p>Verbeke, Hans. 2019. “De slimste cameras hangen in Kortrijk: Politie vindt de man met de blauwe trui in mum van tijd” <em>HLN</em>. <a target="_blank" href="https://www.hln.be/kortrijk/de-slimste-cameras-hangen-in-kortrijk-politie-vindt-de-man-met-de-blauwe-trui-in-mum-van-tijd~af252dfc/">https://www.hln.be/kortrijk/de-slimste-cameras-hangen-in-kortrijk-politie-vindt-de-man-met-de-blauwe-trui-in-mum-van-tijd~af252dfc/</a></p>
<p>Verseck, Keno. 2020. Hungary and the EU: Viktor Orbans Battle with the Rule of Law”. <em>Deutsche Welle</em>. <a target="_blank" href="https://www.dw.com/en/hungary-viktor-orban-rule-of-law-eu-budget/a-55581020">https://www.dw.com/en/hungary-viktor-orban-rule-of-law-eu-budget/a-55581020</a>.</p>
2021-10-07 08:43:56 +02:00
<p>Vincent, James. 2021. “<em>Automatic gender recognition tech is dangerous, say campaigners: Its time to ban it.”</em> <em>The Verge.</em> (14 April) https://www.theverge.com/2021/4/14/22381370/automatic-gender-recognition-sexual-orientation-facial-ai-analysis-ban-campaign</p>
2021-10-13 18:35:49 +02:00
<p>ViNotion. 2020. “TV Program Nieuwsuur about Luminaires with ViNotion Surveillance Software.” <a target="_blank" href="http://vinotion.nl/en/press-releases/tv-program-nieuwsuur-about-luminaires-with-vinotion-surveillance-software/">http://vinotion.nl/en/press-releases/tv-program-nieuwsuur-about-luminaires-with-vinotion-surveillance-software/</a></p>
<p><strong>Wagner, Ben. 2021. Whose Politics? Whose Rights? Transparency, Capture and Dual-Use Export Controls. <em>Security and Human Rights</em>: 112.</strong> <a target="_blank" href="https://doi.org/10.1163/18750230-31010006">https://doi.org/10.1163/18750230-31010006</a></p>
2021-10-07 08:43:56 +02:00
<p>Wang, Maya. 2018. <em>“Eradicating Ideological Viruses”: Chinas Campaign of Repression Against Xinjiangs Muslims</em>. New York: Human Rights Watch. https://www.hrw.org/report/2018/09/09/eradicating-ideological-viruses/chinas-campaign-repression-against-xinjiangs.</p>
2021-10-13 18:35:49 +02:00
<p>Wazulin, Lisa. 2019a. “Klarer Vorteil für den Bürger”. <em>Mannheimer Morgen</em>. <a target="_blank" href="https://www.mannheimer-morgen.de/orte/mannheim_artikel,-mannheim-klarer-vorteil-fuer-den-buerger-_arid,1511931.html">https://www.mannheimer-morgen.de/orte/mannheim_artikel,-mannheim-klarer-vorteil-fuer-den-buerger-_arid,1511931.html</a>.</p>
<p>Wazulin, Lisa. 2019b. “Erschreckend gleichgültig Kommentare”. <em>Mannheimer Morgen</em> <a target="_blank" href="https://www.mannheimer-morgen.de/meinung/kommentare_artikel,-kommentar-erschreckend-gleichgueltig-_arid,1512275.html">https://www.mannheimer-morgen.de/meinung/kommentare_artikel,-kommentar-erschreckend-gleichgueltig-_arid,1512275.html</a>.</p>
<p>Xie, Ning, Gabrielle Ras, Marcel van Gerven, and Derek Doran. 2020. Explainable Deep Learning: A Field Guide for the Uninitiated. <em>arXiv:2004.14545 [cs, stat]</em>. <a target="_blank" href="http://arxiv.org/abs/2004.14545">http://arxiv.org/abs/2004.14545</a></p>
2021-10-07 08:43:56 +02:00
</section>
<section id="annex-cases" class="level1 list-paragraph">
2021-10-07 11:07:36 +02:00
<h1 class="list-paragraph nocount">ANNEX: CASES</h1>
2021-10-07 08:43:56 +02:00
<section id="cjeu-decisions" class="level2">
<h2>CJEU Decisions</h2>
<ul>
<li><p>C-291/12 <em>Michael Schwarz v Stadt Bochum </em>ECLI:EU:C:2013:670.</p></li>
<li><p>Joined Cases C293/12 and C594/12 <em>Digital Rights Ireland v Minister for Communications, Marine and Natural Resources and Others</em> ECLI:EU:C:2014:238</p></li>
<li><p>C-582/14, <em>Patrick Breyer v Bundesrepublik Deutschland</em> ECLI:EU:C:2016:779</p></li>
<li><p>C-203/15 <em>Tele2 Sverige AB v Post och telestyrelsen and Secretary of State for the Home Department v Tom Watson and Others</em>ECLI:EU:C:2016:970</p></li>
<li><p>Opinion 1/15 of the Court (Grand Chamber) ECLI:EU:C:2017<em>:</em>592</p></li>
<li><p>C-434/16 <em>Peter Nowak v Data Protection Commissioner</em> ECLI:EU:C:2017:994</p></li>
<li><p>Joined Cases C511/18, C512/18 and C520/18 <em>La Quadrature du Net and Others</em> <em>v Premier Ministre and Others</em>ECLI:EU:C:2020:791.</p></li>
</ul>
</section>
<section id="ecthr-decisions" class="level2">
<h2>ECtHR decisions</h2>
<ul>
<li><p><em>Klass and others v Germany</em> (1979-80) 2 EHRR 214</p></li>
<li><p><em>Peck v UK</em> (2003) 36 EHRR 41 </p></li>
<li><p><em>PG and JH v UK </em>(2008) 46 EHRR 51</p></li>
<li><p><em>S and Marper v UK</em> (2009) 48 EHRR 50</p></li>
<li><p><em>Uzun v Germany</em> (2011) 53 EHRR 24</p></li>
<li><p><em>Gaughran v The United Kingdom </em>Appl No 45245/15 (13.06.2020)</p></li>
</ul>
</section>
<section id="decisions-of-national-courts" class="level2">
<h2>Decisions of National Courts</h2>
<ul>
<li><p>French Constitutional Council, Decision N° 2004-492 DC of 2 March 2004</p></li>
<li><p>French Constitutional Council, Decision n° 2012-652 DC of 22 March 2012</p></li>
<li><p>Administrative Court of Marseille, Decision N°1901249 of 27 February 2020</p></li>
<li><p>Cour constitutionnelle, N° 2/2021, 14 January 2021</p></li>
</ul>
</section>
</section>
<section class="footnotes" role="doc-endnotes">
<hr />
<ol>
2021-10-13 18:35:49 +02:00
<li id="fn1" role="doc-endnote"><p><a href="https://edps.europa.eu/press-publications/press-news/press-releases/2021/edpb-edps-call-ban-use-ai-automated-recognition_en" target="_blank">https://edps.europa.eu/press-publications/press-news/press-releases/2021/edpb-edps-call-ban-use-ai-automated-recognition_en</a><a href="#fnref1" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn2" role="doc-endnote"><p><a href="https://edri.org/wp-content/uploads/2021/08/European-Digital-Rights-EDRi-submission-to-European-Commission-adoption-consultation-on-the-Artificial-Intelligence-Act-August-2021.pdf" target="_blank">https://edri.org/wp-content/uploads/2021/08/European-Digital-Rights-EDRi-submission-to-European-Commission-adoption-consultation-on-the-Artificial-Intelligence-Act-August-2021.pdf</a><a href="#fnref2" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn3" role="doc-endnote"><p>The one-and-a half meter monitor is trained on the COCO dataset, published by <strong>Microsoft</strong> and <strong><a class="maplink" data-title="Facebook AI Research">Facebook AI</a></strong><a href="#fnref3" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn4" role="doc-endnote"><p>Relatedly, see the Spotify controversy (Access Now 2021)<a href="#fnref4" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn5" role="doc-endnote"><p>Partners in the <a class="maplink" data-title="Living Lab International Zone">Living Lab International Zone</a> include: <a class="maplink" data-title="Municipality of The Hague">Municipality of The Hague</a>, <a class="maplink" data-title="Municipality of The Hague">The Hague Police Region</a>, <a class="maplink" data-title="TNO">TNO</a>, <a class="maplink" data-title="Thales">Thales</a>, <a class="maplink" data-title="Thales">Sorama</a>, <a class="maplink" data-title="Connection Systems">Connection Systems</a>, Crowd Sense, The Hague Security Region, <a class="maplink" data-title="Europol">Europol</a>, <a class="maplink" data-title="Eurojust">Eurojust</a>, <a class="maplink" data-title="OPCW">OPCW</a>, <a class="maplink" data-title="IRMCT">IRMCT</a>, <a class="maplink" data-title="Peace Palace">Peace Palace</a>, <a class="maplink" data-title="Catshuis">Catshuis</a>, Government Buildings Agency, <a class="maplink" data-title=" Dutch Ministry of Foreign Affairs">Ministry of Foreign Affairs</a> and <a class="maplink" data-title="The Hague Security Delta (HSD)">The Hague Security Delta</a>.<a href="#fnref5" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn6" role="doc-endnote"><p>For example, a in a 4K UHD image, composed of 3840 × 2160 pixels, a face occupying 300 x 300 pixels would need to occupy approximately 1/100<sup>th</sup> of the screens surface. In a HD image composed of 1920 x 1080 pixels, the same 300 x 300 pixel face would occupy about 1/25<sup>th</sup> of the screens surface.<a href="#fnref6" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn7" role="doc-endnote"><p>“The ViolaJones object detection framework is an object detection framework which was proposed in 2001 by Paul Viola and Michael Jones. Although it can be trained to detect a variety of object classes, it was motivated primarily by the problem of face detection.” Wikipedia, “ViolaJones object detection framework” <a target="_blank" href="https://en.wikipedia.org/wiki/Viola%E2%80%93Jones_object_detection_framework">https://en.wikipedia.org/wiki/Viola%E2%80%93Jones_object_detection_framework</a><a href="#fnref7" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn8" role="doc-endnote"><p>See: <a target="_blank" href="http://vis-www.cs.umass.edu/lfw/results.html">http://vis-www.cs.umass.edu/lfw/results.html</a>, <a target="_blank" href="https://cocodataset.org/#detection-leaderboard">https://cocodataset.org/#detection-leaderboard</a> and <a target="_blank" href="https://www.nist.gov/programs-projects/face-challenges">https://www.nist.gov/programs-projects/face-challenges</a>.<a href="#fnref8" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn9" role="doc-endnote"><p>Both projects were shut down by the <strong>CNIL,</strong> the French DPA.<a href="#fnref9" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn10" role="doc-endnote"><p>The project was shut down by the <strong><a class="maplink" data-title="Swedish Authority for Privacy Protection (IMY)">Swedish Authority for Privacy Protection (IMY)</a></strong><a href="#fnref10" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn11" role="doc-endnote"><p>Criminal identification database, used by the <strong><a class="maplink" data-title="Austrian Criminal Intelligence Service">Austrian Criminal Intelligence Service</a></strong>, managed by the <strong><a class="maplink" data-title="Ministry of the Interior (Austria)">Austrian Ministry of Interior</a>.</strong><a href="#fnref11" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn12" role="doc-endnote"><p>The KASTU system interrogates two datasets: the <strong>Registered persons identifying features database (RETU)</strong> and <strong>Aliens database.</strong> It is managed by the <a class="maplink" data-title="National Bureau of Investigation (NBI)">National Bureau of Investigation (NBI)</a>, and can be used by the <strong><a class="maplink" data-title="Finnish Police">Finnish Police</a></strong>, the <strong><a class="maplink" data-title="Finnish Border Guard">Finnish Border Guard</a></strong> and the <strong><a class="maplink" data-title="Finnish Customs">Finnish Customs</a></strong>.<a href="#fnref12" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn13" role="doc-endnote"><p>Criminal case history database, managed by the <strong><a class="maplink" data-title="Ministry of Interior (France)">French Ministry of Interior</a></strong><a href="#fnref13" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn14" role="doc-endnote"><p>Criminal case management system, managed by the <strong><a class="maplink" data-title="German Federal Criminal Police Office (Bundeskriminalamt)">German Federal Criminal Police Office</a></strong> (Bundeskriminalamt)<a href="#fnref14" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn15" role="doc-endnote"><p>Managed by the <strong><a class="maplink" data-title="Video and Image Laboratory (Greek Police)">Video and Image Laboratory</a></strong> of the Audiovisual Evidence of the Department of Photography and Modus Operandi of the <a class="maplink" data-title="Hellenic Police Forensic Science Division">Hellenic Police Forensic Science Division</a><a href="#fnref15" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn16" role="doc-endnote"><p>The Facial Image registry is interrogated through a search engine developed by <a class="maplink" data-title="NEC">NEC</a>, and accessible to <strong>the</strong> <strong><a class="maplink" data-title="National Investigation Agency (Hungary)">National Investigation Agency</a></strong>, <strong>the</strong> <strong><a class="maplink" data-title="Criminal Courts (Hungary)">Criminal Courts</a></strong>, <strong>the</strong> <strong><a class="maplink" data-title="National Protective Service (Hungary)">National Protective Service</a></strong>, <strong>the</strong> <strong><a class="maplink" data-title="Counter-Terrorism Centre (Hungary)">Counter-Terrorism Centre</a></strong>, <strong>the</strong> <strong><a class="maplink" data-title="Hungarian Prison Service (Hungary)">Hungarian Prison Service</a></strong>, <strong>the <a class="maplink" data-title="The Prosecution Service of Hungary">Prosecution Service of Hungary</a></strong>, <strong>the <a class="maplink" data-title="The Public Administration (Hungary)">Public Administration</a></strong>, <strong>the</strong> <strong><a class="maplink" data-title="Special Service for National Security (Hungary)">Special Service for National Security</a></strong>, <strong>the</strong> <strong><a class="maplink" data-title="Intelligence Agencies (Hungary)">Intelligence Agencies</a></strong>, <strong>the</strong> <strong><a class="maplink" data-title="Hungarian Police">Hungarian Police</a></strong>, <strong>the <a class="maplink" data-title="The Hungarian Parliamentary Guard">Hungarian Parliamentary Guard</a></strong>, <strong><a class="maplink" data-title="Hungarian Ministry of Justice">Hungarian Ministry of Justice</a></strong>, <strong><a class="maplink" data-title="Witness Protection Service (Hungary)">Witness Protection Service</a></strong>, <strong>the</strong> <strong><a class="maplink" data-title="National Directorate-General for Aliens Policing (Hungary)">National Directorate-General for Aliens Policing</a> and <a class="maplink" data-title="Institution of the President of the Republic (Hungary)">Institution of the President of the Republic</a>.</strong> As of September 2020 the <a class="maplink" data-title="NOVA.mobil FR in Hungary">NOVA.Mobil application</a> has been launched for police officers to identify people on the streets who do not have identity documents with them (TELEFI 2021, 86).<a href="#fnref16" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn17" role="doc-endnote"><p>Automated Fingerprint Identification System. The system, managed by the <strong><a class="maplink" data-title="Ministry of Interior (Italy)">Italian ministry of interior</a></strong> can be interrogated via a software developed by the company <strong><a class="maplink" data-title="Reco 3.26">Reco 3.26</a></strong>, a subsidiary of <strong><a class="maplink" data-title="Parsec 3.26">Parsec 3.26</a></strong>. Another software used is provided by the japanese company <strong><a class="maplink" data-title="NEC">NEC</a></strong>.<a href="#fnref17" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn18" role="doc-endnote"><p>Biometric Data Processing System (criminal data array), supported by database software from <strong><a class="maplink" data-title="RIX Technologies">RIX Technologies</a></strong>, a search engine (<a class="maplink" data-title="MorphoTrust_ABIS_Search_Engine">MorphoTrust</a>)</strong> provided by <strong><a class="maplink" data-title="IDEMIA">Idemia</a> and <a class="maplink" data-title="Safran Group">Safran Group</a></strong> managed by the <strong><a class="maplink" data-title="Ministry of Interior (Latvia)">Latvian ministry of interior</a></strong>.<a href="#fnref18" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn19" role="doc-endnote"><p><strong><a class="maplink" data-title="HDR (Lithuania)">Habitoscopic Data Register</a></strong>, managed by the <strong><a class="maplink" data-title="Ministry of Interior (Lithuania)">Ministry of Interior (Lithuania)</a></strong><a href="#fnref19" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn20" role="doc-endnote"><p>Central Automatic TeChnology for Recognition of Persons, managed by the <strong><a class="maplink" data-title="Centrum voor Biometrie">Centrum voor Biometrie</a></strong>, connected to the <strong><a class="maplink" data-title="Dutch Judicial Information Service (Justid)">Dutch Judicial Information Service (Justid)</a>.</strong><a href="#fnref20" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn21" role="doc-endnote"><p>The database uses <strong>VeriLook</strong> and <strong>Face Trace</strong> software from the Lithuanian company <strong><a class="maplink" data-title="Neurotechnology">Neurotechnology</a>. It is managed by the <strong><a class="maplink" data-title="Ministry of Interior (Slovenia)">Ministry of Interior (Slovenia)</a>.</strong><a href="#fnref21" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn22" role="doc-endnote"><p>Automated Biometric Identification System, searchable by the <strong>IntellQ</strong> software from the company <strong><a class="maplink" data-title="IntellByte">IntellByte</a>,</strong> managed by the <strong><a class="maplink" data-title="Ministry of Interior (Croatia)">Ministry of the Interior</a> (Croatia).</strong><a href="#fnref22" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn23" role="doc-endnote"><p>Central Biometric Information System<a href="#fnref23" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn24" role="doc-endnote"><p>National Biometric Identification System, managed by the <strong><a class="maplink" data-title="Ministry of Interior (Romania)">Ministry of Interior (Romania)</a></strong><a href="#fnref24" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn25" role="doc-endnote"><p>Managed by the <strong><a class="maplink" data-title="The Photographic and Graphic Laboratory of Criminalistic Services">Photographic and Graphic Laboratory of Criminalistic Services</a>,</strong> using search software by the company <strong><a class="maplink" data-title="Unidas">Unidas</a></strong><a href="#fnref25" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn26" role="doc-endnote"><p>Managed by the <strong>Estonian Ministry of Interior</strong><a href="#fnref26" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn27" role="doc-endnote"><p><a class="maplink" data-title="IFRS (Interpol)">Interpol Facial Recognition System</a><a href="#fnref27" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn28" role="doc-endnote"><p>Source: TELEFI Report p.23.<a href="#fnref28" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn29" role="doc-endnote"><p>As detailed in CHAPTER 4. However, that does not mean that it is not subjected to similar legal frameworks.<a href="#fnref29" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn30" role="doc-endnote"><p>Developed as a partnership between the <strong><a class="maplink" data-title="Dutch Ministry of Justice & Security">Dutch Ministry of Justice &amp; Security</a></strong>, the <strong><a class="maplink" data-title="Dutch Institute for Technology Safety and Security (DITSS)">Dutch Institute for Technology Safety and Security (DITSS)</a></strong>, the <strong><a class="maplink" data-title="Rotterdam Municipality">Rotterdam Municipality</a></strong>, insurance company <strong><a class="maplink" data-title="Interpolis">Interpolis</a></strong>, the <strong><a class="maplink" data-title="Dutch Police">Dutch Police</a></strong>, the <strong><a class="maplink" data-title="ViNotion">ViNotion</a></strong>, the <strong><a class="maplink" data-title="Avans Hogeschool">Avans Hogeschool</a></strong>, the <strong><a class="maplink" data-title="Munisense">Munisense</a></strong>, the <strong><a class="maplink" data-title="Sustainder">Sustainder</a></strong>, the <strong><a class="maplink" data-title="Twente University">Twente University</a></strong>, the <strong><a class="maplink" data-title="Max Planck Institute for the Study of Crime, Security and Law">Max Planck Institute for the Study of Crime, Security and Law</a></strong>, and <strong><a class="maplink" data-title="The Network Institute">The Network Institute</a> (Vrije Universiteit Amsterdam).</strong><a href="#fnref30" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn31" role="doc-endnote"><p>Developed in partnership between the <strong><a class="maplink" data-title="Dutch Institute for Technology Safety and Security (DITSS)">Dutch Institute for Technology Safety and Security (DITSS)</a></strong>, <strong><a class="maplink" data-title="Atos">Atos</a></strong>, <strong>the <a class="maplink" data-title="Municipality of Eindhoven">Municipality of Eindhoven</a></strong>, <strong><a class="maplink" data-title="Tilburg University">Tilburg University</a></strong>, <strong><a class="maplink" data-title="Eindhoven University of Technology">Eindhoven University of Technology</a></strong>, <strong><a class="maplink" data-title="Intel">Intel</a></strong>, <strong><a class="maplink" data-title="Sorama">Sorama</a></strong>, and <strong>Axis Communications</strong>; it uses search software from <strong><a class="maplink" data-title="Oddity.ai">Oddity.ai</a></strong> (a spinout of <a class="maplink" data-title="Utrecht University">Utrecht University</a>) and <strong><a class="maplink" data-title="ViNotion">ViNotion</a></strong>.<a href="#fnref31" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn32" role="doc-endnote"><p>Developed by <a class="maplink" data-title="Retevision">Retevision</a>, <a class="maplink" data-title="Instituto Tecnológico de Castilla y León (ITCL)">Instituto Tecnológico de Castilla y León (ITCL)</a>, <a class="maplink" data-title="Centro para el Desarrollo Tecnológico Industrial">Centro para el Desarrollo Tecnológico Industrial</a>, <a class="maplink" data-title="Cellnex">Cellnex</a>, <a class="maplink" href="Herta Security">Herta Security</a>, <a class="maplink" href="Sngular">Sngular</a>, <a class="maplink" href="Emergya">Emergya</a>, SHS, <a class="maplink" href="Televés">Televés</a>, <a class="maplink" href="Universidad de Granada">Universidad de Granada</a>, <a class="maplink" href="Universidad Politecnica de Madrid">Universidad Politecnica de Madrid</a>, <a class="maplink" href="Universidad Carlos III.">Universidad Carlos III</a>.<a href="#fnref32" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn33" role="doc-endnote"><p>Using the software <strong><a class="maplink" data-title="SARI (Deployment)">SARI</a></strong> by the company <a class="maplink" data-title="Parsec 3.26">Parsec 3.26</a>, developed in partnership with <strong><a class="maplink" data-title="Telecom Italia">Telecom Italia</a></strong><a href="#fnref33" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn34" role="doc-endnote"><p>The Defenseur des Droits is a governmental watchdog on civil rights and liberties in France. See Defenseur des Droits (2021) for the call for a ban on facial recognition.<a href="#fnref34" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn35" role="doc-endnote"><p>While the <strong>eID project</strong> is not specific to Belgium, the country stands out for having piloted the project ahead of other EU member states. eID is a form of authentication rather than surveillance system - yet the constitution of a database of machine-readable identities participates to the construction of a digital infrastructure of surveillance that can be misuedmisused for biometric mass surveillance., as argued in chapter 3<a href="#fnref35" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn36" role="doc-endnote"><p>The technology is provided by <a class="maplink" data-title="RTS">RTS</a>, a security technology reseller.<a href="#fnref36" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn37" role="doc-endnote"><p>The COC, or Supervisory Body for Police Information is « the autonomous federal parliamentary body in charge of monitoring the management of police information and also the data controller for the integrated police service, the Passenger Information Unit and the General Inspectorate of the Federal and the Local Police. » (Organe de Controle de l'Information Policière 2021).<a href="#fnref37" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn38" role="doc-endnote"><p><a href="https://reclaimyourface.eu/" target="_blank">https://reclaimyourface.eu/</a><a href="#fnref38" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn39" role="doc-endnote"><p>The other companies are: Arclan Systems, Business Card Associates, <a class="maplink" data-title="Deveryware">Deveryware</a>, Egidium, Gemalto, Geol Semantics, Igo, Inria, Luceor, Onhys, <a class="maplink" data-title="IDEMIA">IDEMIA</a>, Sys, Sysnav and Yncrea.<a href="#fnref39" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn40" role="doc-endnote"><p>Banque Publique dInvestissement: French Public Investment Bank<a href="#fnref40" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn41" role="doc-endnote"><p>Comité de la Filière industrielle de la sécurité<a href="#fnref41" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn42" role="doc-endnote"><p>For the campaign, see: <a href="http://www.technopolice.fr" target="_blank">http://www.technopolice.fr</a><a href="#fnref42" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn43" role="doc-endnote"><p>Image from <a href="https://netzpolitik.org/2020/deutlich-mehr-gesichtserkennung-bei-bundespolizei-und-kriminalaemtern/" target="_blank">https://netzpolitik.org/2020/deutlich-mehr-gesichtserkennung-bei-bundespolizei-und-kriminalaemtern/</a><a href="#fnref43" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn44" role="doc-endnote"><p><a href="https://youtu.be/5WD9b6tWC0Q" target="_blank">https://youtu.be/5WD9b6tWC0Q</a><a href="#fnref44" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn45" role="doc-endnote"><p><a target="_blank" href="https://edps.europa.eu/press-publications/press-news/press-releases/2021/edpb-edps-call-ban-use-ai-automated-recognition_en">https://edps.europa.eu/press-publications/press-news/press-releases/2021/edpb-edps-call-ban-use-ai-automated-recognition_en</a><a href="#fnref45" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
<li id="fn46" role="doc-endnote"><p><a target="_blank" href="https://edri.org/wp-content/uploads/2021/08/European-Digital-Rights-EDRi-submission-to-European-Commission-adoption-consultation-on-the-Artificial-Intelligence-Act-August-2021.pdf">https://edri.org/wp-content/uploads/2021/08/European-Digital-Rights-EDRi-submission-to-European-Commission-adoption-consultation-on-the-Artificial-Intelligence-Act-August-2021.pdf</a><a href="#fnref46" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
2021-10-07 08:43:56 +02:00
</ol>
</section>
<hr>
</article>
</main>
2021-10-07 13:32:13 +02:00
2021-10-07 08:43:56 +02:00
</body>
</html>