forked from security_vision/semantic_graph
WIP adding links to map
This commit is contained in:
parent
f0476ed2a1
commit
c81ec7bf18
2 changed files with 99 additions and 57 deletions
|
@ -588,8 +588,13 @@ h1.Title{
|
||||||
color: var(--color-bg-secondary);
|
color: var(--color-bg-secondary);
|
||||||
padding: 1rem;
|
padding: 1rem;
|
||||||
margin: 0 -1rem;
|
margin: 0 -1rem;
|
||||||
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
.keypoints > p > strong{
|
.keypoints > p > strong{
|
||||||
margin-left:2.5rem;
|
margin-left:2.5rem;
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
a.maplink{
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
|
@ -52,7 +52,43 @@
|
||||||
intersectionObserver.observe(caseEl);
|
intersectionObserver.observe(caseEl);
|
||||||
}
|
}
|
||||||
|
|
||||||
})
|
|
||||||
|
const linkEls = document.getElementsByClassName('maplink');
|
||||||
|
for (let linkEl of linkEls) {
|
||||||
|
linkEl.addEventListener('click', (ev) => {
|
||||||
|
const toSelect = typeof linkEl.dataset.title == 'undefined' || linkEl.dataset.title == 'none' ? null : frameEl.contentWindow.getIdForTitle(linkEl.dataset.title);
|
||||||
|
|
||||||
|
if(toSelect === null) {
|
||||||
|
frameEl.contentWindow.mapGraph.deselectNode();
|
||||||
|
frameEl.contentWindow.mapGraph.resetZoom();
|
||||||
|
} else {
|
||||||
|
const node = frameEl.contentWindow.mapGraph.graph.nodes.filter(n => n.id == toSelect)[0]
|
||||||
|
frameEl.contentWindow.mapGraph.selectNode(node);
|
||||||
|
}
|
||||||
|
|
||||||
|
})
|
||||||
|
linkEl.addEventListener('mouseover', (ev) => {
|
||||||
|
const toSelect = typeof linkEl.dataset.title == 'undefined' || linkEl.dataset.title == 'none' ? null : frameEl.contentWindow.getIdForTitle(linkEl.dataset.title);
|
||||||
|
if(toSelect){
|
||||||
|
|
||||||
|
const node = frameEl.contentWindow.mapGraph.graph.nodes.filter(n => n.id == toSelect)[0]
|
||||||
|
frameEl.contentWindow.mapGraph.hoverNode(false, node);
|
||||||
|
}
|
||||||
|
|
||||||
|
})
|
||||||
|
linkEl.addEventListener('mouseout', (ev) => {
|
||||||
|
const toSelect = typeof linkEl.dataset.title == 'undefined' || linkEl.dataset.title == 'none' ? null : frameEl.contentWindow.getIdForTitle(linkEl.dataset.title);
|
||||||
|
if(toSelect){
|
||||||
|
const node = frameEl.contentWindow.mapGraph.graph.nodes.filter(n => n.id == toSelect)[0]
|
||||||
|
frameEl.contentWindow.mapGraph.endHoverNode(node);
|
||||||
|
}
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// frame.contentWindow;
|
// frame.contentWindow;
|
||||||
|
@ -96,7 +132,7 @@
|
||||||
<li><a href="#the-dragonfly-project-hungary">The Dragonfly project (Hungary)</a>
|
<li><a href="#the-dragonfly-project-hungary">The Dragonfly project (Hungary)</a>
|
||||||
</li>
|
</li>
|
||||||
<li class="space"><a href="#recommendations">Recommendations</a></li>
|
<li class="space"><a href="#recommendations">Recommendations</a></li>
|
||||||
<li><a href="#references">REFERENCES</a></li>
|
<li class="space"><a href="#references">REFERENCES</a></li>
|
||||||
<li><a href="#annex-cases">ANNEX: CASES</a>
|
<li><a href="#annex-cases">ANNEX: CASES</a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
@ -191,7 +227,7 @@
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="even">
|
<tr class="even">
|
||||||
<th>BPI</th>
|
<th>BPI</th>
|
||||||
<td>Public Investment Bank (France)</td>
|
<td><a class="maplink" data-title="Bpifrance">Public Investment Bank</a> (France)</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="odd">
|
<tr class="odd">
|
||||||
<th>BPOL</th>
|
<th>BPOL</th>
|
||||||
|
@ -247,7 +283,7 @@
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="even">
|
<tr class="even">
|
||||||
<th>DITSS</th>
|
<th>DITSS</th>
|
||||||
<td>Dutch Institute for Technology, Safety & Security</td>
|
<td><a class="maplink" data-title="Dutch Institute for Technology Safety and Security (DITSS)">Dutch Institute for Technology, Safety & Security</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="odd">
|
<tr class="odd">
|
||||||
<th>DPA</th>
|
<th>DPA</th>
|
||||||
|
@ -343,7 +379,7 @@
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="even">
|
<tr class="even">
|
||||||
<th>INPOL</th>
|
<th>INPOL</th>
|
||||||
<td>Criminal Case Management System (Germany)</td>
|
<td><a class="maplink" data-title="German central criminal information system INPOL">Criminal Case Management System (Germany)</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="odd">
|
<tr class="odd">
|
||||||
<th>KAK</th>
|
<th>KAK</th>
|
||||||
|
@ -439,7 +475,7 @@
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="even">
|
<tr class="even">
|
||||||
<th>TAJ</th>
|
<th>TAJ</th>
|
||||||
<td>Criminal case history database (France)</td>
|
<td><a class="maplink" data-title="Deployment of TAJ">Criminal case history database (France)</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr class="odd">
|
<tr class="odd">
|
||||||
<th>TASZ</th>
|
<th>TASZ</th>
|
||||||
|
@ -554,7 +590,7 @@
|
||||||
<ul>
|
<ul>
|
||||||
<li><p>Several French cities have launched “safe city” projects involving biometric technologies, however Nice is arguably the national leader. The city currently has the highest CCTV coverage of any city in France and has more than double the police agents per capita of the neighbouring city of Marseille.</p></li>
|
<li><p>Several French cities have launched “safe city” projects involving biometric technologies, however Nice is arguably the national leader. The city currently has the highest CCTV coverage of any city in France and has more than double the police agents per capita of the neighbouring city of Marseille.</p></li>
|
||||||
<li><p>Through a series of public-private partnerships the city began a number of initiatives using RBI technologies (including emotion and facial recognition). These technologies were deployed for both authentication and surveillance purposes with some falling into the category of biometric mass surveillance.</p></li>
|
<li><p>Through a series of public-private partnerships the city began a number of initiatives using RBI technologies (including emotion and facial recognition). These technologies were deployed for both authentication and surveillance purposes with some falling into the category of biometric mass surveillance.</p></li>
|
||||||
<li><p>One project which used FRT at a high school in Nice and one in Marseille was eventually declared unlawful. The court determined that the required consent could not be obtained due to the power imbalance between the targeted public (students) and the public authority (public educational establishment). This case highlights important issues about the deployment of biometric technologies in public spaces.</p></li>
|
<li><p>One project which used FRT at <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">a high school in Nice</a> and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">one in Marseille</a> was eventually declared unlawful. The court determined that the required consent could not be obtained due to the power imbalance between the targeted public (students) and the public authority (public educational establishment). This case highlights important issues about the deployment of biometric technologies in public spaces.</p></li>
|
||||||
<li><p>The use of biometric mass surveillance by the mayor of Nice Christian Estrosi has put him on a collision course with the French Data Protection Authority (<a class="maplink" data-title="CNIL">CNIL</a>) as well as human rights/ digital rights organisations (Ligue des Droits de l’Homme, <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a>). His activities have raised both concern and criticism over the usage of the technologies and their potential impact on the privacy of personal data.</p></li>
|
<li><p>The use of biometric mass surveillance by the mayor of Nice Christian Estrosi has put him on a collision course with the French Data Protection Authority (<a class="maplink" data-title="CNIL">CNIL</a>) as well as human rights/ digital rights organisations (Ligue des Droits de l’Homme, <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a>). His activities have raised both concern and criticism over the usage of the technologies and their potential impact on the privacy of personal data.</p></li>
|
||||||
</ul>
|
</ul>
|
||||||
<p><strong>CHAPTER 9: Facial Recognition in Südkreuz Berlin, Hamburg G20 and Mannheim (Germany)</strong></p>
|
<p><strong>CHAPTER 9: Facial Recognition in Südkreuz Berlin, Hamburg G20 and Mannheim (Germany)</strong></p>
|
||||||
|
@ -660,8 +696,8 @@
|
||||||
|
|
||||||
<p>The intrusiveness of the system, and its impact on fundamental rights is best exemplified by its deployment in the Xinjiang province. The province capital, Urumqi, is chequered with <strong>checkpoints and identification stations</strong>. Citizens need to submit to facial recognition ID checks in supermarkets, hotels, train stations, highway stations and several other public spaces (Chin and Bürge 2017). The information collected through the cameras is centralised and matched against other <strong>biometric data</strong> such as <strong>DNA samples</strong> and <strong>voice samples</strong>. This allows the government to attribute <strong>trust-worthiness scores</strong> (trustworthy, average, untrustworthy) and thus generate a list of individuals that can become candidates for detention (Wang 2018).</p>
|
<p>The intrusiveness of the system, and its impact on fundamental rights is best exemplified by its deployment in the Xinjiang province. The province capital, Urumqi, is chequered with <strong>checkpoints and identification stations</strong>. Citizens need to submit to facial recognition ID checks in supermarkets, hotels, train stations, highway stations and several other public spaces (Chin and Bürge 2017). The information collected through the cameras is centralised and matched against other <strong>biometric data</strong> such as <strong>DNA samples</strong> and <strong>voice samples</strong>. This allows the government to attribute <strong>trust-worthiness scores</strong> (trustworthy, average, untrustworthy) and thus generate a list of individuals that can become candidates for detention (Wang 2018).</p>
|
||||||
|
|
||||||
<p>European countries’ deployments are far from the Chinese experience. But the companies involved in China’s pervasive digital surveillance network (such as <strong>Tencent</strong>, <strong>Dahua Technology</strong>, <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong>SenseTime</strong>, <strong>ByteDance</strong> and <strong><a class="maplink" data-title="Huawei">Huawei</a></strong>) are exporting their know-how to Europe, under the form of “<strong>safe city” packages</strong>. <strong><a class="maplink" data-title="Huawei">Huawei</a></strong> is one of the most active in this regard. On the European continent, the city of Belgrade has for example deployed an extensive communication network of more than 1.000 cameras which collect up to 10 body and facial attributes (Stojkovski 2019). The cameras, deployed on poles, major traffic crossings and a large number of public spaces allow the Belgrade police to monitor large parts of the city centre, collect <strong>biometric information</strong> and communicate it directly to police officers deployed in the field. Belgrade has the most advanced deployment of <a class="maplink" data-title="Huawei">Huawei</a>’s surveillance technologies on the European continent, but similar projects are being implemented by other corporations – including the <strong>European companies <a class="maplink" data-title="Thales">Thales</a>, <a class="maplink" data-title="Engie Ineo">Engie Ineo</a> or <a class="maplink" data-title="IDEMIA">Idemia</strong> – in other European cities and many “Safe City” deployments are planned soon in EU countries such as France, Italy, Spain, Malta, and Germany (Hillman and McCalpin 2019). Furthermore, contrary to the idea China would be the sole exporter of Remote Biometric Identification technologies, EU companies have substantially developed their exports in this domain over the last years (Wagner 2021)</p>
|
<p>European countries’ deployments are far from the Chinese experience. But the companies involved in China’s pervasive digital surveillance network (such as <strong>Tencent</strong>, <strong><a class="maplink" data-title="Dahua Technologies">Dahua Technology</a></strong>, <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong>SenseTime</strong>, <strong>ByteDance</strong> and <strong><a class="maplink" data-title="Huawei">Huawei</a></strong>) are exporting their know-how to Europe, under the form of “<strong>safe city” packages</strong>. <strong><a class="maplink" data-title="Huawei">Huawei</a></strong> is one of the most active in this regard. On the European continent, the city of Belgrade has for example deployed an extensive communication network of more than 1.000 cameras which collect up to 10 body and facial attributes (Stojkovski 2019). The cameras, deployed on poles, major traffic crossings and a large number of public spaces allow the Belgrade police to monitor large parts of the city centre, collect <strong>biometric information</strong> and communicate it directly to police officers deployed in the field. Belgrade has the most advanced deployment of <a class="maplink" data-title="Huawei">Huawei</a>’s surveillance technologies on the European continent, but similar projects are being implemented by other corporations – including the <strong>European companies <a class="maplink" data-title="Thales">Thales</a>, <a class="maplink" data-title="Engie Ineo">Engie Ineo</a> or <a class="maplink" data-title="IDEMIA">Idemia</strong> – in other European cities and many “Safe City” deployments are planned soon in EU countries such as France, Italy, Spain, Malta, and Germany (Hillman and McCalpin 2019). Furthermore, contrary to the idea China would be the sole exporter of Remote Biometric Identification technologies, EU companies have substantially developed their exports in this domain over the last years (Wagner 2021)</p>
|
||||||
<p>The turning point of public debates on facial recognition in Europe was probably <strong>the <a class="maplink" data-title="Clearview AI">Clearview AI</a> controversy</strong> in 2019-2020. <strong><a class="maplink" data-title="Clearview AI">Clearview AI</a></strong>, a company founded by Hoan Ton-That and Richard Schwartz in the United States, maintained a relatively secret profile until a New York Times article revealed in late 2019 that it was selling <strong>facial recognition technology</strong> to law enforcement. In February 2020, it was reported that the client list of <a class="maplink" data-title="Clearview AI">Clearview AI</a> had been stolen, and a few days later the details of the list were leaked (Mac, Haskins, and McDonald 2020). To the surprise of many in Europe, in addition to US government agencies and corporations, it appeared that the <strong>Metropolitan Police Service</strong> <strong>(London, UK)</strong>, as well as <strong>law enforcement from Belgian, Denmark, Finland, France, Ireland, Italy, Latvia, Lithuania, Malta, the Netherlands, Norway, Portugal, Serbia, Slovenia, Spain, Sweden, and Switzerland were on the client list.</strong> The controversy grew larger as it emerged that <a class="maplink" data-title="Clearview AI">Clearview AI</a> had (semi-illegally) harvested a large number of images from social media platforms such as <strong><a class="maplink" data-title="Facebook">Facebook</a>, YouTube</strong> and <strong>Twitter</strong> in order to constitute the datasets against which clients were invited to carry out searches (Mac, Haskins, and McDonald 2020).</p>
|
<p>The turning point of public debates on facial recognition in Europe was probably <strong>the <a class="maplink" data-title="Clearview AI">Clearview AI</a> controversy</strong> in 2019-2020. <strong><a class="maplink" data-title="Clearview AI">Clearview AI</a></strong>, a company founded by Hoan Ton-That and Richard Schwartz in the United States, maintained a relatively secret profile until a New York Times article revealed in late 2019 that it was selling <strong>facial recognition technology</strong> to law enforcement. In February 2020, it was reported that the client list of <a class="maplink" data-title="Clearview AI">Clearview AI</a> had been stolen, and a few days later the details of the list were leaked (Mac, Haskins, and McDonald 2020). To the surprise of many in Europe, in addition to US government agencies and corporations, it appeared that the <strong>Metropolitan Police Service</strong> <strong>(London, UK)</strong>, as well as <strong>law enforcement from Belgian, Denmark, Finland, France, Ireland, <a class="maplink" data-title="Carabinieri">Italy</a>, Latvia, Lithuania, Malta, the Netherlands, Norway, Portugal, Serbia, Slovenia, Spain, Sweden, and Switzerland were on the client list.</strong> The controversy grew larger as it emerged that <a class="maplink" data-title="Clearview AI">Clearview AI</a> had (semi-illegally) harvested a large number of images from social media platforms such as <strong><a class="maplink" data-title="Facebook">Facebook</a>, YouTube</strong> and <strong>Twitter</strong> in order to constitute the datasets against which clients were invited to carry out searches (Mac, Haskins, and McDonald 2020).</p>
|
||||||
|
|
||||||
<p>The news of the hacking strengthened a strong push-back movement against the development of facial recognition technology by companies such as <a class="maplink" data-title="Clearview AI">Clearview AI</a>, as well as their use by government agencies. In 2018, <strong>Massachusetts Institute of Technology</strong> (MIT) scholar and <strong><a class="maplink" data-title="Algorithmic Justice League">Algorithmic Justice League</a></strong> founder <strong>Joy Buolamwini</strong> together with <strong>Temnit Gebru</strong> had published the report <em>Gender Shades</em> (Buolamwini and Gebru 2018), in which they assessed the racial bias in the face recognition datasets and algorithms used by companies such as <a class="maplink" data-title="IBM">IBM</a> and Microsoft. Buolamwini and Gebru found that <strong>algorithms performed generally worse on darker-skinned faces, and in particular darker-skinned females, with error rates up to 34% higher than lighter-skinned males</strong> (Najibi 2020). <a class="maplink" data-title="IBM">IBM</a> and Microsoft responded by amending their systems, and a re-audit showed less bias. Not all companies responded equally. <strong>Amazon’s Rekognition</strong> system, which was included in the second study continued to show a 31% lower rate for darker-skinned females. The same year <strong>ACLU</strong> conducted another key study on Amazon’s Rekognition, using the pictures of <strong>members of congress against a dataset of mugshots from law enforcemen</strong>t. 28 members of Congress, <strong>largely people of colour were incorrectly matched</strong> (Snow 2018). Activists engaged lawmakers. In 2019, the Algorithmic Accountability Act allowed the Federal Trade Commission to regulate private companies’ uses of facial recognition. In 2020, several companies, including <a class="maplink" data-title="IBM">IBM</a>, Microsoft, and Amazon, announced a moratorium on the development of their facial recognition technologies. Several US cities, including <strong>Boston</strong>, <strong>Cambridge</strong> (Massachusetts) <strong>San Francisco</strong>, <strong>Berkeley</strong>, <strong>Portland</strong> (Oregon), have also banned their police forces from using the technology.</p>
|
<p>The news of the hacking strengthened a strong push-back movement against the development of facial recognition technology by companies such as <a class="maplink" data-title="Clearview AI">Clearview AI</a>, as well as their use by government agencies. In 2018, <strong>Massachusetts Institute of Technology</strong> (MIT) scholar and <strong><a class="maplink" data-title="Algorithmic Justice League">Algorithmic Justice League</a></strong> founder <strong>Joy Buolamwini</strong> together with <strong>Temnit Gebru</strong> had published the report <em>Gender Shades</em> (Buolamwini and Gebru 2018), in which they assessed the racial bias in the face recognition datasets and algorithms used by companies such as <a class="maplink" data-title="IBM">IBM</a> and Microsoft. Buolamwini and Gebru found that <strong>algorithms performed generally worse on darker-skinned faces, and in particular darker-skinned females, with error rates up to 34% higher than lighter-skinned males</strong> (Najibi 2020). <a class="maplink" data-title="IBM">IBM</a> and Microsoft responded by amending their systems, and a re-audit showed less bias. Not all companies responded equally. <strong>Amazon’s Rekognition</strong> system, which was included in the second study continued to show a 31% lower rate for darker-skinned females. The same year <strong>ACLU</strong> conducted another key study on Amazon’s Rekognition, using the pictures of <strong>members of congress against a dataset of mugshots from law enforcemen</strong>t. 28 members of Congress, <strong>largely people of colour were incorrectly matched</strong> (Snow 2018). Activists engaged lawmakers. In 2019, the Algorithmic Accountability Act allowed the Federal Trade Commission to regulate private companies’ uses of facial recognition. In 2020, several companies, including <a class="maplink" data-title="IBM">IBM</a>, Microsoft, and Amazon, announced a moratorium on the development of their facial recognition technologies. Several US cities, including <strong>Boston</strong>, <strong>Cambridge</strong> (Massachusetts) <strong>San Francisco</strong>, <strong>Berkeley</strong>, <strong>Portland</strong> (Oregon), have also banned their police forces from using the technology.</p>
|
||||||
|
|
||||||
|
@ -759,7 +795,7 @@
|
||||||
</section>
|
</section>
|
||||||
<section id="people-tracking-and-counting" class="level3">
|
<section id="people-tracking-and-counting" class="level3">
|
||||||
<h3>People tracking and counting </h3>
|
<h3>People tracking and counting </h3>
|
||||||
<p>This is perhaps the form of person tracking with which the least information about an individual is stored. An <strong>object detection algorithm</strong> estimates the presence and position of individuals on a camera image. These positions are stored or counted and used for further metrics. It is used to count <strong>passers-by in city centres</strong>, and for a <strong>one-and-a-half-meter social distancing monitor in Amsterdam</strong><a href="#fn2" class="footnote-ref" id="fnref2" role="doc-noteref"><sup>2</sup></a>. See also the case study in this document on the <a class="maplink" data-title="Burglary-Free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (CHAPTER 7), which goes into more detail about the use of the recorded trajectories of individuals to label anomalous behaviour.</p>
|
<p>This is perhaps the form of person tracking with which the least information about an individual is stored. An <strong>object detection algorithm</strong> estimates the presence and position of individuals on a camera image. These positions are stored or counted and used for further metrics. It is used to count <strong>passers-by in city centres</strong>, and for a <strong>one-and-a-half-meter social distancing monitor in Amsterdam</strong><a href="#fn2" class="footnote-ref" id="fnref2" role="doc-noteref"><sup>2</sup></a>. See also the case study in this document on the <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (CHAPTER 7), which goes into more detail about the use of the recorded trajectories of individuals to label anomalous behaviour.</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="emotion-recognition." class="level3">
|
<section id="emotion-recognition." class="level3">
|
||||||
<h3>Emotion recognition. </h3>
|
<h3>Emotion recognition. </h3>
|
||||||
|
@ -771,7 +807,7 @@
|
||||||
</section>
|
</section>
|
||||||
<section id="audio-recognition" class="level3">
|
<section id="audio-recognition" class="level3">
|
||||||
<h3>Audio recognition </h3>
|
<h3>Audio recognition </h3>
|
||||||
<p>From a technological perspective, neural networks process audio relatively similarly to how video is processed: rather than feeding an image, a spectrogram is used as input for the network. However, under the GDPR, recording conversations, is illegal in the <a class="maplink" data-title="European Union">European Union</a> without informed consent of the participants. In order to adhere to these regulations, on some occasions, only particular frequencies are recorded and processed. For example, in the <a class="maplink" data-title="Burglary-Free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (CHAPTER 7), only two frequencies are used to classify audio; making conversations indiscernible while being able to discern shouting or the breaking of glass<a href="#fn3" class="footnote-ref" id="fnref3" role="doc-noteref"><sup>3</sup></a>.</p>
|
<p>From a technological perspective, neural networks process audio relatively similarly to how video is processed: rather than feeding an image, a spectrogram is used as input for the network. However, under the GDPR, recording conversations, is illegal in the <a class="maplink" data-title="European Union">European Union</a> without informed consent of the participants. In order to adhere to these regulations, on some occasions, only particular frequencies are recorded and processed. For example, in the <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Burglary-Free Neighbourhood</a> in Rotterdam (CHAPTER 7), only two frequencies are used to classify audio; making conversations indiscernible while being able to discern shouting or the breaking of glass<a href="#fn3" class="footnote-ref" id="fnref3" role="doc-noteref"><sup>3</sup></a>.</p>
|
||||||
</section>
|
</section>
|
||||||
</section>
|
</section>
|
||||||
<section id="how-does-image-based-remote-biometric-identification-work" class="level2">
|
<section id="how-does-image-based-remote-biometric-identification-work" class="level2">
|
||||||
|
@ -799,7 +835,7 @@
|
||||||
<section id="availability" class="level3">
|
<section id="availability" class="level3">
|
||||||
<h3>Availability</h3>
|
<h3>Availability</h3>
|
||||||
<p>Facial recognition algorithms can be developed in-house, taken from an open-source repository, or purchased (IPVM Team 2021b, 14). Popular <strong>open-source facial recognition</strong> implementations include OpenCV, Face_pytorch, OpenFace and Insightface. Many of these software libraries are developed at universities or implement algorithms and neural network architectures presented in academic papers. They are free, and allow for a great detail of customisation, but require substantial programming skills to be implemented in a surveillance system. Moreover, when using such software, the algorithms run on one’s own hardware which provides the developer with more control, but also requires more maintenance.</p>
|
<p>Facial recognition algorithms can be developed in-house, taken from an open-source repository, or purchased (IPVM Team 2021b, 14). Popular <strong>open-source facial recognition</strong> implementations include OpenCV, Face_pytorch, OpenFace and Insightface. Many of these software libraries are developed at universities or implement algorithms and neural network architectures presented in academic papers. They are free, and allow for a great detail of customisation, but require substantial programming skills to be implemented in a surveillance system. Moreover, when using such software, the algorithms run on one’s own hardware which provides the developer with more control, but also requires more maintenance.</p>
|
||||||
<p><strong>Proprietary facial recognition.</strong> There are three possible routes for the use of proprietary systems: There are <strong>“turnkey”</strong> systems sold by manufacturers such as <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong>Dahua</strong>, <strong>Anyvision</strong> or <strong>Briefcam</strong>. Those integrate the software and hardware, and as such can be directly deployed by the client. <strong>Algorithm developers</strong> such as <strong>Amazon AWS Rekognition</strong> (USA), <strong><a class="maplink" data-title="NEC">NEC</a></strong> (Japan), <strong>NTechlab</strong> (Russia), <strong><a class="maplink" data-title="Paravision">Paravision</a></strong> (USA) allow to implement their algorithms and customise them to one’s needs, and finally there are <strong>“cloud” API systems</strong>, a sub-set of the former category, where the algorithm is hosted in a datacentre and is accessed remotely (IPVM Team 2021b, 16). The latter type of technology bears important legal ramifications, as the data may travel outside of national or European jurisdictions. It should be noted that many of the proprietary products are based on similar algorithms and network architectures as their open-source counterparts (OpenCV, 2021). Contrary to the open-source software, it is generally unclear which datasets of images have been used to train the proprietary algorithms.</p>
|
<p><strong>Proprietary facial recognition.</strong> There are three possible routes for the use of proprietary systems: There are <strong>“turnkey”</strong> systems sold by manufacturers such as <strong><a class="maplink" data-title="Hikvision">Hikvision</a></strong>, <strong><a class="maplink" data-title="Dahua Technologies">Dahua</a></strong>, <strong><a class="maplink" data-title="AnyVision">AnyVision</a></strong> or <strong><a class="maplink" data-title="Briefcam Ltd">Briefcam</a></strong>. Those integrate the software and hardware, and as such can be directly deployed by the client. <strong>Algorithm developers</strong> such as <strong>Amazon AWS Rekognition</strong> (USA), <strong><a class="maplink" data-title="NEC">NEC</a></strong> (Japan), <strong>NTechlab</strong> (Russia), <strong><a class="maplink" data-title="Paravision">Paravision</a></strong> (USA) allow to implement their algorithms and customise them to one’s needs, and finally there are <strong>“cloud” API systems</strong>, a sub-set of the former category, where the algorithm is hosted in a datacentre and is accessed remotely (IPVM Team 2021b, 16). The latter type of technology bears important legal ramifications, as the data may travel outside of national or European jurisdictions. It should be noted that many of the proprietary products are based on similar algorithms and network architectures as their open-source counterparts (OpenCV, 2021). Contrary to the open-source software, it is generally unclear which datasets of images have been used to train the proprietary algorithms.</p>
|
||||||
</section>
|
</section>
|
||||||
</section>
|
</section>
|
||||||
<section id="technical-limits-problems-and-challenges-of-facial-recognition" class="level2">
|
<section id="technical-limits-problems-and-challenges-of-facial-recognition" class="level2">
|
||||||
|
@ -843,29 +879,29 @@
|
||||||
<p>A broad range of deployments, which we consider in this first section, is not aimed at surveillance, but at authentication (see section 2.3 in this report), namely making sure that the person in front of the security camera is who they say they are.</p>
|
<p>A broad range of deployments, which we consider in this first section, is not aimed at surveillance, but at authentication (see section 2.3 in this report), namely making sure that the person in front of the security camera is who they say they are.</p>
|
||||||
<section id="live-authentication" class="level3">
|
<section id="live-authentication" class="level3">
|
||||||
<h3>Live authentication</h3>
|
<h3>Live authentication</h3>
|
||||||
<p>As in the cases of the use of Cisco systems powered FRT in two pilot projects in <strong>high schools of Nice</strong> (see section 8.1) <strong>and Marseille (France)</strong><a href="#fn7" class="footnote-ref" id="fnref7" role="doc-noteref"><sup>7</sup></a>, or as in the case of the <strong>Anderstorp Upper Secondary School in Skelleftea (Sweden)</strong><a href="#fn8" class="footnote-ref" id="fnref8" role="doc-noteref"><sup>8</sup></a>, the aim of these projects was to identify students who could have access to the premises. School-wide biometric databases were generated and populated with students’ portraits. Gates were fitted with cameras connected to facial recognition technology and allowed access only to recognised students. Another documented use has been for the <strong><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App (Hungary)</a></strong>, in which telephone cameras are used by authorities to verify the identity of the persons logged into the app (see also section 10.1).</p>
|
<p>As in the cases of the use of <a class="maplink" data-title="Cisco Systems">Cisco systems</a> powered FRT in two pilot projects in <strong><a class="maplink" data-title="Facial Recognition Pilot in High School (Nice)">high schools of Nice</a></strong> (see section 8.1) <strong>and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">Marseille</a> (France)</strong><a href="#fn7" class="footnote-ref" id="fnref7" role="doc-noteref"><sup>7</sup></a>, or as in the case of the <strong><a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">Anderstorp Upper Secondary School</a> in Skelleftea (Sweden)</strong><a href="#fn8" class="footnote-ref" id="fnref8" role="doc-noteref"><sup>8</sup></a>, the aim of these projects was to identify students who could have access to the premises. School-wide biometric databases were generated and populated with students’ portraits. Gates were fitted with cameras connected to facial recognition technology and allowed access only to recognised students. Another documented use has been for the <strong><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App (Hungary)</a></strong>, in which telephone cameras are used by authorities to verify the identity of the persons logged into the app (see also section 10.1).</p>
|
||||||
<p>In these deployments, people must submit themselves to the camera in order to be identified and gain access. While these techniques of identification pose <strong>important threats to the privacy of the concerned small groups of users</strong> (in both high school cases, DPAs banned the use of FRTs), and run the risk of false positives (unauthorised people recognised as authorised) or false negatives (authorised people not recognised as such) <strong>the risk of biometric mass surveillance strictly speaking is low to non-existent because of the nature of the acquisition of images and other sensor-based data.</strong></p>
|
<p>In these deployments, people must submit themselves to the camera in order to be identified and gain access. While these techniques of identification pose <strong>important threats to the privacy of the concerned small groups of users</strong> (in both high school cases, DPAs banned the use of FRTs), and run the risk of false positives (unauthorised people recognised as authorised) or false negatives (authorised people not recognised as such) <strong>the risk of biometric mass surveillance strictly speaking is low to non-existent because of the nature of the acquisition of images and other sensor-based data.</strong></p>
|
||||||
<p>However, other forms of live authentication tie in with surveillance practices, in particular various forms of <strong>blacklisting</strong>. With blacklisting the face of every passer-by is compared to a list of faces of individuals who have been rejected access to the premises. In such an instance, people do not have to be identified, as long as an image of their face is provided. This has been used in public places, for example in the case of the Korte Putstraat in the Dutch city of 's-Hertogenbosch: during the carnival festivities of 2019 two people were rejected access to the street after they were singled out by the system (Gotink, 2019). It is unclear how many false positives were generated during this period. Other cases of blacklisting can be found at, for example, access control at various football stadiums in Europe, see also section 3.3. In many cases of blacklisting, individuals do not enrol voluntarily.</p>
|
<p>However, other forms of live authentication tie in with surveillance practices, in particular various forms of <strong>blacklisting</strong>. With blacklisting the face of every passer-by is compared to a list of faces of individuals who have been rejected access to the premises. In such an instance, people do not have to be identified, as long as an image of their face is provided. This has been used in public places, for example in the case of the <a class="maplink" data-title="Korte Putstraat (Stopped)">Korte Putstraat</a> in the Dutch city of 's-Hertogenbosch: during the carnival festivities of 2019 two people were rejected access to the street after they were singled out by the system (Gotink, 2019). It is unclear how many false positives were generated during this period. Other cases of blacklisting can be found at, for example, access control at various football stadiums in Europe, see also section 3.3. In many cases of blacklisting, individuals do not enrol voluntarily.</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="forensic-authentication" class="level3">
|
<section id="forensic-authentication" class="level3">
|
||||||
<h3>Forensic authentication</h3>
|
<h3>Forensic authentication</h3>
|
||||||
<p>Biometric systems for the purposes of authentication are also increasingly deployed for <strong>forensic applications</strong> among law-enforcement agencies in the European Union. The typical scenario for the use of such technologies is to match the photograph of a suspect (extracted, for example, from previous records or from CCTV footage) against an existing dataset of known individuals (e.g., a national biometric database, a driver’s license database, etc.). (TELEFI, 2021). The development of these forensic authentication capabilities is particularly relevant to this study, because it entails making large databases ready for searches on the basis of biometric information.</p>
|
<p>Biometric systems for the purposes of authentication are also increasingly deployed for <strong>forensic applications</strong> among law-enforcement agencies in the European Union. The typical scenario for the use of such technologies is to match the photograph of a suspect (extracted, for example, from previous records or from CCTV footage) against an existing dataset of known individuals (e.g., a national biometric database, a driver’s license database, etc.). (TELEFI, 2021). The development of these forensic authentication capabilities is particularly relevant to this study, because it entails making large databases ready for searches on the basis of biometric information.</p>
|
||||||
<p>To date, <strong>11 out of 27 member states of the <a class="maplink" data-title="European Union">European Union</a></strong> are using facial recognition against biometric databases for forensic purposes: <strong>Austria</strong> (EDE)<a href="#fn9" class="footnote-ref" id="fnref9" role="doc-noteref"><sup>9</sup></a>, <strong>Finland</strong> (KASTU)<a href="#fn10" class="footnote-ref" id="fnref10" role="doc-noteref"><sup>10</sup></a>, <strong>France</strong> (TAJ)<a href="#fn11" class="footnote-ref" id="fnref11" role="doc-noteref"><sup>11</sup></a>, <strong>Germany</strong> (INPOL)<a href="#fn12" class="footnote-ref" id="fnref12" role="doc-noteref"><sup>12</sup></a>, <strong>Greece</strong> (Mugshot Database)<a href="#fn13" class="footnote-ref" id="fnref13" role="doc-noteref"><sup>13</sup></a>, <strong>Hungary</strong> (Facial Image Registry)<a href="#fn14" class="footnote-ref" id="fnref14" role="doc-noteref"><sup>14</sup></a>, <strong>Italy</strong> (AFIS)<a href="#fn15" class="footnote-ref" id="fnref15" role="doc-noteref"><sup>15</sup></a>, <strong>Latvia</strong> (BDAS)<a href="#fn16" class="footnote-ref" id="fnref16" role="doc-noteref"><sup>16</sup></a>, <strong>Lithuania</strong> (HDR)<a href="#fn17" class="footnote-ref" id="fnref17" role="doc-noteref"><sup>17</sup></a>, <strong>Netherlands</strong> (<a class="maplink" data-title="CATCH">CATCH</a>)<a href="#fn18" class="footnote-ref" id="fnref18" role="doc-noteref"><sup>18</sup></a> and <strong>Slovenia</strong> (Record of Photographed Persons)<a href="#fn19" class="footnote-ref" id="fnref19" role="doc-noteref"><sup>19</sup></a> (TELEFI 2021).</p>
|
<p>To date, <strong>11 out of 27 member states of the <a class="maplink" data-title="European Union">European Union</a></strong> are using facial recognition against biometric databases for forensic purposes: <strong>Austria</strong> (<a class="maplink" data-title="EDE (AFR used by Austrian Criminal Intelligence Service)">EDE</a>)<a href="#fn9" class="footnote-ref" id="fnref9" role="doc-noteref"><sup>9</sup></a>, <strong>Finland</strong> (<a class="maplink" data-title="KASTU (Finland)">KASTU</a>)<a href="#fn10" class="footnote-ref" id="fnref10" role="doc-noteref"><sup>10</sup></a>, <strong>France</strong> (<a class="maplink" data-title="Deployment of TAJ">TAJ</a>)<a href="#fn11" class="footnote-ref" id="fnref11" role="doc-noteref"><sup>11</sup></a>, <strong>Germany</strong> (<a class="maplink" data-title="German central criminal information system INPOL">INPOL</a>)<a href="#fn12" class="footnote-ref" id="fnref12" role="doc-noteref"><sup>12</sup></a>, <strong>Greece</strong> (<a class="maplink" data-title="Facial Recognition in Greece (Law Enforcement)">Mugshot Database</a>)<a href="#fn13" class="footnote-ref" id="fnref13" role="doc-noteref"><sup>13</sup></a>, <strong>Hungary</strong> (<a class="maplink" data-title="NEC Face Recognition Search Engine in Hungary">Facial Image Registry</a>)<a href="#fn14" class="footnote-ref" id="fnref14" role="doc-noteref"><sup>14</sup></a>, <strong>Italy</strong> (<a class="maplink" data-title="AFIS (Deployment, Italy)">AFIS</a>)<a href="#fn15" class="footnote-ref" id="fnref15" role="doc-noteref"><sup>15</sup></a>, <strong>Latvia</strong> (<a class="maplink" data-title="BDAS Deployment (Latvia)">BDAS</a>)<a href="#fn16" class="footnote-ref" id="fnref16" role="doc-noteref"><sup>16</sup></a>, <strong>Lithuania</strong> (<a class="maplink" data-title="HDR (Deployment, Lithuania)">HDR</a>)<a href="#fn17" class="footnote-ref" id="fnref17" role="doc-noteref"><sup>17</sup></a>, <strong>Netherlands</strong> (<a class="maplink" data-title="CATCH">CATCH</a>)<a href="#fn18" class="footnote-ref" id="fnref18" role="doc-noteref"><sup>18</sup></a> and <strong>Slovenia</strong> (<a class="maplink" data-title="VeriLook (and Face Trace) in Slovenia">Record of Photographed Persons</a>)<a href="#fn19" class="footnote-ref" id="fnref19" role="doc-noteref"><sup>19</sup></a> (TELEFI 2021).</p>
|
||||||
<p><strong>Seven additional countries</strong> are expected to acquire such capabilities in the near future: <strong>Croatia</strong> (ABIS)<a href="#fn20" class="footnote-ref" id="fnref20" role="doc-noteref"><sup>20</sup></a>, <strong>Czech Republic</strong> (CBIS)<a href="#fn21" class="footnote-ref" id="fnref21" role="doc-noteref"><sup>21</sup></a>, <strong>Portugal</strong> (AFIS) <strong>Romania</strong> (NBIS)<a href="#fn22" class="footnote-ref" id="fnref22" role="doc-noteref"><sup>22</sup></a>, <strong>Spain</strong> (ABIS), <strong>Sweden</strong> (National Mugshot Database), <strong>Cyprus</strong> (ISIS Faces)<a href="#fn23" class="footnote-ref" id="fnref23" role="doc-noteref"><sup>23</sup></a>, <strong>Estonia</strong> (ABIS) (TELEFI 2021).</p>
|
<p><strong>Seven additional countries</strong> are expected to acquire such capabilities in the near future: <strong>Croatia</strong> (<a class="maplink" data-title="ABIS (Deployment, Croatia)">ABIS</a>)<a href="#fn20" class="footnote-ref" id="fnref20" role="doc-noteref"><sup>20</sup></a>, <strong>Czech Republic</strong> (<a class="maplink" data-title="CBIS (deployment, Czech Republic)">CBIS</a>)<a href="#fn21" class="footnote-ref" id="fnref21" role="doc-noteref"><sup>21</sup></a>, <strong>Portugal</strong> (<a class="maplink" data-title="AFIS (Deployment, Portugal)">AFIS</a>) <strong>Romania</strong> (<a class="maplink" data-title="Romanian Police use of Facial Recognition (NBIS)">NBIS</a>)<a href="#fn22" class="footnote-ref" id="fnref22" role="doc-noteref"><sup>22</sup></a>, <strong>Spain</strong> (<a class="maplink" data-title="ABIS (Deployment, Spain)">ABIS</a>), <strong>Sweden</strong> (<a class="maplink" data-title="Facial Recognition National Mugshot Database (Sweden)">National Mugshot Database</a>), <strong>Cyprus</strong> (<a class="maplink" data-title="Facial Recognition in Cyprus (Law Enforcement)">ISIS Faces</a>)<a href="#fn23" class="footnote-ref" id="fnref23" role="doc-noteref"><sup>23</sup></a>, <strong>Estonia</strong> (<a class="maplink" data-title="ABIS (Deployment, Estonia)">ABIS</a>) (TELEFI 2021).</p>
|
||||||
<p>When it comes to international institutions, <strong><a class="maplink" data-title="Interpol">Interpol</a></strong> (2020) has a facial recognition system (<a class="maplink" data-title="IFRS (Interpol)">IFRS</a>)<a href="#fn24" class="footnote-ref" id="fnref24" role="doc-noteref"><sup>24</sup></a>, based on facial images received from more than 160 countries. <strong><a class="maplink" data-title="Europol">Europol</a></strong> has <strong>t</strong>wo sub-units which use the facial recognition search tool and database known as FACE: the European Counter Terrorism Center (ECTC) and the European Cybercrime Center (ECC). (TELEFI, 2021 149-153) (Europol 2020)</p>
|
<p>When it comes to international institutions, <strong><a class="maplink" data-title="Interpol">Interpol</a></strong> (2020) has a facial recognition system (<a class="maplink" data-title="IFRS (Interpol)">IFRS</a>)<a href="#fn24" class="footnote-ref" id="fnref24" role="doc-noteref"><sup>24</sup></a>, based on facial images received from more than 160 countries. <strong><a class="maplink" data-title="Europol">Europol</a></strong> has two sub-units which use the facial recognition search tool and database known as <a class="maplink" data-title="FACE Deployment by EUROPOL">FACE</a>: the European Counter Terrorism Center (ECTC) and the European Cybercrime Center (ECC). (TELEFI, 2021 149-153) (Europol 2020)</p>
|
||||||
<p><strong>Only 9 countries in the EU so far have rejected or do not plan to implement</strong> FRT for forensic purposes: <strong>Belgium</strong> (see CHAPTER 6), <strong>Bulgaria</strong>, <strong>Denmark</strong>, <strong>Ireland</strong>, <strong>Luxembourg</strong>, <strong>Malta</strong>, <strong>Poland</strong>, <strong>Portugal</strong>, <strong>Slovakia</strong>.</p>
|
<p><strong>Only 9 countries in the EU so far have rejected or do not plan to implement</strong> FRT for forensic purposes: <strong>Belgium</strong> (see CHAPTER 6), <strong>Bulgaria</strong>, <strong>Denmark</strong>, <strong>Ireland</strong>, <strong>Luxembourg</strong>, <strong>Malta</strong>, <strong>Poland</strong>, <strong>Portugal</strong>, <strong>Slovakia</strong>.</p>
|
||||||
<p><img src="images/media/image1.png" style="width:4.62502in;height:3.28283in" alt="Map Description automatically generated" /></p>
|
<p><img src="images/media/image1.png" style="width:4.62502in;height:3.28283in" alt="Map Description automatically generated" /></p>
|
||||||
<p>Figure 1. EU Countries use of FRT for forensic applications<a href="#fn25" class="footnote-ref" id="fnref25" role="doc-noteref"><sup>25</sup></a></p>
|
<p>Figure 1. EU Countries use of FRT for forensic applications<a href="#fn25" class="footnote-ref" id="fnref25" role="doc-noteref"><sup>25</sup></a></p>
|
||||||
<p><strong>When it comes to databases</strong>, some countries limit the searches to <strong>criminal databases</strong> (Austria, Germany, France, Italy, Greece, Slovenia, Lithuania, UK), while other countries open the searches to <strong>civil databases</strong> (Finland, Netherlands, Latvia, Hungary).</p>
|
<p><strong>When it comes to databases</strong>, some countries limit the searches to <strong>criminal databases</strong> (Austria, Germany, France, Italy, Greece, Slovenia, Lithuania, UK), while other countries open the searches to <strong>civil databases</strong> (Finland, Netherlands, Latvia, Hungary).</p>
|
||||||
<p>This means that the <strong>person categories can vary substantially.</strong> In the case of criminal databases it can range from suspects and convicts, to asylum seekers, aliens, unidentified persons, immigrants, visa applicants. When <strong>civil databases</strong> are used as well, such as in Hungary, the database contains a broad range of “individuals of known identity from various document/civil proceedings” (TELEFI 2021, appendix 3).</p>
|
<p>This means that the <strong>person categories can vary substantially.</strong> In the case of criminal databases it can range from suspects and convicts, to asylum seekers, aliens, unidentified persons, immigrants, visa applicants. When <strong>civil databases</strong> are used as well, such as in Hungary, the database contains a broad range of “individuals of known identity from various document/civil proceedings” (TELEFI 2021, appendix 3).</p>
|
||||||
<p><strong>Finally, the database sizes</strong>, in comparison to the authentication databases mentioned in the previous section, are of a different magnitude. The databases of school students in France and Sweden, mentioned in the previous section contains a few hundred entries. National databases can contain instead several millions. Criminal databases such as Germany’s INPOL contains <strong>6,2 million individuals</strong>, France’s TAJ <strong>21 million individuals</strong> and Italy’s AFIS <strong>9 million individuals.</strong> Civil databases, such as Hungary’s Facial Image Registry contain <strong>30 million templates</strong> (TELEFI, 2021 appendix 3).</p>
|
<p><strong>Finally, the database sizes</strong>, in comparison to the authentication databases mentioned in the previous section, are of a different magnitude. The databases of school students in France and Sweden, mentioned in the previous section contains a few hundred entries. National databases can contain instead several millions. Criminal databases such as Germany’s INPOL contains <strong>6,2 million individuals</strong>, France’s <a class="maplink" data-title="Deployment of TAJ">TAJ</a> <strong>21 million individuals</strong> and <a class="maplink" data-title="AFIS (Deployment, Italy)">Italy’s AFIS</a> <strong>9 million individuals.</strong> Civil databases, such as Hungary’s Facial Image Registry contain <strong>30 million templates</strong> (TELEFI, 2021 appendix 3).</p>
|
||||||
<p>Authentication has also been deployed as part of integrated “safe city” solutions, such as the <strong><a class="maplink" data-title="NEC">NEC</a> Technology <a class="maplink" data-title="NEC Technology in Lisbon">Bio-IDiom system in Lisbon</a> and London,</strong> deployed for forensic investigation purposes. For this specific product, authentication can occur via facial recognition, as well as other biometric authentication techniques such as <strong>ear acoustics, iris, voice, fingerprint, and finger vein recognition</strong>. We currently do not have public information on the use of <a class="maplink" data-title="NEC Technology in Lisbon">Bio-IDiom in Lisbon</a> nor in London. On <a class="maplink" data-title="NEC">NEC</a>’s Website (2021) however, Bio-IDiom is advertised as a “multimodal” identification system, that has been used for example by the Los Angeles County Sheriff’s Department (LASD) for criminal investigations. The system “combines multiple biometric technologies including fingerprint, palm print, face, and iris recognition” and works “based on the few clues left behind at crime scenes. In Los Angeles, “this system is also connected to the databases of federal and state law enforcement agencies such as the California Department of Justice and FBI, making it the world’s largest-scale service-based biometrics system for criminal investigation”. We don’t know if that is the case in Portugal and in the UK deployments.</p>
|
<p>Authentication has also been deployed as part of integrated “safe city” solutions, such as the <strong><a class="maplink" data-title="NEC">NEC</a> Technology <a class="maplink" data-title="NEC Technology in Lisbon">Bio-IDiom system in Lisbon</a> and London,</strong> deployed for forensic investigation purposes. For this specific product, authentication can occur via facial recognition, as well as other biometric authentication techniques such as <strong>ear acoustics, iris, voice, fingerprint, and finger vein recognition</strong>. We currently do not have public information on the use of <a class="maplink" data-title="NEC Technology in Lisbon">Bio-IDiom in Lisbon</a> nor in London. On <a class="maplink" data-title="NEC">NEC</a>’s Website (2021) however, Bio-IDiom is advertised as a “multimodal” identification system, that has been used for example by the Los Angeles County Sheriff’s Department (LASD) for criminal investigations. The system “combines multiple biometric technologies including fingerprint, palm print, face, and iris recognition” and works “based on the few clues left behind at crime scenes. In Los Angeles, “this system is also connected to the databases of federal and state law enforcement agencies such as the California Department of Justice and FBI, making it the world’s largest-scale service-based biometrics system for criminal investigation”. We don’t know if that is the case in Portugal and in the UK deployments.</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="case-study-inpol-germany" class="level3">
|
<section id="case-study-inpol-germany" class="level3">
|
||||||
<h3>Case study: INPOL (Germany)</h3>
|
<h3>Case study: INPOL (Germany)</h3>
|
||||||
<p>In order to give a concrete example of the forensic use of biometric technology, we can take the German case. Germany has been using <strong>automated facial recognition</strong> technologies to identify criminal activity since 2008 using a central criminal information system called <strong>INPOL (Informationssystem Polizei)</strong>, maintained by the <strong>Bundeskriminalamt (BKA)</strong>, which is the federal criminal police office. INPOL uses <strong>Oracle Software</strong> and includes the following information: name, aliases, date and place of birth, nationality, fingerprints, mugshots, appearance, information about criminal histories such as prison sentences or violence of an individual, and DNA information. However, DNA information is not automatically recorded (TELEFI 2021).</p>
|
<p>In order to give a concrete example of the forensic use of biometric technology, we can take the German case. Germany has been using <strong>automated facial recognition</strong> technologies to identify criminal activity since 2008 using a central criminal information system called <strong><a class="maplink" data-title="German central criminal information system INPOL">INPOL</a> (Informationssystem Polizei)</strong>, maintained by the <strong>Bundeskriminalamt (BKA)</strong>, which is the federal criminal police office. INPOL uses <strong>Oracle Software</strong> and includes the following information: name, aliases, date and place of birth, nationality, fingerprints, mugshots, appearance, information about criminal histories such as prison sentences or violence of an individual, and DNA information. However, DNA information is not automatically recorded (TELEFI 2021).</p>
|
||||||
<p>The INPOL database includes <strong>facial images of suspects, arrestees, missing persons, and convicted individuals</strong>. For the purpose of facial recognition, anatomical features of a person's face or head as seen on video surveillance or images are used as a material to match with data in INPOL. The facial recognition system compares templates and lists all the matches ordered by degree of accordance. The BKA has specific personnel visually analysing the system's choices and providing an assessment, defining the probability of identifying a person. This assessment can be used in a court of law if necessary (Bundeskriminalamt, n.d.). Searches in the database are conducted by using Cognitec Face VACS software (TELEFI 2021).</p>
|
<p>The <a class="maplink" data-title="German central criminal information system INPOL">INPOL</a> database includes <strong>facial images of suspects, arrestees, missing persons, and convicted individuals</strong>. For the purpose of facial recognition, anatomical features of a person's face or head as seen on video surveillance or images are used as a material to match with data in <a class="maplink" data-title="German central criminal information system INPOL">INPOL</a>. The facial recognition system compares templates and lists all the matches ordered by degree of accordance. The BKA has specific personnel visually analysing the system's choices and providing an assessment, defining the probability of identifying a person. This assessment can be used in a court of law if necessary (Bundeskriminalamt, n.d.). Searches in the database are conducted by using <a class="maplink" data-title="Cognitec Systems">Cognitec</a> Face VACS software (TELEFI 2021).</p>
|
||||||
<p>As of March 2020, <strong>INPOL</strong> consists of <strong>5,8 million images of about 3,6 million individuals</strong>. All police stations in Germany have access to this database. The BKA saves biometric data and can be used by other ministries as well, for instance, to identify asylum seekers. Furthermore, the data is shared in the context of the <strong>Prüm cooperation</strong> on an international level (mostly fingerprints and DNA patterns). Furthermore, the <strong>BKA</strong> saves <strong>DNA analysis data as part of INPOL</strong>, accessible for all police stations in Germany. That database contains <strong>1,2 million data sets</strong> (Bundeskriminalamt, n.d.). Other recorded facial images, for instance, driver’s licenses or passports, are not included in the search, and the database is mainly used for police work (TELEFI 2021).</p>
|
<p>As of March 2020, <strong><a class="maplink" data-title="German central criminal information system INPOL">INPOL</a></strong> consists of <strong>5,8 million images of about 3,6 million individuals</strong>. All police stations in Germany have access to this database. The BKA saves biometric data and can be used by other ministries as well, for instance, to identify asylum seekers. Furthermore, the data is shared in the context of the <strong>Prüm cooperation</strong> on an international level (mostly fingerprints and DNA patterns). Furthermore, the <strong>BKA</strong> saves <strong>DNA analysis data as part of <a class="maplink" data-title="German central criminal information system INPOL">INPOL</a></strong>, accessible for all police stations in Germany. That database contains <strong>1,2 million data sets</strong> (Bundeskriminalamt, n.d.). Other recorded facial images, for instance, driver’s licenses or passports, are not included in the search, and the database is mainly used for police work (TELEFI 2021).</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="a-blurred-boundary-between-authentication-and-surveillance" class="level3">
|
<section id="a-blurred-boundary-between-authentication-and-surveillance" class="level3">
|
||||||
<h3>A blurred boundary between authentication and surveillance</h3>
|
<h3>A blurred boundary between authentication and surveillance</h3>
|
||||||
|
@ -881,17 +917,17 @@
|
||||||
<p>A first range of deployments of <strong>“smart” systems</strong> correspond to what can broadly be defined as “smart surveillance” yet <strong>do not collect or process biometric information per se</strong><a href="#fn26" class="footnote-ref" id="fnref26" role="doc-noteref"><sup>26</sup></a>. Smart systems can be used <strong>ex-post</strong>, <strong>to assist CCTV camera operators</strong> in processing large amounts of <strong>recorded information</strong>, or can guide their attention when they have to monitor a large number of <strong>live video feeds</strong> simultaneously. Smart surveillance uses the following features:</p>
|
<p>A first range of deployments of <strong>“smart” systems</strong> correspond to what can broadly be defined as “smart surveillance” yet <strong>do not collect or process biometric information per se</strong><a href="#fn26" class="footnote-ref" id="fnref26" role="doc-noteref"><sup>26</sup></a>. Smart systems can be used <strong>ex-post</strong>, <strong>to assist CCTV camera operators</strong> in processing large amounts of <strong>recorded information</strong>, or can guide their attention when they have to monitor a large number of <strong>live video feeds</strong> simultaneously. Smart surveillance uses the following features:</p>
|
||||||
<p><strong>- Anomaly detection. In Toulouse (France), the City Council commissioned <a class="maplink" data-title="IBM">IBM</a> to connect 30 video surveillance cameras to software able to "assist human decisions" by raising alerts when "abnormal events are detected." (Technopolice 2021) The request was justified by the “difficulties of processing the images generated daily by the 350 cameras and kept for 30 days (more than 10,000 images per second)”. The objective, according to the digital direction is "to optimise and structure the supervision of video surveillance operators by generating alerts through a system of intelligent analysis that facilitates the identification of anomalies detected, whether: movements of crowds, isolated luggage, crossing virtual barriers north of the Garonne, precipitous movement, research of shapes and colour. All these detections are done in real time or delayed (Technopolice 2021). In other words, the anomaly detection is a way to <em>operationalise</em> the numerical output of various computer vision based recognition systems. Similar systems are used</strong> in the <strong>Smart video surveillance deployment in Valenciennes (France)</strong> or in the <strong>Urban Surveillance Centre (Marseille).</strong></p>
|
<p><strong>- Anomaly detection. In Toulouse (France), the City Council commissioned <a class="maplink" data-title="IBM">IBM</a> to connect 30 video surveillance cameras to software able to "assist human decisions" by raising alerts when "abnormal events are detected." (Technopolice 2021) The request was justified by the “difficulties of processing the images generated daily by the 350 cameras and kept for 30 days (more than 10,000 images per second)”. The objective, according to the digital direction is "to optimise and structure the supervision of video surveillance operators by generating alerts through a system of intelligent analysis that facilitates the identification of anomalies detected, whether: movements of crowds, isolated luggage, crossing virtual barriers north of the Garonne, precipitous movement, research of shapes and colour. All these detections are done in real time or delayed (Technopolice 2021). In other words, the anomaly detection is a way to <em>operationalise</em> the numerical output of various computer vision based recognition systems. Similar systems are used</strong> in the <strong>Smart video surveillance deployment in Valenciennes (France)</strong> or in the <strong>Urban Surveillance Centre (Marseille).</strong></p>
|
||||||
<p><strong>- Object Detection.</strong> In Amsterdam, around the <strong><a class="maplink" data-title="Johan Cruijff ArenA">Johan Cruijff ArenA</a></strong> (Stadium), the city has been experimenting with a <strong><a class="maplink" data-title="Digitale Perimeter">Digitale Perimeter</a></strong> (digital perimeter) surveillance system. In addition to the usual features of facial recognition, and crowd monitorining, the system includes the possibility of automatically detecting specific objects such as <strong>weapons, fireworks</strong> or <strong>drones</strong>. Similar features are found in <strong><a class="maplink" data-title="Inwebit">Inwebit</a>’s Smart Security Platform (SSP) in Poland.</strong></p>
|
<p><strong>- Object Detection.</strong> In Amsterdam, around the <strong><a class="maplink" data-title="Johan Cruijff ArenA">Johan Cruijff ArenA</a></strong> (Stadium), the city has been experimenting with a <strong><a class="maplink" data-title="Digitale Perimeter">Digitale Perimeter</a></strong> (digital perimeter) surveillance system. In addition to the usual features of facial recognition, and crowd monitorining, the system includes the possibility of automatically detecting specific objects such as <strong>weapons, fireworks</strong> or <strong>drones</strong>. Similar features are found in <strong><a class="maplink" data-title="Inwebit">Inwebit</a>’s Smart Security Platform (SSP) in Poland.</strong></p>
|
||||||
<p><strong>- Feature search. In Marbella (Spain), <a class="maplink" data-title="Avigilon">Avigilon</a> deployed a smart camera system aimed at providing “smart” functionalities without biometric data. Since regional law bans facial and biometric identification without consent, the software uses “appearance search”. “Appearance search” provides estimates for “unique facial traits, the colour of a person’s clothes, age, shape, gender and hair colour”. This information is not considered biometric. The individual’s features can be used to search for suspects fitting a particular profile. Similar technology has been deployed in Kortrijk (Belgium), which provides search parameters for people, vehicles and animals (</strong>Verbeke 2019)<strong>.</strong></p>
|
<p><strong>- Feature search. In <a class="maplink" data-title="City of Marbella">Marbella</a> (Spain), <a class="maplink" data-title="Avigilon">Avigilon</a> deployed <a class="maplink" data-title="Avigilon deployment in Marbella">a smart camera system</a> aimed at providing “smart” functionalities without biometric data. Since regional law bans facial and biometric identification without consent, the software uses “appearance search”. “Appearance search” provides estimates for “unique facial traits, the colour of a person’s clothes, age, shape, gender and hair colour”. This information is not considered biometric. The individual’s features can be used to search for suspects fitting a particular profile. Similar technology has been deployed in Kortrijk (Belgium), which provides search parameters for people, vehicles and animals (</strong>Verbeke 2019)<strong>.</strong></p>
|
||||||
<p>- <strong>Video summary.</strong> Some companies, such as <strong>Briefcam</strong> and their product <strong>Briefcam Review</strong>, offer a related product, which promises to shorten the analysis of long hours of CCTV footage, by identifying specific topics of interest (children, women, lighting changes) and making the footage searchable. The product combines face recognition, license plate recognition, and more mundane video analysis features such as the possibility to overlay selected scenes, thus highlighting recurrent points of activity in the image. Briefcam is deployed in several cities across Europe, including Vannes, Roubaix (in partnership with <strong><a class="maplink" data-title="Eiffage">Eiffage</a></strong>) and Moirand in France.</p>
|
<p>- <strong>Video summary.</strong> Some companies, such as <strong><a class="maplink" data-title="Briefcam Ltd">Briefcam</a></strong> and their product <strong>Briefcam Review</strong>, offer a related product, which promises to shorten the analysis of long hours of CCTV footage, by identifying specific topics of interest (children, women, lighting changes) and making the footage searchable. The product combines face recognition, license plate recognition, and more mundane video analysis features such as the possibility to overlay selected scenes, thus highlighting recurrent points of activity in the image. Briefcam is deployed in several cities across Europe, including <a class="maplink" data-title="Briefcam deployment in Vannes">Vannes</a>, <a class="maplink" data-title="Center of Urban Supervision (Roubaix)">Roubaix</a> (in partnership with <strong><a class="maplink" data-title="Eiffage">Eiffage</a></strong>) and <a class="maplink" data-title="City of Moirans">Moirand</a> in France.</p>
|
||||||
<p><strong>- Object detection and object tracking. As outlined in chapter 2, object detection is often the first step in the various digital detection applications for images. An ‘object’ here can mean anything the computer is conditioned to search for: a suitcase, a vehicle, but also a person; while some products further process the detected object to estimate particular features, such as the colour of a vehicle, the age of a person. However, on some occasions — often to address concerns over privacy — only the position of the object on the image is stored. This is for example the case with the</strong> test of the <strong>One-and-a-half-meter monitor in Amsterdam (Netherlands), <a class="maplink" data-title="Intemo">Intemo</a>’s people counting system in Nijmegen (Netherlands),</strong> the <strong>KICK project</strong> in <strong>Brugge</strong>, <strong>Kortrijk</strong>, <strong>Ieper</strong>, <strong>Roeselare</strong> and <strong>Oostende</strong> in Belgium or the <strong>Eco-counter</strong> <strong>tracking cameras pilot project</strong> in <strong>Lannion</strong> (France).</p>
|
<p><strong>- Object detection and object tracking. As outlined in chapter 2, object detection is often the first step in the various digital detection applications for images. An ‘object’ here can mean anything the computer is conditioned to search for: a suitcase, a vehicle, but also a person; while some products further process the detected object to estimate particular features, such as the colour of a vehicle, the age of a person. However, on some occasions — often to address concerns over privacy — only the position of the object on the image is stored. This is for example the case with the</strong> test of the <strong><a class="maplink" data-title="Test of One and a half meter monitor">One-and-a-half-meter monitor</a> in Amsterdam (Netherlands), <a class="maplink" data-title="Intemo">Intemo</a>’s people counting system in Nijmegen (Netherlands),</strong> the <strong><a class="maplink" data-title="Project KICK">KICK project</a></strong> in <strong><a class="maplink" data-title="Brugge Municipality">Brugge</a></strong>, <strong><a class="maplink" data-title="Kortrijk Municipality">Kortrijk</a></strong>, <strong><a class="maplink" data-title="Kortrijk Municipality">Ieper</a></strong>, <strong><a class="maplink" data-title="Roeselare Municipality">Roeselare</a></strong> and <strong><a class="maplink" data-title="Economisch Huis Oostende">Oostende</a></strong> in Belgium or the <strong><a class="maplink" data-title="Eco-Counter">Eco-counter</a></strong> <strong><a class="maplink" data-title="Tracking cameras pilot in Lannion">tracking cameras pilot project</a></strong> in <strong><a class="maplink" data-title="City of Lannion">Lannion</a></strong> (France).</p>
|
||||||
<p><strong>- Movement recognition. <a class="maplink" data-title="Avigilon">Avigilon</a>’s software that is deployed in Marbella (Spain) also detects unusual movement. “To avoid graffiti, we can calculate the time someone takes to pass a shop window, “explained Javier Martín, local chief of police in Marbella to the Spanish newspaper El País. “If it takes them more than 10 seconds, the camera is activated to see if they are graffitiing. So far, it hasn’t been activated.” (Colomé 2019) Similar movement recognition technology is used in, the ViSense deployment at the Olympic Park London (UK) and the security camera system in Mechelen-Willebroek (Belgium). It should be noted that movement</strong> recognition can be done in two ways: where projects such as the <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Data-lab Burglary-free Neighbourhood</a> in Rotterdam (Netherlands)</strong><a href="#fn27" class="footnote-ref" id="fnref27" role="doc-noteref"><sup>27</sup></a> are only based on the tracking of trajectories of people through an image (see also ‘Object detection’), cases such as <strong>the <a class="maplink" data-title="Living Lab Stratumseind">Living Lab Stratumseind</a></strong><a href="#fn28" class="footnote-ref" id="fnref28" role="doc-noteref"><sup>28</sup></a> <strong>in Eindhoven (Netherlands)</strong> also process the movements and gestures of individuals in order to estimate their behaviour.</p>
|
<p><strong>- Movement recognition. <a class="maplink" data-title="Avigilon">Avigilon</a>’s software that is deployed in Marbella (Spain) also detects unusual movement. “To avoid graffiti, we can calculate the time someone takes to pass a shop window, “explained Javier Martín, local chief of police in Marbella to the Spanish newspaper El País. “If it takes them more than 10 seconds, the camera is activated to see if they are graffitiing. So far, it hasn’t been activated.” (Colomé 2019) Similar movement recognition technology is used in, the ViSense deployment at the Olympic Park London (UK) and the security camera system in Mechelen-Willebroek (Belgium). It should be noted that movement</strong> recognition can be done in two ways: where projects such as the <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Data-lab Burglary-free Neighbourhood</a> in Rotterdam (Netherlands)</strong><a href="#fn27" class="footnote-ref" id="fnref27" role="doc-noteref"><sup>27</sup></a> are only based on the tracking of trajectories of people through an image (see also ‘Object detection’), cases such as <strong>the <a class="maplink" data-title="Living Lab Stratumseind">Living Lab Stratumseind</a></strong><a href="#fn28" class="footnote-ref" id="fnref28" role="doc-noteref"><sup>28</sup></a> <strong>in Eindhoven (Netherlands)</strong> also process the movements and gestures of individuals in order to estimate their behaviour.</p>
|
||||||
<section id="audio-recognition-1" class="level4">
|
<section id="audio-recognition-1" class="level4">
|
||||||
<h4>Audio recognition</h4>
|
<h4>Audio recognition</h4>
|
||||||
<p>- In addition to image (video) based products, some deployments use audio recognition to complement the decision-making process, for example used in the <strong><a class="maplink" data-title="Serenecity">Serenecity</a> (a branch of Verney-Carron) Project in Saint-Etienne (France)</strong>, the <strong>Smart CCTV deployment in public transportation in Rouen (France)</strong> or the <strong>Smart CCTV system in Strasbourg (France)</strong>. The project piloted in Saint-Etienne for example, worked by placing “audio capture devices” - the term microphone was avoided- in strategic parts of the city. Sounds qualified by an anomaly detection algorithm as suspicious would then alert operators in the Urban Supervision Center, prompting further investigation via CCTV or deployment of the necessary services (healthcare or police for example) (France 3 Auvergne-Rhône-Alpes 2019.)</p>
|
<p>- In addition to image (video) based products, some deployments use audio recognition to complement the decision-making process, for example used in the <strong><a class="maplink" data-title="Serenecity">Serenecity</a> (a branch of <a class="maplink" data-title="Verney-Carron SA">Verney-Carron</a>) Project in Saint-Etienne (France)</strong>, the <strong><a class="maplink" data-title="Smart CCTV with audio detection in Rouen public transportation">Smart CCTV deployment in public transportation</a> in <a class="maplink" data-title="City of Rouen">Rouen</a> (France)</strong> or the <strong><a class="maplink" data-title="Smart surveillance coupled with audio recognition (Strasbourg)">Smart CCTV system in Strasbourg</a> (France)</strong>. The <a class="maplink" data-title="Serenicity project (Saint-Etienne)">project piloted in Saint-Etienne</a> for example, worked by placing “audio capture devices” - the term microphone was avoided- in strategic parts of the city. Sounds qualified by an anomaly detection algorithm as suspicious would then alert operators in the Urban Supervision Center, prompting further investigation via CCTV or deployment of the necessary services (healthcare or police for example) (France 3 Auvergne-Rhône-Alpes 2019.)</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="emotion-recognition" class="level4">
|
<section id="emotion-recognition" class="level4">
|
||||||
<h4>Emotion recognition</h4>
|
<h4>Emotion recognition</h4>
|
||||||
<p>- <strong>Emotion recognition</strong> is a rare occurrence. We found evidence of its deployment only in a <strong>pilot project in Nice (see section 8.1)</strong> and in the <strong><a class="maplink" data-title="Citybeacon">Citybeacon</a> project in Eindhoven, but even then, the project was never actually tested. The original idea proposed by the company <a class="maplink" data-title="Two-I">Two-I</a> was “a "real-time emotional mapping" capable of highlighting "potentially problematic or even dangerous situations". "A dynamic deployment of security guards in an area where tension and stress are felt, is often a simple way to avoid any overflow," also argues <a class="maplink" data-title="Two-I">Two-I</a>, whose "Security" software would be able to decipher some 10,000 faces per second. (Binacchi 2019)</strong></p>
|
<p>- <strong>Emotion recognition</strong> is a rare occurrence. We found evidence of its deployment only in a <strong><a class="maplink" data-title="Facial/Emotion Recognition Pilot in Tramway (Nice)">pilot project in Nice</a> (see section 8.1)</strong> and in the <strong><a class="maplink" data-title="Citybeacon">Citybeacon</a> project in Eindhoven, but even then, the project was never actually tested. The original idea proposed by the company <a class="maplink" data-title="Two-I">Two-I</a> was “a "real-time emotional mapping" capable of highlighting "potentially problematic or even dangerous situations". "A dynamic deployment of security guards in an area where tension and stress are felt, is often a simple way to avoid any overflow," also argues <a class="maplink" data-title="Two-I">Two-I</a>, whose "Security" software would be able to decipher some 10,000 faces per second. (Binacchi 2019)</strong></p>
|
||||||
</section>
|
</section>
|
||||||
<section id="gait-recognition-1" class="level4">
|
<section id="gait-recognition-1" class="level4">
|
||||||
<h4>Gait recognition</h4>
|
<h4>Gait recognition</h4>
|
||||||
|
@ -902,16 +938,17 @@
|
||||||
<h3>Integrated solutions </h3>
|
<h3>Integrated solutions </h3>
|
||||||
<section id="smart-cities" class="level4">
|
<section id="smart-cities" class="level4">
|
||||||
<h4>Smart cities</h4>
|
<h4>Smart cities</h4>
|
||||||
<p>While some cities or companies decide to implement some of the functionalities with their existing or updated CCTV systems, several chose to centralise several of these “smart” functions in <strong>integrated systems</strong> often referred to as “safe city” solutions. These solutions do not necessarily process biometric information. This is the case for example for the deployments in <strong>TIM’s</strong>, <strong>Insula</strong> and <strong>Venis’</strong> <strong>Safe City Platform in Venice (Italy)</strong>, <strong><a class="maplink" data-title="Huawei">Huawei</a>’s</strong> <strong>Safe City in Valenciennes (France)</strong>, <strong>Dahua’s integrated solution in Brienon-sur-Armançon</strong> <strong>(France)</strong>, <strong>Thalès’ Safe City in La Défense and Nice (France)</strong>, <strong>Engie Inéo’s and SNEF’s integrated solution in Marseille (France)</strong>, the <strong>Center of Urban Supervision in Roubaix (France)</strong>, <strong>AI Mars (Madrid, in development)</strong> or <strong>NEC’s platform in <a class="maplink" data-title="NEC Technology in Lisbon">Lisbon</a> and London</strong>.</p>
|
<p>While some cities or companies decide to implement some of the functionalities with their existing or updated CCTV systems, several chose to centralise several of these “smart” functions in <strong>integrated systems</strong> often referred to as “safe city” solutions. These solutions do not necessarily process biometric information. This is the case for example for the deployments in <strong>TIM’s</strong>, <strong>Insula</strong> and <strong>Venis’</strong> <strong><a class="maplink" data-title="Control Room (Venice)">Safe City Platform in Venice</a> (Italy)</strong>, <strong><a class="maplink" data-title="Huawei">Huawei</a>’s</strong> <strong><a class="maplink" data-title="Smart video surveillance in Valenciennes">Safe City in Valenciennes</a> (France)</strong>, <strong><a class="maplink" data-title="Dahua Deployment in Brienon-sur-Armançon">Dahua’s integrated solution in Brienon-sur-Armançon</a></strong> <strong>(France)</strong>, <strong><a class="maplink" data-title="Thales">Thalès</a>’ Safe City in <a class="maplink" data-title="Safe City Pilot (La Défense)">La Défense</a> and <a class="maplink" data-title= "Safe City Pilot Project (Nice)",
|
||||||
<p>The way “Smart/Safe City” solutions work is well exemplified by the “Control room” deployed in Venice, connected to an urban surveillance network. The system is composed of a central command and control room which aggregates cloud computing systems, together with smart cameras, artificial intelligence systems, antennas and hundreds of sensors distributed on a widespread network. The idea is to monitor what happens in the lagoon city in real time. The scope of the abilities of the centre is wide-ranging. It promises to: manage events and incoming tourist flows, something particularly relevant to a city which aims to implement a visiting fee for tourists; predict and manage weather events in advance, such as the shifting of tides and high water, by defining alternative routes for transit in the city; indicating to the population in real time the routes to avoid traffic and better manage mobility for time optimisation; improve the management of public safety allowing city agents to intervene in a more timely manner; control and manage water and road traffic, also for sanctioning purposes, through specific video-analysis systems; control the status of parking lots; monitor the environmental and territorial situation; collect, process data and information that allow for the creation of forecasting models and the allocation of resources more efficiently and effectively; bring to life a physical "Smart Control Room" where law enforcement officers train and learn how to read data as well. (LUMI 2020)</p>
|
>Nice</a> (France)</strong>, <strong>Engie Inéo’s and SNEF’s <a class="maplink" data-title="SNEF Smart CCTVs in Marseille">integrated solution in Marseille</a> (France)</strong>, the <strong><a class="maplink" data-title="Center of Urban Supervision (Roubaix)">Center of Urban Supervision in Roubaix</a> (France)</strong>, <strong><a class="maplink" data-title="AI Mars (Potential)">AI Mars</a> (Madrid, in development)</strong> or <strong>NEC’s platform in <a class="maplink" data-title="NEC Technology in Lisbon">Lisbon</a> and London</strong>.</p>
|
||||||
|
<p>The way “Smart/Safe City” solutions work is well exemplified by the <a class="maplink" data-title="Control Room (Venice)">“Control room” deployed in Venice</a>, connected to an urban surveillance network. The system is composed of a central command and control room which aggregates cloud computing systems, together with smart cameras, artificial intelligence systems, antennas and hundreds of sensors distributed on a widespread network. The idea is to monitor what happens in the lagoon city in real time. The scope of the abilities of the centre is wide-ranging. It promises to: manage events and incoming tourist flows, something particularly relevant to a city which aims to implement a visiting fee for tourists; predict and manage weather events in advance, such as the shifting of tides and high water, by defining alternative routes for transit in the city; indicating to the population in real time the routes to avoid traffic and better manage mobility for time optimisation; improve the management of public safety allowing city agents to intervene in a more timely manner; control and manage water and road traffic, also for sanctioning purposes, through specific video-analysis systems; control the status of parking lots; monitor the environmental and territorial situation; collect, process data and information that allow for the creation of forecasting models and the allocation of resources more efficiently and effectively; bring to life a physical "Smart Control Room" where law enforcement officers train and learn how to read data as well. (LUMI 2020)</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="smartphone-apps" class="level4">
|
<section id="smartphone-apps" class="level4">
|
||||||
<h4>Smartphone apps</h4>
|
<h4>Smartphone apps</h4>
|
||||||
<p>Integrated solutions can entail smartphone apps, used to connect citizens with the control and command centres. This is for example the case in Nice with the (failed) <strong>Reporty App</strong> project (See Chapter 5), the <strong>Dragonfly project (Hungary) (See chapter 10)</strong> and was part of the original plan of <strong>Marseille’s Safe City project</strong>.</p>
|
<p>Integrated solutions can entail smartphone apps, used to connect citizens with the control and command centres. This is for example the case in Nice with the (failed) <strong><a class="maplink" data-title="Reporty App Nice (Stopped)">Reporty App</a></strong> project (See Chapter 5), the <strong>Dragonfly project (Hungary) (See chapter 10)</strong> and was part of the original plan of <strong><a class="maplink" data-title="Safe City project in Marseille (Observatoire Big Data de la Tranquillité Publique)">Marseille’s Safe City project</a></strong>.</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="crowd-management" class="level4">
|
<section id="crowd-management" class="level4">
|
||||||
<h4>Crowd management</h4>
|
<h4>Crowd management</h4>
|
||||||
<p>Integrated solutions are generally comprised of a set of crowd management features, such as in the case of the systems <strong>in Valenciennes and Marseille (France), Mannheim (Germany), Venice (Italy), Amsterdam, Eindhoven and Den Bosch with the Crowdwatch project (Netherlands).</strong> Such crowd management software generally does not recognise individuals, but rather estimates the number of people on (a part of) the video frame. Sudden movements of groups or changes in density are then flagged for attention of the security operator (Nishiyama 2018).</p>
|
<p>Integrated solutions are generally comprised of a set of crowd management features, such as in the case of the systems <strong>in <a class="maplink" data-title="Smart video surveillance in Valenciennes">Valenciennes</a> and <a class="maplink" data-title="Urban Surveillance Center in Marseille">Marseille</a> (France), <a class="maplink" data-title="Mannheim public surveillance">Mannheim</a> (Germany), <a class="maplink" data-title="Control Room (Venice)">Venice</a> (Italy), Amsterdam, <a class="maplink" data-title="Citybeacons Eindhoven">Eindhoven</a> and Den Bosch with the <a class="maplink" data-title="CrowdWatch">Crowdwatch</a> project (Netherlands).</strong> Such crowd management software generally does not recognise individuals, but rather estimates the number of people on (a part of) the video frame. Sudden movements of groups or changes in density are then flagged for attention of the security operator (Nishiyama 2018).</p>
|
||||||
</section>
|
</section>
|
||||||
</section>
|
</section>
|
||||||
</section>
|
</section>
|
||||||
|
@ -921,12 +958,12 @@
|
||||||
<section id="deployment-of-rbi-in-public-spaces" class="level3">
|
<section id="deployment-of-rbi-in-public-spaces" class="level3">
|
||||||
<h3>Deployment of RBI in public spaces</h3>
|
<h3>Deployment of RBI in public spaces</h3>
|
||||||
<p>Here are the documented cases of RBI in public spaces we could find through our research:</p>
|
<p>Here are the documented cases of RBI in public spaces we could find through our research:</p>
|
||||||
<p>- <strong>Live Facial Recognition pilot project in Brussels International Airport / Zaventem</strong> (Belgium, see detailed case study, CHAPTER 6)</p>
|
<p>- <strong>Live Facial Recognition <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">pilot project in Brussels International Airport / Zaventem</a></strong> (Belgium, see detailed case study, CHAPTER 6)</p>
|
||||||
<p><strong>- Live Facial Recognition in Budapest</strong> (Hungary, see detailed case study, CHAPTER 10)</p>
|
<p>- <strong>Live Facial Recognition in Budapest</strong> (Hungary, see detailed case study, CHAPTER 10)</p>
|
||||||
<p>- <strong>Live Facial Recognition pilot project during the Carnival in Nice</strong> (France, see detailed case study, CHAPTER 8)</p>
|
<p>- <strong>Live Facial Recognition <a class="maplink" data-title="Facial Recognition Pilot Project during Carnival (Nice)">pilot project during the Carnival in Nice</a></strong> (France, see detailed case study, CHAPTER 8)</p>
|
||||||
<p><strong>- Live Facial Recognition <a class="maplink" data-title="Pilot Project Südkreuz Berlin">Pilot Project Südkreuz Berlin</a></strong> (Germany, see detailed case study, CHAPTER 9)</p>
|
<p>- <strong>Live Facial Recognition <a class="maplink" data-title="Pilot Project Südkreuz Berlin">Pilot Project Südkreuz Berlin</a></strong> (Germany, see detailed case study, CHAPTER 9)</p>
|
||||||
<ul>
|
<ul>
|
||||||
<li><p>Live Facial Recognition during Carnival 2019 in 's-Hertogenbosch’s Lange Putstraat (the (Netherlands)</p></li>
|
<li><p>Live Facial Recognition during <a class="maplink" data-title="Korte Putstraat (Stopped)">Carnival 2019 in 's-Hertogenbosch’s Korte Putstraat</a> (the (Netherlands)</p></li>
|
||||||
</ul>
|
</ul>
|
||||||
<p>As most of these cases are extensively discussed in the following chapters, we do not comment further on them here.</p>
|
<p>As most of these cases are extensively discussed in the following chapters, we do not comment further on them here.</p>
|
||||||
</section>
|
</section>
|
||||||
|
@ -1093,7 +1130,7 @@
|
||||||
<p>Taking in particular issue with Article 4 and the possible exemptions to regulation of AI “in order to safeguard public safety”, they urge the commissionEuropean Commission “to make sure that existing protections are upheld and <strong>a clear ban on biometric mass surveillance in public spaces is proposed</strong>. This is what a majority of citizens want” (Breyer et al. 2021)</p>
|
<p>Taking in particular issue with Article 4 and the possible exemptions to regulation of AI “in order to safeguard public safety”, they urge the commissionEuropean Commission “to make sure that existing protections are upheld and <strong>a clear ban on biometric mass surveillance in public spaces is proposed</strong>. This is what a majority of citizens want” (Breyer et al. 2021)</p>
|
||||||
|
|
||||||
<p><strong><a class="maplink" data-title="European Digital Rights (EDRi)">European Digital Rights (EDRi)</a>, an umbrella organisation of</strong> 44 digital rights NGOs in Europe takes a radical stance on the issue. They argue <strong>that mass processing of biometric data in public spaces creates a serious risk of mass surveillance</strong> that infringes on fundamental rights, and therefore they call on the Commission to <strong>permanently stop all deployments that can lead to mass surveillance</strong>. In their report <em>Ban Biometric Mass Surveillance</em> (2020) they demand that the EDPB and national DPAs) <strong>“publicly disclose all existing and planned activities and deployments that fall within this remit.</strong>” (EDRi 2020, 5). Furthermore, they call for ceasing all planned legislation which establishes biometric processing as well as the funding for all such projects, amounting to an “immediate and indefinite ban on biometric processing”.</p>
|
<p><strong><a class="maplink" data-title="European Digital Rights (EDRi)">European Digital Rights (EDRi)</a>, an umbrella organisation of</strong> 44 digital rights NGOs in Europe takes a radical stance on the issue. They argue <strong>that mass processing of biometric data in public spaces creates a serious risk of mass surveillance</strong> that infringes on fundamental rights, and therefore they call on the Commission to <strong>permanently stop all deployments that can lead to mass surveillance</strong>. In their report <em>Ban Biometric Mass Surveillance</em> (2020) they demand that the EDPB and national DPAs) <strong>“publicly disclose all existing and planned activities and deployments that fall within this remit.</strong>” (EDRi 2020, 5). Furthermore, they call for ceasing all planned legislation which establishes biometric processing as well as the funding for all such projects, amounting to an “immediate and indefinite ban on biometric processing”.</p>
|
||||||
<p><strong><a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> (LQDN) one of EDRi’s founding members</strong> (created in 2008 to “promote and defend fundamental freedoms in the digital world") similarly called for a ban on <strong>any present and future use of facial recognition for security and surveillance purposes</strong>. Together with a number of other French NGOs monitoring legislation impacting digital freedoms, as well as other collectives, companies, associations and trade unions, the LQDN initiated a joint open letter in which they call on French authorities to ban any security and surveillance use of facial recognition due to their <strong>uniquely invasive and dehumanising</strong> nature. In their letter they point to the fact that in France there are a “multitude of systems already installed, outside of any real legal framework, without transparency or public discussion” referring, among others, to the PARAFE system and the use of FRTs by civil and military police. As they put it:</p>
|
<p><strong><a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> (LQDN) one of EDRi’s founding members</strong> (created in 2008 to “promote and defend fundamental freedoms in the digital world") similarly called for a ban on <strong>any present and future use of facial recognition for security and surveillance purposes</strong>. Together with a number of other French NGOs monitoring legislation impacting digital freedoms, as well as other collectives, companies, associations and trade unions, the <a class="maplink" data-title="La Quadrature du Net">LQDN</a> initiated a joint open letter in which they call on French authorities to ban any security and surveillance use of facial recognition due to their <strong>uniquely invasive and dehumanising</strong> nature. In their letter they point to the fact that in France there are a “multitude of systems already installed, outside of any real legal framework, without transparency or public discussion” referring, among others, to the PARAFE system and the use of FRTs by civil and military police. As they put it:</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="facial-recognition-is-a-uniquely-invasive-and-dehumanising-technology-which-makes-possible-sooner-or-later-constant-surveillance-of-the-public-space.-it-creates-a-society-in-which-we-are-all-suspects.-it-turns-our-face-into-a-tracking-device-rather-than-a-signifier-of-personality-eventually-reducing-it-to-a-technical-object.-it-enables-invisible-control.-it-establishes-a-permanent-and-inescapable-identification-regime.-it-eliminates-anonymity.-no-argument-can-justify-the-deployment-of-such-a-technology.-la-quadrature-du-net.-et-al.-2019" class="level4 Quote">
|
<section id="facial-recognition-is-a-uniquely-invasive-and-dehumanising-technology-which-makes-possible-sooner-or-later-constant-surveillance-of-the-public-space.-it-creates-a-society-in-which-we-are-all-suspects.-it-turns-our-face-into-a-tracking-device-rather-than-a-signifier-of-personality-eventually-reducing-it-to-a-technical-object.-it-enables-invisible-control.-it-establishes-a-permanent-and-inescapable-identification-regime.-it-eliminates-anonymity.-no-argument-can-justify-the-deployment-of-such-a-technology.-la-quadrature-du-net.-et-al.-2019" class="level4 Quote">
|
||||||
<blockquote class="Quote">“Facial recognition is a uniquely invasive and dehumanising technology, which makes possible, sooner or later, constant surveillance of the public space. It creates a society in which we are all suspects. It turns our face into a tracking device, rather than a signifier of personality, eventually reducing it to a technical object. It enables invisible control. It establishes a permanent and inescapable identification regime. It eliminates anonymity. No argument can justify the deployment of such a technology.” <footer>(La Quadrature du Net. et al. 2019)</footer></blockquote>
|
<blockquote class="Quote">“Facial recognition is a uniquely invasive and dehumanising technology, which makes possible, sooner or later, constant surveillance of the public space. It creates a society in which we are all suspects. It turns our face into a tracking device, rather than a signifier of personality, eventually reducing it to a technical object. It enables invisible control. It establishes a permanent and inescapable identification regime. It eliminates anonymity. No argument can justify the deployment of such a technology.” <footer>(La Quadrature du Net. et al. 2019)</footer></blockquote>
|
||||||
|
@ -1143,25 +1180,25 @@
|
||||||
<ul>
|
<ul>
|
||||||
<li><p>Belgium is one of two European countries that has not yet authorised the use of FRT, however, law enforcement is strongly advocating for its use and the current legal obstacles to its implementation might not hold for very long given the political pressure.</p></li>
|
<li><p>Belgium is one of two European countries that has not yet authorised the use of FRT, however, law enforcement is strongly advocating for its use and the current legal obstacles to its implementation might not hold for very long given the political pressure.</p></li>
|
||||||
<li><p>In 2017, unbeknownst to the Belgian Supervisory Body for Police Information (COC), Brussels International Airport acquired 4 cameras connected to a facial recognition software for use by the airport police. Though the COC subsequently ruled that this use fell outside of the conditions for a lawful deployment, the legality of the airport experiment fell into a legal grey area because of the ways in which the technology was deployed.</p></li>
|
<li><p>In 2017, unbeknownst to the Belgian Supervisory Body for Police Information (COC), Brussels International Airport acquired 4 cameras connected to a facial recognition software for use by the airport police. Though the COC subsequently ruled that this use fell outside of the conditions for a lawful deployment, the legality of the airport experiment fell into a legal grey area because of the ways in which the technology was deployed.</p></li>
|
||||||
<li><p>One justification for the legality of the airport experiment from the General Commissioner of Federal Police was to compare the technological deployment to that of the legal use of other intelligent technologies such as Automated Number Plate Recognition (ANPR). Although this argument was rejected at the time, such a system could be re-instated if the grounds for interruption are no longer present in the law.</p></li>
|
<li><p>One justification for the legality of the airport experiment from the General Commissioner of <a class="maplink" data-title="Belgian Federal Police">Federal Police</a> was to compare the technological deployment to that of the legal use of other intelligent technologies such as Automated Number Plate Recognition (ANPR). Although this argument was rejected at the time, such a system could be re-instated if the grounds for interruption are no longer present in the law.</p></li>
|
||||||
<li><p>Some civil society actors in Belgium contest the legitimacy of remote biometric identification. However, current legislative activity seems to point in the direction of more acceptance for remote biometric surveillance.</p></li>
|
<li><p>Some civil society actors in Belgium contest the legitimacy of remote biometric identification. However, current legislative activity seems to point in the direction of more acceptance for remote biometric surveillance.</p></li>
|
||||||
</ul>
|
</ul>
|
||||||
</div> <!-- key points -->
|
</div> <!-- key points -->
|
||||||
<p>Belgium is, with Spain, one of the few countries in Europe that <strong>has not authorised the use of facial recognition technology</strong>, neither for criminal investigations nor for mass surveillance (Vazquez 2020). This does not mean that it is unlikely to change its position in the very near future. <strong>Law enforcement is indeed strongly advocating its use</strong>, and the current legal obstacles are not likely to hold for very long (Bensalem 2018). The pilot experiment that took place in Zaventem / Brussels International Airport, although aborted, occurred within a national context in which <strong>biometric systems are increasingly used and deployed</strong>.</p>
|
<p>Belgium is, with Spain, one of the few countries in Europe that <strong>has not authorised the use of facial recognition technology</strong>, neither for criminal investigations nor for mass surveillance (Vazquez 2020). This does not mean that it is unlikely to change its position in the very near future. <strong>Law enforcement is indeed strongly advocating its use</strong>, and the current legal obstacles are not likely to hold for very long (Bensalem 2018). The pilot experiment that took place in Zaventem / Brussels International Airport, although aborted, occurred within a national context in which <strong>biometric systems are increasingly used and deployed</strong>.</p>
|
||||||
<p>Belgium will, for example, soon roll out at the national level the new biometric identity card “<strong>eID</strong>”, the Minister of Interior Annelies Verlinden has recently announced. The identification document, which will rely on the constitution of a broad biometric database and is part of a broader <a class="maplink" data-title="European Union">European Union</a> initiative, is developed in partnership with security multinational <strong><a class="maplink" data-title="Thales">Thales</a></strong>, was already trialled with 53.000 citizens in (Prins 2021; Thales Group 2020).<a href="#fn30" class="footnote-ref" id="fnref30" role="doc-noteref"><sup>30</sup></a></p>
|
<p>Belgium will, for example, soon roll out at the national level the new biometric identity card “<strong>eID</strong>”, the Minister of Interior Annelies Verlinden has recently announced. The identification document, which will rely on the constitution of a broad biometric database and is part of a broader <a class="maplink" data-title="European Union">European Union</a> initiative, is developed in partnership with security multinational <strong><a class="maplink" data-title="Thales">Thales</a></strong>, was already trialled with 53.000 citizens in (Prins 2021; Thales Group 2020).<a href="#fn30" class="footnote-ref" id="fnref30" role="doc-noteref"><sup>30</sup></a></p>
|
||||||
<p>Municipalities in different parts of the country are experimenting with <strong>Automated Number Plate Recognition (ANPR) technology</strong>. A smaller number have started deploying “<strong>smart CCTV</strong>” cameras, which fall just short of using facial recognition technology. The city of Kortrijk has for example deployed “<strong>body recognition</strong>” technology, which uses walking style or clothing of individuals to track them across the city’s CCTV network. Facial recognition is possible with these systems, but has not been activated as of yet <strong>pending legal authorisation to do so</strong>. In the city of Roeselare, “smart cameras” have been installed in one of the shopping streets. Deployed by telecom operator Citymesh, they could provide facial recognition services, but are currently used to count and estimate crowds, data which is shared with the police (van Brakel 2020). All the emerging initiatives of remote biometric identification are however pending a reversal of the decision to halt the experiment at Zaventem Brussels International Airport.</p>
|
<p>Municipalities in different parts of the country are experimenting with <strong>Automated Number Plate Recognition (ANPR) technology</strong>. A smaller number have started deploying “<strong>smart CCTV</strong>” cameras, which fall just short of using facial recognition technology. The city of Kortrijk has for example deployed “<strong>body recognition</strong>” technology, which uses walking style or clothing of individuals to track them across the city’s CCTV network. Facial recognition is possible with these systems, but has not been activated as of yet <strong>pending legal authorisation to do so</strong>. In the city of Roeselare, “smart cameras” have been installed in one of the shopping streets. Deployed by telecom operator Citymesh, they could provide facial recognition services, but are currently used to count and estimate crowds, data which is shared with the police (van Brakel 2020). All the emerging initiatives of remote biometric identification are however pending a reversal of the decision to halt <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">the experiment at Zaventem Brussels International Airport</a>.</p>
|
||||||
<section id="the-zaventem-pilot-in-the-context-of-face-recognition-technology-in-belgium" class="level2">
|
<section id="the-zaventem-pilot-in-the-context-of-face-recognition-technology-in-belgium" class="level2">
|
||||||
<h2>The Zaventem pilot in the context of Face Recognition Technology in Belgium</h2>
|
<h2>The Zaventem pilot in the context of Face Recognition Technology in Belgium</h2>
|
||||||
<p>The use of <strong>facial recognition technology</strong> at the Brussels International Airport was announced on 10 July 2019 in the Flemish weekly <em>Knack</em> by General Commissioner of Federal Police Marc De Mesmaeker (Lippens and Vandersmissen 2019). There is currently no publicly available information as to whom provided the technical system. De Mesmaeker explained that an agreement had been found with the company managing the airport and the labour unions, and thus that the technology was already in use (Organe de Controle de l'Information Policière 2019, 3).</p>
|
<p>The use of <strong>facial recognition technology</strong> at the Brussels International Airport was announced on 10 July 2019 in the Flemish weekly <em>Knack</em> by General Commissioner of Federal Police Marc De Mesmaeker (Lippens and Vandersmissen 2019). There is currently no publicly available information as to whom provided the technical system. De Mesmaeker explained that an agreement had been found with the company managing the airport and the labour unions, and thus that the technology was already in use (Organe de Controle de l'Information Policière 2019, 3).</p>
|
||||||
<p>As part of the justification for the deployment of FRT in Zaventem, De Mesmaeker made a comparison with <strong>ANPR-enabled cameras</strong>, arguing that “They have already helped to solve investigations quickly, (…). Citizens understand this and have learned to live with their presence, but privacy remains a right”. (7sur7 2019)</p>
|
<p>As part of the justification for the deployment of FRT in Zaventem, De Mesmaeker made a comparison with <strong>ANPR-enabled cameras</strong>, arguing that “They have already helped to solve investigations quickly, (…). Citizens understand this and have learned to live with their presence, but privacy remains a right”. (7sur7 2019)</p>
|
||||||
<p>The <strong>Belgian Supervisory Body for Police Information (COC)</strong><a href="#fn31" class="footnote-ref" id="fnref31" role="doc-noteref"><sup>31</sup></a>, in its advisory document, explained that it had no prior knowledge of the deployment and learned about the existence of the facial recognition systems through the interview of De Mesmaeker in the <em>Knack</em> magazine (Organe de Controle de l'Information Policière 2019, 3). On 10 July 2019, the COC thus invited the General Commissioner to communicate all the details of the deployment of this technology in the Brussels International Airport. On 18 July 2019, COC received a summary of the system’s main components. On 9 August 2019, it subsequently visited the premises of the federal police deployment in Zaventem airport (Organe de Controle de l'Information Policière 2019, 3).</p>
|
<p>The <strong>Belgian Supervisory Body for Police Information (COC)</strong><a href="#fn31" class="footnote-ref" id="fnref31" role="doc-noteref"><sup>31</sup></a>, in its advisory document, explained that it had no prior knowledge of the deployment and learned about the existence of the facial recognition systems through the interview of De Mesmaeker in the <em>Knack</em> magazine (Organe de Controle de l'Information Policière 2019, 3). On 10 July 2019, the COC thus invited the General Commissioner to communicate all the details of the deployment of this technology in the Brussels International Airport. On 18 July 2019, COC received a summary of the system’s main components. On 9 August 2019, it subsequently visited the premises of the <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">federal police deployment in Zaventem airport</a> (Organe de Controle de l'Information Policière 2019, 3).</p>
|
||||||
<p>We know some technical details about the system through the public information shared by the COC. In early 2017, Brussels airport had acquired <strong>4 cameras connected to a facial recognition software for use by the airport police</strong> (Police Aéronautique, LPA) (Farge 2020, 15; Organe de Controle de l'Information Policière 2019, 3). The system works in two steps.</p>
|
<p>We know some technical details about the system through the public information shared by the COC. In early 2017, Brussels airport had acquired <strong>4 cameras connected to a facial recognition software for use by the airport police</strong> (Police Aéronautique, LPA) (Farge 2020, 15; Organe de Controle de l'Information Policière 2019, 3). The system works in two steps.</p>
|
||||||
<p>When provided with video feeds from the four cameras, the software first creates <strong>snapshots</strong>, generating individual records with the faces that appear in the frame. These snapshots on record are then in a second step compared and potentially matched to previously established “<strong>blacklists</strong>” created by the police itself (the reference dataset is thus not external to this particular deployment) (Organe de Controle de l'Information Policière 2019, 3).</p>
|
<p>When provided with video feeds from the four cameras, the software first creates <strong>snapshots</strong>, generating individual records with the faces that appear in the frame. These snapshots on record are then in a second step compared and potentially matched to previously established “<strong>blacklists</strong>” created by the police itself (the reference dataset is thus not external to this particular deployment) (Organe de Controle de l'Information Policière 2019, 3).</p>
|
||||||
<p>The system did however not live up to its promise and generated a high number of <strong>false positives</strong>. Many features such as skin colour, glasses, moustaches, and beards led to false matches. The system was thus partially disconnected in March 2017, and at the time of the visit of the COC, the system was no longer fully in use (Organe de Controle de l'Information Policière 2019, 3). Yet the second step had not been de-activated (matching video feeds against pre-established blacklists of faces), the first function of creating a biometric record of the video feeds was still in place (Organe de Controle de l'Information Policière 2019, 3).</p>
|
<p>The system did however not live up to its promise and generated a high number of <strong>false positives</strong>. Many features such as skin colour, glasses, moustaches, and beards led to false matches. The system was thus partially disconnected in March 2017, and at the time of the visit of the COC, the system was no longer fully in use (Organe de Controle de l'Information Policière 2019, 3). Yet the second step had not been de-activated (matching video feeds against pre-established blacklists of faces), the first function of creating a biometric record of the video feeds was still in place (Organe de Controle de l'Information Policière 2019, 3).</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="legal-bases-and-challenges" class="level2">
|
<section id="legal-bases-and-challenges" class="level2">
|
||||||
<h2>Legal bases and challenges</h2>
|
<h2>Legal bases and challenges</h2>
|
||||||
<p>The legality of the Zaventem airport experiment fell into a legal grey area, but eventually the COC ruled that it fell outside of the conditions for a lawful deployment.</p>
|
<p>The legality of the <a class="maplink" data-title="Facial Recognition in Brussels Airport (Stopped)">Zaventem airport experiment</a> fell into a legal grey area, but eventually the COC ruled that it fell outside of the conditions for a lawful deployment.</p>
|
||||||
<p>The right to privacy is enshrined in <strong>Article 22 of the Belgian Constitution</strong>, which reads as “everyone has the right to the respect of his private and family life, except in the cases and conditions determined by the law.” The <strong>ECHR</strong> and the case law of the <strong>ECtHR</strong> have had considerable influence over the interpretation of Article 22 of the Belgian Constitution (Lavrysen et al. 2017) and thus the right enshrined therein can be broadly construed to encompass the right to protection of personal data and to address risks associated with the use of new technologies (Kindt et al. 2008; De Hert 2017). <strong>Articles 7 and 8 of the Charter</strong> are also relevant where the legislator acts within the scope of EU law (Cour constitutionnelle, N° 2/2021, 14 January 2021).</p>
|
<p>The right to privacy is enshrined in <strong>Article 22 of the Belgian Constitution</strong>, which reads as “everyone has the right to the respect of his private and family life, except in the cases and conditions determined by the law.” The <strong>ECHR</strong> and the case law of the <strong>ECtHR</strong> have had considerable influence over the interpretation of Article 22 of the Belgian Constitution (Lavrysen et al. 2017) and thus the right enshrined therein can be broadly construed to encompass the right to protection of personal data and to address risks associated with the use of new technologies (Kindt et al. 2008; De Hert 2017). <strong>Articles 7 and 8 of the Charter</strong> are also relevant where the legislator acts within the scope of EU law (Cour constitutionnelle, N° 2/2021, 14 January 2021).</p>
|
||||||
<p>Belgium adapted its data protection law to the GDPR by enacting the <strong>Act of 30 July 2018</strong> on the Protection of Natural Persons with regard to the Processing of Personal data (the Data Protection Act). The same act implements the LED, as well. </p>
|
<p>Belgium adapted its data protection law to the GDPR by enacting the <strong>Act of 30 July 2018</strong> on the Protection of Natural Persons with regard to the Processing of Personal data (the Data Protection Act). The same act implements the LED, as well. </p>
|
||||||
<p>In regard to processing of sensitive data for non-law enforcement purposes, the Act sets out certain processing activities which would be regarded as necessary for reasons of substantial public interest, which is one of the lawful grounds listed in <strong>Article 9 of the GDPR</strong> to process said data. Overall, the relevant public interest purposes relate to processing by human rights organisations in relation to their objective of defending and promoting human rights and fundamental freedoms and in relation to an offence in relation to missing persons or sexual exploitation (Article 8, §1, the Data Protection Act). A separate data processing purpose for <strong>personal data of sexual life of the data subject</strong> is introduced in relation to the statutory purpose of evaluating, supervising, and treating persons whose sexual behaviour may be qualified as a criminal offence (Article 8, §1, 3°, the Data Protection Act).</p>
|
<p>In regard to processing of sensitive data for non-law enforcement purposes, the Act sets out certain processing activities which would be regarded as necessary for reasons of substantial public interest, which is one of the lawful grounds listed in <strong>Article 9 of the GDPR</strong> to process said data. Overall, the relevant public interest purposes relate to processing by human rights organisations in relation to their objective of defending and promoting human rights and fundamental freedoms and in relation to an offence in relation to missing persons or sexual exploitation (Article 8, §1, the Data Protection Act). A separate data processing purpose for <strong>personal data of sexual life of the data subject</strong> is introduced in relation to the statutory purpose of evaluating, supervising, and treating persons whose sexual behaviour may be qualified as a criminal offence (Article 8, §1, 3°, the Data Protection Act).</p>
|
||||||
|
@ -1198,30 +1235,30 @@
|
||||||
<div class="keypoints">
|
<div class="keypoints">
|
||||||
<p><strong>Key points</strong></p>
|
<p><strong>Key points</strong></p>
|
||||||
<ul>
|
<ul>
|
||||||
<li><p>The Fieldlab Burglary Free Neighbourhood is a public-private collaboration with two aims: to detect suspicious behaviour and to influence the behaviour of the suspect. While the system of smart streetlamps does collect some image and sound-based data, it does not record any characteristics specific to the individual.</p></li>
|
<li><p>The <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Fieldlab Burglary Free Neighbourhood</a> is a public-private collaboration with two aims: to detect suspicious behaviour and to influence the behaviour of the suspect. While the system of smart streetlamps does collect some image and sound-based data, it does not record any characteristics specific to the individual.</p></li>
|
||||||
<li><p>From a legal perspective, there is a question as to whether or not the data processed by the Burglary Free Neighbourhood programme qualifies as personal data and thus would fall within the scope of data protection legislation.</p></li>
|
<li><p>From a legal perspective, there is a question as to whether or not the data processed by the Burglary Free Neighbourhood programme qualifies as personal data and thus would fall within the scope of data protection legislation.</p></li>
|
||||||
<li><p>It is contested whether forms of digital monitoring and signalling are actually the most efficient methods for preventing break ins. Despite the aims of the programme, to date, the streetlights have only been used to capture data for the purposes of machine learning.</p></li>
|
<li><p>It is contested whether forms of digital monitoring and signalling are actually the most efficient methods for preventing break ins. Despite the aims of the programme, to date, the streetlights have only been used to capture data for the purposes of machine learning.</p></li>
|
||||||
<li><p>The infrastructure installed for the experiments can potentially be used for more invasive forms of monitoring. During the project, local police, for example, already voiced an interest in access to the cameras.</p></li>
|
<li><p>The infrastructure installed for the experiments can potentially be used for more invasive forms of monitoring. During the project, local police, for example, already voiced an interest in access to the cameras.</p></li>
|
||||||
<li><p>In March 2021, the Fieldlab trial ended. The data collected over the course of the project was not sufficient enough to have the computer distinguish suspicious trajectories. The infrastructure of cameras and microphones is currently disabled, yet remains in place.</p></li>
|
<li><p>In March 2021, the Fieldlab trial ended. The data collected over the course of the project was not sufficient enough to have the computer distinguish suspicious trajectories. The infrastructure of cameras and microphones is currently disabled, yet remains in place.</p></li>
|
||||||
</ul>
|
</ul>
|
||||||
</div> <!-- key points -->
|
</div> <!-- key points -->
|
||||||
<p>In October 2019, the Carlo Collodihof, a courtyard in the Rotterdam neighbourhood Lombardijen, was equipped with a new kind of streetlamp. The twelve new luminaires did not just illuminate the streets; they were <strong>fitted with cameras, microphones, speakers, and a computer which was connected to the internet</strong>. They are part of the so called <strong>Fieldlab Burglary Free Neighbourhood</strong>: an experiment in the public space with technologies for computer sensing and data processing, aimed at the prevention of break-ins, robberies, and aggression; increasing the chances of catching and increasing a sense of safety for the inhabitants of the neighbourhood ((Redactie Inbraakvrije Wijk 2019; Kokkeler et al. 2020b). The practical nature of a Fieldlab provides a way to examine concretely how the various technologies come together, and how they fit in with existing infrastructures and regulations.</p>
|
<p>In October 2019, the Carlo Collodihof, a courtyard in the Rotterdam neighbourhood Lombardijen, was equipped with a new kind of streetlamp. The twelve new luminaires did not just illuminate the streets; they were <strong>fitted with cameras, microphones, speakers, and a computer which was connected to the internet</strong>. They are part of the so called <strong><a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Fieldlab Burglary Free Neighbourhood</a></strong>: an experiment in the public space with technologies for computer sensing and data processing, aimed at the prevention of break-ins, robberies, and aggression; increasing the chances of catching and increasing a sense of safety for the inhabitants of the neighbourhood ((Redactie Inbraakvrije Wijk 2019; Kokkeler et al. 2020b). The practical nature of a Fieldlab provides a way to examine concretely how the various technologies come together, and how they fit in with existing infrastructures and regulations.</p>
|
||||||
<section id="detection-and-decision-making-in-the-burglary-free-neighbourhood-fieldlab" class="level2">
|
<section id="detection-and-decision-making-in-the-burglary-free-neighbourhood-fieldlab" class="level2">
|
||||||
<h2>Detection and decision-making in the “Burglary free neighbourhood” Fieldlab</h2>
|
<h2>Detection and decision-making in the “Burglary free neighbourhood” Fieldlab</h2>
|
||||||
<p>The national programme Burglary Free Neighbourhood was initiated and funded by the <strong>Dutch Ministry of Justice and Security</strong>. It is led by <strong>DITSS</strong> (Dutch Institute for Technology, Safety & Security), a non-profit organisation, that has been involved in earlier computer sensing projects in the Netherlands – for example in <strong>Stratumseind, Eindhoven</strong> (The Hague Security Delta 2021). Other parties involved include the municipality of Rotterdam, the police –both on a local and national level– the Public Prosecutor’s Office and insurance company <a class="maplink" data-title="Interpolis">Interpolis</a>. Part of the research is carried out by University of Twente, <a class="maplink" data-title="Avans Hogeschool">Avans Hogeschool</a>, the Network Institute of the Vrije Universiteit Amsterdam and the Max Planck Institute for Foreign and International Criminal Law (Freiburg, Germany).</p>
|
<p>The national programme Burglary Free Neighbourhood was initiated and funded by the <strong>Dutch Ministry of Justice and Security</strong>. It is led by <strong><a class="maplink" data-title="Dutch Institute for Technology Safety and Security (DITSS)">DITSS</a></strong> (Dutch Institute for Technology, Safety & Security), a non-profit organisation, that has been involved in earlier computer sensing projects in the Netherlands – for example in <strong>Stratumseind, Eindhoven</strong> (The Hague Security Delta 2021). Other parties involved include the municipality of Rotterdam, the police –both on a local and national level– the Public Prosecutor’s Office and insurance company <a class="maplink" data-title="Interpolis">Interpolis</a>. Part of the research is carried out by University of Twente, <a class="maplink" data-title="Avans Hogeschool">Avans Hogeschool</a>, the Network Institute of the Vrije Universiteit Amsterdam and the Max Planck Institute for Foreign and International Criminal Law (Freiburg, Germany).</p>
|
||||||
<p><img src="images/media/image2.jpg" style="width:6.25564in;height:3.51788in" alt="A picture containing roller coaster, ride Description automatically generated" /></p>
|
<p><img src="images/media/image2.jpg" style="width:6.25564in;height:3.51788in" alt="A picture containing roller coaster, ride Description automatically generated" /></p>
|
||||||
<p>Figure 2. Fieldlab in Rotterdam Lombardijen</p>
|
<p>Figure 2. Fieldlab in Rotterdam Lombardijen</p>
|
||||||
<p>From a technological perspective, the project has two aims: to <strong>detect suspicious behaviour</strong>, and in turn<strong>, to influence the behaviour of the suspect</strong>. As such, project manager Guido Delver, who agreed to be interviewed for this report, describes the project as being primarily a behavioural experiment (Delver 2021). The twelve luminaires are provided by <a class="maplink" data-title="Sustainder">Sustainder</a> (their Anne series (Sustainder 2021)). The processing of the video and audio is done on the spot by a computer embedded in the luminaire, using software from the Eindhoven based company <strong><a class="maplink" data-title="ViNotion">ViNotion</a></strong> (ViNotion 2020). This software reads the video frames from the camera and estimates the presence and position of people – thereby mapping the coordinates of the video frame to coordinates in the space. It then determines the direction they are facing. <strong>Only these values –position and direction– and no other characteristics nor any images,</strong> are sent over the internet to a datacentre somewhere in the Netherlands, where the position data is stored for further processing (Delver 2021).</p>
|
<p>From a technological perspective, the project has two aims: to <strong>detect suspicious behaviour</strong>, and in turn<strong>, to influence the behaviour of the suspect</strong>. As such, project manager Guido Delver, who agreed to be interviewed for this report, describes the project as being primarily a behavioural experiment (Delver 2021). The twelve luminaires are provided by <a class="maplink" data-title="Sustainder">Sustainder</a> (their Anne series (Sustainder 2021)). The processing of the video and audio is done on the spot by a computer embedded in the luminaire, using software from the Eindhoven based company <strong><a class="maplink" data-title="ViNotion">ViNotion</a></strong> (ViNotion 2020). This software reads the video frames from the camera and estimates the presence and position of people – thereby mapping the coordinates of the video frame to coordinates in the space. It then determines the direction they are facing. <strong>Only these values –position and direction– and no other characteristics nor any images,</strong> are sent over the internet to a datacentre somewhere in the Netherlands, where the position data is stored for further processing (Delver 2021).</p>
|
||||||
<p>Currently, <strong>there is no immediate processing of the position data</strong> to classify behaviour as being suspicious or not. The proposed pipeline consists of two stages: first, an unsupervised machine algorithm for <strong>anomaly (outlier) detection processes the gathered trajectories</strong>, in order to distinguish trajectories that statistically deviate from the norm. As an example, both children playing, as well as burglars making a scouting round through the neighbourhood can potentially produce anomalous trajectories. Secondly, <strong>these anomalous trajectories are judged as being suspicious or not by a computer model</strong> that was trained with human supervision. In the Fieldlab’s first data collection experiment 100.000 trajectories were collected, totalling 20.000.000 data points (Hamada 2020). It turned out however that this was still too few to draw any conclusions about viability of the approach; the big data was still too small (Delver 2021).</p>
|
<p>Currently, <strong>there is no immediate processing of the position data</strong> to classify behaviour as being suspicious or not. The proposed pipeline consists of two stages: first, an unsupervised machine algorithm for <strong>anomaly (outlier) detection processes the gathered trajectories</strong>, in order to distinguish trajectories that statistically deviate from the norm. As an example, both children playing, as well as burglars making a scouting round through the neighbourhood can potentially produce anomalous trajectories. Secondly, <strong>these anomalous trajectories are judged as being suspicious or not by a computer model</strong> that was trained with human supervision. In the Fieldlab’s first data collection experiment 100.000 trajectories were collected, totalling 20.000.000 data points (Hamada 2020). It turned out however that this was still too few to draw any conclusions about viability of the approach; the big data was still too small (Delver 2021).</p>
|
||||||
<p>Another input for detecting suspicious situations is the <strong>microphone with which some of the streetlamps are equipped</strong>. By recording two frequencies of sound, sounds can be categorised as coming from for example a conversation, shouting, dog barking, or the breaking of glass. The two frequencies recorded provide too little information to distinguish the words in a conversation (Delver 2021).</p>
|
<p>Another input for detecting suspicious situations is the <strong>microphone with which some of the streetlamps are equipped</strong>. By recording two frequencies of sound, sounds can be categorised as coming from for example a conversation, shouting, dog barking, or the breaking of glass. The two frequencies recorded provide too little information to distinguish the words in a conversation (Delver 2021).</p>
|
||||||
<p>Aside from experimenting with the automated detection of suspicious behaviour, the Fieldlab experiments with various ways in which the detected situations can be played out. Project manager Guido Delver notes that the aim is not <em>per se</em> to involve the police. Instead, the suspect should be deterred before any crime is committed (Delver 2021). Various strategies are laid out: the yet-to-be-autonomous system can <strong>voice warnings through the speakers</strong> embedded in the streetlamps. Or, in line with the work of DITSS in Eindhoven’s Stratumseind street, the <strong>light intensity or colour of the streetlamps can be changed</strong> (Intelligent Lighting Institute, n.d.). Both strategies are aimed at signalling the subjects that their behaviour is noticed, which generally suffices to have burglars break off their scouting. Another option under consideration is to send a signal to the residents living nearby.</p>
|
<p>Aside from experimenting with the automated detection of suspicious behaviour, the Fieldlab experiments with various ways in which the detected situations can be played out. Project manager Guido Delver notes that the aim is not <em>per se</em> to involve the police. Instead, the suspect should be deterred before any crime is committed (Delver 2021). Various strategies are laid out: the yet-to-be-autonomous system can <strong>voice warnings through the speakers</strong> embedded in the streetlamps. Or, in line with the work of DITSS in Eindhoven’s Stratumseind street, the <strong>light intensity or colour of the streetlamps can be changed</strong> (Intelligent Lighting Institute, n.d.). Both strategies are aimed at signalling the subjects that their behaviour is noticed, which generally suffices to have burglars break off their scouting. Another option under consideration is to send a signal to the residents living nearby.</p>
|
||||||
<p>The process of data gathering in the Burglary Free Neighbourhood is quite similar to technologies that are deployed for anonymous people counting. One such application has been developed by <strong><a class="maplink" data-title="Numina">Numina</a></strong> and is deployed in the Dutch city of Nijmegen: individuals are <strong>traced through space and time, but not identified or categorised.</strong> This information is then used to provide statistics about the number of visitors in the city centre (Schouten and Bril 2019). Another Dutch deployment of technologically similar software is the <strong>One-and-a-half-meter monitor developed by the municipality of Amsterdam,</strong> which is based on the YOLO5 object detection algorithm and trained on the COCO dataset. This data processing architecture can detect the presence of persons but is incapable of deducing any characteristics (Amsterdam-Amstelland safety region 2020). These implementations show biometrics can be used to detect the presence of people, while refraining from storing these characteristics.</p>
|
<p>The process of data gathering in the Burglary Free Neighbourhood is quite similar to technologies that are deployed for anonymous people counting. One such application has been developed by <strong><a class="maplink" data-title="Numina">Numina</a></strong> and is deployed in the Dutch city of Nijmegen: individuals are <strong>traced through space and time, but not identified or categorised.</strong> This information is then used to provide statistics about the number of visitors in the city centre (Schouten and Bril 2019). Another Dutch deployment of technologically similar software is the <strong><a class="maplink" data-title="Test of One and a half meter monitor">One-and-a-half-meter monitor</a> developed by the <a class="maplink" data-title="Amsterdam Municipality">municipality of Amsterdam</a>,</strong> which is based on the YOLO5 object detection algorithm and trained on the COCO dataset. This data processing architecture can detect the presence of persons but is incapable of deducing any characteristics (Amsterdam-Amstelland safety region 2020). These implementations show biometrics can be used to detect the presence of people, while refraining from storing these characteristics.</p>
|
||||||
<p><img src="images/media/image3.png" style="width:5.35242in;height:3.07738in" alt="Two people holding umbrellas on a street Description automatically generated with low confidence" /></p>
|
<p><img src="images/media/image3.png" style="width:5.35242in;height:3.07738in" alt="Two people holding umbrellas on a street Description automatically generated with low confidence" /></p>
|
||||||
<p>Figure 3. The one-and-a-half-meter monitor developed by the municipality of Amsterdam</p>
|
<p>Figure 3. The one-and-a-half-meter monitor developed by the municipality of Amsterdam</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="legal-bases-and-challenges-1" class="level2">
|
<section id="legal-bases-and-challenges-1" class="level2">
|
||||||
<h2>Legal bases and challenges</h2>
|
<h2>Legal bases and challenges</h2>
|
||||||
<p>The Fieldlab Burglary Free Neighbourhood programme shows how data can be used to <strong>conduct monitoring</strong> and <strong>nudging of individuals’ behaviours</strong>. From a legal point of view, the question is whether the data processed in the context of the programme qualifies as <strong>personal data</strong> and would thus fall within <strong>the scope of data protection legislation</strong>.</p>
|
<p>The <a class="maplink" data-title="Data-lab Burglary-free Neighbourhood">Fieldlab Burglary Free Neighbourhood</a> programme shows how data can be used to <strong>conduct monitoring</strong> and <strong>nudging of individuals’ behaviours</strong>. From a legal point of view, the question is whether the data processed in the context of the programme qualifies as <strong>personal data</strong> and would thus fall within <strong>the scope of data protection legislation</strong>.</p>
|
||||||
<p>The Constitution for the Kingdom of the Netherlands provides for a general right to protection for privacy in Article 10, according to which restrictions to that right must be laid down by law. The GDPR Implementation Act (<em>Uitvoeringswet Algemene Verordening Gegevens-bescherming</em>) (UAVG), as well as the Police Data Act (<em>Wet Politiegegevens</em>) or the Judicial Data and Criminal Records Act (<em>Wet Justitiele en Strafvorderlijke Gegevens</em>) which implement the GDPR and the LED, provides the legal framework regarding privacy and data protection.</p>
|
<p>The Constitution for the Kingdom of the Netherlands provides for a general right to protection for privacy in Article 10, according to which restrictions to that right must be laid down by law. The GDPR Implementation Act (<em>Uitvoeringswet Algemene Verordening Gegevens-bescherming</em>) (UAVG), as well as the Police Data Act (<em>Wet Politiegegevens</em>) or the Judicial Data and Criminal Records Act (<em>Wet Justitiele en Strafvorderlijke Gegevens</em>) which implement the GDPR and the LED, provides the legal framework regarding privacy and data protection.</p>
|
||||||
<p>The <strong>definition of personal data</strong> as enshrined in the GDPR and the LED is directly applicable under the Dutch law. To qualify data as such, “any information” must relate to an identified or identifiable natural person. Based on the data that can be captured by the Fieldlab programme, two elements of this definition need further attention.</p>
|
<p>The <strong>definition of personal data</strong> as enshrined in the GDPR and the LED is directly applicable under the Dutch law. To qualify data as such, “any information” must relate to an identified or identifiable natural person. Based on the data that can be captured by the Fieldlab programme, two elements of this definition need further attention.</p>
|
||||||
<p><strong>-“Information “relating to” a natural person”</strong>. The former <strong>Article 29 Working Party</strong> (2007) substantiated this element by noting that information can relate to an individual based on its content (i.e., information is about the individual), its purpose (i.e., information is used or likely to be used to evaluate, treat in a way, or influence the status or behaviour of an individual), or its result (i.e., information is likely to have an impact on a certain person’s rights and interests, taking into account all the circumstances surrounding the precise case). These three alternative notions to determine whether the information relates to an individual was endorsed by the <strong>CJEU</strong> in its <em>Nowak</em> decision (C-434/16), where it dealt with the purpose (i.e., it evaluates the candidate’s competence) and the result (i.e., it is used to determine whether the candidate passes or fails, which can have an impact on the candidate’s rights) of the information in question in determining whether the written answers to an exam would qualify as personal data. In brief, in determining whether the data captured by the Fieldlab programme qualify as personal data, the context for which the data is used or captured is important. Information about the level of crowding or sound could “relate” to an individual if it is used to evaluate or influence the behaviour of a person (based on its purpose), or to affect a person’s rights (based on its result) (Galič and Gellert 2021).</p>
|
<p><strong>-“Information “relating to” a natural person”</strong>. The former <strong>Article 29 Working Party</strong> (2007) substantiated this element by noting that information can relate to an individual based on its content (i.e., information is about the individual), its purpose (i.e., information is used or likely to be used to evaluate, treat in a way, or influence the status or behaviour of an individual), or its result (i.e., information is likely to have an impact on a certain person’s rights and interests, taking into account all the circumstances surrounding the precise case). These three alternative notions to determine whether the information relates to an individual was endorsed by the <strong>CJEU</strong> in its <em>Nowak</em> decision (C-434/16), where it dealt with the purpose (i.e., it evaluates the candidate’s competence) and the result (i.e., it is used to determine whether the candidate passes or fails, which can have an impact on the candidate’s rights) of the information in question in determining whether the written answers to an exam would qualify as personal data. In brief, in determining whether the data captured by the Fieldlab programme qualify as personal data, the context for which the data is used or captured is important. Information about the level of crowding or sound could “relate” to an individual if it is used to evaluate or influence the behaviour of a person (based on its purpose), or to affect a person’s rights (based on its result) (Galič and Gellert 2021).</p>
|
||||||
|
@ -1237,7 +1274,7 @@
|
||||||
<p>Schuilenburg frames the interest of cities in technologies such as those used in the Burglary Free Neighbourhood as being part <strong>of the well-marketed narrative of the “smart city” that is sold by technology companies</strong>: “no city wants to be dumb” (“Nieuwsuur” 2020b, 36m). To some extent, Guido Delver positions the project’s <strong>privacy-by-design methodology</strong> <strong>in contrast to many of these commercial products for surveillance</strong>. In his conversations with various municipalities he recognises, and shares, the interest for “smart” surveillance technologies. However, Delver attempts to minimise the data gathering in the Burglary Free Neighbourhood. This proves to be a constant negotiation, for example the police have voiced an interest in access to the camera feeds in case suspicious behaviour was detected. However, access to the camera feeds has been deliberately kept outside of the scope of the project (Delver 2021).</p>
|
<p>Schuilenburg frames the interest of cities in technologies such as those used in the Burglary Free Neighbourhood as being part <strong>of the well-marketed narrative of the “smart city” that is sold by technology companies</strong>: “no city wants to be dumb” (“Nieuwsuur” 2020b, 36m). To some extent, Guido Delver positions the project’s <strong>privacy-by-design methodology</strong> <strong>in contrast to many of these commercial products for surveillance</strong>. In his conversations with various municipalities he recognises, and shares, the interest for “smart” surveillance technologies. However, Delver attempts to minimise the data gathering in the Burglary Free Neighbourhood. This proves to be a constant negotiation, for example the police have voiced an interest in access to the camera feeds in case suspicious behaviour was detected. However, access to the camera feeds has been deliberately kept outside of the scope of the project (Delver 2021).</p>
|
||||||
<p>While the project currently only stores the position of passers-by, there are also <strong>technical considerations for the capture of more information.</strong> For example, the video cameras cannot cover the entire area, therefore, as no characteristics of individuals are stored, <strong>tracking people from one camera to the next is problematic</strong>. It raises the question of whether biometric measurements such as a person’s estimated volume, length, or colour of clothing should be recorded, this would allow the computer to link the trace of one camera to another. Posing ethical and legal questions for the project: <strong>what are the legal ramifications of deducing and (temporarily) storing these characteristics, and for how long should they be stored (Delver 2021)?</strong> Even for projects that decide to consider privacy by design, it can be tempting to store and process biometric information. However, as mentioned above (see section 7.2.), the challenges in determining whether the Fieldlab or any other similar initiatives process personal data as defined in the GDPR raises questions on the extent to which these programmes fall within the scope of the data protection legislation, irrespective of the fact that they may be designed to affect the personal autonomy of individuals (as opposed to an identified or identifiable individual) by influencing and nudging their behaviours.</p>
|
<p>While the project currently only stores the position of passers-by, there are also <strong>technical considerations for the capture of more information.</strong> For example, the video cameras cannot cover the entire area, therefore, as no characteristics of individuals are stored, <strong>tracking people from one camera to the next is problematic</strong>. It raises the question of whether biometric measurements such as a person’s estimated volume, length, or colour of clothing should be recorded, this would allow the computer to link the trace of one camera to another. Posing ethical and legal questions for the project: <strong>what are the legal ramifications of deducing and (temporarily) storing these characteristics, and for how long should they be stored (Delver 2021)?</strong> Even for projects that decide to consider privacy by design, it can be tempting to store and process biometric information. However, as mentioned above (see section 7.2.), the challenges in determining whether the Fieldlab or any other similar initiatives process personal data as defined in the GDPR raises questions on the extent to which these programmes fall within the scope of the data protection legislation, irrespective of the fact that they may be designed to affect the personal autonomy of individuals (as opposed to an identified or identifiable individual) by influencing and nudging their behaviours.</p>
|
||||||
<p>Finally, commentators have pointed out the <strong>discrepancy between what is expected of the technology, and what it is actually doing.</strong> For example, the Algemeen Dagblad (Krol 2019) writes that the “smart streetlights” are actually able to “recognise behaviour” and to “sound the alarm” if necessary. <strong>Whereas up until now, the streetlights have only been used to capture data for machine learning.</strong></p>
|
<p>Finally, commentators have pointed out the <strong>discrepancy between what is expected of the technology, and what it is actually doing.</strong> For example, the Algemeen Dagblad (Krol 2019) writes that the “smart streetlights” are actually able to “recognise behaviour” and to “sound the alarm” if necessary. <strong>Whereas up until now, the streetlights have only been used to capture data for machine learning.</strong></p>
|
||||||
<p>These observations raise the question as to whether or not the communication about the technologies used suffices. When entering the neighbourhood, a sign signals to the visitor that the Fieldlab is operative, however, much of the information discussed above could not be found on the website that is mentioned on the sign – as is indicated by the breath of references used. This situation is substantially different from the way that, for example, the city of Amsterdam lays out its use of algorithms: one website presents the goals of the projects, the kinds of data processing that is happening, the datasets on which the algorithms are trained, and in some cases the source code is shared (Amsterdam Algoritmeregister, 2021). <strong>The Dutch government is currently drafting regulations for a national register of cameras and sensors as deployed by municipalities (Nieuwsuur 2020b).</strong></p>
|
<p>These observations raise the question as to whether or not the communication about the technologies used suffices. When entering the neighbourhood, a sign signals to the visitor that the Fieldlab is operative, however, much of the information discussed above could not be found on the website that is mentioned on the sign – as is indicated by the breath of references used. This situation is substantially different from the way that, for example, the <a class="maplink" data-title="Amsterdam Municipality">city of Amsterdam</a> lays out its use of algorithms: one website presents the goals of the projects, the kinds of data processing that is happening, the datasets on which the algorithms are trained, and in some cases the source code is shared (Amsterdam Algoritmeregister, 2021). <strong>The Dutch government is currently drafting regulations for a national register of cameras and sensors as deployed by municipalities (Nieuwsuur 2020b).</strong></p>
|
||||||
</section>
|
</section>
|
||||||
</section>
|
</section>
|
||||||
<section id="effects-of-the-technologies-1" class="level2">
|
<section id="effects-of-the-technologies-1" class="level2">
|
||||||
|
@ -1259,23 +1296,23 @@
|
||||||
<ul>
|
<ul>
|
||||||
<li><p>Several French cities have launched “safe city” projects involving biometric technologies, however Nice is arguably the national leader. The city currently has the highest CCTV coverage of any city in France and has more than double the police agents per capita of the neighbouring city of Marseille.</p></li>
|
<li><p>Several French cities have launched “safe city” projects involving biometric technologies, however Nice is arguably the national leader. The city currently has the highest CCTV coverage of any city in France and has more than double the police agents per capita of the neighbouring city of Marseille.</p></li>
|
||||||
<li><p>Through a series of public-private partnerships the city began a number of initiatives using RBI technologies (including emotion and facial recognition). These technologies were deployed for both authentication and surveillance purposes with some falling into the category of biometric mass surveillance.</p></li>
|
<li><p>Through a series of public-private partnerships the city began a number of initiatives using RBI technologies (including emotion and facial recognition). These technologies were deployed for both authentication and surveillance purposes with some falling into the category of biometric mass surveillance.</p></li>
|
||||||
<li><p>One project which used FRT at a high school in Nice and one in Marseille was eventually declared unlawful. The court determined that the required consent could not be obtained due to the power imbalance between the targeted public (students) and the public authority (public educational establishment). This case highlights important issues about the deployment of biometric technologies in public spaces.</p></li>
|
<li><p>One project which used FRT at a <a class="maplink" data-title="Facial Recognition Pilot in High School (Nice)">high school in Nice</a> and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">one in Marseille</a> was eventually declared unlawful. The court determined that the required consent could not be obtained due to the power imbalance between the targeted public (students) and the public authority (public educational establishment). This case highlights important issues about the deployment of biometric technologies in public spaces.</p></li>
|
||||||
<li><p>The use of biometric mass surveillance by the mayor of Nice Christian Estrosi has put him on a collision course with the French Data Protection Authority (<a class="maplink" data-title="CNIL">CNIL</a>) as well as human rights/ digital rights organisations (Ligue des Droits de l’Homme, <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a>). His activities have raised both concern and criticism over the usage of the technologies and their potential impact on the privacy of personal data.</p></li>
|
<li><p>The use of biometric mass surveillance by the mayor of Nice Christian Estrosi has put him on a collision course with the French Data Protection Authority (<a class="maplink" data-title="CNIL">CNIL</a>) as well as human rights/ digital rights organisations (Ligue des Droits de l’Homme, <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a>). His activities have raised both concern and criticism over the usage of the technologies and their potential impact on the privacy of personal data.</p></li>
|
||||||
</ul>
|
</ul>
|
||||||
</div> <!-- key points -->
|
</div> <!-- key points -->
|
||||||
<p>Although several French cities such as Paris, Valenciennes or Marseille have launched pilot projects for “safe city” projects involving <strong>biometric technologies (facial, voice, sound recognition),</strong> the city of Nice is perhaps the <strong>national leader in the experimentation with such technologies at a local level</strong> (Nice Premium 2017). The mayor of Nice, Christian Estrosi (Les Républicains Party, right) a prominent political figure on the national political scene, has made clear his intention was to make Nice a “laboratory” of crime prevention (Barelli 2018). Since 2010, more than <strong>1.962 surveillance cameras have been deployed throughout the city</strong>, making it the city with <strong>highest CCTV coverage in France</strong> (27 cameras per square meter). Nice also possesses the most local police in France per inhabitant: 414 agents, for a population of 340.000 (in comparison, the neighbouring city of Marseille has 450 agents for 861.000 inhabitants).</p>
|
<p>Although several French cities such as Paris, Valenciennes or Marseille have launched pilot projects for “safe city” projects involving <strong>biometric technologies (facial, voice, sound recognition),</strong> the city of Nice is perhaps the <strong>national leader in the experimentation with such technologies at a local level</strong> (Nice Premium 2017). The mayor of Nice, Christian Estrosi (Les Républicains Party, right) a prominent political figure on the national political scene, has made clear his intention was to make Nice a “laboratory” of crime prevention (Barelli 2018). Since 2010, more than <strong>1.962 surveillance cameras have been deployed throughout the city</strong>, making it the city with <strong>highest CCTV coverage in France</strong> (27 cameras per square meter). Nice also possesses the most local police in France per inhabitant: 414 agents, for a population of 340.000 (in comparison, the neighbouring city of Marseille has 450 agents for 861.000 inhabitants).</p>
|
||||||
<section id="the-various-facets-of-the-safe-city-project-in-nice" class="level2">
|
<section id="the-various-facets-of-the-safe-city-project-in-nice" class="level2">
|
||||||
<h2>The various facets of the “Safe city” project in Nice</h2>
|
<h2>The various facets of the “Safe city” project in Nice</h2>
|
||||||
<p>Nice has experimented with various initiatives related to <strong>remote biometric identification</strong> – many of which fall into the category of biometric mass surveillance. In 2017, Christian Estrosi announced a partnership with the energy company <a class="maplink" data-title="Engie Ineo">Engie Ineo</a> for the development of an Urban Surveillance Centre (Centre de Surveillance Urbain, CSU). Based on a touch-interface technology, it centralises a platform of <strong>real-time data such as traffic accidents, patrol locations, as well as video feeds from CCTV</strong>s on the streets and in public transportation. (Dudebout 2020, 1). The video feeds from the city tramways are connected to an <strong>emotion recognition algorithm</strong> to flag suspicious situations (Allix 2018).</p>
|
<p>Nice has experimented with various initiatives related to <strong>remote biometric identification</strong> – many of which fall into the category of biometric mass surveillance. In 2017, Christian Estrosi announced a partnership with the energy company <a class="maplink" data-title="Engie Ineo">Engie Ineo</a> for the development of an Urban Surveillance Centre (Centre de Surveillance Urbain, CSU). Based on a touch-interface technology, it centralises a platform of <strong>real-time data such as traffic accidents, patrol locations, as well as video feeds from CCTV</strong>s on the streets and in public transportation. (Dudebout 2020, 1). The video feeds from the city tramways are connected to an <strong><a class="maplink" data-title="Facial/Emotion Recognition Pilot in Tramway (Nice)">emotion recognition algorithm</a></strong> to flag suspicious situations (Allix 2018).</p>
|
||||||
<p>In June 2018, an additional step was taken with the signing of a partnership agreement with a consortium of companies headed by <a class="maplink" data-title="Thales">Thales</a>, specialised in social network intelligence, geolocation, biometrics and crowd simulation<a href="#fn33" class="footnote-ref" id="fnref33" role="doc-noteref"><sup>33</sup></a> for a <strong>“Safe City” project</strong> (Dudebout 2020, 2). Established for three years (2018-2021) with a budget of EUR 10,9 million, the project is financed by the city council, subsidised in part by BPI France<a href="#fn34" class="footnote-ref" id="fnref34" role="doc-noteref"><sup>34</sup></a>, and supported by the Committee for the Security Industrial Sector, an agency under the tutelage of the Prime Minister’s office<a href="#fn35" class="footnote-ref" id="fnref35" role="doc-noteref"><sup>35</sup></a> (Allix 2018; BPI France 2018)</p>
|
<p>In June 2018, an additional step was taken with the signing of a partnership agreement with a consortium of companies headed by <a class="maplink" data-title="Thales">Thales</a>, specialised in social network intelligence, geolocation, biometrics and crowd simulation<a href="#fn33" class="footnote-ref" id="fnref33" role="doc-noteref"><sup>33</sup></a> for a <strong>“Safe City” project</strong> (Dudebout 2020, 2). Established for three years (2018-2021) with a budget of EUR 10,9 million, the project is financed by the city council, subsidised in part by <a class="maplink" data-title="Bpifrance">BPI France</a><a href="#fn34" class="footnote-ref" id="fnref34" role="doc-noteref"><sup>34</sup></a>, and supported by the Committee for the Security Industrial Sector, an agency under the tutelage of the Prime Minister’s office<a href="#fn35" class="footnote-ref" id="fnref35" role="doc-noteref"><sup>35</sup></a> (Allix 2018; BPI France 2018)</p>
|
||||||
<p>The first facial recognition test of the Safe city project took place from 16 February to 2 March 2019, during the Nice Carnival. The experiment was a simulation, involving matching faces collected through CCTV footage of the crowd attending the carnival with a fictitious set of databases (lost individuals, wanted individuals, or individuals with restraining orders). The fictitious datasets were constituted by 50 volunteers, recruited mostly among the municipality, who provided their pictures, or were freshly photographed for the test. The system used <strong>live facial recognition software provided by the company Anyvision</strong>. The live feeds were filmed during the carnival. Passers-by (approximately 1000 people were concerned) were informed of the ongoing test and asked to wear a bracelet if they consented to being filmed (Hassani 2019).</p>
|
<p>The first facial recognition test of the Safe city project took place from 16 February to 2 March 2019, during the Nice Carnival. The experiment was a simulation, involving matching faces collected through CCTV footage of the crowd attending the carnival with a fictitious set of databases (lost individuals, wanted individuals, or individuals with restraining orders). The fictitious datasets were constituted by 50 volunteers, recruited mostly among the municipality, who provided their pictures, or were freshly photographed for the test. The system used <strong>live facial recognition software provided by the company Anyvision</strong>. The live feeds were filmed during the carnival. Passers-by (approximately 1000 people were concerned) were informed of the ongoing test and asked to wear a bracelet if they consented to being filmed (Hassani 2019).</p>
|
||||||
<p>A second experiment took the form of a <strong>software application (app) named “Reporty”,</strong> rolled out in January 2018. The app, developed by the Israeli American company <a class="maplink" data-title="Carbyne">Carbyne</a>, allows citizens to be in direct audio and video connection and share geolocation information with the Urban Supervision Centre in order to report any incivility, offense, or crime that they might witness (Barelli 2018).</p>
|
<p>A second experiment took the form of a <strong>software application (app) named “Reporty”,</strong> rolled out in January 2018. The app, developed by the Israeli American company <a class="maplink" data-title="Carbyne">Carbyne</a>, allows citizens to be in direct audio and video connection and share geolocation information with the Urban Supervision Centre in order to report any incivility, offense, or crime that they might witness (Barelli 2018).</p>
|
||||||
<p>The third project, involving <strong>facial recognition</strong> was tested in the education context. In February 2019, <strong>a high school in Nice and a high school in Marseille were fitted with facial recognition technology</strong> at their gates in order to grant or bar access to the premises. The official motivation behind the deployment was to "assist the personnel of the high schools and to fight against identity theft’’ (Dudebout 2020, 3–4).</p>
|
<p>The third project, involving <strong>facial recognition</strong> was tested in the education context. In February 2019, <strong><a class="maplink" data-title="Facial Recognition Pilot in High School (Nice)">a high school in Nice</a> and <a class="maplink" data-title="Facial Recognition Pilot in High School (Marseille)">a high school in Marseille</a> were fitted with facial recognition technology</strong> at their gates in order to grant or bar access to the premises. The official motivation behind the deployment was to "assist the personnel of the high schools and to fight against identity theft’’ (Dudebout 2020, 3–4).</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="legal-bases-and-challenges-2" class="level2">
|
<section id="legal-bases-and-challenges-2" class="level2">
|
||||||
<h2>Legal bases and challenges</h2>
|
<h2>Legal bases and challenges</h2>
|
||||||
<p>The use of facial recognition systems in high schools in Nice and Marseille, which <strong>was declared unlawful by the Administrative Court of Marseille</strong>, raised important issues on the legality of deploying biometric technologies in public places.</p>
|
<p>The use of facial recognition systems in high schools in Nice and Marseille, which <strong>was declared unlawful by the Administrative Court of Marseille</strong>, raised important issues on the legality of deploying biometric technologies in public places.</p>
|
||||||
<p>There is no specific provision devoted to the right to privacy or data protection in the French Constitution of 1958, but constitutional safeguards for the interests protected under said rights exists. <strong>The French Constitutional Council</strong> (<em>Conseil Constitutionnel</em>) has recognised that the respect for privacy is protected by Article 2 of the 1789 Declaration of the Rights of Man and of the Citizen, which is incorporated in the <strong>French constitutionality bloc</strong> <strong>as binding constitutional rule (bloc de constitutionnalité)</strong> (French Constitutional Council, Decision N° 2004-492 DC of 2 March 2004). Accordingly, the collection, retention, use and sharing of personal data attracts protection <strong>under the right to privacy</strong> (French Constitutional Council, Decision n° 2012-652 DC of 22 March 2012). The limitations to that right must thus be justified on grounds of general interest and implemented in an adequate manner, <strong>proportionate to this objective (ibid).</strong></p>
|
<p>There is no specific provision devoted to the right to privacy or data protection in the French Constitution of 1958, but constitutional safeguards for the interests protected under said rights exists. <strong>The French Constitutional Council</strong> (<em><a class="maplink" data-title="Conseil constitutionnel">Conseil Constitutionnel</a></em>) has recognised that the respect for privacy is protected by Article 2 of the 1789 Declaration of the Rights of Man and of the Citizen, which is incorporated in the <strong>French constitutionality bloc</strong> <strong>as binding constitutional rule (bloc de constitutionnalité)</strong> (French Constitutional Council, Decision N° 2004-492 DC of 2 March 2004). Accordingly, the collection, retention, use and sharing of personal data attracts protection <strong>under the right to privacy</strong> (French Constitutional Council, Decision n° 2012-652 DC of 22 March 2012). The limitations to that right must thus be justified on grounds of general interest and implemented in an adequate manner, <strong>proportionate to this objective (ibid).</strong></p>
|
||||||
|
|
||||||
<p><strong>France has updated the Act N°78-17</strong> of 6 January 1978 on information technology, data files and civil liberties in various stages to incorporate the provisions of the <strong>GDPR</strong>, address the possible exemptions contained in the <strong>GDPR</strong>, and implement the <strong>LED</strong>.</p>
|
<p><strong>France has updated the Act N°78-17</strong> of 6 January 1978 on information technology, data files and civil liberties in various stages to incorporate the provisions of the <strong>GDPR</strong>, address the possible exemptions contained in the <strong>GDPR</strong>, and implement the <strong>LED</strong>.</p>
|
||||||
|
|
||||||
|
@ -1288,7 +1325,7 @@
|
||||||
|
|
||||||
<p><strong>The Act N°78-17 provides the data subject rights against the processing of their personal data</strong> with restrictions to the exercise of those rights subject to certain conditions (e.g., the restriction for protecting public security to the right to access the data processed for law enforcement purposes pursuant to Art 107 of Act N°78-17). An important data subject’s right in the context of biometric surveillance is <strong>the data subject’s right not to be subjected to solely automated decision-making, including profiling, except if it is carried out in light of circumstances laid out in Article 22 of the GDPR</strong> and for individual administrative decisions taken in compliance with French legislation (Article 47 of Act N°78-17). That said, for the latter circumstance, the automated data processing must not involve sensitive data (Article 47(2), Act N°78-17). Regarding the data processing operations relating to State security and defence (Article 120, Act N°78-17) and to the prevention, investigation, and prosecution of criminal offences (Article 95, Act N°78-17), the Act lays out an absolute prohibition against solely automated decision-making, according to which no decision producing legal effects or similarly significant effects can be based on said decision-making intended to predict or assess certain personal aspects of the person concerned. Particularly, with respect to data processing operations for law enforcement purposes, Article 95 of the Act prohibits any type of profiling that discriminates against natural persons based on sensitive data as laid out in Article 6.</p>
|
<p><strong>The Act N°78-17 provides the data subject rights against the processing of their personal data</strong> with restrictions to the exercise of those rights subject to certain conditions (e.g., the restriction for protecting public security to the right to access the data processed for law enforcement purposes pursuant to Art 107 of Act N°78-17). An important data subject’s right in the context of biometric surveillance is <strong>the data subject’s right not to be subjected to solely automated decision-making, including profiling, except if it is carried out in light of circumstances laid out in Article 22 of the GDPR</strong> and for individual administrative decisions taken in compliance with French legislation (Article 47 of Act N°78-17). That said, for the latter circumstance, the automated data processing must not involve sensitive data (Article 47(2), Act N°78-17). Regarding the data processing operations relating to State security and defence (Article 120, Act N°78-17) and to the prevention, investigation, and prosecution of criminal offences (Article 95, Act N°78-17), the Act lays out an absolute prohibition against solely automated decision-making, according to which no decision producing legal effects or similarly significant effects can be based on said decision-making intended to predict or assess certain personal aspects of the person concerned. Particularly, with respect to data processing operations for law enforcement purposes, Article 95 of the Act prohibits any type of profiling that discriminates against natural persons based on sensitive data as laid out in Article 6.</p>
|
||||||
|
|
||||||
<p>In addition to the data protection legislation, <strong>the other legislation applicable to biometric surveillance is the Code of Criminal Procedure.</strong> Its Article R40-26 allows the national police and gendarmerie to retain in a criminal records database (<em>Traitement des Antécédents Judiciaires</em> or <em>TAJ</em>) photographs of people suspected of having participated in criminal offences as well as victims and persons being investigated for causes of death, serious injury or disappearance to make it possible to use a facial recognition device. According to a 2018 report by Parliament, <strong>TAJ contains between 7 and 8 million facial images (<em>Assemblée Nationale</em> N°1335, 2018, 64, f.n. 2).</strong> <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> lodged legal complaints against the retention of facial images before the <a class="maplink" data-title="Conseil d'État">Conseil d'État</a>, arguing that this practice does not comply with the strict necessity test required under Article 10 of LED and Article 88 of Act N°78-17 (La Quadrature du Net, 2020).</p>
|
<p>In addition to the data protection legislation, <strong>the other legislation applicable to biometric surveillance is the Code of Criminal Procedure.</strong> Its Article R40-26 allows the national police and gendarmerie to retain in a criminal records database (<em><a class="maplink" data-title="Deployment of TAJ">Traitement des Antécédents Judiciaires</a></em> or <em><a class="maplink" data-title="Deployment of TAJ">TAJ</a></em>) photographs of people suspected of having participated in criminal offences as well as victims and persons being investigated for causes of death, serious injury or disappearance to make it possible to use a facial recognition device. According to a 2018 report by Parliament, <strong><a class="maplink" data-title="Deployment of TAJ">TAJ</a> contains between 7 and 8 million facial images (<em>Assemblée Nationale</em> N°1335, 2018, 64, f.n. 2).</strong> <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> lodged legal complaints against the retention of facial images before the <a class="maplink" data-title="Conseil d'État">Conseil d'État</a>, arguing that this practice does not comply with the strict necessity test required under Article 10 of LED and Article 88 of Act N°78-17 (La Quadrature du Net, 2020).</p>
|
||||||
</section>
|
</section>
|
||||||
<section id="mobilisations-and-contestations-2" class="level2">
|
<section id="mobilisations-and-contestations-2" class="level2">
|
||||||
<h2>Mobilisations and contestations</h2>
|
<h2>Mobilisations and contestations</h2>
|
||||||
|
@ -1296,7 +1333,7 @@
|
||||||
<p>The French digital rights organisation <strong><a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> was quick to highlight the problems raised by the deployment of these technologies in Nice</strong>. “The safe city is the proliferation of tools from the intelligence community, with a logic of massive surveillance, identification of weak signals and suspicious behaviour," commented Félix Tréguer, a Marseilles-based leader of the association <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> and member of the campaign Technopolice<a href="#fn36" class="footnote-ref" id="fnref36" role="doc-noteref"><sup>36</sup></a>. “We do not find it reassuring that the municipal police will become the intelligence service of the urban public space and its digital double" (Allix 2018).</p>
|
<p>The French digital rights organisation <strong><a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> was quick to highlight the problems raised by the deployment of these technologies in Nice</strong>. “The safe city is the proliferation of tools from the intelligence community, with a logic of massive surveillance, identification of weak signals and suspicious behaviour," commented Félix Tréguer, a Marseilles-based leader of the association <a class="maplink" data-title="La Quadrature du Net">La Quadrature du Net</a> and member of the campaign Technopolice<a href="#fn36" class="footnote-ref" id="fnref36" role="doc-noteref"><sup>36</sup></a>. “We do not find it reassuring that the municipal police will become the intelligence service of the urban public space and its digital double" (Allix 2018).</p>
|
||||||
<p><strong>The Ligue des Droits de l’Homme emphasised similar points, highlighting the political dangers involved.</strong> As Henri Busquet of the Ligue des Droits de l'Homme in Nice put “improving emergency services and traffic is legitimate, but the generalisation of video surveillance worries us, and scrutinising social networks is not the role of a mayor. Without any safeguards, such a tool cannot demonstrate the necessary neutrality [...] It is potentially a tool for political destruction, which puts opponents and journalists at particular risk” (Allix 2018).</p>
|
<p><strong>The Ligue des Droits de l’Homme emphasised similar points, highlighting the political dangers involved.</strong> As Henri Busquet of the Ligue des Droits de l'Homme in Nice put “improving emergency services and traffic is legitimate, but the generalisation of video surveillance worries us, and scrutinising social networks is not the role of a mayor. Without any safeguards, such a tool cannot demonstrate the necessary neutrality [...] It is potentially a tool for political destruction, which puts opponents and journalists at particular risk” (Allix 2018).</p>
|
||||||
<p>In July 2019, the city of Nice hoped the <a class="maplink" data-title="CNIL">CNIL</a> would provide advice related to its first test experiment during the Carnival. The <a class="maplink" data-title="CNIL">CNIL</a> responded however that not enough information was provided by the municipality for the DPA to assess it. The French DPA pointed out in particular the lack of “quantified elements on the effectiveness of the technical device or the concrete consequences of a possible bias (related to gender, skin colour ...) of the software” (Dudebout 2020, 3).</p>
|
<p>In July 2019, the city of Nice hoped the <a class="maplink" data-title="CNIL">CNIL</a> would provide advice related to its first test experiment during the Carnival. The <a class="maplink" data-title="CNIL">CNIL</a> responded however that not enough information was provided by the municipality for the DPA to assess it. The French DPA pointed out in particular the lack of “quantified elements on the effectiveness of the technical device or the concrete consequences of a possible bias (related to gender, skin colour ...) of the software” (Dudebout 2020, 3).</p>
|
||||||
<p><strong>The launch of the smartphone application “Reporty” was the catalyst for mobilisation in Nice, united under the umbrella organisation “Collectif anti-Reporty"</strong>. The coalition was formed by local representatives from two left-wing parties (Parti Socialiste, Les Insoumis), Tous Citoyens, the union CGT and the anti-discrimination NGO MRAP. The coalition appealed to two institutions to block the use of the application: <strong>The Defender of Rights</strong> (Défenseur des Droits) and the French DPA (<a class="maplink" data-title="CNIL">CNIL</a>). The coalition denounced “a risk of generalised denunciation and a serious breach of privacy”, calling to “put an end to the securitarian drift of Christian Estrosi” (Barelli 2018).</p>
|
<p><strong>The launch of the smartphone application “Reporty” was the catalyst for mobilisation in Nice, united under the umbrella organisation “Collectif anti-Reporty"</strong>. The coalition was formed by local representatives from two left-wing parties (Parti Socialiste, Les Insoumis), Tous Citoyens, the <a class="maplink" data-title="CGT Union (Education) Alpes-Maritimes">union CGT</a> and the anti-discrimination NGO MRAP. The coalition appealed to two institutions to block the use of the application: <strong>The Defender of Rights</strong> (Défenseur des Droits) and the French DPA (<a class="maplink" data-title="CNIL">CNIL</a>). The coalition denounced “a risk of generalised denunciation and a serious breach of privacy”, calling to “put an end to the securitarian drift of Christian Estrosi” (Barelli 2018).</p>
|
||||||
<p><strong>On 15 March 2018, the <a class="maplink" data-title="CNIL">CNIL</a> stated that the application was too invasive and did not meet the criteria set out by the legislation</strong>. It did not meet the proportionality test; it failed to fall within the frame of existing law on video-protection due to the integration of private citizens terminals (smartphones) with a security database managed by the police; it was excessively intrusive due to the collection of images and voice of people in the public space and finally it covered a field of offenses that was too broad (CNIL 2018).</p>
|
<p><strong>On 15 March 2018, the <a class="maplink" data-title="CNIL">CNIL</a> stated that the application was too invasive and did not meet the criteria set out by the legislation</strong>. It did not meet the proportionality test; it failed to fall within the frame of existing law on video-protection due to the integration of private citizens terminals (smartphones) with a security database managed by the police; it was excessively intrusive due to the collection of images and voice of people in the public space and finally it covered a field of offenses that was too broad (CNIL 2018).</p>
|
||||||
<p><strong>The school experimentation further pushed the <a class="maplink" data-title="CNIL">CNIL</a> to take a position on the technological activism of Nice’s mayor.</strong> On 29 October 2019, it expressed serious concerns over the experimentation, arguing that the technology was clashing with the principles of proportionality and data collection minimisation enshrined in the principles of the GDPR. It pointed out that other methods, less intrusive for the privacy of the students, could be used to achieve the technology’s stated goal, namely increasing the student’s security and traffic fluidity (Dudebout 2020, 4).</p>
|
<p><strong>The school experimentation further pushed the <a class="maplink" data-title="CNIL">CNIL</a> to take a position on the technological activism of Nice’s mayor.</strong> On 29 October 2019, it expressed serious concerns over the experimentation, arguing that the technology was clashing with the principles of proportionality and data collection minimisation enshrined in the principles of the GDPR. It pointed out that other methods, less intrusive for the privacy of the students, could be used to achieve the technology’s stated goal, namely increasing the student’s security and traffic fluidity (Dudebout 2020, 4).</p>
|
||||||
<p><strong>In a landmark opinion published on 15 November 2019, the <a class="maplink" data-title="CNIL">CNIL</a> clarified what it defined as guidelines related to facial recognition (CNIL 2019a).</strong> The French DPA expressed concerns over a blanket and indiscriminate use of the technologies, highlighting possible infringements to fundamental rights, because these technologies operate in the public space, where these freedoms (expression, reunion, protest) are expressed. It however did not suggest that they should be banned in all circumstances – it suggested instead that its uses could be justified if properly regulated, on a case-by-case basis. <strong>Certain uses could be rejected a priori – such as in the case of minors, whose data are strictly protected</strong>. The question of data retention is also central, warning against excessive data duration and excessive centralisation, suggesting instead citizen’s control over their own data. But as the president of the <a class="maplink" data-title="CNIL">CNIL</a>, Marie-Laure Denis explained, facial recognition technology “can have legitimate uses, and there is a not firm position of the <a class="maplink" data-title="CNIL">CNIL</a>’s board” (Untersinger 2019).</p>
|
<p><strong>In a landmark opinion published on 15 November 2019, the <a class="maplink" data-title="CNIL">CNIL</a> clarified what it defined as guidelines related to facial recognition (CNIL 2019a).</strong> The French DPA expressed concerns over a blanket and indiscriminate use of the technologies, highlighting possible infringements to fundamental rights, because these technologies operate in the public space, where these freedoms (expression, reunion, protest) are expressed. It however did not suggest that they should be banned in all circumstances – it suggested instead that its uses could be justified if properly regulated, on a case-by-case basis. <strong>Certain uses could be rejected a priori – such as in the case of minors, whose data are strictly protected</strong>. The question of data retention is also central, warning against excessive data duration and excessive centralisation, suggesting instead citizen’s control over their own data. But as the president of the <a class="maplink" data-title="CNIL">CNIL</a>, Marie-Laure Denis explained, facial recognition technology “can have legitimate uses, and there is a not firm position of the <a class="maplink" data-title="CNIL">CNIL</a>’s board” (Untersinger 2019).</p>
|
||||||
|
@ -1424,9 +1461,9 @@
|
||||||
<p>In May 2020 Hungarian Authorities rolled out two digital applications, the contract-tracing app called <strong>VirusRadar</strong> (Kaszás 2020) and the <strong><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a></strong> (Házi Karantén Rendszer, abreviated HKR). Both of these apps are centralised tracing apps meaning that they send contact logs with pseudonymised personal data to a central (government) back-end server (Council of Europe 2020, 28). While the VirusRadar only uses Bluetooth data and proximity of other devices, the <strong>HKR processes biometric data</strong> when comparing facial images of its users.</p>
|
<p>In May 2020 Hungarian Authorities rolled out two digital applications, the contract-tracing app called <strong>VirusRadar</strong> (Kaszás 2020) and the <strong><a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a></strong> (Házi Karantén Rendszer, abreviated HKR). Both of these apps are centralised tracing apps meaning that they send contact logs with pseudonymised personal data to a central (government) back-end server (Council of Europe 2020, 28). While the VirusRadar only uses Bluetooth data and proximity of other devices, the <strong>HKR processes biometric data</strong> when comparing facial images of its users.</p>
|
||||||
<p>Those who, according to the COVID-19 regulations in Hungary, are confined to home quarantine are offered the option to use the app instead of being checked by the police. For those who return from abroad, the use of the app is compulsory. But even those who can choose are encourage by the authorities to make use of the HKR app otherwise they will be subjected to frequent visits by police agents. <strong>Once a person downloads the app, its use becomes compulsory</strong> and failure to do so or attempts to evade its tracking is considered an administrative offense. From a data protection law point of view, this is a clear case where the data subject’s consent (and in the case of biometric data, their explicit consent) cannot provide the lawful ground for the processing of data through the app (see section 4.2.2). Even if the processing can be based on another lawful ground such as public interest, the punitive nature of non-compliance may raise issues in terms of adhering to the necessity test, which requires a balancing act between the objective pursued and the data subject’s interests.</p>
|
<p>Those who, according to the COVID-19 regulations in Hungary, are confined to home quarantine are offered the option to use the app instead of being checked by the police. For those who return from abroad, the use of the app is compulsory. But even those who can choose are encourage by the authorities to make use of the HKR app otherwise they will be subjected to frequent visits by police agents. <strong>Once a person downloads the app, its use becomes compulsory</strong> and failure to do so or attempts to evade its tracking is considered an administrative offense. From a data protection law point of view, this is a clear case where the data subject’s consent (and in the case of biometric data, their explicit consent) cannot provide the lawful ground for the processing of data through the app (see section 4.2.2). Even if the processing can be based on another lawful ground such as public interest, the punitive nature of non-compliance may raise issues in terms of adhering to the necessity test, which requires a balancing act between the objective pursued and the data subject’s interests.</p>
|
||||||
|
|
||||||
<p>The HKR app is <strong>developed by Asura Technologies and implemented by <a class="maplink" data-title="IdomSoft">IdomSoft</a> Ltd</strong>., the same company that provides the software and technical implementation for the nation-wide <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a>. The HKR application works with <strong>face recognition technology combined with location verification</strong>. The application sends notifications at random times prompting the user to <strong>upload a facial image</strong> while retrieving the location data of the mobile device. The user must respond within 15 minutes and the location data must match the address registered for quarantine. In order for the <a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a> to work, the user first needs to upload a facial image which is compared by a police officer with the photo of the same individual stored in the central database. After this <strong>facial verification</strong>, the app creates <strong>a biometric template on the mobile phone of the user</strong> and the photo is deleted. The consecutive photos are only compared to this biometric template, so neither the photos nor the template leave the personal device. If there is suspicion about the identity or whereabouts of the user, a police officer visits the address to make sure that the person is adhering to the quarantine rules.</p>
|
<p>The HKR app is <strong>developed by <a class="maplink" data-title="Asura Technology">Asura Technologies</a> and implemented by <a class="maplink" data-title="IdomSoft">IdomSoft</a> Ltd</strong>., the same company that provides the software and technical implementation for the nation-wide <a class="maplink" data-title="Dragonfly Project">Dragonfly Project</a>. The HKR application works with <strong>face recognition technology combined with location verification</strong>. The application sends notifications at random times prompting the user to <strong>upload a facial image</strong> while retrieving the location data of the mobile device. The user must respond within 15 minutes and the location data must match the address registered for quarantine. In order for the <a class="maplink" data-title="Home Quarantine App Hungary">Home Quarantine App</a> to work, the user first needs to upload a facial image which is compared by a police officer with the photo of the same individual stored in the central database. After this <strong>facial verification</strong>, the app creates <strong>a biometric template on the mobile phone of the user</strong> and the photo is deleted. The consecutive photos are only compared to this biometric template, so neither the photos nor the template leave the personal device. If there is suspicion about the identity or whereabouts of the user, a police officer visits the address to make sure that the person is adhering to the quarantine rules.</p>
|
||||||
|
|
||||||
<p>Interestingly, the HKR app, — just like the contact tracing app VirusRadar, which was developed <span class="underline"> </span> by Nextsense — has been <strong>“donated” to the Hungarian Government by Asura Technologies “free of charge”</strong>. </p>
|
<p>Interestingly, the HKR app, — just like the contact tracing app VirusRadar, which was developed <span class="underline"> </span> by Nextsense — has been <strong>“donated” to the Hungarian Government by <a class="maplink" data-title="Asura Technology">Asura Technologies</a> “free of charge”</strong>. </p>
|
||||||
|
|
||||||
<p><img src="images/media/image5.png" style="width:3.1203in;height:1.7525in" alt="Graphical user interface, application, chat or text message Description automatically generated" /><img src="images/media/image6.png" style="width:3.10526in;height:1.74405in" alt="A picture containing text, screenshot, monitor Description automatically generated" /></p>
|
<p><img src="images/media/image5.png" style="width:3.1203in;height:1.7525in" alt="Graphical user interface, application, chat or text message Description automatically generated" /><img src="images/media/image6.png" style="width:3.10526in;height:1.74405in" alt="A picture containing text, screenshot, monitor Description automatically generated" /></p>
|
||||||
<p>Figure 5. Snapshots from the video Home Quarantine System Short Presentation by Asura Technologies<a href="#fn38" class="footnote-ref" id="fnref38" role="doc-noteref"><sup>38</sup></a></p>
|
<p>Figure 5. Snapshots from the video Home Quarantine System Short Presentation by Asura Technologies<a href="#fn38" class="footnote-ref" id="fnref38" role="doc-noteref"><sup>38</sup></a></p>
|
||||||
|
@ -1756,13 +1793,13 @@
|
||||||
<li id="fn11" role="doc-endnote"><p>Criminal case history database, managed by the <strong>French Ministry of Interior</strong><a href="#fnref11" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn11" role="doc-endnote"><p>Criminal case history database, managed by the <strong>French Ministry of Interior</strong><a href="#fnref11" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn12" role="doc-endnote"><p>Criminal case management system, managed by the <strong>German Federal Criminal Police Office</strong> (Bundeskriminalamt)<a href="#fnref12" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn12" role="doc-endnote"><p>Criminal case management system, managed by the <strong>German Federal Criminal Police Office</strong> (Bundeskriminalamt)<a href="#fnref12" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn13" role="doc-endnote"><p>Managed by the <strong>Video and Image Laboratory</strong> of the Audiovisual Evidence of the Department of Photography and Modus Operandi of the <a class="maplink" data-title="Hellenic Police Forensic Science Division">Hellenic Police Forensic Science Division</a><a href="#fnref13" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn13" role="doc-endnote"><p>Managed by the <strong>Video and Image Laboratory</strong> of the Audiovisual Evidence of the Department of Photography and Modus Operandi of the <a class="maplink" data-title="Hellenic Police Forensic Science Division">Hellenic Police Forensic Science Division</a><a href="#fnref13" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn14" role="doc-endnote"><p>The Facial Image registry is interrogated through a search engine developed by <a class="maplink" data-title="NEC">NEC</a>, and accessible to <strong>the</strong> <strong>National Investigation Agency</strong>, <strong>the</strong> <strong>Criminal Courts</strong>, <strong>the</strong> <strong>National Protective Service</strong>, <strong>the</strong> <strong>Counter-Terrorism Centre</strong>, <strong>the</strong> <strong>Hungarian Prison Service</strong>, <strong>the Prosecution Service of Hungary</strong>, t<strong>he Public Administration</strong>, <strong>the</strong> <strong>Special Service for National Security</strong>, <strong>the</strong> <strong>Intelligence Agencies</strong>, <strong>the</strong> <strong><a class="maplink" data-title="Hungarian Police">Hungarian Police</a></strong>, t<strong>he Hungarian Parliamentary Guard</strong>, <strong><a class="maplink" data-title="Hungarian Ministry of Justice">Hungarian Ministry of Justice</a></strong>, <strong>Witness Protection Service</strong>, <strong>the</strong> <strong>National Directorate-General for Aliens Policing and Institution of the President of the Republic.</strong><a href="#fnref14" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn14" role="doc-endnote"><p>The Facial Image registry is interrogated through a search engine developed by <a class="maplink" data-title="NEC">NEC</a>, and accessible to <strong>the</strong> <strong>National Investigation Agency</strong>, <strong>the</strong> <strong><a class="maplink" data-title="Criminal Courts (Hungary)">Criminal Courts</a></strong>, <strong>the</strong> <strong>National Protective Service</strong>, <strong>the</strong> <strong><a class="maplink" data-title="Counter-Terrorism Centre (Hungary)">Counter-Terrorism Centre</a></strong>, <strong>the</strong> <strong>Hungarian Prison Service</strong>, <strong>the Prosecution Service of Hungary</strong>, t<strong>he Public Administration</strong>, <strong>the</strong> <strong>Special Service for National Security</strong>, <strong>the</strong> <strong>Intelligence Agencies</strong>, <strong>the</strong> <strong><a class="maplink" data-title="Hungarian Police">Hungarian Police</a></strong>, t<strong>he Hungarian Parliamentary Guard</strong>, <strong><a class="maplink" data-title="Hungarian Ministry of Justice">Hungarian Ministry of Justice</a></strong>, <strong>Witness Protection Service</strong>, <strong>the</strong> <strong>National Directorate-General for Aliens Policing and Institution of the President of the Republic.</strong><a href="#fnref14" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn15" role="doc-endnote"><p>Automated Fingerprint Identification System. The system can be interrogated via a software developed by the company <strong><a class="maplink" data-title="Reco 3.26">Reco 3.26</a></strong>, a subsidiary of <strong><a class="maplink" data-title="Parsec 3.26">Parsec 3.26</a></strong>. Another software used is provided by the japanese company <strong><a class="maplink" data-title="NEC">NEC</a></strong>.<a href="#fnref15" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn15" role="doc-endnote"><p>Automated Fingerprint Identification System. The system can be interrogated via a software developed by the company <strong><a class="maplink" data-title="Reco 3.26">Reco 3.26</a></strong>, a subsidiary of <strong><a class="maplink" data-title="Parsec 3.26">Parsec 3.26</a></strong>. Another software used is provided by the japanese company <strong><a class="maplink" data-title="NEC">NEC</a></strong>.<a href="#fnref15" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn16" role="doc-endnote"><p>Biometric Data Processing System (criminal data array), supported by database software from <strong><a class="maplink" data-title="RIX Technologies">RIX Technologies</a></strong><a href="#fnref16" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn16" role="doc-endnote"><p>Biometric Data Processing System (criminal data array), supported by database software from <strong><a class="maplink" data-title="RIX Technologies">RIX Technologies</a></strong><a href="#fnref16" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn17" role="doc-endnote"><p>Habitoscopic Data Register<a href="#fnref17" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn17" role="doc-endnote"><p>Habitoscopic Data Register<a href="#fnref17" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn18" role="doc-endnote"><p>Central Automatic TeChnology for Recognition of Persons, managed by the <strong><a class="maplink" data-title="Centrum voor Biometrie">Centrum voor Biometrie</a></strong>, connected to the <strong><a class="maplink" data-title="Dutch Judicial Information Service (Justid)">Dutch Judicial Information Service (Justid)</a>.</strong><a href="#fnref18" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn18" role="doc-endnote"><p>Central Automatic TeChnology for Recognition of Persons, managed by the <strong><a class="maplink" data-title="Centrum voor Biometrie">Centrum voor Biometrie</a></strong>, connected to the <strong><a class="maplink" data-title="Dutch Judicial Information Service (Justid)">Dutch Judicial Information Service (Justid)</a>.</strong><a href="#fnref18" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn19" role="doc-endnote"><p>The database uses <strong>VeriLook</strong> and <strong>Face Trace</strong> software from the Lithuanian company <strong><a class="maplink" data-title="Neurotechnology">Neurotechnology</a>.</strong><a href="#fnref19" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn19" role="doc-endnote"><p>The database uses <strong>VeriLook</strong> and <strong>Face Trace</strong> software from the Lithuanian company <strong><a class="maplink" data-title="Neurotechnology">Neurotechnology</a>.</strong><a href="#fnref19" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn20" role="doc-endnote"><p>Automated Biometric Identification System, searchable by the <strong>IntellQ</strong> software from the company <strong><a class="maplink" data-title="IntellByte">IntellByte</a>,</strong> managed by the <strong>Ministry of the Interior (Croatia).</strong><a href="#fnref20" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn20" role="doc-endnote"><p>Automated Biometric Identification System, searchable by the <strong>IntellQ</strong> software from the company <strong><a class="maplink" data-title="IntellByte">IntellByte</a>,</strong> managed by the <strong><a class="maplink" data-title="Ministry of Interior (Croatia)">Ministry of the Interior</a> (Croatia).</strong><a href="#fnref20" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn21" role="doc-endnote"><p>Central Biometric Information System<a href="#fnref21" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn21" role="doc-endnote"><p>Central Biometric Information System<a href="#fnref21" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn22" role="doc-endnote"><p>National Biometric Identification System<a href="#fnref22" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn22" role="doc-endnote"><p>National Biometric Identification System<a href="#fnref22" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
<li id="fn23" role="doc-endnote"><p>Managed by the <strong>Photographic and Graphic Laboratory of Criminalistic Services,</strong> using search software by the company <strong><a class="maplink" data-title="Unidas">Unidas</a></strong><a href="#fnref23" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
<li id="fn23" role="doc-endnote"><p>Managed by the <strong>Photographic and Graphic Laboratory of Criminalistic Services,</strong> using search software by the company <strong><a class="maplink" data-title="Unidas">Unidas</a></strong><a href="#fnref23" class="footnote-back" role="doc-backlink">↩︎</a></p></li>
|
||||||
|
|
Loading…
Reference in a new issue