New items and some fixes

This commit is contained in:
Ruben van de Ven 2018-10-12 14:47:58 +02:00
parent ab13e038c8
commit 6d3d7be124
4 changed files with 349 additions and 145 deletions

View file

@ -46,10 +46,14 @@
<circle cx="40" cy="40" r="40" /> <circle cx="40" cy="40" r="40" />
</clipPath> </clipPath>
<pattern id="img1" patternUnits="userSpaceOnUse" width="100" height="100"> <pattern id="shade1" patternUnits="userSpaceOnUse" width="4" height="4">
<image xlink:href="testback.png" x="0" y="0" width="100" height="100" /> <path d="M 1,0 1,4" style="stroke:white;stroke-width:1.4px;"></path>
<path d="M 3,0 3,4" style="stroke:white;stroke-width:1.4px;"></path>
</pattern> </pattern>
</defs> </defs>
<g id='initPlaceholder' class="nodes">
<g class="node Person centeredNode visibleNode selectedNode" transform="translate(-0.468387442807449,0.0096912027826707)"><path id="nodePath82" d="M0 0 m -36, 0 a 36,36 0 1,0 72,0 a 36,36 0 1,0 -72,0"></path><circle r="40" class="nodeBg"></circle><circle r="43.2" class="highlightCircle"></circle><text class="nodeType">Person</text><text class="nodeYear" y="22"></text><text class="nodeTitle" y="5" transform="scale(0.7670250953737731)">Ruben van de Ven</text></g>
</g>
</svg> </svg>
<div id="nodeDetails"> <div id="nodeDetails">
</div> </div>

View file

@ -15,9 +15,10 @@
{ {
"@id": "http://plottingd.at/a", "@id": "http://plottingd.at/a",
"@type": "PerformingGroup", "@type": "PerformingGroup",
"name": "PlottingD.at/a", "name": "Plotting Data: dramatisation as tactic",
"url": "http://plottingd.at/a", "url": "http://plottingd.at/a",
"description": "A collaboration between Ruben van de Ven and Cristina Cochior.", "foundingDate": "2018",
"description": "A research into <em>Data Dramatisation</em> as a tactic to make data visualisations more transparent in their underlying procedures of data collection. We advocate a form of data literacy that is not so much focussed on programming skill, but rather one that brings an understanding of data infrastructures: allowing for restistance against data driven governance.",
"member": [ "member": [
{"@id": "http://randomizer.info"} {"@id": "http://randomizer.info"}
] ]
@ -28,7 +29,7 @@
"@type": "MediaObject", "@type": "MediaObject",
"name": "Sustaining Gazes", "name": "Sustaining Gazes",
"dateCreated": "2018", "dateCreated": "2018",
"description": "...", "description": "In analytics and statistics, data visualisations, such as the heatmap, are used as tools bring forward patterns in data. These data visualisations, are often presented as objective tools of knowledge: data supposedly do not lie. What is often neglected however, are the subjective and political intricacies embedded within the datasets, and its method of visualisation. In collaboration with Cristina Cochior I am exploring the concept of data dramatisations, as an opposite to data visualisations. Rather than aiming for the objective, we explore the affective & performative aspects of data gathering and processing.\n\nSustaining Gazes is a data dramatisation which looks at the role between the visualisation and its subject. A heatmap is generated by looking at it, which directly influences the looking. Areas which are looked at turn into interesting shapes, inviting even more observation. The pattern of looking is reinforced though its visualisation. This cycle reveals both a hierarchy of power between algorithm and subject, as well as a point for intervention.",
"@reverse": { "@reverse": {
"workFeatured": [ "workFeatured": [
{ {
@ -37,8 +38,8 @@
"url": "http://routedunord.nl/portfolio-item/ruben-van-de-ven-2/", "url": "http://routedunord.nl/portfolio-item/ruben-van-de-ven-2/",
"location": { "location": {
"@type": "Place", "@type": "Place",
"name": "Rotterdam", "name": "ZOHO",
"address": "MuseumsQuartier /Vienna" "address": "Rotterdam"
}, },
"startDate": "2018-05", "startDate": "2018-05",
"endDate": "2018-05", "endDate": "2018-05",
@ -59,14 +60,23 @@
}, },
{ {
"@type": "CreativeWorkSeries", "@type": "CreativeWorkSeries",
"name": "MVP's", "name": "MVPs",
"about": "lorem ipsum etc.", "about": "November 2017 In4Art approached me to be part of their KickstART project. They commissioned three works to be part of their auction. The goal was to explicitly develop works that fitted the commercial art scene.\n\nRather than creating 'sellable' works myself, I took the start-up culture that forms the foundation of In4Art, as my object of my series. I augmented the works of three other participants with business models taken from the online/digital realm. This resulted in three <em>Minimum Viable Products</em>. Recontextualising these business models towards an arts context, results in new perspectives on both the arts as well as start-up culture.",
"dateCreated":"2018",
"hasPart": [ "hasPart": [
{ {
"@type": "MediaObject", "@type": "MediaObject",
"name": "MVP#1", "name": "MVP#1 Gathering viewing statistics for Donald Schenkel",
"dateCreated": "2018", "dateCreated": "2018",
"description": "....", "description": "The work of Donald Schenkel is augmented with a camera which keeps track of how long people look at the work. Like an electricity meter, the value increases the more the work is <em>used</em>. This data is then sent to a server, and provides the artist &mdash;in this case Donald Schenkel&mdash; with live statistics. But who owns this data generated by the work? The buyer, the artist, or the mediator &mdash; which is me?\n\nThis is the first augmentation that is part of the MVP series created for KickstART.",
"width": "45cm + 15cm",
"height": "55cm + 8cm",
"artworkSurface": "Oilpaint on wood + WiFi-connected RaspberryPi in ABS enclosure",
"contributor": {
"@type": "Person",
"name": "Donald Schenkel",
"url": "http://www.donaldschenkel.nl/"
},
"@reverse": { "@reverse": {
"workFeatured": [ "workFeatured": [
{ {
@ -96,13 +106,33 @@
] ]
}, },
"image": [ "image": [
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/mvp1-1.jpg"
},
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/mvp1-2.jpg"
},
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/mvp1-4.jpg"
}
] ]
}, },
{ {
"@type": "MediaObject", "@type": "MediaObject",
"name": "MVP#2", "name": "MVP#2 Joseph Huot's Limited Edition",
"dateCreated": "2018", "dateCreated": "2018",
"description": "....", "description": "In September 2017, huricane Irma was racing towards Florida. Everybody tried to get away from there as quick as possible. Stuck were those with the electric Tesla Model S with the cheaper battery option (60kWh). Then, all of a sudden, Tesla send a software update to these cars. The cars could drive futher then ever before. Until 72 hours later, Tesla reversed the software update. Exactly the same car could drive shorter distances.\n\nThis is the business of software companies: just like Apple limits performance on old iPhones, Tesla used softare to limit the reach of its cars in order to make more money. While physically the exact same product, it can do less.\n\n<em>Minimum Viable Product #2</em> brings this business model to the art world. It provides a <em>limited</em> edition of a drawing by Joseph Huot.",
"width":"32.5cm",
"height":"50cm",
"artworkSurface": "Pencil drawing in metal LCD enclosure",
"contributor": {
"@type": "Person",
"name": "Joseph Huot",
"url": "http://www.joseph-huot.com/"
},
"@reverse": { "@reverse": {
"workFeatured": "workFeatured":
{ {
@ -110,13 +140,33 @@
} }
}, },
"image": [ "image": [
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/mvp2-01.jpg"
},
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/mvp2-02.jpg"
},
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/mvp2-5.jpg"
}
] ]
}, },
{ {
"@type": "MediaObject", "@type": "MediaObject",
"name": "MVP#3", "name": "MVP#3 Customiseyour.art - Mikel Folgerts 1/3",
"dateCreated": "2018", "dateCreated": "2018",
"description": "....", "description": "On YouTube, Instagram, and many other online platforms, celebrities get paid to place consumer products in their videos. Not prominently, but hidden in plain sight. Can precarious artists finance their work by employing this product placement?\n\n<em>Minimum Viable Product</em> #3 allows buyers to customise the work they bought &mdash;Rotterdam, by Mikel Folgerts&mdash;, linking the status of the artist to their product.",
"width":"40cm",
"height":"40cm",
"artworkSurface": "Personalised print on perspex",
"contributor": {
"@type": "Person",
"name": "Mikel Folgerts",
"url": "https://www.instagram.com/mikelfolgerts/"
},
"@reverse": { "@reverse": {
"workFeatured": "workFeatured":
{ {
@ -124,6 +174,10 @@
} }
}, },
"image": [ "image": [
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/mvp3-1.jpg"
}
] ]
} }
] ]
@ -132,32 +186,62 @@
"@type": "MediaObject", "@type": "MediaObject",
"name": "Spectacular Spectator Mood Meter", "name": "Spectacular Spectator Mood Meter",
"dateCreated": "2017", "dateCreated": "2017",
"description": "...", "description": "Commisioned by V2_ for Evening of the Black Box Concern. Researchers of digital culture often regard artificial intelligence as <em>black boxes</em>. However, developers of these systems often regard the humans that are analysed as black boxes.\n\nWhat happens when we use black boxes (AI) to analyse black boxes (humans) and present these back to black boxes (humans)?\n\nThis prototype used emotion recognition software by Affectiva to analyse the audience of the talks. It then highlighted the moments in the talks for which outliers in the data were found. Can we use this data to analyse either of the black boxes?",
"producer": {
"@id": "https://rubenvandeven.com/#v2_"
},
"@reverse": { "@reverse": {
"workFeatured": [ "workFeatured": [
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "The Black Box Concern", "name": "Evening of the Black Box Concerns",
"url": "....", "url": "http://v2.nl/events/evening-of-the-black-box-concerns",
"location": { "location": {
"@id": "https://rubenvandeven.com/#v2_" "@id": "https://rubenvandeven.com/#v2_"
}, },
"startDate": "2017-12", "startDate": "2017-12-7",
"workFeatured": [] "workFeatured": []
} }
] ]
}, },
"image": [ "image": [
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/moodmeter1-1.jpg"
},
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/moodmeter2.jpg"
},
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/moodmeter3.jpg"
},
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/moodmeter4.jpg"
}
] ]
}, },
{ {
"@type": "MediaObject", "@type": "MediaObject",
"dateCreated": "2016",
"name": "Emotion Hero", "name": "Emotion Hero",
"description": "Emotion recognition software is being used both as a tool for \u2018objective\u2019 measurements as well as a tool for training one\u2019s facial expressions, eg. for job interviews. Emotion Hero is a literal translation of the paradoxical relation between these applications of the technology.\n\nEmotion Hero is a two-part artwork. On the one hand is a video-game that is freely downloadable for everybody with an Android device (see <a href=\"https://play.google.com\/store\/apps\/details?id=com.rubenvandeven.emotion_hero\">Google Play<\/a>). Inspired by Guitar Hero, the user scores points by following given cues. It provides detailed feedback on the mechanics of the face (eg. \u201cYou showed on 10% Joy when you had to show 100%, smile 99.32% more.\u201d), revealing that rather than being a window into the brain, the face is a controllable surface.\n\nThe second part is a projection that shows the aggregated scores of the game. In order to substantiate their discourse, companies in facial expression measurement employ a huge amount of data collection and processing. The results are displayed in a fixed grid, recalling historical practices that, trough extensive measurement and administration, also aimed to delineate something which is conceptually undelineated: think of Duchenne de Boulogne, Lombroso, and Charcot.\n\nEmotion Hero is a playful invitation to open up the box of expression analysis to reveal the assumptions that underlie this technology.\nThe game's emotional intelligence is powered by Affectiva (I was also <a href=\"http://blog.affectiva.com\/sdk-on-the-spot-emotion-hero-app-encourages-play-with-facial-expressions\">interviewed<\/a> by them). This project is produced as part of the <a href=\"http://summersessions.net\/17-projects\/projects-2016\/55-emotion-hero\">Summer Sessions Network for Talent Development<\/a> in a co-production of Arquivo 237 and V2_ Lab for the Unstable Media, with support of the Creative Industries Fund NL.\nIt has been exhibited at the <a href=\"http://www.statefestival.org\/2016\/program-entry\/2016\/emotion-hero-2016\">State Festival 2016<\/a> (Berlin, DE) and Digital &lt;Dis&gt;orders (Frankfurt, DE).", "description": "Emotion recognition software is being used both as a tool for \u2018objective\u2019 measurements as well as a tool for training one\u2019s facial expressions, eg. for job interviews. Emotion Hero is a literal translation of the paradoxical relation between these applications of the technology.\n\nEmotion Hero is a two-part artwork. On the one hand is a video-game that is freely downloadable for everybody with an Android device (see <a href=\"https://play.google.com\/store\/apps\/details?id=com.rubenvandeven.emotion_hero\">Google Play<\/a>). Inspired by Guitar Hero, the user scores points by following given cues. It provides detailed feedback on the mechanics of the face (eg. \u201cYou showed on 10% Joy when you had to show 100%, smile 99.32% more.\u201d), revealing that rather than being a window into the brain, the face is a controllable surface.\n\nThe second part is a projection that shows the aggregated scores of the game. In order to substantiate their discourse, companies in facial expression measurement employ a huge amount of data collection and processing. The results are displayed in a fixed grid, recalling historical practices that, trough extensive measurement and administration, also aimed to delineate something which is conceptually undelineated: think of Duchenne de Boulogne, Lombroso, and Charcot.\n\nEmotion Hero is a playful invitation to open up the box of expression analysis to reveal the assumptions that underlie this technology.\nThe game's emotional intelligence is powered by Affectiva (I was also <a href=\"http://blog.affectiva.com\/sdk-on-the-spot-emotion-hero-app-encourages-play-with-facial-expressions\">interviewed<\/a> by them). This project is produced as part of the <a href=\"http://summersessions.net\/17-projects\/projects-2016\/55-emotion-hero\">Summer Sessions Network for Talent Development<\/a> in a co-production of Arquivo 237 and V2_ Lab for the Unstable Media, with support of the Creative Industries Fund NL.\nIt has been exhibited at the <a href=\"http://www.statefestival.org\/2016\/program-entry\/2016\/emotion-hero-2016\">State Festival 2016<\/a> (Berlin, DE) and Digital &lt;Dis&gt;orders (Frankfurt, DE).",
"url": "https://emotionhero.com", "url": "https://emotionhero.com",
"@reverse": { "@reverse": {
"workFeatured": [ "workFeatured": [
{
"@type": "ExhibitionEvent",
"name": "ECP Conference",
"location": {
"@type": "Place",
"name": "Fokker Terminal"
},
"startDate": "2018-11-15",
"endDate": "2018-11-15",
"workFeatured": []
},
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "Mood Swings", "name": "Mood Swings",
@ -186,7 +270,7 @@
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "Plan D", "name": "Plan D",
"url": "...", "url": "http://2017.pland.hr/vijesti/dodijeljena-nagrada-i-pocasne-diplome-festivala-plan-d/",
"location": "Zagreb", "location": "Zagreb",
"startDate": "2017-10", "startDate": "2017-10",
"endDate": "2018-10", "endDate": "2018-10",
@ -196,7 +280,7 @@
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "Ars Electronica", "name": "Ars Electronica",
"url": "...", "url": "http://v2.nl/events/summer-sessions-at-ars-electronica-festival-2017",
"location": { "location": {
"@type": "Place", "@type": "Place",
"name": "Ars Electronica", "name": "Ars Electronica",
@ -211,7 +295,7 @@
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "Microbites of Creativity", "name": "Microbites of Creativity",
"url": "...", "url": "http://microbites.me/",
"organiser": "ACM Creativity & Cognition", "organiser": "ACM Creativity & Cognition",
"location": { "location": {
"@type": "Place", "@type": "Place",
@ -252,6 +336,7 @@
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "Manipulation: Emotion Hero", "name": "Manipulation: Emotion Hero",
"description": "I organised this exhibition as part of my Summer Sessions residency at Arquivo237. It was a modest exhibition covering my research on emotion recognition software.",
"location": { "location": {
"@type": "Place", "@type": "Place",
"name": "Arquivo237", "name": "Arquivo237",
@ -272,22 +357,15 @@
"@type": "ImageObject", "@type": "ImageObject",
"contentUrl": "assets\/image\/emotionhero2.jpg" "contentUrl": "assets\/image\/emotionhero2.jpg"
} }
],
"subjectOf": [
{
"@type": "BlogPosting",
"url": "http://blog.affectiva.com\/sdk-on-the-spot-emotion-hero-app-encourages-play-with-facial-expressions",
"dateCreated": "2017-01-1-18",
"publisher": "Affectiva",
"name": "SDK On the Spot: Emotion Hero Project Encourages Play with Facial Expressions"
}
] ]
}, },
{ {
"@type": "MediaObject", "@type": "MediaObject",
"name": "Choose How You Feel; You Have Seven Options", "name": "Choose How You Feel; You Have Seven Options",
"dateCreated": "2016", "dateCreated": "2016",
"description": "...", "duration":"9M9S (∞ loop)",
"artworkSurface": "3 projections",
"description": "What does it mean to feel 47% happy and 21% surprised? <em>Choose how you feel; you have seven options</em> is a video work that revolves around this question as it looks at software that derives emotional parameters from facial expressions. It combines human accounts and algorithmic processing to examine the intersection of highly cognitive procedures and ambiguous experiences. Born from a fascination with the technological achievements, the work interrogates the discursive apparatus the software is embedded in.\n\nThis work builds on my research into the workings of expression analysis technologies and the assumptions that underlie it, scrutinising the claims that are made by the companies developing the software.",
"@reverse": { "@reverse": {
"workFeatured": [ "workFeatured": [
{ {
@ -298,35 +376,35 @@
"name": "Nieuwe Vide", "name": "Nieuwe Vide",
"address": "Haarlem" "address": "Haarlem"
}, },
"startDate": "2018-02", "startDate": "2018-02-02",
"endDate": "2018-03", "endDate": "2018-03-04",
"workFeatured": [] "workFeatured": []
}, },
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "Test_Lab the Graduation Edition", "name": "Test_Lab the Graduation Edition",
"url": "...", "url": "http://v2.nl/events/test_lab-the-graduation-edition-2015-1/",
"location": { "location": {
"@id": "https://rubenvandeven.com/#v2_", "@id": "https://rubenvandeven.com/#v2_",
"@type": "Place", "@type": "Place",
"name": "V2_", "name": "V2_",
"address": "Rotterdam" "address": "Rotterdam"
}, },
"startDate": "2016-07", "startDate": "2016-07-07 20:00",
"endDate": "2016-07", "endDate": "2016-07-07 23:00",
"workFeatured": [] "workFeatured": []
}, },
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "Fuzzy Logic - Graduation Show", "name": "Fuzzy Logic - Graduation Show",
"url": "...", "url": "https://pzimediadesign.nl/2016.html",
"location": { "location": {
"@type": "Place", "@type": "Place",
"name": "Piet Zwart Institute", "name": "Piet Zwart Institute",
"address": "Rotterdam" "address": "Rotterdam"
}, },
"startDate": "2016-06", "startDate": "2016-06-17",
"endDate": "2016-06", "endDate": "2016-06-26",
"workFeatured": [] "workFeatured": []
} }
] ]
@ -346,7 +424,8 @@
"@type": "MediaObject", "@type": "MediaObject",
"name": "EYE Without A Face", "name": "EYE Without A Face",
"dateCreated": "2016", "dateCreated": "2016",
"description": "...", "duration": "11:32 (∞ loop)",
"description": "Whether the video frames are ordered by time or by emotion will not make a difference to a computer. For it, both orderings are just as logical. However, for the human spectator the reordered display of frames becomes a disruptive process. The human is positioned as a required agent for meaning making in an algorithmic procedure.\n\nIn collaboration with Cristina Cochior I went manually through the Eye's public collection, and catalogued faces by surrendering them to an emotion detection algorithm. Cutting from one face to another,its uncritical selection produced a new portrait of emotional gradients moving in-between anger and happiness.",
"author": { "author": {
"@id": "http://randomizer.info", "@id": "http://randomizer.info",
"@type": "Person", "@type": "Person",
@ -374,11 +453,11 @@
"name": "Video Vortex", "name": "Video Vortex",
"location": { "location": {
"@type": "Place", "@type": "Place",
"name": "...", "name": " Mill Hall / Kochi-Muzeris biennial",
"address": "Kochi (India)" "address": "Kochi (India)"
}, },
"startDate": "2017-04", "startDate": "2017-04-23",
"endDate": "2017-04" "endDate": "2017-04-26"
}, },
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
@ -394,34 +473,56 @@
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "Boundaries of the Archive", "name": "Boundaries of the Archive",
"description": "The Piet Zwart Institute at the Eye Film Museum in Amsterdam as part of its ResearchLab series. The exhibition focussed on the boundaries of the archive. Studying the structures and cultural impacts of our media technologies, it concentrated on the intricate and usually hidden aspects of EYE's extensive archive",
"location": { "location": {
"@type": "Place", "@type": "Place",
"name": "EYE Film Museum", "name": "EYE Film Museum",
"address": "Amsterdam" "address": "Amsterdam"
}, },
"startDate": "2016-04", "startDate": "2016-04-12",
"endDate": "2014-04" "endDate": "2014-04-24"
} }
] ]
} }
}, },
{
"@type": "ScholarlyArticle",
"name": "Article - Emotion Hero",
"pageStart": "422",
"pageEnd": "423",
"isPartOf": "C&C '17 Proceedings of the 2017 ACM SIGCHI Conference on Creativity and Cognition",
"description": "What does it mean to feel 62% joy, and 15% surprised? Over the past years the digitization of emotions is booming business: multimillion dollar investments are made in technologies of which is claimed that they give companies an objective view in their consumers' feelings. The video-game-artwork Emotion Hero challenges the user to investigate this claim and question the premise of the technology. Emotion Hero is a two-part artwork. On the one hand is a video-game that is freely downloadable for everybody with an Android device. Inspired by Guitar Hero, the user scores points by following given cues. It provides detailed feedback on the mechanics of the face. The second part is a projection that shows a grid with aggregated scores of the game, that updates live. In its design, the grid refers to 19th century positivist practices.",
"url": "https://doi.org/10.1145/3059454.3059490/",
"publisher": "ACM New York, NY, USA",
"datePublished": "2017-01-27"
},
{ {
"@type": "Report", "@type": "Report",
"name": "Choose How You Feel; You Have Seven Options", "name": "Longform - Choose How You Feel; You Have Seven Options",
"description": "What does it mean to feel 82% surprised or 93% joy? As part of their Longform series, the Institute of Network Cultures published my research into software that derives emotional scores from facial expressions.",
"url": "http://networkcultures.org/longform/2017/01/25/choose-how-you-feel-you-have-seven-options/", "url": "http://networkcultures.org/longform/2017/01/25/choose-how-you-feel-you-have-seven-options/",
"datePublished": "2017-01-27", "datePublished": "2017-01-27",
"publisher": "Institute of Network Cultures" "publisher": {
"@type": "Organization",
"name": "Institute of Network Cultures",
"url": "http://networkcultures.org"
}
}, },
{ {
"@type": "MediaObject", "@type": "MediaObject",
"name": "We know how you feel", "name": "We know how you feel",
"dateCreated": "2015", "dateCreated": "2015",
"description": "A two-part artwork and my first work on software that derives emotional parameters from facial expressions. The first part displays the <em>Mind Reading Emotions Library<\/em>, an interactive collection of videos, audio fragments and scenes depicting 412 distinct emotions &mdash;ranging from `angry' to `unsure'&mdash; grouped in 24 categories. The second part of the project is a tablet with a modified demo app by Affectiva, a major player in the field of emotion analysis software. This app acts as an interactive mirror which displays the various parameters that the Affectiva software derives from someone's facial expression, while a voice over reads a text extracted from the Dutch classic <em>Beyond Sleep<\/em> by W.F. Hermans (1966), concerning the impact of the mirror, photography and video on the human self-image.\n\nIt was part of the exhibition <em><a href='http://v2.nl\/events\/an-encyclopedia-of-media-objects'>Encyclopedia Of Media Objects<\/a><\/em>, organised by the Piet Zwart Institute and held at the V2_ Institute for unstable media in June 2015.", "description": "A two-part artwork and my first work on software that derives emotional parameters from facial expressions. The first part displays the <em>Mind Reading Emotions Library<\/em>, an interactive collection of videos, audio fragments and scenes depicting 412 distinct emotions &mdash;ranging from `angry' to `unsure'&mdash; grouped in 24 categories. The second part of the project is a tablet with a modified demo app by Affectiva, a major player in the field of emotion analysis software. This app acts as an interactive mirror which displays the various parameters that the Affectiva software derives from someone's facial expression, while a voice over reads a text extracted from the Dutch classic <em>Beyond Sleep<\/em> by W.F. Hermans (1966), concerning the impact of the mirror, photography and video on the human self-image.",
"image": [ "image": [
{ {
"@type": "ImageObject", "@type": "ImageObject",
"contentUrl": "assets\/image\/we_know_how_you_feel.jpg", "contentUrl": "assets\/image\/we_know_how_you_feel.jpg",
"caption": "Fragments of reality" "caption": "We know how you feel"
},
{
"@type": "ImageObject",
"contentUrl": "assets\/image\/we_know_how_you_feel-3.jpg",
"caption": "Mind Reading Emotions Library"
} }
], ],
"@reverse": { "@reverse": {
@ -434,10 +535,11 @@
}, },
{ {
"@type": "ExhibitionEvent", "@type": "ExhibitionEvent",
"name": "Encyclopedia of Media Objects", "name": "An Encyclopedia of Media Objects",
"url": "...", "url": "http://v2.nl/events/an-encyclopedia-of-media-objects",
"startDate": "2015-06", "startDate": "2015-06-26",
"endDate": "2015-06", "endDate": "2015-06-27",
"description": "What is an encyclopedia? What are media? What is an object? Students of the Piet Zwart Institute Media Design Masters programme invite you to look at these issues. Twelve installations and one choreography present a taxonomy of disappearing, human and non-human, introvert and collected media objects.\n\nThis exhibition was developed in a 'Thematic Seminar' run in the summer trimester 2015 taught by Florian Cramer.\n\nArtists: Lucas Battich [IT], Manetta Berends [NL], Julie Boschat Thorez [FR], Cihad Caner [TR], Joana Chicau [PT], Cristina Cochior [RO], Solange Frankort [NL], Arantxa Gonlag [NL], Anne Lamb [US], Benjamin Li [NL], Yuzhen Tang [CN], Ruben van de Ven [NL] & Thomas Walskaar [NO]",
"location": { "location": {
"@id": "https://rubenvandeven.com/#v2_" "@id": "https://rubenvandeven.com/#v2_"
} }
@ -524,8 +626,8 @@
"name": "Leidse Schouwburg", "name": "Leidse Schouwburg",
"address": "Leiden" "address": "Leiden"
}, },
"startDate": "2012", "startDate": "2012-11-14",
"endDate": "2012", "endDate": "2012-11-17",
"workFeatured": [] "workFeatured": []
} }
] ]
@ -572,8 +674,8 @@
"name": "Ward Goes", "name": "Ward Goes",
"url": "http://www.wardgoes.nl" "url": "http://www.wardgoes.nl"
}, },
"description": "Originally initiated as a graduation project (cum laude) by Ward Goes, <a href=\"http://spectacularspectacular.news\">The Spectacular Times<\/a> is an ongoing visual inquiry into the presentation of news. It investigates how news, rather than being subjective, is representational of its social, cultural, (geo)political and ideological contexts. The project sets out to lay bare these representations in currently circulating news.\n\nBy layering and contrasting different news elements The Spectacular Times re-contextualises news and makes explicit the intangible notions that lie beyond an increasingly universal guise of news reporting. Not in an effort to tell apart true from false, left from right, or right from wrong, but in order to accentuate a variety of articulations of news.\n\nThis web based project uses user defined variables to animate news headers, texts and images, which are directly sourced from different news websites world wide. By adjusting these parameters (among which region, scope and speed) the spectator actively perceives how news content is de- and reformed through its aesthetics.",
"url":"http://spectacularspectacular.news", "url":"http://spectacularspectacular.news",
"description": "Originally initiated as a graduation project (cum laude) by Ward Goes, <a href=\"http://spectacularspectacular.news\">The Spectacular Times<\/a> is an ongoing visual inquiry into the presentation of news. It investigates how news, rather than being subjective, is representational of its social, cultural, (geo)political and ideological contexts. The project sets out to lay bare these representations in currently circulating news.\n\nBy layering and contrasting different news elements The Spectacular Times re-contextualises news and makes explicit the intangible notions that lie beyond an increasingly universal guise of news reporting. Not in an effort to tell apart true from false, left from right, or right from wrong, but in order to accentuate a variety of articulations of news.\n\nThis web based project uses user defined variables to animate news headers, texts and images, which are directly sourced from different news websites world wide. By adjusting these parameters (among which region, scope and speed) the spectator actively perceives how news content is de- and reformed through its aesthetics.",
"image": [ "image": [
{ {
"@type": "ImageObject", "@type": "ImageObject",

View file

@ -25,6 +25,18 @@ function getNodeYear(n){
if(typeof n['dateCreated'] !== 'undefined') { if(typeof n['dateCreated'] !== 'undefined') {
return n['dateCreated'].substr(0,4); return n['dateCreated'].substr(0,4);
} }
if(typeof n['datePublished'] !== 'undefined') {
return n['datePublished'].substr(0,4);
}
if(typeof n['startDate'] !== 'undefined') {
return n['startDate'].substr(0,4);
}
if(typeof n['endDate'] !== 'undefined') {
return n['endDate'].substr(0,4);
}
if(typeof n['foundingDate'] !== 'undefined') {
return n['foundingDate'].substr(0,4);
}
return null; return null;
} }
/** /**
@ -255,6 +267,9 @@ showMoreTypeLinksEl.addEventListener('click', function () {
var svg = d3.select("svg"), var svg = d3.select("svg"),
width = +svg.attr("width"), width = +svg.attr("width"),
height = +svg.attr("height"); height = +svg.attr("height");
var container = svg.append("g")
.attr("id", "container")
;
var simulation = d3.forceSimulation() var simulation = d3.forceSimulation()
.force("link", d3.forceLink().id(function(d) { return d["id"]; }).strength(.005)) .force("link", d3.forceLink().id(function(d) { return d["id"]; }).strength(.005))
@ -268,7 +283,7 @@ var simulation = d3.forceSimulation()
; ;
var link = svg.append("g") var link = container.append("g")
.attr("class", "links") .attr("class", "links")
.selectAll(".relationship") .selectAll(".relationship")
.data(graph['links']) .data(graph['links'])
@ -288,7 +303,7 @@ var linkText = link
; ;
var node = svg.append("g") var node = container.append("g")
.attr("class", "nodes") .attr("class", "nodes")
.selectAll(".node") .selectAll(".node")
.data(graph.nodes) .data(graph.nodes)
@ -431,6 +446,7 @@ var setDetails = function(nodeDatum, nodeIdx) {
// TODO: replace relUp & relDown with linkMap // TODO: replace relUp & relDown with linkMap
let relUp = []; let relUp = [];
let relDown = []; let relDown = [];
let pageTitles = [];
let nodeDetailScalerEl = document.createElement('div'); let nodeDetailScalerEl = document.createElement('div');
// nodeDetailScalerEl.innerHTML = `<div id='scalarbar'></div>`; // nodeDetailScalerEl.innerHTML = `<div id='scalarbar'></div>`;
nodeDetailScalerEl.id = 'nodeDetailsScaler'; nodeDetailScalerEl.id = 'nodeDetailsScaler';
@ -464,8 +480,10 @@ var setDetails = function(nodeDatum, nodeIdx) {
} }
crumbWrapEl.appendChild(crumbEl); crumbWrapEl.appendChild(crumbEl);
breadcrumbsEl.appendChild(crumbWrapEl); breadcrumbsEl.appendChild(crumbWrapEl);
pageTitles.push(getNodeTitle(nodeMap[crumbNodeId]));
} }
nodeDetailEl.appendChild(breadcrumbsEl); nodeDetailEl.appendChild(breadcrumbsEl);
pageTitles.push(getNodeTitle(nodeDatum));
let titleAttr = getTitleAttribute(nodeDatum); let titleAttr = getTitleAttribute(nodeDatum);
let titleEl = document.createElement('h2'); let titleEl = document.createElement('h2');
@ -510,7 +528,8 @@ var setDetails = function(nodeDatum, nodeIdx) {
listEl.innerHTML += `<dt class='dt-${attr}'>${attr}</dt><dd class='dd-${attr}'><a href='${nodeAttr[i]}'>${nodeAttr[i]}</a></dd>`; listEl.innerHTML += `<dt class='dt-${attr}'>${attr}</dt><dd class='dd-${attr}'><a href='${nodeAttr[i]}'>${nodeAttr[i]}</a></dd>`;
listEl.innerHTML += `<dd class='dd-contentobject'><object data='${nodeAttr[i]}'></object></dd>`; listEl.innerHTML += `<dd class='dd-contentobject'><object data='${nodeAttr[i]}'></object></dd>`;
} else { } else {
listEl.innerHTML += `<dt class='dt-${attr}'>${attr}</dt><dd class='dd-${attr}'>${nodeAttr[i]}</dd>`; let valueHtml = nodeAttr[i].replace(/\n/g,"<br>");
listEl.innerHTML += `<dt class='dt-${attr}'>${attr}</dt><dd class='dd-${attr}'>${valueHtml}</dd>`;
} }
} }
} }
@ -581,6 +600,9 @@ var setDetails = function(nodeDatum, nodeIdx) {
nodeEls[nIdx].classList.remove('selectedNode'); nodeEls[nIdx].classList.remove('selectedNode');
} }
}); });
// TODO: update history & title
document.title = pageTitles.join(" :: ");
}; };
var closeDetails = function() { var closeDetails = function() {
document.body.classList.remove("detailsOpen"); document.body.classList.remove("detailsOpen");
@ -668,6 +690,7 @@ var setViewboxForceCenter = function() {
} }
setViewboxForceCenter(); // sets forceCx & forceCy setViewboxForceCenter(); // sets forceCx & forceCy
var graphInitialised = false;
simulation.force('centerActive', function force(alpha) { simulation.force('centerActive', function force(alpha) {
// let currentNode = node.selectAll('.detail'); // let currentNode = node.selectAll('.detail');
// console.log(currentNode); // console.log(currentNode);
@ -675,9 +698,18 @@ simulation.force('centerActive', function force(alpha) {
node.each(function(d, idx, nodes){ node.each(function(d, idx, nodes){
let n = d; let n = d;
let k = alpha * 0.1; let k = alpha * 0.1;
n.fx = null;
n.fy = null;
if(typeof nodePositions[idx] != 'undefined') { if(typeof nodePositions[idx] != 'undefined') {
if(graphInitialised == false) {
n.x = nodePositions[idx][0];
n.y = nodePositions[idx][1];
n.vx = 0;
n.vy = 0;
} else {
n.vx -= (n.x - nodePositions[idx][0]) * k * 5; n.vx -= (n.x - nodePositions[idx][0]) * k * 5;
n.vy -= (n.y - nodePositions[idx][1]) * k * 5; n.vy -= (n.y - nodePositions[idx][1]) * k * 5;
}
} else { } else {
// if it's not positioned, move it out of the circle // if it's not positioned, move it out of the circle
if(currentNodePositionRadius < 1) { if(currentNodePositionRadius < 1) {
@ -690,9 +722,16 @@ simulation.force('centerActive', function force(alpha) {
return; return;
} }
if(graphInitialised == false) {
// on init, fixate items outside of circle
n.fx = n.x + dx * (2+Math.random());
n.fy = n.y + dy * (2+Math.random());
} else {
// if initialised, gradually move them outwards
n.vx += dx * k*4; n.vx += dx * k*4;
n.vy += dy * k*4; n.vy += dy * k*4;
} }
}
}); });
}); });
@ -730,16 +769,34 @@ node.call(d3.drag()
// } // }
}); });
svg.call(d3.drag() // svg.call(d3.drag()
// .on("start", function(d){
// if(d3.event.sourceEvent.type == 'touchstart' && d3.event.sourceEvent.touches.length > 1) {
// } else {
// d3.event.sourceEvent.stopPropagation();
// svg.node().classList.add("dragging");
// }
// })
// .on("drag", function(){
// moveViewboxPx(d3.event.dx, d3.event.dy);
// })
// .on("end", function(){
// svg.node().classList.remove("dragging");
// }));
svg.call(d3.zoom()
.scaleExtent([0.3,3])
.on("start", function(){ .on("start", function(){
svg.node().classList.add("dragging"); svg.node().classList.add("dragging");
}) })
.on("drag", function(){
moveViewboxPx(d3.event.dx, d3.event.dy);
})
.on("end", function(){ .on("end", function(){
svg.node().classList.remove("dragging"); svg.node().classList.remove("dragging");
})); })
.on("zoom", function(a,b,c){
container.attr("transform", d3.event.transform);
})
);
// svg.call(d3.zoom.transform, d3.zoomIdentity);
node.append('circle') node.append('circle')
.attr("r", nodeSize) .attr("r", nodeSize)
@ -763,16 +820,43 @@ node.append('text')
return getNodeYear(n); return getNodeYear(n);
}) })
; ;
node.append('text') let nodeTitle = node.append('text')
.attr("class", "nodeTitle") .attr("class", "nodeTitle")
.attr("y", "5") .attr("y", "5")
;
nodeTitle
// .append("textPath") // .append("textPath")
// .attr( "xlink:href",function(d, idx){return '#nodePath'+idx;}) // .attr( "xlink:href",function(d, idx){return '#nodePath'+idx;})
.text(getNodeTitle) // .text(getNodeTitle)
.each(function(){ .each(function(node, nodes){
let self = d3.select(this), let textLength;
textLength = self.node().getComputedTextLength() let self = d3.select(this);
let titleText = getNodeTitle(node);
if(titleText.length > 20 && titleText.indexOf(" ") > -1) {
let mid = Math.floor(titleText.length / 2);
mid = titleText.substr(0,mid).lastIndexOf(" ");
if(mid === -1) {
mid = titleText.indexOf(" ");
}
let titleText1 = titleText.substr(0, mid).trim();
let titleText2 = titleText.substr(mid).trim();
self.append("tspan")
.text(titleText1)
.attr("y", "-10")
.attr("x", "0")
; ;
let tspan = self.append("tspan")
.text(titleText2)
.attr("y", "10")
.attr("x", "0")
;
textLength = tspan.node().getComputedTextLength();
} else {
self.text(titleText);
textLength = self.node().getComputedTextLength();
}
// scale according to text length:
if(textLength > nodeSize * 2) { if(textLength > nodeSize * 2) {
self.attr('transform', `scale(${(nodeSize * 2) / textLength})`); self.attr('transform', `scale(${(nodeSize * 2) / textLength})`);
} }
@ -891,46 +975,6 @@ function ticked() {
} }
return "translate("+x+" "+y+") rotate("+deg+") translate(0, -10)"; return "translate("+x+" "+y+") rotate("+deg+") translate(0, -10)";
}); });
// linkPath.attr("d", function(d) {
// var x1 = d.source.x,
// y1 = d.source.y,
// x2 = d.target.x,
// y2 = d.target.y,
// dx = x2 - x1,
// dy = y2 - y1,
// dr = Math.sqrt(dx * dx + dy * dy),
// // Defaults for normal edge.
// drx = dr,
// dry = dr,
// xRotation = 0, // degrees
// largeArc = 0, // 1 or 0
// sweep = 1; // 1 or 0
// // Self edge.
// if ( x1 === x2 && y1 === y2 ) {
// // Fiddle with this angle to get loop oriented.
// xRotation = -45;
// // Needs to be 1.
// largeArc = 1;
// // Change sweep to change orientation of loop.
// //sweep = 0;
// // Make drx and dry different to get an ellipse
// // instead of a circle.
// drx = 30;
// dry = 20;
// // For whatever reason the arc collapses to a point if the beginning
// // and ending points of the arc are the same, so kludge it.
// x2 = x2 + 1;
// y2 = y2 + 1;
// }
// return "M" + x1 + "," + y1 + "A" + drx + "," + dry + " " + xRotation + "," + largeArc + "," + sweep + " " + x2 + "," + y2;
// });
node.attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; }); node.attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; });
} }
@ -974,6 +1018,18 @@ function moveViewboxPx(dx, dy){
// selectNode(currentNodeIdx+1); // selectNode(currentNodeIdx+1);
// positionNodesInCenter(currentNodeIdx); // positionNodesInCenter(currentNodeIdx);
selectNode(graph['nodes'].length - 1); selectNode(graph['nodes'].length - 1);
// closeDetails(); // hide details at first // closeDetails(); // hide details at first
// positionNodesInCenter(currentNodeIdx+1); // positionNodesInCenter(currentNodeIdx+1);
// setTimeout(function(){
// document.body.classList.add('graphInitialised');
// }, 10);
let initPlaceholder = document.getElementById('initPlaceholder');
svg.node().removeChild(initPlaceholder);
setTimeout(function(){
graphInitialised = true;
document.body.classList.add('graphInitialised');
}, 500);
} }

View file

@ -3,13 +3,14 @@ $detailsWidth: 740px;
$detailSlide: -1 * ($detailsWidth); $detailSlide: -1 * ($detailsWidth);
$detailSlideMobile: -30vh; $detailSlideMobile: -30vh;
@import url('/assets/fonts/Bright/cmun-bright.css'); @import url('/assets/fonts/FluxischElse/webfonts/fluxischelse.css');
body{ body{
margin:0;overflow: hidden; margin:0;overflow: hidden;
font-family: "CMU Bright", "Computer Modern Bright", sans-serif; font-family: "Fluxisch Else", helvetica, sans-serif;
height: 100vh; height: 100vh;
background: black; background: black;
font-size: 12pt;
} }
a, a:link, a:visited{ a, a:link, a:visited{
@ -31,8 +32,8 @@ a:active{
100%{background-position:0% 50%} 100%{background-position:0% 50%}
}*/ }*/
svg{ svg{
width:100vw; width:100%;
height: calc(100vh - 40px); height: 100%;
cursor: grab; cursor: grab;
} }
svg.dragging{ svg.dragging{
@ -42,14 +43,24 @@ g.node{
cursor: pointer; cursor: pointer;
stroke: blue; stroke: blue;
stroke-width: 0; stroke-width: 0;
transition: stroke-width .5s; transition: stroke-width .5s, opacity 0s;
transition-delay: 0s, 1s;
opacity:0;
.graphInitialised &{
opacity: 1; opacity: 1;
// pointer-events: none; }
transition: opacity 1s;
&.centeredNode{
opacity: 1;
transition: stroke-width .5s, opacity 0s;
transition-delay: 0s, 0s;
}
&.visibleNode{ &.visibleNode{
opacity: 1; // opacity: 1;
pointer-events: auto; pointer-events: auto;
transition-delay: 0s, 0s;
*{ *{
// transform: scale(1); // transform: scale(1);
} }
@ -70,6 +81,7 @@ g.node{
stroke-width: 1px; stroke-width: 1px;
stroke: yellow; stroke: yellow;
} }
} }
&:hover{ &:hover{
@ -79,6 +91,7 @@ g.node{
} }
.nodeBg{ .nodeBg{
fill: yellow; fill: yellow;
stroke: yellow;
} }
} }
@ -100,14 +113,23 @@ g.node{
text.nodeTitle{ text.nodeTitle{
text-anchor: middle; text-anchor: middle;
// text-anchor: start; // text-anchor: start;
font-family: "CMU Bright", sans-serif; // font-family: "CMU Bright", sans-serif;
font-size: 10pt; font-size: 10pt;
tspan{
text-anchor:middle;
}
} }
} }
.relationship{ .relationship{
display:none; display:none;
// opacity: .2; opacity: 0;
transition: opacity .5s;
body.graphInitialised &{
opacity: 1;
}
&.visibleLink{ &.visibleLink{
display:block; display:block;
@ -124,7 +146,7 @@ g.node{
text{ text{
fill:black; fill:black;
font-family: "Noto Mono", monospace; // font-family: "Noto Mono", monospace;
font-size: 9pt; font-size: 9pt;
// font-size: 75%; // font-size: 75%;
display:none; display:none;
@ -154,10 +176,11 @@ g.node{
circle.nodeBg{ circle.nodeBg{
fill: white; fill: white;
// fill: url(#img1); // fill: url(#shade1);
// fill:url(#blueGrad); // fill:url(#blueGrad);
stroke-width: 3px; stroke-width: 3px;
stroke: black; stroke: white;
// stroke: black;
.visibleNode & { .visibleNode & {
stroke: yellow; stroke: yellow;
@ -282,15 +305,18 @@ text{
padding-top: 40px; padding-top: 40px;
font-size: 120%; font-size: 120%;
} }
dl:last-child{
margin-bottom: 250px;
}
dt{ dt{
float:left; float:left;
width: 120px; width: 120px;
font-weight:bold; font-weight:bold;
min-height:25px; min-height:25px;
clear:both;
} }
dd{ dd{
min-height:25px; min-height:30px;
} }
dd:not(.nodeTitleNr1) { dd:not(.nodeTitleNr1) {
margin-left: 130px; margin-left: 130px;
@ -361,10 +387,14 @@ svg#portfolioGraph {
text-align: right; text-align: right;
cursor: pointer; cursor: pointer;
&::before{ &::before{
content:"..."; content:"";
font-size:150%;
position: relative;
top: 4px;
} }
&:hover{ &:hover{
text-decoration:unline; color: red;
text-decoration: none;
} }
.showMoreLinks & { .showMoreLinks & {
pointer-events:none; pointer-events:none;
@ -396,7 +426,19 @@ svg#portfolioGraph {
@media (max-width: 1000px) { @media (max-width: 1000px) {
body{ body{
overflow:auto; overflow-y: auto;
overflow-x: hidden;
font-size: 16pt;
}
svg#portfolioGraph{
position: fixed;
top:0;
left: -35vw;
width:160vw;
height: 100vh;
z-index: -5;
// margin-left:-35vw;
} }
#nodeDetails{ #nodeDetails{
@ -408,17 +450,17 @@ svg#portfolioGraph {
padding: $detailsPadding; padding: $detailsPadding;
/* opacity: 0; */ /* opacity: 0; */
height:auto; height:auto;
margin-top:0; min-height: 100vh;
margin-top:100vh;
transition: margin 1s; transition: margin 1s;
} }
body.detailsOpen{ body.detailsOpen{
#nodeDetails{ #nodeDetails{
displaY:block; displaY:block;
margin-top: $detailSlideMobile; margin-top: calc( 100vh + $detailSlideMobile);
position: relative; position: relative;
z-index: 1000; z-index: 1000;
min-height:$detailSlideMobile * -1; // min-height:$detailSlideMobile * -1;
} }
svg#portfolioGraph{ svg#portfolioGraph{
right: 0; right: 0;