Refactor for use with new reporting format
This commit is contained in:
parent
81ca382fbf
commit
f8a506bb77
4 changed files with 184 additions and 86 deletions
|
@ -1,8 +1,9 @@
|
|||
import csv
|
||||
import json
|
||||
|
||||
node_names = set()
|
||||
edges = []
|
||||
# node_names = set()
|
||||
movements = []
|
||||
items = [] #edges
|
||||
|
||||
libraries = {}
|
||||
locations = {}
|
||||
|
@ -30,25 +31,41 @@ with open("data/locaties.csv") as fp:
|
|||
libraries[library['name']] = library
|
||||
locations[location['code']] = location
|
||||
|
||||
def filter_date(date: str):
|
||||
|
||||
date = date.replace('cop.', '©').removeprefix('[').removesuffix(']')
|
||||
if len(date) and date[-1] == '.':
|
||||
date = date[:-1]
|
||||
return date
|
||||
|
||||
|
||||
with open("data/requests.csv") as fp:
|
||||
reader = csv.DictReader(fp, delimiter=";")
|
||||
with open("data/batch2/Rapport_transit_1.csv", encoding='utf-8-sig') as fp:
|
||||
# items
|
||||
reader = csv.DictReader(fp, delimiter=",")
|
||||
for item in reader:
|
||||
node_names.add(item['Owning Library Name'])
|
||||
node_names.add(item['Pickup Location'])
|
||||
edges.append(item)
|
||||
item['Publication Date'] = filter_date(item['Publication Date'])
|
||||
item['Sort Date'] = item['Publication Date'][-4:] # some dates are ranges, only sort by last year
|
||||
items.append(item)
|
||||
|
||||
|
||||
nodes = [{'name': n} for n in node_names]
|
||||
with open("data/batch2/Rapport_transit_2.csv", encoding='utf-8-sig') as fp:
|
||||
# movements
|
||||
reader = csv.DictReader(fp, delimiter=",")
|
||||
for item in reader:
|
||||
movements.append(item)
|
||||
|
||||
print(f"{len(nodes)} nodes, {len(edges)} edges")
|
||||
|
||||
# nodes = [{'name': n} for n in node_names]
|
||||
|
||||
print(f"{len(libraries)} nodes, {len(movements)} movements of {len(items)} items")
|
||||
|
||||
data = {
|
||||
'nodes': list(libraries.values()), #nodes,
|
||||
'edges': edges
|
||||
'libraries': list(libraries.values()), #nodes,
|
||||
'movements': movements, #edges
|
||||
'items': items, # item bibliographical data
|
||||
}
|
||||
|
||||
fn = 'data/parsed_requests.json'
|
||||
fn = 'data/parsed_transits.json'
|
||||
with open(fn, 'w') as fp:
|
||||
json.dump(data, fp)
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script lang="ts">
|
||||
import parsed_requests from "/data/parsed_requests.json";
|
||||
import parsed_transits from "/data/parsed_transits.json";
|
||||
import { draw, slide, fade } from "svelte/transition";
|
||||
import { fps } from "@sveu/browser";
|
||||
import LibrariesSvg from "./LibrariesSvg.svelte";
|
||||
|
@ -14,6 +14,8 @@
|
|||
type Data,
|
||||
type VizData,
|
||||
type Log,
|
||||
importItem,
|
||||
importMovement,
|
||||
} from "./lib/types";
|
||||
|
||||
// these are passed from main.ts (or vice versaas)
|
||||
|
@ -48,21 +50,16 @@
|
|||
]),
|
||||
);
|
||||
|
||||
$: console.log(parsed_requests);
|
||||
$: console.log(parsed_transits);
|
||||
|
||||
// preprocess data
|
||||
const _nodes: Location[] = parsed_requests.nodes;
|
||||
const _requests: Array<Object> = parsed_requests.edges;
|
||||
|
||||
const items = new Map<string, Item>();
|
||||
const movements: Array<Movement> = [];
|
||||
|
||||
// filter nodes with only having both Latitude and Longitude.
|
||||
// then map these coordinates to the canvas space
|
||||
const locations = new Map(
|
||||
_nodes
|
||||
.filter((n) => n.lat && n.lon)
|
||||
.map((node) => {
|
||||
const locations = new Map<string, Location>(
|
||||
parsed_transits.libraries
|
||||
.filter((n: Location) => n.lat && n.lon)
|
||||
.map((node: Location) => {
|
||||
node["x"] = node_positions[node.code][0];
|
||||
node["y"] = node_positions[node.code][1];
|
||||
return node;
|
||||
|
@ -70,38 +67,35 @@
|
|||
.map((d) => [d["name"], d]),
|
||||
);
|
||||
|
||||
_requests
|
||||
const items = new Map<string, Item>(
|
||||
parsed_transits.items.map((i) => [
|
||||
i["Barcode"],
|
||||
importItem(i, locations),
|
||||
]),
|
||||
);
|
||||
|
||||
const movements: Array<Movement> = [];
|
||||
parsed_transits.movements
|
||||
// remove entries that stay at the same place
|
||||
.filter((n) => n["Owning Library Name"] != n["Pickup Location"])
|
||||
.filter(
|
||||
(l) =>
|
||||
locations.has(l["Owning Library Name"]) &&
|
||||
locations.has(l["Pickup Location"]),
|
||||
)
|
||||
// .filter((n) => n["Owning Library Name"] != n["Pickup Location"])
|
||||
// .filter(
|
||||
// (l) =>
|
||||
// locations.has(l["Owning Library Name"]) &&
|
||||
// locations.has(l["Pickup Location"]),
|
||||
// )
|
||||
.forEach((r, idx) => {
|
||||
const identifier: String = r["Barcode"];
|
||||
if (!items.has(identifier)) {
|
||||
items.set(identifier, {
|
||||
title: r["Title (Complete)"],
|
||||
MMS: r["MMS Id"],
|
||||
Barcode: r["Barcode"],
|
||||
Publisher: r["Publisher"],
|
||||
_original: r,
|
||||
});
|
||||
let movement: Movement | null = importMovement(
|
||||
idx,
|
||||
r,
|
||||
locations,
|
||||
items,
|
||||
);
|
||||
if (movement === null) {
|
||||
return;
|
||||
}
|
||||
let movement: Movement = {
|
||||
source: locations.get(r["Owning Library Name"]),
|
||||
target: locations.get(r["Pickup Location"]),
|
||||
nr: idx,
|
||||
item: items.get(identifier),
|
||||
date: new Date(r["Request Completion Date"]), // TODO: validate unfortunate M/D/Y format
|
||||
_original: r,
|
||||
d: "", // bit of a hacky workaround, refactor in object
|
||||
};
|
||||
movement.d = get_path_d(movement);
|
||||
movements.push(movement);
|
||||
});
|
||||
console.log(items, movements);
|
||||
console.log(locations, items, movements);
|
||||
|
||||
// const edgeIndexBarcode = buildIndex(edges, "Barcode");
|
||||
// console.log(edgeIndexBarcode);
|
||||
|
@ -166,7 +160,7 @@
|
|||
};
|
||||
|
||||
let drawn_motions = writable(<Motion[]>[]); //.filter((m, i) => i < 100);
|
||||
$: opacity = Math.max(0.055, Math.min(.3, 70 / $drawn_motions.length));
|
||||
$: opacity = Math.max(0.055, Math.min(0.3, 70 / $drawn_motions.length));
|
||||
let overlay_motions = writable(<Motion[]>[]); //.filter((m, i) => i < 100);
|
||||
let events = writable(<Log[]>[]); //.filter((m, i) => i < 100);
|
||||
let current_item = writable(<Item | null>null); //.filter((m, i) => i < 100);
|
||||
|
@ -246,9 +240,10 @@
|
|||
</g>
|
||||
<g id="overlay_motions">
|
||||
{#each $overlay_motions as m}
|
||||
<path out:fade={{ duration: 1000 }}
|
||||
in:draw={{ duration: m.duration }}
|
||||
d={m.movement.d}
|
||||
<path
|
||||
out:fade={{ duration: 1000 }}
|
||||
in:draw={{ duration: m.duration }}
|
||||
d={m.movement.d}
|
||||
></path>
|
||||
{/each}
|
||||
</g>
|
||||
|
@ -344,7 +339,12 @@
|
|||
fill: none;
|
||||
animation: highlight-on-insert 1s 1;
|
||||
}
|
||||
@keyframes highlight-on-insert{10% {opacity: .7; stroke:#ff89ff}}
|
||||
@keyframes highlight-on-insert {
|
||||
10% {
|
||||
opacity: 0.7;
|
||||
stroke: #ff89ff;
|
||||
}
|
||||
}
|
||||
#overlay_motions path {
|
||||
stroke: rgba(255, 255, 0, 0.641);
|
||||
stroke-width: 5;
|
||||
|
@ -357,6 +357,5 @@
|
|||
|
||||
#events {
|
||||
max-height: 50px;
|
||||
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1,6 +1,12 @@
|
|||
import type { Writable } from "svelte/store";
|
||||
|
||||
export type Location = Object;
|
||||
export type Location = {
|
||||
name: String,
|
||||
code: String,
|
||||
adres: String,
|
||||
lat: number,
|
||||
lon: number
|
||||
};
|
||||
|
||||
// disambiguated (physical or online) library object
|
||||
export type Item = {
|
||||
|
@ -8,9 +14,42 @@ export type Item = {
|
|||
MMS: String,
|
||||
Barcode: String, // String because of preceding 0
|
||||
Publisher: String,
|
||||
Date: String, // is not always simply a year, messy field!
|
||||
_Sort_Date: String, // in case of ranges this has the year to sort by
|
||||
Place: String,
|
||||
Subjects: String,
|
||||
owning_location: Location
|
||||
_original: Object
|
||||
}
|
||||
|
||||
export function importItem(orig: Object, locations: Map<string, Location>): Item {
|
||||
// Barcode
|
||||
// Request Date
|
||||
// Request Time
|
||||
// Request Completion Time
|
||||
// Request Completion Date
|
||||
// Title
|
||||
// Author
|
||||
// Publication Date
|
||||
// Publication Place
|
||||
// Place of Publication - Country
|
||||
// Subjects
|
||||
// Creation Date
|
||||
// Owning Library Name (Active)
|
||||
return {
|
||||
Barcode: orig['Barcode'],
|
||||
title: orig['Title'],
|
||||
MMS: orig['MMS'],
|
||||
Publisher: orig['Publisher'],
|
||||
Date: orig['Publication Date'],
|
||||
_Sort_Date: orig['Sort Date'], //
|
||||
Place: orig['Publication Place'],
|
||||
Subjects: orig['Subjects'],
|
||||
owning_location: locations.get(orig['Owning Library Name (Active)']),
|
||||
_original: orig
|
||||
}
|
||||
}
|
||||
|
||||
// A movement of the object (Request in Alma Analytics)
|
||||
export type Movement = {
|
||||
nr: number, // unique identifier in the set
|
||||
|
@ -20,9 +59,46 @@ export type Movement = {
|
|||
_original: Object
|
||||
// also contains additional request data (see requests.csv)
|
||||
d: String, // the svg path
|
||||
date: Date,
|
||||
start_date: Date,
|
||||
end_date: Date,
|
||||
};
|
||||
|
||||
export function importMovement(idx, orig: Object, locations: Map<string, Location>, items: Map<string, Item>): Movement {
|
||||
// Barcode
|
||||
// Transit From Library Name
|
||||
// Transit To Library Name
|
||||
// Event Start Date and Time
|
||||
// Event type description
|
||||
// Event End Date and Time
|
||||
const source = locations.get(orig["Transit From Library Name"])
|
||||
if (!source) {
|
||||
console.error("No valid source for movement", orig["Transit From Library Name"], orig)
|
||||
return null;
|
||||
}
|
||||
const target = locations.get(orig["Transit To Library Name"])
|
||||
if (!target) {
|
||||
console.error("No valid target for movement", orig["Transit To Library Name"], orig)
|
||||
return null;
|
||||
}
|
||||
const item = items.get(orig['Barcode'])
|
||||
if (!item) {
|
||||
console.error("No valid item for movement", orig['Barcode'], orig);
|
||||
return null;
|
||||
}
|
||||
const movement = {
|
||||
nr: idx,
|
||||
source: source,
|
||||
target: target,
|
||||
item: item,
|
||||
start_date: new Date(orig["Event Start Date and Time"]), // TODO: validate unfortunate M/D/Y format
|
||||
end_date: new Date(orig["Event Start Date and Time"]), // TODO: validate unfortunate M/D/Y format
|
||||
_original: orig,
|
||||
d: "", // bit of a hacky workaround, refactor in object
|
||||
};
|
||||
movement.d = get_path_d(movement);
|
||||
return movement
|
||||
}
|
||||
|
||||
export type Occurences = Map<Item, Movement[]>
|
||||
|
||||
export interface Data {
|
||||
|
|
|
@ -13,14 +13,14 @@ export class Scene {
|
|||
this.nextScene = nextScene;
|
||||
}
|
||||
|
||||
drawMovements(movements: Movement[], duration: number, in_overlay = false){
|
||||
drawMovements(movements: Movement[], duration: number, in_overlay = false) {
|
||||
let motions: Motion[] = movements.map((m) => ({ duration: duration, movement: m }));
|
||||
// TODO abstract in function drawMovements()
|
||||
// TODO in there, setTimeout for arrival effect on node
|
||||
|
||||
const set = in_overlay ? this.viz_data.overlay_motions : this.viz_data.drawn_motions
|
||||
setTimeout(() => {
|
||||
set.update(items => ([...items, ...motions]))
|
||||
set.update(items => ([...items, ...motions]))
|
||||
}, 100);
|
||||
|
||||
motions.forEach((motion) => {
|
||||
|
@ -40,7 +40,7 @@ export class Scene {
|
|||
}, duration - 250) // very brief to just trigger css anim
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
stop() {
|
||||
console.log("TODO: stop timeout")
|
||||
}
|
||||
|
@ -51,11 +51,11 @@ export class All extends Scene {
|
|||
step: number = 0
|
||||
locationCounts = new Map<Location, { in: number, out: number }>()
|
||||
selected_movements: Movement[];
|
||||
|
||||
|
||||
options = {
|
||||
interval_days: .1,
|
||||
interval_days: 1,
|
||||
tick_interval: 1000, // ms
|
||||
items_per_tick: 1,
|
||||
items_per_tick: 1,
|
||||
}
|
||||
|
||||
constructor(data: Data, viz_data: VizData, nextScene: CallableFunction) {
|
||||
|
@ -64,19 +64,24 @@ export class All extends Scene {
|
|||
// start setInterval to trigger additions per 100 or so to drawn_movements (rendered on map)
|
||||
// when done, trigger parent.done()
|
||||
// this.allMovements = data.movements;
|
||||
|
||||
// sorted by date
|
||||
data.movements.sort((a,b) => a.date - b.date);
|
||||
const last_move_date = data.movements[data.movements.length-1].date
|
||||
|
||||
const interval_ms = this.options.interval_days * 24 * 3600 * 1000
|
||||
|
||||
const range = [new Date(last_move_date - interval_ms), last_move_date]
|
||||
console.log(range)
|
||||
this.selected_movements = data.movements.filter((movement) => movement.date > range[0] && movement.date <= range[1]);
|
||||
// sorted by date
|
||||
data.movements.sort((a, b) => a.end_date - b.end_date);
|
||||
const last_move_date = data.movements[data.movements.length - 1].end_date
|
||||
|
||||
// start at midnight
|
||||
const interval_ms = (this.options.interval_days - 1) * 24 * 3600 * 1000
|
||||
const start_date = new Date(last_move_date - interval_ms)
|
||||
start_date.setHours(0)
|
||||
start_date.setMinutes(0)
|
||||
start_date.setSeconds(0)
|
||||
|
||||
const range = [start_date, last_move_date]
|
||||
console.log('check for date', range)
|
||||
this.selected_movements = data.movements.filter((movement) => movement.end_date > range[0] && movement.end_date <= range[1]);
|
||||
console.log(`Draw ${this.selected_movements.length} movements, will take ${this.selected_movements.length / this.options.items_per_tick * this.options.tick_interval / 1000} seconds`)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// TODO: group by hour and have it last nr-of-hours * interval
|
||||
// TODO: then, add a timeline
|
||||
|
@ -101,7 +106,7 @@ export class All extends Scene {
|
|||
return;
|
||||
}
|
||||
let movements: Movement[] = this.selected_movements.slice(this.step, this.step + n);
|
||||
|
||||
|
||||
// duration 5000 + Math.random() * 10000
|
||||
this.drawMovements(movements, 3500)
|
||||
this.step += n;
|
||||
|
@ -132,13 +137,13 @@ export class Timeline extends Scene {
|
|||
// when done, trigger parent.done()
|
||||
const min_occurences = 3;
|
||||
const pick = this.pickMovements(min_occurences);
|
||||
if( pick === null) {
|
||||
if (pick === null) {
|
||||
console.error(`No items which occur at least ${min_occurences} times`)
|
||||
setTimeout(this.nextScene.bind(this), 1000);
|
||||
|
||||
|
||||
return;
|
||||
}
|
||||
const [ item, movements ]= pick;
|
||||
const [item, movements] = pick;
|
||||
this.item = item
|
||||
this.viz_data.current_item.set(item);
|
||||
this.movements = movements
|
||||
|
@ -156,17 +161,17 @@ export class Timeline extends Scene {
|
|||
* @returns [Item, Movement[]]
|
||||
*/
|
||||
pickMovements(min_occurences: number) {
|
||||
|
||||
|
||||
const item_movements = [...this.data.occurences]
|
||||
// TODO: variable lenght, prefer with most steps
|
||||
.filter(([item, movements]) => movements.length >= min_occurences)
|
||||
|
||||
if(!item_movements.length) {
|
||||
.filter(([item, movements]) => movements.length >= min_occurences)
|
||||
|
||||
if (!item_movements.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
const pick = item_movements[Math.floor(Math.random() * item_movements.length)]
|
||||
pick[1].sort((a: Movement, b: Movement) => (a.date - b.date))
|
||||
pick[1].sort((a: Movement, b: Movement) => (a.end_date - b.end_date))
|
||||
return pick;
|
||||
}
|
||||
|
||||
|
@ -177,12 +182,13 @@ export class Timeline extends Scene {
|
|||
this.nextScene()
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// duration 5000 + Math.random() * 10000
|
||||
const mov = this.movements[this.step]
|
||||
this.drawMovements([mov], 2000, true)
|
||||
// const motion: Motion = { duration: 2000, movement: mov };
|
||||
const log: Log = { date: mov.date, title: `Transfer to ${mov.target.name}`, description: "" };
|
||||
// TODO: also consider end date
|
||||
const log: Log = { date: mov.start_date, title: `Transfer to ${mov.target.name}`, description: "" };
|
||||
// console.log(motion, motion.movement.source, motion.movement.target)
|
||||
// this.viz_data.overlay_motions.update(items => ([...items, motion]))
|
||||
this.viz_data.events.update(items => ([...items, log]))
|
||||
|
|
Loading…
Reference in a new issue