2023-10-25 17:12:22 +02:00
|
|
|
# 01_preprocessing_haum.R
|
|
|
|
#
|
|
|
|
# content: (1) Parse raw log files
|
|
|
|
# (2) Create event logs
|
|
|
|
# (3) Add meta data
|
|
|
|
#
|
|
|
|
# input: raw log files from ../data/haum/*.log
|
|
|
|
# ../data/metadata/feiertage.csv
|
|
|
|
# ../data/metadata/schulferien_2016-2018_NI.csv
|
|
|
|
# ../data/metadata/schulferien_2019-2025_NI.csv
|
|
|
|
# output: raw_logfiles_<timestamp>.csv
|
|
|
|
# event_logfiles_<timestamp>.csv
|
|
|
|
# event_logfiles_<timestamp>.csv
|
|
|
|
#
|
|
|
|
# last mod: 2023-10-23, NW
|
|
|
|
|
2023-09-22 16:16:20 +02:00
|
|
|
# setwd("C:/Users/nwickelmaier/Nextcloud/Documents/MDS/2023ss/60100_master_thesis/code")
|
|
|
|
|
|
|
|
#library(mtt)
|
|
|
|
devtools::load_all("../../../../software/mtt")
|
|
|
|
|
2023-09-26 18:34:59 +02:00
|
|
|
now <- format(Sys.time(), "%Y-%m-%d_%H-%M-%S")
|
|
|
|
#now <- "2023-09-23_01-31-30"
|
2023-09-22 16:16:20 +02:00
|
|
|
|
2023-10-25 17:12:22 +02:00
|
|
|
#--------------- (1) Parse raw log files ---------------
|
2023-09-22 16:16:20 +02:00
|
|
|
|
2023-11-01 18:48:14 +01:00
|
|
|
#path <- "../data/haum/LogFiles/"
|
|
|
|
#folders <- dir(path)
|
|
|
|
#folders <- "2016"
|
2023-09-22 16:16:20 +02:00
|
|
|
|
2023-11-01 18:48:14 +01:00
|
|
|
#datraw <- parse_logfiles(folders, path)
|
|
|
|
|
2023-12-21 14:46:59 +01:00
|
|
|
datraw <- read.table("results/haum/raw_logfiles_2023-10-25_16-20-45.csv",
|
2023-11-01 18:48:14 +01:00
|
|
|
sep = ";", header = TRUE)
|
2023-10-25 17:12:22 +02:00
|
|
|
|
|
|
|
## Export data
|
2023-09-26 18:34:59 +02:00
|
|
|
|
2023-12-21 14:46:59 +01:00
|
|
|
#write.table(datraw, paste0("results/haum/raw_logfiles_small_", now, ".csv"),
|
2023-11-01 18:48:14 +01:00
|
|
|
# sep = ";", row.names = FALSE)
|
2023-09-22 16:16:20 +02:00
|
|
|
|
2023-10-25 17:12:22 +02:00
|
|
|
#--------------- (2) Create event logs ---------------
|
2023-10-23 15:11:08 +02:00
|
|
|
|
|
|
|
datlogs <- create_eventlogs(datraw,
|
2023-11-01 18:48:14 +01:00
|
|
|
xmlpath = "../data/haum/ContentEyevisit/eyevisit_cards_light/",
|
2023-11-06 09:15:03 +01:00
|
|
|
glossar = TRUE, save = TRUE)
|
2023-10-18 12:57:15 +02:00
|
|
|
|
2023-11-06 09:15:03 +01:00
|
|
|
artworks <- unique(datlogs$artwork)
|
2023-09-26 18:34:59 +02:00
|
|
|
topics <- extract_topics(artworks, xmlfiles = paste0(artworks, ".xml"),
|
|
|
|
xmlpath = "../data/haum/ContentEyevisit/eyevisit_cards_light/")
|
|
|
|
|
|
|
|
datlogs_topics <- add_topic(datlogs, topics = topics)
|
|
|
|
|
2023-10-25 17:12:22 +02:00
|
|
|
#--------------- (3) Add meta data ---------------
|
2023-10-23 15:11:08 +02:00
|
|
|
|
|
|
|
## Read data for holiday
|
|
|
|
|
|
|
|
hd0 <- read.table("../data/metadata/feiertage.csv", sep = ";", header = TRUE)
|
|
|
|
hd0$X.br. <- NULL
|
|
|
|
|
|
|
|
hd <- hd0[hd0$Abkuerzung == "NI", ]
|
|
|
|
names(hd) <- c("state", "stateCode", "date", "holiday")
|
|
|
|
hd$date <- as.POSIXct(hd$date)
|
|
|
|
|
|
|
|
## Read data for school vacations
|
|
|
|
|
|
|
|
# https://ferien-api.de/#holidaysPerStateAndYear
|
|
|
|
# Data extracted (on Linux) via:
|
|
|
|
# curl https://ferien-api.de/api/v1/holidays/NI > schulferien_NI.json
|
|
|
|
|
|
|
|
# library(jsonlite)
|
|
|
|
#
|
|
|
|
# dat <- read_json("data/metadata/schulferien_NI.json", simplify = TRUE)
|
|
|
|
# dat$slug <- NULL
|
|
|
|
#
|
|
|
|
# dat$name <- paste0(gsub("^(.*).niedersachsen.*", "\\1", dat$name),
|
|
|
|
# gsub("^.*niedersachsen [0-9]{4}(.*)", "\\1",
|
|
|
|
# dat$name))
|
|
|
|
#
|
|
|
|
# write.table(dat, "data/metadata/schulferien_2019-2025_NI.csv", sep = ";",
|
|
|
|
# row.names = FALSE, quote = FALSE)
|
|
|
|
|
|
|
|
sf1 <- read.table("../data/metadata/schulferien_2016-2018_NI.csv", sep = ";",
|
|
|
|
header = TRUE)
|
|
|
|
sf2 <- read.table("../data/metadata/schulferien_2019-2025_NI.csv", sep = ";",
|
|
|
|
header = TRUE)
|
|
|
|
sf <- rbind(sf1, sf2)
|
|
|
|
sf$start <- as.Date(sf$start)
|
|
|
|
sf$end <- as.Date(sf$end)
|
|
|
|
|
|
|
|
sfdat <- NULL
|
|
|
|
|
|
|
|
for (i in seq_len(nrow(sf))) {
|
|
|
|
date <- seq(sf$start[i], sf$end[i], by = 1)
|
|
|
|
sfdat <- rbind(sfdat, data.frame(date, vacations = sf$name[i],
|
|
|
|
stateCodeVacations = sf$stateCode[i]))
|
|
|
|
}
|
|
|
|
|
|
|
|
# TODO: How to handle stateCode? There will be several for certain types of
|
|
|
|
# data sets... Not important here, since I only do NI.
|
|
|
|
|
2023-10-25 17:12:22 +02:00
|
|
|
## Merge data
|
2023-10-23 15:11:08 +02:00
|
|
|
|
2023-11-01 18:48:14 +01:00
|
|
|
datlogs_topics$date <- as.Date(datlogs_topics$date.start)
|
|
|
|
|
|
|
|
dat1 <- merge(datlogs_topics, hd, by.x = "date", by.y = "date", all.x = TRUE)
|
|
|
|
dat2 <- merge(dat1, sfdat, by.x = "date", by.y = "date", all.x = TRUE)
|
2023-10-23 15:11:08 +02:00
|
|
|
|
|
|
|
## Export data
|
|
|
|
|
2023-12-21 14:46:59 +01:00
|
|
|
write.table(dat2, paste0("results/haum/event_logfiles_glossar_", now, ".csv"),
|
2023-09-22 16:16:20 +02:00
|
|
|
sep = ";", row.names = FALSE)
|
|
|
|
|
2023-10-23 15:11:08 +02:00
|
|
|
# TODO: Maybe add infos about artworks?
|
|
|
|
|