Updated script for conformance checking

This commit is contained in:
Nora Wickelmaier 2024-01-30 10:40:42 +01:00
parent b469ccfbcf
commit 6ade6444ac

View File

@ -8,11 +8,10 @@ from python_helpers import eval_pm, pn_infos_miner
###### Load data and create event logs ######
dat = pd.read_csv("results/haum/event_logfiles_2024-01-18_09-58-52.csv", sep = ";")
#dat = dat[dat["date.start"] < "2020-03-13"]
# --> only pre corona (before artworks were updated)
event_log = pm4py.format_dataframe(dat, case_id='path', activity_key='event',
timestamp_key='date.start')
event_log = pm4py.format_dataframe(dat, case_id = "path",
activity_key = "event",
timestamp_key = "date.start")
###### Descriptives of log data ######
@ -36,7 +35,7 @@ len(variants_no_move)
sorted_variants_no_move = dict(sorted(variants_no_move.items(), key=lambda item: item[1], reverse = True))
{k: sorted_variants_no_move[k] for k in list(sorted_variants_no_move)[:20]}
###### Read "conformative" Petri Net ######
###### Check against "conformative" Petri Net ######
basenet, initial_marking, final_marking = pm4py.read_pnml("results/haum/conformative_petrinet_con.pnml")
@ -65,14 +64,13 @@ l4.count([])
broken_traces = [replayed_traces[i] for i in index_broken]
event_log[event_log['@@case_index'] == index_broken[0]].event
event_log[event_log['@@case_index'] == index_broken[0]].path.unique().tolist()
event_log[event_log['@@case_index'] == index_broken[0]].item.unique().tolist()
event_log[event_log['@@case_index'] == index_broken[0]]["fileId.start"].unique().tolist()
event_log[event_log["@@case_index"] == index_broken[0]].event
event_log[event_log["@@case_index"] == index_broken[0]].path.unique().tolist()
event_log[event_log["@@case_index"] == index_broken[0]].item.unique().tolist()
event_log[event_log["@@case_index"] == index_broken[0]]["fileId.start"].unique().tolist()
# --> logging error in raw file
# Footprints
## Footprints
from pm4py.algo.discovery.footprints import algorithm as footprints_discovery
from pm4py.visualization.footprints import visualizer as fp_visualizer
fp_log = footprints_discovery.apply(event_log, variant=footprints_discovery.Variants.ENTIRE_EVENT_LOG)
@ -85,7 +83,7 @@ efg_graph = pm4py.discover_eventually_follows_graph(event_log)
## Directly-follows graph
dfg, start_activities, end_activities = pm4py.discover_dfg(event_log)
pm4py.view_dfg(dfg, start_activities, end_activities)
pm4py.save_vis_dfg(dfg, start_activities, end_activities, 'results/processmaps/dfg_complete_python.png')
pm4py.save_vis_dfg(dfg, start_activities, end_activities, "results/processmaps/dfg_complete_python.png")
## Fitting different miners
@ -96,42 +94,39 @@ eval = pd.DataFrame(columns = ["fitness", "precision", "generalizability",
for miner in ["conformative", "alpha", "heuristics", "inductive", "ilp"]:
eval = pd.concat([eval, pn_infos_miner(event_log, miner)])
## Export for all miners
eval.to_csv("results/eval_all-miners_complete.csv", sep = ";")
## Without broken trace
event_log_clean = event_log[event_log['@@case_index'] != index_broken[0]]
event_log_clean = event_log[event_log["@@case_index"] != index_broken[0]]
eval_clean = pd.DataFrame(columns = ["fitness", "precision", "generalizability",
"simplicity", "sound", "narcs", "ntrans",
"nplaces", "nvariants", "mostfreq"])
for miner in ["conformative", "alpha", "heuristics", "inductive", "ilp"]:
eval_clean = pd.concat([eval_clean, pn_infos_miner(event_log_clean, miner)])
eval_clean.to_csv("results/eval_all-miners_clean.csv", sep = ";")
# Export petri nets
h_net, h_im, h_fm = pm4py.discover_petri_net_heuristics(event_log_clean)
a_net, a_im, a_fm = pm4py.discover_petri_net_alpha(event_log_clean)
i_net, i_im, i_fm = pm4py.discover_petri_net_inductive(event_log_clean)
ilp_net, ilp_im, ilp_fm = pm4py.discover_petri_net_ilp(event_log_clean)
pm4py.vis.save_vis_petri_net(h_net, h_im, h_fm, "results/processmaps/petrinet_heuristics_clean.png")
pm4py.vis.save_vis_petri_net(a_net, a_im, a_fm, "results/processmaps/petrinet_alpha_clean.png")
pm4py.vis.save_vis_petri_net(i_net, i_im, i_fm, "results/processmaps/petrinet_inductive_clean.png")
pm4py.vis.save_vis_petri_net(ilp_net, ilp_im, ilp_fm, "results/processmaps/petrinet_ilp_clean.png")
## Export petri nets
pm4py.vis.save_vis_petri_net(basenet, initial_marking, final_marking, "results/processmaps/petrinet_conformative.png")
h_net, h_im, h_fm = pm4py.discover_petri_net_heuristics(event_log_clean)
pm4py.vis.save_vis_petri_net(h_net, h_im, h_fm, "results/processmaps/petrinet_heuristics_clean.png")
a_net, a_im, a_fm = pm4py.discover_petri_net_alpha(event_log_clean)
pm4py.vis.save_vis_petri_net(a_net, a_im, a_fm, "results/processmaps/petrinet_alpha_clean.png")
i_net, i_im, i_fm = pm4py.discover_petri_net_inductive(event_log_clean)
pm4py.vis.save_vis_petri_net(i_net, i_im, i_fm, "results/processmaps/petrinet_inductive_clean.png")
ilp_net, ilp_im, ilp_fm = pm4py.discover_petri_net_ilp(event_log_clean)
pm4py.vis.save_vis_petri_net(ilp_net, ilp_im, ilp_fm, "results/processmaps/petrinet_ilp_clean.png")
# convert to BPMN
base_bpmn = pm4py.convert.convert_to_bpmn(basenet, initial_marking, final_marking)
pm4py.vis.save_vis_bpmn(base_bpmn, "results/processmaps/bpmn_conformative.png")
i_bpmn = pm4py.convert.convert_to_bpmn(i_net, i_im, i_fm)
pm4py.vis.save_vis_bpmn(i_bpmn, "results/processmaps/bpmn_inductive_clean.png")
ilp_bpmn = pm4py.convert.convert_to_bpmn(ilp_net, ilp_im, ilp_fm)
pm4py.vis.save_vis_bpmn(ilp_bpmn, "results/processmaps/bpmn_ilp_clean.png")
a_bpmn = pm4py.convert.convert_to_bpmn(a_net, a_im, a_fm)
pm4py.vis.save_vis_bpmn(a_bpmn, "results/processmaps/bpmn_alpha_clean.png")
h_bpmn = pm4py.convert.convert_to_bpmn(h_net, h_im, h_fm)
pm4py.vis.save_vis_bpmn(h_bpmn, "results/processmaps/bpmn_heuristics_clean.png")