mtt_haum/code/python_helpers.py

65 lines
2.5 KiB
Python
Raw Normal View History

import pm4py
import pandas as pd
###### Extract metadata for petri nets on filtered logs ######
def eval_pm(data, net, initial_marking, final_marking):
"""Caculate fitness, precision, generalizability, and simplicity for petri net"""
fitness = pm4py.fitness_token_based_replay(data, net, initial_marking, final_marking)
precisison = pm4py.precision_token_based_replay(data, net, initial_marking, final_marking)
generalizability = pm4py.algo.evaluation.generalization.algorithm.apply(data, net,
initial_marking, final_marking)
simplicity = pm4py.algo.evaluation.simplicity.algorithm.apply(net)
return [fitness['average_trace_fitness'], precisison, generalizability, simplicity]
def pn_infos(log, colname, filter):
"""Create data frame with relevant infos for petri nets on filtered logs"""
filtered_log = pm4py.filter_event_attribute_values(log, colname, [filter])
net, im, fm = pm4py.discover_petri_net_inductive(filtered_log)
2024-01-30 09:46:40 +01:00
eval = eval_append(filtered_log, net, im, fm)
2024-01-30 09:46:40 +01:00
eval.index = [str(filter).zfill(3)]
return eval
def pn_infos_miner(log, miner):
"""Create data frame with relevant infos for petri nets created with
different miners"""
if miner == "alpha":
net, im, fm = pm4py.discover_petri_net_alpha(log)
elif miner == "heuristics":
net, im, fm = pm4py.discover_petri_net_heuristics(log)
elif miner == "ilp":
net, im, fm = pm4py.discover_petri_net_ilp(log)
elif miner == "inductive":
net, im, fm = pm4py.discover_petri_net_inductive(log)
elif miner == "conformative":
net, im, fm = pm4py.read_pnml("results/haum/conformative_petrinet_con.pnml")
eval = eval_append(log, net, im, fm)
eval.index = [miner]
return eval
def eval_append(log, net, im, fm):
eval = eval_pm(log, net, im, fm)
is_sound = pm4py.check_soundness(net, im, fm)
eval.append(is_sound[0])
eval.append(len(net.arcs))
eval.append(len(net.transitions))
eval.append(len(net.places))
2024-01-30 09:46:40 +01:00
variants = pm4py.get_variants(log)
eval.append(len(variants))
sorted_variants = dict(sorted(variants.items(), key=lambda item: item[1], reverse = True))
eval.append({k: sorted_variants[k] for k in list(sorted_variants)[:1]})
eval = pd.DataFrame(eval).T
eval.columns = ["fitness", "precision", "generalizability", "simplicity",
"sound", "narcs", "ntrans", "nplaces", "nvariants", "mostfreq"]
return eval
2024-01-30 09:46:40 +01:00