Added script to check on process models for case clusters; not sure this makes sense
This commit is contained in:
		
							parent
							
								
									b29790dfc1
								
							
						
					
					
						commit
						7eff903837
					
				
							
								
								
									
										45
									
								
								code/12_pm-case-clusters.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								code/12_pm-case-clusters.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,45 @@
 | 
			
		||||
# 12_pm-case-clusters.py
 | 
			
		||||
#
 | 
			
		||||
# content: (1) Load data and create event log
 | 
			
		||||
#          (2) Infos for clusters
 | 
			
		||||
#          (3) Process maps for clusters
 | 
			
		||||
#
 | 
			
		||||
# input:  results/haum/eventlogs_pre-corona_item-clusters.csv
 | 
			
		||||
# output: results/haum/pn_infos_clusters.csv
 | 
			
		||||
#
 | 
			
		||||
# last mod: 2024-03-06
 | 
			
		||||
 | 
			
		||||
import pm4py
 | 
			
		||||
import pandas as pd
 | 
			
		||||
 | 
			
		||||
from python_helpers import eval_pm, pn_infos
 | 
			
		||||
 | 
			
		||||
#--------------- (1) Load data and create event logs ---------------
 | 
			
		||||
 | 
			
		||||
dat = pd.read_csv("results/haum/eventlogs_2019_case-clusters.csv", sep = ";")
 | 
			
		||||
 | 
			
		||||
event_log = pm4py.format_dataframe(dat, case_id = "case", activity_key = "item",
 | 
			
		||||
                                  timestamp_key = "date.start")
 | 
			
		||||
 | 
			
		||||
#--------------- (2) Infos for clusters ---------------
 | 
			
		||||
 | 
			
		||||
# Merge clusters into data frame
 | 
			
		||||
eval = pd.DataFrame(columns = ["fitness", "precision", "generalizability",
 | 
			
		||||
                               "simplicity", "sound", "narcs", "ntrans",
 | 
			
		||||
                               "nplaces", "nvariants", "mostfreq"])
 | 
			
		||||
for cluster in event_log.cluster.unique().tolist():
 | 
			
		||||
    eval = pd.concat([eval, pn_infos(event_log, "cluster", cluster)])
 | 
			
		||||
eval = eval.sort_index()
 | 
			
		||||
 | 
			
		||||
eval.to_csv("results/haum/pn_infos_clusters.csv", sep = ";")
 | 
			
		||||
 | 
			
		||||
#--------------- (3) Process maps for clusters ---------------
 | 
			
		||||
 | 
			
		||||
for cluster in event_log.cluster.unique().tolist():
 | 
			
		||||
    subdata = event_log[event_log.cluster == cluster]
 | 
			
		||||
    subnet, subim, subfm = pm4py.discover_petri_net_inductive(subdata, noise_threshold=.3)
 | 
			
		||||
    pm4py.save_vis_petri_net(subnet, subim, subfm,
 | 
			
		||||
       "results/processmaps/petrinet_cluster" + str(cluster) + "_cases.png")
 | 
			
		||||
    bpmn = pm4py.convert.convert_to_bpmn(subnet, subim, subfm)
 | 
			
		||||
    pm4py.vis.save_vis_bpmn(bpmn, "results/processmaps/bpmn_cluster" +
 | 
			
		||||
            str(cluster) + "_cases.png")
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user