1
+ from scheduling_environment .job import Job
2
+ from scheduling_environment .machine import Machine
3
+ from scheduling_environment .operation import Operation
4
+ from scheduling_environment .jobShop import JobShop
5
+
6
+
7
+ def parse (processing_info , instance_name = "custom_problem_instance" ):
8
+
9
+ # Initialize JobShop
10
+ jobShop = JobShop ()
11
+ jobShop .set_instance_name (instance_name )
12
+
13
+ # Configure Machines based on nr_machines in processing_info
14
+ number_total_machines = processing_info ["nr_machines" ]
15
+ for machine_id in range (0 , number_total_machines ):
16
+ jobShop .add_machine (Machine (machine_id ))
17
+ jobShop .set_nr_of_machines (number_total_machines )
18
+
19
+ # Configure jobs, operations, and processing times
20
+ for job_info in processing_info ["jobs" ]:
21
+ job = Job (job_id = job_info ["job_id" ])
22
+
23
+ for operation_info in job_info ["operations" ]:
24
+ operation = Operation (job , job_info ["job_id" ], operation_info ["operation_id" ])
25
+
26
+ # Convert machine names (e.g., "machine_1") to numeric IDs for compatibility
27
+ for machine_key , processing_time in operation_info ["processing_times" ].items ():
28
+ machine_id = int (machine_key .split ("_" )[1 ])- 1
29
+ operation .add_operation_option (machine_id , processing_time )
30
+
31
+ job .add_operation (operation )
32
+ jobShop .add_operation (operation )
33
+ jobShop .add_job (job )
34
+ jobShop .set_nr_of_jobs (len (processing_info ["jobs" ]))
35
+
36
+ # Configure precedence relations between operations
37
+ precedence_relations = {}
38
+ for job_info in processing_info ["jobs" ]:
39
+ for op_info in job_info ["operations" ]:
40
+ if op_info ["predecessor" ] is not None :
41
+ operation = jobShop .get_operation (op_info ["operation_id" ])
42
+ predecessor_operation = jobShop .get_operation (op_info ["predecessor" ])
43
+ operation .add_predecessors ([predecessor_operation ])
44
+ precedence_relations [op_info ["operation_id" ]] = [predecessor_operation ]
45
+ else :
46
+ precedence_relations [op_info ["operation_id" ]] = []
47
+
48
+ # Configure sequence-dependent setup times for each machine and operation pair
49
+ setup_times = processing_info ["sequence_dependent_setup_times" ]
50
+ sequence_dependent_setup_times = {}
51
+
52
+ for machine_key , setup_matrix in setup_times .items ():
53
+ machine_id = int (machine_key .split ("_" )[1 ])- 1 # Convert machine_1 to machine ID 1
54
+ machine_setup_times = {}
55
+
56
+ # Map the setup times for all pairs of operations
57
+ for i in range (len (setup_matrix )):
58
+ for j in range (len (setup_matrix [i ])):
59
+ if i != j : # Ignore setup times for the same operation (i == j)
60
+ if i not in machine_setup_times :
61
+ machine_setup_times [i ] = {}
62
+ machine_setup_times [i ][j ] = setup_matrix [i ][j ]
63
+
64
+ sequence_dependent_setup_times [machine_id ] = machine_setup_times
65
+
66
+ # Add the precedence relations and sequence-dependent setup times to the JobShop
67
+ jobShop .add_precedence_relations_operations (precedence_relations )
68
+ jobShop .add_sequence_dependent_setup_times (sequence_dependent_setup_times )
69
+
70
+ return jobShop
71
+
72
+
73
+ if __name__ == "__main__" :
74
+ processing_info = {
75
+ "instance_name" : "custom_problem_instance" ,
76
+ "nr_machines" : 2 ,
77
+ "jobs" : [
78
+ {"job_id" : 0 , "operations" : [
79
+ {"operation_id" : 0 , "processing_times" : {"machine_1" : 10 , "machine_2" : 20 }, "predecessor" : None },
80
+ {"operation_id" : 1 , "processing_times" : {"machine_1" : 25 , "machine_2" : 19 }, "predecessor" : 0 }
81
+ ]},
82
+ {"job_id" : 1 , "operations" : [
83
+ {"operation_id" : 2 , "processing_times" : {"machine_1" : 23 , "machine_2" : 21 }, "predecessor" : None },
84
+ {"operation_id" : 3 , "processing_times" : {"machine_1" : 12 , "machine_2" : 24 }, "predecessor" : 2 }
85
+ ]},
86
+ {"job_id" : 2 , "operations" : [
87
+ {"operation_id" : 4 , "processing_times" : {"machine_1" : 37 , "machine_2" : 21 }, "predecessor" : None },
88
+ {"operation_id" : 5 , "processing_times" : {"machine_1" : 23 , "machine_2" : 34 }, "predecessor" : 4 }
89
+ ]}
90
+ ],
91
+ "sequence_dependent_setup_times" : {
92
+ "machine_1" : [
93
+ [0 , 25 , 30 , 35 , 40 , 45 ],
94
+ [25 , 0 , 20 , 30 , 40 , 50 ],
95
+ [30 , 20 , 0 , 10 , 15 , 25 ],
96
+ [35 , 30 , 10 , 0 , 5 , 10 ],
97
+ [40 , 40 , 15 , 5 , 0 , 20 ],
98
+ [45 , 50 , 25 , 10 , 20 , 0 ]
99
+ ],
100
+ "machine_2" : [
101
+ [0 , 21 , 30 , 35 , 40 , 45 ],
102
+ [21 , 0 , 10 , 25 , 30 , 40 ],
103
+ [30 , 10 , 0 , 5 , 15 , 25 ],
104
+ [35 , 25 , 5 , 0 , 10 , 20 ],
105
+ [40 , 30 , 15 , 10 , 0 , 25 ],
106
+ [45 , 40 , 25 , 20 , 25 , 0 ]
107
+ ]
108
+ }
109
+ }
110
+
111
+ jobShopEnv = parse (processing_info )
112
+ print ('Job shop setup complete' )
113
+
114
+ # TEST GA:
115
+ # from solution_methods.GA.src.initialization import initialize_run
116
+ # from solution_methods.GA.run_GA import run_GA
117
+ # import multiprocessing
118
+ #
119
+ # parameters = {"instance": {"problem_instance": "custom_problem_instance"},
120
+ # "algorithm": {"population_size": 8, "ngen": 10, "seed": 5, "indpb": 0.2, "cr": 0.7, "mutiprocessing": True},
121
+ # "output": {"logbook": True}
122
+ # }
123
+ #
124
+ # pool = multiprocessing.Pool()
125
+ # population, toolbox, stats, hof = initialize_run(jobShopEnv, pool, **parameters)
126
+ # makespan, jobShopEnv = run_GA(jobShopEnv, population, toolbox, stats, hof, **parameters)
127
+
128
+ # TEST CP_SAT:
129
+ from solution_methods .CP_SAT .run_cp_sat import run_CP_SAT
130
+ parameters = {"instance" : {"problem_instance" : "custom_fjsp_sdst" },
131
+ "solver" : {"time_limit" : 3600 },
132
+ "output" : {"logbook" : True }
133
+ }
134
+
135
+ jobShopEnv = parse (processing_info )
136
+ results , jobShopEnv = run_CP_SAT (jobShopEnv , ** parameters )
0 commit comments