|
5 | 5 | import os |
6 | 6 | import shutil |
7 | 7 | import stat |
| 8 | +import copy_with_port_portname |
| 9 | +import numpy as np |
8 | 10 |
|
9 | 11 | MKCONCORE_VER = "22-09-18" |
10 | 12 |
|
|
122 | 124 | nodes_text = soup.find_all('node') |
123 | 125 |
|
124 | 126 | # Store the edges and nodes in a dictionary |
| 127 | +edge_label_regex = re.compile(r"0x([a-fA-F0-9]+)_(\S+)") |
125 | 128 | edges_dict = dict() |
126 | 129 | nodes_dict = dict() |
| 130 | +node_id_to_label_map = dict() # Helper to get clean node labels from GraphML ID |
127 | 131 |
|
128 | 132 | for node in nodes_text: |
129 | | - node_key = node.get_text() |
130 | | - length = len(node.find_all('data')) |
131 | | - for i in range(length): |
132 | | - try: |
133 | | - data = node.find_all('data')[i] |
134 | | - node_label = prefixedgenode + data.find('y:NodeLabel').text #3/23/21 |
135 | | - nodes_dict[node['id']] = re.sub(r'(\s+|\n)', ' ', node_label) |
136 | | - except IndexError: |
137 | | - logging.debug('IndexError: A node with no valid properties encountered and ignored') |
138 | | - except AttributeError: |
139 | | - logging.debug('AttributeError: A node with no valid properties encountered and ignored') |
| 133 | + try: |
| 134 | + data = node.find('data', recursive=False) |
| 135 | + if data: |
| 136 | + node_label_tag = data.find('y:NodeLabel') |
| 137 | + if node_label_tag: |
| 138 | + node_label = prefixedgenode + node_label_tag.text |
| 139 | + nodes_dict[node['id']] = re.sub(r'(\s+|\n)', ' ', node_label) |
| 140 | + node_id_to_label_map[node['id']] = node_label.split(':')[0] |
| 141 | + except (IndexError, AttributeError): |
| 142 | + logging.debug('A node with no valid properties encountered and ignored') |
140 | 143 |
|
141 | 144 | for edge in edges_text: |
142 | | - length = len(edge.find_all('data')) |
143 | | - for i in range(length): |
144 | | - try: |
145 | | - data = edge.find_all('data')[i] |
146 | | - edge_label = prefixedgenode + data.find('y:EdgeLabel').text # 3/23/21 |
147 | | - if edges_dict.get(edge_label) != None: |
148 | | - targets = edges_dict[edge_label][1] |
149 | | - else: |
150 | | - targets = [] |
151 | | - targets.append(nodes_dict[edge['target']]) |
152 | | - edges_dict[edge_label] = [nodes_dict[edge['source']], targets] |
153 | | - except IndexError: |
154 | | - logging.debug('An edge with no valid properties encountered and ignored') |
155 | | - except AttributeError: |
156 | | - logging.debug('AttributeError: An edge with no valid properties encountered and ignored') |
| 145 | + try: |
| 146 | + data = edge.find('data', recursive=False) |
| 147 | + if data: |
| 148 | + edge_label_tag = data.find('y:EdgeLabel') |
| 149 | + if edge_label_tag: |
| 150 | + raw_label = edge_label_tag.text |
| 151 | + edge_label = prefixedgenode + raw_label |
| 152 | + # Filter out ZMQ edges from the file-based edge dictionary by checking the raw label |
| 153 | + if not edge_label_regex.match(raw_label): |
| 154 | + if edge_label not in edges_dict: |
| 155 | + edges_dict[edge_label] = [nodes_dict[edge['source']], []] |
| 156 | + edges_dict[edge_label][1].append(nodes_dict[edge['target']]) |
| 157 | + except (IndexError, AttributeError, KeyError): |
| 158 | + logging.debug('An edge with no valid properties or missing node encountered and ignored') |
157 | 159 |
|
158 | | -# Print the edges_dict |
159 | | -#logging.info(edges_dict) |
160 | 160 |
|
161 | 161 | ############## Mark's Docker |
162 | | -import numpy as np |
163 | | - |
164 | | -i=0 |
165 | | -nodes_num=dict() |
166 | | -for node in nodes_dict: |
167 | | - nodes_num[nodes_dict[node]] = i |
168 | | - i=i+1 |
| 162 | +logging.info("Building graph adjacency matrix...") |
| 163 | +nodes_num = {label: i for i, label in enumerate(nodes_dict.values())} |
169 | 164 |
|
170 | | -m=np.zeros((len(nodes_dict),len(nodes_dict))) |
| 165 | +m = np.zeros((len(nodes_dict), len(nodes_dict))) |
171 | 166 | for edges in edges_dict: |
172 | | - for dest in (edges_dict[edges])[1]: |
173 | | - m[nodes_num[edges_dict[edges][0]]][nodes_num[dest]] = 1 |
| 167 | + source_node_label = edges_dict[edges][0] |
| 168 | + for dest_node_label in edges_dict[edges][1]: |
| 169 | + try: |
| 170 | + source_idx = nodes_num[source_node_label] |
| 171 | + dest_idx = nodes_num[dest_node_label] |
| 172 | + m[source_idx][dest_idx] = 1 |
| 173 | + except KeyError as e: |
| 174 | + logging.error(f"KeyError while building matrix. Label '{e}' not found in node map.") |
| 175 | + continue |
174 | 176 |
|
175 | 177 | mp = np.eye(len(nodes_dict)) |
176 | 178 | ms = np.zeros((len(nodes_dict),len(nodes_dict))) |
177 | | - |
178 | | -for i in range(0,len(nodes_dict)): |
| 179 | +for i in range(len(nodes_dict)): |
179 | 180 | mp = mp@m |
180 | 181 | ms += mp |
181 | | - |
182 | 182 | if (ms == 0).any(): |
183 | 183 | logging.warning("Unreachable nodes detected") |
184 | 184 |
|
| 185 | +# --- START: New logic for script specialization (Aggregation) --- |
| 186 | +python_executable = sys.executable |
| 187 | +mkconcore_dir = os.path.dirname(os.path.abspath(__file__)) |
| 188 | +copy_script_py_path = os.path.join(mkconcore_dir, "copy_with_port_portname.py") |
| 189 | +if not os.path.exists(copy_script_py_path): |
| 190 | + copy_script_py_path = os.path.join(CONCOREPATH, "copy_with_port_portname.py") |
| 191 | + |
| 192 | +if not os.path.exists(copy_script_py_path): |
| 193 | + logging.warning(f"copy_with_port_portname.py not found. Script specialization will be skipped.") |
| 194 | + copy_script_py_path = None |
| 195 | + |
| 196 | +# Dictionary to aggregate edge parameters for each node that needs specialization |
| 197 | +# Key: node_id (from GraphML), Value: list of edge parameter dicts |
| 198 | +node_edge_params = {} |
| 199 | +edge_label_regex = re.compile(r"0x([^_]+)_(\S+)") |
| 200 | + |
| 201 | +logging.info("Aggregating ZMQ edge parameters for nodes...") |
| 202 | +if copy_script_py_path: |
| 203 | + for edge_element in soup.find_all('edge'): |
| 204 | + try: |
| 205 | + edge_label_tag = edge_element.find('y:EdgeLabel') |
| 206 | + if not edge_label_tag or not edge_label_tag.text: |
| 207 | + continue |
| 208 | + |
| 209 | + raw_edge_label = edge_label_tag.text |
| 210 | + match = edge_label_regex.match(raw_edge_label) |
| 211 | + |
| 212 | + if match: |
| 213 | + hex_port_val, port_name_val = match.groups() |
| 214 | + # Convert hex port value to decimal string |
| 215 | + try: |
| 216 | + decimal_port_str = str(int(hex_port_val, 16)) |
| 217 | + except ValueError: |
| 218 | + logging.error(f"Invalid hex value '{hex_port_val}' in edge label. Using as is.") |
| 219 | + decimal_port_str = hex_port_val |
| 220 | + |
| 221 | + source_node_id = edge_element['source'] |
| 222 | + target_node_id = edge_element['target'] |
| 223 | + |
| 224 | + # Get clean labels for use in variable names |
| 225 | + source_node_label = node_id_to_label_map.get(source_node_id, "UNKNOWN_SOURCE") |
| 226 | + target_node_label = node_id_to_label_map.get(target_node_id, "UNKNOWN_TARGET") |
| 227 | + |
| 228 | + logging.info(f"Found ZMQ edge '{raw_edge_label}' from '{source_node_label}' to '{target_node_label}'") |
| 229 | + |
| 230 | + edge_param_data = { |
| 231 | + "port": decimal_port_str, |
| 232 | + "port_name": port_name_val, |
| 233 | + "source_node_label": source_node_label, |
| 234 | + "target_node_label": target_node_label |
| 235 | + } |
| 236 | + |
| 237 | + # Add this edge's data to both the source and target nodes for specialization |
| 238 | + if source_node_id in nodes_dict: |
| 239 | + if source_node_id not in node_edge_params: |
| 240 | + node_edge_params[source_node_id] = [] |
| 241 | + node_edge_params[source_node_id].append(edge_param_data) |
| 242 | + |
| 243 | + if target_node_id in nodes_dict: |
| 244 | + if target_node_id not in node_edge_params: |
| 245 | + node_edge_params[target_node_id] = [] |
| 246 | + node_edge_params[target_node_id].append(edge_param_data) |
| 247 | + except Exception as e: |
| 248 | + logging.warning(f"Error processing edge for parameter aggregation: {e}") |
| 249 | + |
| 250 | +# --- Now, run the specialization for each node that has aggregated parameters --- |
| 251 | +if node_edge_params: |
| 252 | + logging.info("Running script specialization process...") |
| 253 | + specialized_scripts_output_dir = os.path.abspath(os.path.join(outdir, "src")) |
| 254 | + os.makedirs(specialized_scripts_output_dir, exist_ok=True) |
| 255 | + |
| 256 | + for node_id, params_list in node_edge_params.items(): |
| 257 | + current_node_full_label = nodes_dict[node_id] |
| 258 | + try: |
| 259 | + container_name, original_script = current_node_full_label.split(':', 1) |
| 260 | + except ValueError: |
| 261 | + continue # Skip if label format is wrong |
| 262 | + |
| 263 | + if not original_script or "." not in original_script: |
| 264 | + continue # Skip if not a script file |
| 265 | + |
| 266 | + template_script_full_path = os.path.join(sourcedir, original_script) |
| 267 | + if not os.path.exists(template_script_full_path): |
| 268 | + logging.error(f"Cannot specialize: Original script '{template_script_full_path}' not found in '{sourcedir}'.") |
| 269 | + continue |
| 270 | + |
| 271 | + new_script_basename = copy_with_port_portname.run_specialization_script( |
| 272 | + template_script_full_path, |
| 273 | + specialized_scripts_output_dir, |
| 274 | + params_list, |
| 275 | + python_executable, |
| 276 | + copy_script_py_path |
| 277 | + ) |
| 278 | + |
| 279 | + if new_script_basename: |
| 280 | + # Update nodes_dict to point to the new comprehensive specialized script |
| 281 | + nodes_dict[node_id] = f"{container_name}:{new_script_basename}" |
| 282 | + logging.info(f"Node ID '{node_id}' ('{container_name}') updated to use specialized script '{new_script_basename}'.") |
| 283 | + else: |
| 284 | + logging.error(f"Failed to generate specialized script for node ID '{node_id}'. It will retain its original script.") |
| 285 | + |
185 | 286 | #not right for PM2_1_1 and PM2_1_2 |
186 | 287 | volswr = len(nodes_dict)*[''] |
187 | 288 | i = 0 |
|
202 | 303 | volsro[nodes_num[dest]] += ' -v '+volIndirPair+':ro' |
203 | 304 | i += 1 |
204 | 305 |
|
205 | | -#copy sourcedir into ./src |
206 | | -for node in nodes_dict: |
207 | | - containername,sourcecode = nodes_dict[node].split(':') |
208 | | - if len(sourcecode)!=0 and sourcecode.find(".")!=-1: #3/28/21 |
209 | | - dockername,langext = sourcecode.split(".") |
210 | | - try: |
211 | | - fsource = open(sourcedir+"/"+sourcecode) |
212 | | - except: |
213 | | - logging.error(f"{sourcecode} not found in {sourcedir}") |
214 | | - quit() |
215 | | - with open(outdir+"/src/"+sourcecode,"w") as fcopy: |
216 | | - fcopy.write(fsource.read()) |
217 | | - fsource.close() |
218 | | - if concoretype == "docker": # 3/30/21 |
219 | | - try: |
220 | | - fsource = open(sourcedir+"/Dockerfile."+dockername) |
221 | | - with open(outdir+"/src/Dockerfile."+dockername,"w") as fcopy: |
222 | | - fcopy.write(fsource.read()) |
223 | | - logging.info(f"Using custom Dockerfile for {dockername}") |
224 | | - except: |
225 | | - logging.info(f"Using default Dockerfile for {dockername}") |
226 | | - fsource.close() |
227 | | - if os.path.isdir(sourcedir+"/"+dockername+".dir"): |
228 | | - shutil.copytree(sourcedir+"/"+dockername+".dir",outdir+"/src/"+dockername+".dir") |
| 306 | +# copy sourcedir into ./src |
| 307 | +# --- Modified file copying loop --- |
| 308 | +logging.info("Processing files for nodes...") |
| 309 | +for node_id_key in list(nodes_dict.keys()): |
| 310 | + node_label_from_dict = nodes_dict[node_id_key] |
| 311 | + try: |
| 312 | + containername, sourcecode = node_label_from_dict.split(':', 1) |
| 313 | + except ValueError: |
| 314 | + continue |
| 315 | + |
| 316 | + if not sourcecode: |
| 317 | + continue |
| 318 | + |
| 319 | + if "." in sourcecode: |
| 320 | + dockername, langext = os.path.splitext(sourcecode) |
| 321 | + else: |
| 322 | + dockername, langext = sourcecode, "" |
| 323 | + |
| 324 | + script_target_path = os.path.join(outdir, "src", sourcecode) |
| 325 | + |
| 326 | + # If the script was specialized, it's already in outdir/src. If not, copy from sourcedir. |
| 327 | + if node_id_key not in node_edge_params: |
| 328 | + script_source_path = os.path.join(sourcedir, sourcecode) |
| 329 | + if os.path.exists(script_source_path): |
| 330 | + shutil.copy2(script_source_path, script_target_path) |
| 331 | + else: |
| 332 | + logging.error(f"Script '{sourcecode}' not found in sourcedir '{sourcedir}'") |
| 333 | + |
| 334 | + # The rest of the file handling (Dockerfiles, .dir) uses 'dockername', |
| 335 | + # which is now derived from the specialized script name, maintaining consistency. |
| 336 | + if concoretype == "docker": |
| 337 | + custom_dockerfile = f"Dockerfile.{dockername}" |
| 338 | + if os.path.exists(os.path.join(sourcedir, custom_dockerfile)): |
| 339 | + shutil.copy2(os.path.join(sourcedir, custom_dockerfile), os.path.join(outdir, "src", custom_dockerfile)) |
229 | 340 |
|
| 341 | + dir_for_node = f"{dockername}.dir" |
| 342 | + if os.path.isdir(os.path.join(sourcedir, dir_for_node)): |
| 343 | + shutil.copytree(os.path.join(sourcedir, dir_for_node), os.path.join(outdir, "src", dir_for_node), dirs_exist_ok=True) |
| 344 | + |
| 345 | + |
230 | 346 | #copy proper concore.py into /src |
231 | 347 | try: |
232 | 348 | if concoretype=="docker": |
|
346 | 462 | fcopy.write(fsource.read()) |
347 | 463 | fsource.close() |
348 | 464 |
|
| 465 | +# --- Generate iport and oport mappings --- |
| 466 | +logging.info("Generating iport/oport mappings...") |
| 467 | +node_port_mappings = {label: {'iport': {}, 'oport': {}} for label in nodes_dict.values()} |
| 468 | +# 1. Process file-based inputs |
| 469 | +node_labels_by_index = {i: label for label, i in nodes_num.items()} |
| 470 | +for i, in_dirs in enumerate(indir): |
| 471 | + if i in node_labels_by_index: |
| 472 | + node_label = node_labels_by_index[i] |
| 473 | + for pair in in_dirs: |
| 474 | + volname, portnum = pair.split(INDIRNAME) |
| 475 | + node_port_mappings[node_label]['iport'][volname] = int(portnum) |
| 476 | +# 2. Process file-based outputs |
| 477 | +for edge_label, (source_label, target_labels) in edges_dict.items(): |
| 478 | + if source_label in node_port_mappings: |
| 479 | + out_count = len(node_port_mappings[source_label]['oport']) + 1 |
| 480 | + node_port_mappings[source_label]['oport'][edge_label] = out_count |
| 481 | +# 3. Augment with bidirectional ZMQ connections |
| 482 | +logging.info("Augmenting port maps with ZMQ connections...") |
| 483 | +for edge_element in soup.find_all('edge'): |
| 484 | + try: |
| 485 | + edge_label_tag = edge_element.find('y:EdgeLabel') |
| 486 | + if not edge_label_tag or not edge_label_tag.text: continue |
| 487 | + match = edge_label_regex.match(edge_label_tag.text) |
| 488 | + if match: |
| 489 | + hex_port_val, port_name_val = match.groups() |
| 490 | + # Convert hex port value to decimal string |
| 491 | + try: |
| 492 | + decimal_port_str = str(int(hex_port_val, 16)) |
| 493 | + except ValueError: |
| 494 | + logging.error(f"Invalid hex value '{hex_port_val}' in edge label. Using as is.") |
| 495 | + decimal_port_str = hex_port_val |
| 496 | + |
| 497 | + source_label = nodes_dict.get(edge_element['source']) |
| 498 | + target_label = nodes_dict.get(edge_element['target']) |
| 499 | + if source_label and target_label: |
| 500 | + node_port_mappings[source_label]['iport'][port_name_val] = decimal_port_str |
| 501 | + node_port_mappings[source_label]['oport'][port_name_val] = decimal_port_str |
| 502 | + node_port_mappings[target_label]['iport'][port_name_val] = decimal_port_str |
| 503 | + node_port_mappings[target_label]['oport'][port_name_val] = decimal_port_str |
| 504 | + logging.info(f" - Added ZMQ port '{port_name_val}:{decimal_port_str}' to both iport/oport for nodes '{source_label}' and '{target_label}'") |
| 505 | + except Exception as e: |
| 506 | + logging.warning(f"Error processing ZMQ edge for port map: {e}") |
| 507 | + |
| 508 | +# 4. Write final iport/oport files |
| 509 | +logging.info("Writing .iport and .oport files...") |
| 510 | +for node_label, ports in node_port_mappings.items(): |
| 511 | + try: |
| 512 | + containername, sourcecode = node_label.split(':', 1) |
| 513 | + if not sourcecode or "." not in sourcecode: continue |
| 514 | + dockername = os.path.splitext(sourcecode)[0] |
| 515 | + with open(os.path.join(outdir, "src", f"{dockername}.iport"), "w") as fport: |
| 516 | + fport.write(str(ports['iport']).replace("'" + prefixedgenode, "'")) |
| 517 | + with open(os.path.join(outdir, "src", f"{dockername}.oport"), "w") as fport: |
| 518 | + fport.write(str(ports['oport']).replace("'" + prefixedgenode, "'")) |
| 519 | + except ValueError: |
| 520 | + continue |
349 | 521 |
|
350 | | -#generate input portmap file |
351 | | -i=0 |
352 | | -for node in nodes_dict: |
353 | | - containername,sourcecode = nodes_dict[node].split(':') |
354 | | - iportmap_dict = dict() |
355 | | - for pair in indir[i]: |
356 | | - volname,portnum = pair.split(INDIRNAME) |
357 | | - iportmap_dict[volname] = int(portnum) |
358 | | - if len(sourcecode)!=0 and sourcecode.find(".")!=-1: #3/28/21 |
359 | | - dockername,langext = sourcecode.split(".") |
360 | | - if os.path.exists(outdir+"/src/"+dockername+".iport"): |
361 | | - logging.warning(f"{dockername} has multiple instantiations ; iport/oport may be invalid") |
362 | | - with open(outdir+"/src/"+dockername+".iport", "w") as fport: |
363 | | - if prefixedgenode == "": # 5/18/21 |
364 | | - fport.write(str(iportmap_dict)) |
365 | | - else: |
366 | | - fport.write(str(iportmap_dict).replace("'"+prefixedgenode,"'")) |
367 | | - i=i+1 |
368 | | - |
369 | | -#generate output portmap file |
370 | | -outcount = len(nodes_dict)*[0] |
371 | | -#wrong, this aliases single dict for all elements: oportmap = len(nodes_dict)*[dict()] |
372 | | -#instead, need to use a loop to initialize: |
373 | | -oportmap_dict = [] |
374 | | -for node in nodes_dict: |
375 | | - oportmap_dict += [dict()] |
376 | | -for edges in edges_dict: |
377 | | - containername,sourcecode = edges_dict[edges][0].split(':') |
378 | | - outcount[nodes_num[edges_dict[edges][0]]] += 1 |
379 | | - oportmap_dict[nodes_num[edges_dict[edges][0]]][edges] = outcount[nodes_num[edges_dict[edges][0]]] |
380 | | -i=0 |
381 | | -for node in nodes_dict: |
382 | | - containername,sourcecode = nodes_dict[node].split(':') |
383 | | - if len(sourcecode)!=0 and sourcecode.find(".")!=-1: #3/28/21 |
384 | | - dockername,langext = sourcecode.split(".") |
385 | | - with open(outdir+"/src/"+dockername+".oport", "w") as fport: |
386 | | - if prefixedgenode == "": # 5/18/21 |
387 | | - fport.write(str(oportmap_dict[i])) |
388 | | - else: |
389 | | - fport.write(str(oportmap_dict[i]).replace("'"+prefixedgenode,"'")) |
390 | | - i=i+1 |
391 | 522 |
|
392 | 523 | #if docker, make docker-dirs, generate build, run, stop, clear scripts and quit |
393 | 524 | if (concoretype=="docker"): |
|
0 commit comments