From 5452bd5e72940da305e3a7ebc99746ef92f17573 Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Thu, 13 Feb 2025 02:55:29 -0700 Subject: [PATCH 01/17] Make sure all pins appear on carry4s in netlist --- bfasst/utils/netlist_cleanup.py | 65 ++++++++++++++------------------- bfasst/utils/rw_helpers.py | 19 ++++++++++ bfasst/utils/rw_phys_netlist.py | 2 + 3 files changed, 48 insertions(+), 38 deletions(-) diff --git a/bfasst/utils/netlist_cleanup.py b/bfasst/utils/netlist_cleanup.py index 6225d9a9..3f97a7a0 100644 --- a/bfasst/utils/netlist_cleanup.py +++ b/bfasst/utils/netlist_cleanup.py @@ -38,31 +38,25 @@ def __init__(self, build_path, netlist_in_path, netlist_out_path, logging_level) self.remove_unused_instances(top) self.write_netlist(netlist_ir) - def valid_assign_instance(self, instance): - """Check which instances are ASSIGN instances which need to be removed""" - if not instance.reference.name.startswith("SD"): - return False - try: - pin_out = next(pin for pin in instance.pins if pin.inner_pin.port.name != "i") - except StopIteration: - return False - if len(pin_out.wire.pins) > 1: - raise NotImplementedError("Multiple connections for ASSIGN instance not supported") - return True - def remove_assign_instances(self, top): """Remove all ASSIGN instances""" logging.info("Finding and removing all ASSIGN instances") t_begin = time.perf_counter() - - top_ref = top.reference - - instances_to_remove = { - instance for instance in top.get_instances() if self.valid_assign_instance(instance) - } - - top_ref.remove_children_from(instances_to_remove) - + for instance in top.get_instances(): + if instance.reference.name.startswith("SDN_VERILOG_ASSIGNMENT"): + pin_out = None + + for pin in instance.pins: + if pin.inner_pin.port.name == "i": + pass + else: + pin_out = pin + + for pin in pin_out.wire.pins: + if pin == pin_out: + continue + raise NotImplementedError + top.reference.remove_child(instance) logging.info("Total time to remove ASSIGN instances: %s", time.perf_counter() - t_begin) def remove_unused_instances(self, top): @@ -70,26 +64,21 @@ def remove_unused_instances(self, top): logging.info("Removing unused instances") unused_instance_types = {"LUT6_2": ("O5", "O6"), "IBUF": ("O",)} netlist_wrapper = SdnNetlistWrapper(top) - - wire_to_net = netlist_wrapper.wire_to_net - instances = netlist_wrapper.instances - t_begin = time.perf_counter() - - # Group instances by type so we don't have to iterate over instances multiple times - instances_by_type = {} - for instance in instances: - inst_type = instance.instance.reference.name - instances_by_type.setdefault(inst_type, []).append(instance) - for instance_type, pin_names in unused_instance_types.items(): - for instance in instances_by_type.get(instance_type, []): - if not any( - wire_to_net[instance.get_pin(pin_name).pin.wire].is_connected + for instance_wrapper in [ + instance_wrapper + for instance_wrapper in netlist_wrapper.instances + if instance_wrapper.instance.reference.name == instance_type + ]: + connected_pins = ( + netlist_wrapper.wire_to_net[ + instance_wrapper.get_pin(pin_name).pin.wire + ].is_connected for pin_name in pin_names - ): - top.reference.remove_child(instance.instance) - + ) + if not any(connected_pins): + top.reference.remove_child(instance_wrapper.instance) logging.info("Total time to remove unused instances: %s", time.perf_counter() - t_begin) def write_netlist(self, netlist_ir): diff --git a/bfasst/utils/rw_helpers.py b/bfasst/utils/rw_helpers.py index 77d3baa9..4916dd57 100644 --- a/bfasst/utils/rw_helpers.py +++ b/bfasst/utils/rw_helpers.py @@ -473,6 +473,25 @@ def cell_is_default_mapping(self, cell): return True + def ensure_connected(self, edif_cell_inst, net): + """ + Ensure that all ports on the cell are connected to the net. + + Sometimes Vivado leaves ports undriven, which can cause the port to not be + explicitly shown in the verilog netlist. This can cause issues since + spydrnet will not infer ground for these signals. Use this function to make + sure all ports are shown by connecting them. + """ + + type_name = edif_cell_inst.getCellType().getName() + port_names = self.CELL_PIN_MAP[type_name] + + for phys_name, log_name in port_names.items(): + port = edif_cell_inst.getPortInst(phys_name) + if port is None: + new_port = edif_cell_inst.getPort(log_name) + net.createPortInst(new_port, edif_cell_inst) + class _PinMap(MutableMapping): """ diff --git a/bfasst/utils/rw_phys_netlist.py b/bfasst/utils/rw_phys_netlist.py index 63321efd..170efde6 100644 --- a/bfasst/utils/rw_phys_netlist.py +++ b/bfasst/utils/rw_phys_netlist.py @@ -581,6 +581,8 @@ def __process_carry4(self, cell): logging.info(" Inputs not permuted, skipping") return [] + rw.PinMap.ensure_connected(cell.getEDIFCellInst(), self.gnd) + raise NotImplementedError def __process_bufg(self, bufg_cell): From fe69458d4750618bf203668c57d5ee8bd1f444da Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Thu, 13 Feb 2025 02:56:57 -0700 Subject: [PATCH 02/17] Add is_const prop to nets --- bfasst/utils/sdn_helpers.py | 3 +++ bfasst/utils/structural.py | 5 +++++ bfasst/utils/structural_helpers.py | 2 +- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/bfasst/utils/sdn_helpers.py b/bfasst/utils/sdn_helpers.py index 7394ec2f..9791203d 100644 --- a/bfasst/utils/sdn_helpers.py +++ b/bfasst/utils/sdn_helpers.py @@ -171,6 +171,7 @@ def __init__(self, wire): self.driver_pin = None self.is_vdd = False self.is_gnd = False + self.is_const = False self.is_connected = None def add_alias_wire(self, wire): @@ -225,11 +226,13 @@ def find_driver(self): or self.wire.cable.name in SdnNetlistWrapper.GND_NAMES ): self.is_gnd = True + self.is_const = True elif ( self.wire.cable.name == r"\" or self.wire.cable.name in SdnNetlistWrapper.VCC_NAMES ): self.is_vdd = True + self.is_const = True def set_driver_pin(self, pin): """Set the driver pin""" diff --git a/bfasst/utils/structural.py b/bfasst/utils/structural.py index 84b25275..2af8f5b0 100644 --- a/bfasst/utils/structural.py +++ b/bfasst/utils/structural.py @@ -740,6 +740,11 @@ def check_for_potential_mapping(self, instance_name: str) -> set[str]: instances_matching_connections = self.eliminate_redundant_matches(instance_name) + logging.info( + " %s instance(s) after elimating matched instances", + len(instances_matching_connections), + ) + for pin in instance.pins: if pin.net not in self.net_mapping: continue diff --git a/bfasst/utils/structural_helpers.py b/bfasst/utils/structural_helpers.py index 9b6e82ab..5dd02c55 100644 --- a/bfasst/utils/structural_helpers.py +++ b/bfasst/utils/structural_helpers.py @@ -77,4 +77,4 @@ def create_cell_props() -> dict: def count_num_const(pins) -> int: - return sum(1 for pin in pins if pin.net and (pin.net.is_gnd or pin.net.is_vdd)) + return sum(1 for pin in pins if pin.net and pin.net.is_const) From 97096248ee8ed10db9b4cc8603b4ea02da486f00 Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Thu, 13 Feb 2025 03:06:07 -0700 Subject: [PATCH 03/17] update compare flow build count --- test/scripts/test_ninja_flow_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/scripts/test_ninja_flow_manager.py b/test/scripts/test_ninja_flow_manager.py index 3583e453..986fb631 100644 --- a/test/scripts/test_ninja_flow_manager.py +++ b/test/scripts/test_ninja_flow_manager.py @@ -107,7 +107,7 @@ def test_run_vivado_phys_netlist_flow(self): self.__check_flow_run("vivado_phys_netlist", 15) def test_run_phys_compare_flow(self): - self.__check_flow_run("vivado_phys_netlist_cmp", 18) + self.__check_flow_run("vivado_phys_netlist_cmp", 19) def test_run_cmp_error_injection_flow(self): # There should be 200 injections and 200 comparisons for one flow From a4f9fc92d17f221ed7a7067ca8e1f71ddcc6a784 Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Thu, 13 Feb 2025 03:32:39 -0700 Subject: [PATCH 04/17] fix const logic --- bfasst/utils/sdn_helpers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bfasst/utils/sdn_helpers.py b/bfasst/utils/sdn_helpers.py index 9791203d..d3b4fb5e 100644 --- a/bfasst/utils/sdn_helpers.py +++ b/bfasst/utils/sdn_helpers.py @@ -242,6 +242,7 @@ def set_driver_pin(self, pin): # Check for constant GND/VDD. Top-level I/O will not be GND/VDD if isinstance(pin, sdn.OuterPin) and self.driver_pin.instance.reference.name == "GND": self.is_gnd = True + self.is_const = True else: self.is_gnd = False if isinstance(pin, sdn.OuterPin) and self.driver_pin.instance.reference.name in ( @@ -249,6 +250,7 @@ def set_driver_pin(self, pin): "VCC", ): self.is_vdd = True + self.is_const = True else: self.is_vdd = False From cdf10adc0759e7782820e628ddacc67bf421858d Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Thu, 20 Feb 2025 15:33:38 -0700 Subject: [PATCH 05/17] change netlist_cleanup back to main --- bfasst/utils/netlist_cleanup.py | 65 +++++++++++++++++++-------------- 1 file changed, 38 insertions(+), 27 deletions(-) diff --git a/bfasst/utils/netlist_cleanup.py b/bfasst/utils/netlist_cleanup.py index 3f97a7a0..6225d9a9 100644 --- a/bfasst/utils/netlist_cleanup.py +++ b/bfasst/utils/netlist_cleanup.py @@ -38,25 +38,31 @@ def __init__(self, build_path, netlist_in_path, netlist_out_path, logging_level) self.remove_unused_instances(top) self.write_netlist(netlist_ir) + def valid_assign_instance(self, instance): + """Check which instances are ASSIGN instances which need to be removed""" + if not instance.reference.name.startswith("SD"): + return False + try: + pin_out = next(pin for pin in instance.pins if pin.inner_pin.port.name != "i") + except StopIteration: + return False + if len(pin_out.wire.pins) > 1: + raise NotImplementedError("Multiple connections for ASSIGN instance not supported") + return True + def remove_assign_instances(self, top): """Remove all ASSIGN instances""" logging.info("Finding and removing all ASSIGN instances") t_begin = time.perf_counter() - for instance in top.get_instances(): - if instance.reference.name.startswith("SDN_VERILOG_ASSIGNMENT"): - pin_out = None - - for pin in instance.pins: - if pin.inner_pin.port.name == "i": - pass - else: - pin_out = pin - - for pin in pin_out.wire.pins: - if pin == pin_out: - continue - raise NotImplementedError - top.reference.remove_child(instance) + + top_ref = top.reference + + instances_to_remove = { + instance for instance in top.get_instances() if self.valid_assign_instance(instance) + } + + top_ref.remove_children_from(instances_to_remove) + logging.info("Total time to remove ASSIGN instances: %s", time.perf_counter() - t_begin) def remove_unused_instances(self, top): @@ -64,21 +70,26 @@ def remove_unused_instances(self, top): logging.info("Removing unused instances") unused_instance_types = {"LUT6_2": ("O5", "O6"), "IBUF": ("O",)} netlist_wrapper = SdnNetlistWrapper(top) + + wire_to_net = netlist_wrapper.wire_to_net + instances = netlist_wrapper.instances + t_begin = time.perf_counter() + + # Group instances by type so we don't have to iterate over instances multiple times + instances_by_type = {} + for instance in instances: + inst_type = instance.instance.reference.name + instances_by_type.setdefault(inst_type, []).append(instance) + for instance_type, pin_names in unused_instance_types.items(): - for instance_wrapper in [ - instance_wrapper - for instance_wrapper in netlist_wrapper.instances - if instance_wrapper.instance.reference.name == instance_type - ]: - connected_pins = ( - netlist_wrapper.wire_to_net[ - instance_wrapper.get_pin(pin_name).pin.wire - ].is_connected + for instance in instances_by_type.get(instance_type, []): + if not any( + wire_to_net[instance.get_pin(pin_name).pin.wire].is_connected for pin_name in pin_names - ) - if not any(connected_pins): - top.reference.remove_child(instance_wrapper.instance) + ): + top.reference.remove_child(instance.instance) + logging.info("Total time to remove unused instances: %s", time.perf_counter() - t_begin) def write_netlist(self, netlist_ir): From 6fe7817b6d201d7da2d8e90fe987992a28e24120 Mon Sep 17 00:00:00 2001 From: Connor Young Date: Fri, 21 Feb 2025 03:33:40 +0000 Subject: [PATCH 06/17] add full cleaning time to log --- bfasst/utils/netlist_cleanup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bfasst/utils/netlist_cleanup.py b/bfasst/utils/netlist_cleanup.py index 6225d9a9..3f609ad4 100644 --- a/bfasst/utils/netlist_cleanup.py +++ b/bfasst/utils/netlist_cleanup.py @@ -23,6 +23,7 @@ def __init__(self, build_path, netlist_in_path, netlist_out_path, logging_level) self.log_path = self.build_path / "log.txt" self.log_path.unlink(missing_ok=True) + self.start_time = time.perf_counter() logging.basicConfig( filename=self.log_path, @@ -97,6 +98,7 @@ def write_netlist(self, netlist_ir): t_begin = time.perf_counter() sdn.compose(netlist_ir, self.netlist_out, write_blackbox=False) logging.info("Total time to write out netlist: %s", time.perf_counter() - t_begin) + logging.info("Total time to clean netlist: %s", time.perf_counter() - self.start_time) if __name__ == "__main__": From 3a3a8e2030ff5fcaa29a12d2dfbd3231c63c1634 Mon Sep 17 00:00:00 2001 From: Connor Young Date: Fri, 21 Feb 2025 04:18:18 +0000 Subject: [PATCH 07/17] pylint --- bfasst/utils/rw_helpers.py | 5 ++-- bfasst/utils/rw_phys_netlist.py | 53 +++++++++++++++++++-------------- 2 files changed, 34 insertions(+), 24 deletions(-) diff --git a/bfasst/utils/rw_helpers.py b/bfasst/utils/rw_helpers.py index 4916dd57..89d1c6a0 100644 --- a/bfasst/utils/rw_helpers.py +++ b/bfasst/utils/rw_helpers.py @@ -211,8 +211,7 @@ def remove_and_disconnect_cell(cell, log=logging.info): def lut_move_net_to_new_cell( - old_edif_cell_inst, - new_edif_cell_inst, + edif_cell_insts, old_logical_pin, physical_pin, log=logging.info, @@ -225,6 +224,8 @@ def lut_move_net_to_new_cell( log(f" Processing logical pin {old_logical_pin}, physical pin {physical_pin}") + old_edif_cell_inst, new_edif_cell_inst = edif_cell_insts + port_inst = old_edif_cell_inst.getPortInst(old_logical_pin) logical_net = port_inst.getNet() assert logical_net diff --git a/bfasst/utils/rw_phys_netlist.py b/bfasst/utils/rw_phys_netlist.py index 170efde6..eed53acb 100644 --- a/bfasst/utils/rw_phys_netlist.py +++ b/bfasst/utils/rw_phys_netlist.py @@ -261,7 +261,10 @@ def __process_all_luts(self, cells_already_visited): lut5_cell = site_inst.getCell(lut5_bel) gnd_luts = self.__check_lut_const_nets( - lut6_cell, lut6_pin_out, lut5_cell, lut5_pin_out, gnd_nets, vcc_nets, site_inst + (lut6_cell, lut6_pin_out), + (lut5_cell, lut5_pin_out), + (gnd_nets, vcc_nets), + site_inst, ) if gnd_luts: cells_already_visited.update(gnd_luts) @@ -326,42 +329,47 @@ def __check_lutram_srl(self, lut6_cell, lut5_cell, lut_rams, cells_already_visit return True return False - def __check_lut_const_nets( - self, lut6_cell, lut6_pin_out, lut5_cell, lut5_pin_out, gnd_nets, vcc_nets, site_inst - ): + def __check_lut_const_nets(self, lut6, lut5, const_nets, site_inst): """ Check if the LUT6 or LUT5 are connected to a const net Covers O5/O6 being gnd Covers O5/O6 being vcc Covers one output being gnd and the other being vcc + + lut6: Tuple (lut6_cell, lut6_pin_out) + lut5: Tuple (lut5_cell, lut5_pin_out) + const_nets: Tuple (gnd_nets, vcc_nets) """ + + gnd_nets, vcc_nets = const_nets + const_generator_pins = [None, None] pin1_gnd = None pin2_gnd = None for is_gnd, const_net in ((True, gnd_nets), (False, vcc_nets)): - if lut6_pin_out in const_net: + if lut6[1] in const_net: # If a gnd net, then there can't be a cell there - assert lut6_cell is None - if lut5_cell is not None: - self.__process_lut5_and_const_lut(lut5_cell, lut6_pin_out, site_inst, is_gnd) + assert lut6[0] is None + if lut5[0] is not None: + self.__process_lut5_and_const_lut(lut5[0], lut6[1], site_inst, is_gnd) return { - lut5_cell, + lut5[0], } assert const_generator_pins[0] is None - const_generator_pins[0] = lut6_pin_out + const_generator_pins[0] = lut6[1] pin1_gnd = is_gnd - if lut5_pin_out in const_net: + if lut5[1] in const_net: # If a gnd net, then there can't be a cell there # This assumption is not true for LUTRAMs - assert lut5_cell is None - if lut6_cell is not None: - self.__process_lut5_and_const_lut(lut6_cell, lut5_pin_out, site_inst, is_gnd) + assert lut5[0] is None + if lut6[0] is not None: + self.__process_lut5_and_const_lut(lut6[0], lut5[1], site_inst, is_gnd) return { - lut6_cell, + lut6[0], } assert const_generator_pins[1] is None - const_generator_pins[1] = lut5_pin_out + const_generator_pins[1] = lut5[1] pin2_gnd = is_gnd if const_generator_pins[0] is not None or const_generator_pins[1] is not None: @@ -629,12 +637,14 @@ def __process_bufg(self, bufg_cell): return [bufg_cell] - def __check_carry4_const_net(self, site_inst, const_type, pin_out, new_net, is_gnd): + def __check_carry4_const_net(self, site_inst, const_info, pin_out, new_net): """ It seems that if the const lut output is routed to a carry4, the mux it routes through also doesn't have a cell, so you have to check the c4 input pins. """ + + const_type, is_gnd = const_info cell = site_inst.getCell("CARRY4") assert cell, f"{const_type} LUT routed to no cells" pin_in = "DI" if pin_out.endswith("O5") else "S" @@ -702,7 +712,7 @@ def __process_lut_const_net(self, site_inst, new_cell_inst, pin_out, is_gnd): new_net.createPortInst(routed_to_port_inst.getPort(), routed_to_cell_inst) if site_inst.getCell("CARRY4") is not None: - self.__check_carry4_const_net(site_inst, const_type, pin_out, new_net, is_gnd) + self.__check_carry4_const_net(site_inst, (const_type, is_gnd), pin_out, new_net) def __process_lut_const(self, site_inst, pins, pin1_gnd, pin2_gnd): """ @@ -788,7 +798,7 @@ def __process_lut(self, lut6_cell, lut5_cell, lut5_only=False): physical_pins_to_nets[physical_pin] = port_inst.getNet() rw.lut_move_net_to_new_cell( - lut6_edif_cell_inst, new_cell_inst, logical_pin, physical_pin + (lut6_edif_cell_inst, new_cell_inst), logical_pin, physical_pin ) # Now do the same for the other LUT @@ -805,8 +815,7 @@ def __process_lut(self, lut6_cell, lut5_cell, lut5_only=False): # Disconnect net from logical pin on old cell, # and connect to new logical pin (based on physical pin) of new cell rw.lut_move_net_to_new_cell( - lut5_edif_cell_inst, - new_cell_inst, + (lut5_edif_cell_inst, new_cell_inst), logical_pin, physical_pin, logging.info, @@ -878,7 +887,7 @@ def __process_lut5_and_const_lut(self, lut5, const_pin, site_inst, is_gnd): assert port_inst rw.lut_move_net_to_new_cell( - lut5_edif_cell_inst, new_cell_inst, logical_pin, physical_pin + (lut5_edif_cell_inst, new_cell_inst), logical_pin, physical_pin ) if lut5.isRoutethru(): From f5b01ee191f997098f152d1d552104102ff51760 Mon Sep 17 00:00:00 2001 From: Connor Young Date: Fri, 21 Feb 2025 22:38:58 +0000 Subject: [PATCH 08/17] add some more timing statements, memory dump, and update process_results.py --- bfasst/utils/general.py | 18 ++++++++++++++++++ bfasst/utils/netlist_cleanup.py | 3 +++ bfasst/utils/structural.py | 3 ++- scripts/process_results.py | 27 ++++++++++++--------------- 4 files changed, 35 insertions(+), 16 deletions(-) diff --git a/bfasst/utils/general.py b/bfasst/utils/general.py index 6d5f51bf..0fe03569 100644 --- a/bfasst/utils/general.py +++ b/bfasst/utils/general.py @@ -232,3 +232,21 @@ def get_family_from_part(part): will have to be changed if we start supporting more part families """ return "kintex7" if part[3] == "k" else "artix7" + +def get_size(obj, seen=None): + """Recursively finds size of objects""" + size = sys.getsizeof(obj) + if seen is None: + seen = set() + obj_id = id(obj) + if obj_id in seen: + return 0 + seen.add(obj_id) + if isinstance(obj, dict): + size += sum([get_size(v, seen) for v in obj.values()]) + size += sum([get_size(k, seen) for k in obj.keys()]) + elif hasattr(obj, '__dict__'): + size += get_size(obj.__dict__, seen) + elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)): + size += sum([get_size(i, seen) for i in obj]) + return size diff --git a/bfasst/utils/netlist_cleanup.py b/bfasst/utils/netlist_cleanup.py index 3f609ad4..f21b5b71 100644 --- a/bfasst/utils/netlist_cleanup.py +++ b/bfasst/utils/netlist_cleanup.py @@ -24,6 +24,7 @@ def __init__(self, build_path, netlist_in_path, netlist_out_path, logging_level) self.log_path = self.build_path / "log.txt" self.log_path.unlink(missing_ok=True) self.start_time = time.perf_counter() + self.cleanup_time_log = self.build_path / "cleanup_time.txt" logging.basicConfig( filename=self.log_path, @@ -99,6 +100,8 @@ def write_netlist(self, netlist_ir): sdn.compose(netlist_ir, self.netlist_out, write_blackbox=False) logging.info("Total time to write out netlist: %s", time.perf_counter() - t_begin) logging.info("Total time to clean netlist: %s", time.perf_counter() - self.start_time) + with open(self.cleanup_time_log, "w") as f: + f.write(f"{time.perf_counter() - self.start_time}\n") if __name__ == "__main__": diff --git a/bfasst/utils/structural.py b/bfasst/utils/structural.py index 2af8f5b0..8f1c3bdb 100644 --- a/bfasst/utils/structural.py +++ b/bfasst/utils/structural.py @@ -15,7 +15,7 @@ from bfasst import jpype_jvm from bfasst.utils import convert_verilog_literal_to_int from bfasst.utils.structural_helpers import create_cell_props, count_num_const -from bfasst.utils.general import log_with_banner +from bfasst.utils.general import log_with_banner, get_size from bfasst.utils.sdn_helpers import SdnNetlistWrapper, SdnInstanceWrapper, SdnNet, SdnPinWrapper # pylint: disable=wrong-import-order @@ -343,6 +343,7 @@ def init_matching_instances(self) -> None: ) self.possible_matches[instance_name] = set(instances_matching) + logging.info("The size of the possible_matches dict is %d", get_size(self.possible_matches)) with open(self.possible_matches_cache_path, "wb") as f: pickle.dump(self.possible_matches, f) diff --git a/scripts/process_results.py b/scripts/process_results.py index b1da482b..5932d827 100644 --- a/scripts/process_results.py +++ b/scripts/process_results.py @@ -145,27 +145,20 @@ def transform_stats(designs_yaml): workbook.close() -def phys_cmp_results(flow): +def phys_cmp_results(designs): """ Gather results from phys_cmp flow and create a CSV file with utilization data, transformation times, comparison times, and success status. """ - root_dir = ROOT_PATH / "build" / flow - - results = root_dir / "results.json" + root_dir = ROOT_PATH / "build" out = "results.csv" - - with open(results, "r") as f: - data = json.load(f) - rows = [] - for design, status in data.items(): - status = "Success" if not status else status + + for design in designs: row = { "Design": design.split("/")[1], - "Status": status, "LUT": 0, "LUT_MEM": 0, "SRL": 0, @@ -173,6 +166,7 @@ def phys_cmp_results(flow): "CARRY4": 0, "BRAM": 0, "T_TIME": 0, + "C_TIME": 0, "S_TIME": 0, } utilization_file = root_dir / f"{design}/vivado_impl/utilization.txt" @@ -193,24 +187,27 @@ def phys_cmp_results(flow): elif "| Block RAM Tile" in line: row["BRAM"] = line.split("|")[2].strip() - with open(root_dir / f"{design}/xilinx_phys_netlist/transformation_time.txt", "r") as f: + with open(root_dir / design / "vivado_phys_netlist/transformation_time.txt", "r") as f: row["T_TIME"] = round(float(f.read().strip()), 2) - with open(root_dir / f"{design}/struct_cmp/comparison_time.txt", "r") as f: + with open(root_dir / design / "netlist_cleanup/cleanup_time.txt", "r") as f: + row["C_TIME"] = round(float(f.read().strip()), 2) + with open(root_dir / design / "struct_cmp/struct_comparison_time.txt", "r") as f: row["S_TIME"] = round(float(f.read().strip()), 2) rows.append(row) with open(out, "w", newline="") as f: fieldnames = [ "Design", - "Status", "LUT", "LUT_MEM", "SRL", "FF", "CARRY4", "BRAM", - "S_TIME", "T_TIME", + "C_TIME", + "S_TIME", + ] writer = csv.DictWriter(f, fieldnames=fieldnames) writer.writeheader() From feb6e82bd7c164c62f8c9ad28dd51a5fd29d20b5 Mon Sep 17 00:00:00 2001 From: Connor Young Date: Mon, 24 Feb 2025 15:57:10 -0700 Subject: [PATCH 09/17] add memory dump logic to structural.py and process_results.py --- bfasst/utils/structural.py | 7 ++++++- scripts/process_results.py | 5 ++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/bfasst/utils/structural.py b/bfasst/utils/structural.py index 8f1c3bdb..f67957b5 100644 --- a/bfasst/utils/structural.py +++ b/bfasst/utils/structural.py @@ -79,6 +79,8 @@ def __init__( self.comparison_time_log = ( str(log_path).split("_cmp.log", maxsplit=1)[0] + "_comparison_time.txt" ) + self.mem_dump_log = (str(log_path).split("_cmp.log", maxsplit=1)[0] + "_comparison_mem_dump.txt") + self.mem = 0 self.block_mapping = bidict() self.net_mapping = bidict() @@ -251,6 +253,8 @@ def compare_netlists(self) -> None: logging.info(total_time_msg) with open(self.comparison_time_log, "w") as f: f.write(f"{self.end_time - self.start_time}\n") + with open(self.mem_dump_log, "w") as f: + f.write(f"{self.mem}\n") def map_ports(self) -> None: """Map top-level ports""" @@ -343,7 +347,8 @@ def init_matching_instances(self) -> None: ) self.possible_matches[instance_name] = set(instances_matching) - logging.info("The size of the possible_matches dict is %d", get_size(self.possible_matches)) + self.mem = get_size(self.possible_matches) + logging.info("The size of the possible_matches dict is %d", self.mem) with open(self.possible_matches_cache_path, "wb") as f: pickle.dump(self.possible_matches, f) diff --git a/scripts/process_results.py b/scripts/process_results.py index 5932d827..9ce55e43 100644 --- a/scripts/process_results.py +++ b/scripts/process_results.py @@ -168,6 +168,7 @@ def phys_cmp_results(designs): "T_TIME": 0, "C_TIME": 0, "S_TIME": 0, + "S_MEM": 0, } utilization_file = root_dir / f"{design}/vivado_impl/utilization.txt" if not utilization_file.is_file(): @@ -193,6 +194,8 @@ def phys_cmp_results(designs): row["C_TIME"] = round(float(f.read().strip()), 2) with open(root_dir / design / "struct_cmp/struct_comparison_time.txt", "r") as f: row["S_TIME"] = round(float(f.read().strip()), 2) + with open(root_dir / design / "struct_cmp/struct_comparison_mem_dump.txt", "r") as f: + row["S_MEM"] = f.read().strip() rows.append(row) with open(out, "w", newline="") as f: @@ -207,7 +210,7 @@ def phys_cmp_results(designs): "T_TIME", "C_TIME", "S_TIME", - + "S_MEM" ] writer = csv.DictWriter(f, fieldnames=fieldnames) writer.writeheader() From 3c625e82d6f4e6cbe7ed22f10eb3d540a4b674ab Mon Sep 17 00:00:00 2001 From: Connor Young Date: Mon, 24 Feb 2025 16:00:23 -0700 Subject: [PATCH 10/17] pylint --- bfasst/utils/general.py | 13 +++++++------ bfasst/utils/structural.py | 4 +++- scripts/process_results.py | 5 ++--- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/bfasst/utils/general.py b/bfasst/utils/general.py index 0fe03569..eb235931 100644 --- a/bfasst/utils/general.py +++ b/bfasst/utils/general.py @@ -233,8 +233,9 @@ def get_family_from_part(part): """ return "kintex7" if part[3] == "k" else "artix7" + def get_size(obj, seen=None): - """Recursively finds size of objects""" + """Recursively finds size of objects using generators.""" size = sys.getsizeof(obj) if seen is None: seen = set() @@ -243,10 +244,10 @@ def get_size(obj, seen=None): return 0 seen.add(obj_id) if isinstance(obj, dict): - size += sum([get_size(v, seen) for v in obj.values()]) - size += sum([get_size(k, seen) for k in obj.keys()]) - elif hasattr(obj, '__dict__'): + size += sum(get_size(v, seen) for v in obj.values()) + size += sum(get_size(k, seen) for k in obj.keys()) + elif hasattr(obj, "__dict__"): size += get_size(obj.__dict__, seen) - elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)): - size += sum([get_size(i, seen) for i in obj]) + elif hasattr(obj, "__iter__") and not isinstance(obj, (str, bytes, bytearray)): + size += sum(get_size(i, seen) for i in obj) return size diff --git a/bfasst/utils/structural.py b/bfasst/utils/structural.py index f67957b5..fd8be3f9 100644 --- a/bfasst/utils/structural.py +++ b/bfasst/utils/structural.py @@ -79,7 +79,9 @@ def __init__( self.comparison_time_log = ( str(log_path).split("_cmp.log", maxsplit=1)[0] + "_comparison_time.txt" ) - self.mem_dump_log = (str(log_path).split("_cmp.log", maxsplit=1)[0] + "_comparison_mem_dump.txt") + self.mem_dump_log = ( + str(log_path).split("_cmp.log", maxsplit=1)[0] + "_comparison_mem_dump.txt" + ) self.mem = 0 self.block_mapping = bidict() diff --git a/scripts/process_results.py b/scripts/process_results.py index 9ce55e43..7ea68346 100644 --- a/scripts/process_results.py +++ b/scripts/process_results.py @@ -5,7 +5,6 @@ from argparse import ArgumentParser import csv -import json from pathlib import Path import xlsxwriter @@ -156,7 +155,7 @@ def phys_cmp_results(designs): out = "results.csv" rows = [] - for design in designs: + for design in designs: row = { "Design": design.split("/")[1], "LUT": 0, @@ -210,7 +209,7 @@ def phys_cmp_results(designs): "T_TIME", "C_TIME", "S_TIME", - "S_MEM" + "S_MEM", ] writer = csv.DictWriter(f, fieldnames=fieldnames) writer.writeheader() From f7b75fe9d8ad21201cabc3105c5d7b2ecc43508a Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Thu, 27 Feb 2025 16:17:30 -0700 Subject: [PATCH 11/17] format --- bfasst/utils/rw_phys_netlist.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bfasst/utils/rw_phys_netlist.py b/bfasst/utils/rw_phys_netlist.py index 5afaf947..a088ebb6 100644 --- a/bfasst/utils/rw_phys_netlist.py +++ b/bfasst/utils/rw_phys_netlist.py @@ -669,7 +669,6 @@ def __check_carry4_const_net(self, site_inst, const_info, pin_out, new_net): new_net.createPortInst(new_port, routed_to_cell_inst) new_net.createPortInst(new_port, routed_to_cell_inst) - def __process_lut_const_net(self, site_inst, new_cell_inst, pin_out, is_gnd): """ Replace the global const net on an input with the const routethru LUT output. From fc41f46cb9794e835de3b539d97520a4c28dfabd Mon Sep 17 00:00:00 2001 From: Connor Young Date: Thu, 27 Feb 2025 21:35:59 -0700 Subject: [PATCH 12/17] utilize set comprehension, enumeration, and dump mem size of possible_matches dict even if using cache --- bfasst/utils/structural.py | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/bfasst/utils/structural.py b/bfasst/utils/structural.py index fd8be3f9..4150c311 100644 --- a/bfasst/utils/structural.py +++ b/bfasst/utils/structural.py @@ -295,24 +295,21 @@ def init_matching_instances(self) -> None: grouped_by_cell_type = defaultdict(list) grouped_by_cell_type_and_const = defaultdict(list) - for instance in self.reversed_netlist.instances: + for idx, instance in enumerate(self.reversed_netlist.instances): if instance.cell_type.startswith("SD"): continue num_const = count_num_const(instance.pins) - properties = set() - for prop in self.get_properties_for_type(instance.cell_type): - properties.add( - f"{prop}{convert_verilog_literal_to_int(instance.properties[prop])}" - ) + properties = { + f"{prop}{convert_verilog_literal_to_int(instance.properties[prop])}" + for prop in self.get_properties_for_type(instance.cell_type) + } grouped_by_cell_type_and_const[ (instance.cell_type, hash(frozenset(properties)), num_const) - ].append(self.reversed_netlist.instances.index(instance)) + ].append(idx) - grouped_by_cell_type[(instance.cell_type, hash(frozenset(properties)))].append( - self.reversed_netlist.instances.index(instance) - ) + grouped_by_cell_type[(instance.cell_type, hash(frozenset(properties)))].append(idx) for instance_name, _ in self.named_netlist.instances_to_map: ############################################################### @@ -322,11 +319,10 @@ def init_matching_instances(self) -> None: # Compute a hash of this instance's properties instance = self.named_instance_map[instance_name] num_const = count_num_const(instance.pins) - properties = set() - for prop in self.get_properties_for_type(instance.cell_type): - properties.add( - f"{prop}{convert_verilog_literal_to_int(instance.properties[prop])}" - ) + properties = { + f"{prop}{convert_verilog_literal_to_int(instance.properties[prop])}" + for prop in self.get_properties_for_type(instance.cell_type) + } my_hash = hash(frozenset(properties)) instances_matching = grouped_by_cell_type_and_const[ @@ -349,10 +345,10 @@ def init_matching_instances(self) -> None: ) self.possible_matches[instance_name] = set(instances_matching) - self.mem = get_size(self.possible_matches) - logging.info("The size of the possible_matches dict is %d", self.mem) with open(self.possible_matches_cache_path, "wb") as f: pickle.dump(self.possible_matches, f) + self.mem = get_size(self.possible_matches) + logging.info("The size of the possible_matches dict is %d", self.mem) def potential_mapping_wrapper(self, instance_tuple: tuple) -> bool: """Wrap check_for_potential_mapping some inital checks/postprocessing""" From 38ae684b303668fa46d3a1ebf052bb76744cd39c Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Fri, 28 Feb 2025 13:06:24 -0700 Subject: [PATCH 13/17] update logging and file dependency tree --- bfasst/tools/compare/structural/structural.py | 2 ++ bfasst/tools/transform/phys_netlist.py | 1 + bfasst/utils/rw_helpers.py | 5 +++-- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/bfasst/tools/compare/structural/structural.py b/bfasst/tools/compare/structural/structural.py index 4be757cb..a913cb1d 100644 --- a/bfasst/tools/compare/structural/structural.py +++ b/bfasst/tools/compare/structural/structural.py @@ -52,3 +52,5 @@ def _init_outputs(self): def add_ninja_deps(self, deps): self._add_ninja_deps_default(deps, __file__) deps.append(BFASST_UTILS_PATH / "structural.py") + deps.append(BFASST_UTILS_PATH / "structural_helpers.py") + deps.append(BFASST_UTILS_PATH / "sdn_helpers.py") diff --git a/bfasst/tools/transform/phys_netlist.py b/bfasst/tools/transform/phys_netlist.py index 8d6facd2..deebef08 100644 --- a/bfasst/tools/transform/phys_netlist.py +++ b/bfasst/tools/transform/phys_netlist.py @@ -76,4 +76,5 @@ def _init_outputs(self): def add_ninja_deps(self, deps): self._add_ninja_deps_default(deps, __file__) deps.append(BFASST_UTILS_PATH / "rw_phys_netlist.py") + deps.append(BFASST_UTILS_PATH / "rw_helpers.py") deps.append(NINJA_TRANSFORM_TOOLS_PATH / "checkpoint_to_v.tcl.mustache") diff --git a/bfasst/utils/rw_helpers.py b/bfasst/utils/rw_helpers.py index 89d1c6a0..2c3eac88 100644 --- a/bfasst/utils/rw_helpers.py +++ b/bfasst/utils/rw_helpers.py @@ -469,12 +469,12 @@ def cell_is_default_mapping(self, cell): l2p = cell.getPinMappingsL2P() for logical, physical in default_l2p_map.items(): if logical in l2p and list(l2p[logical]) != [physical]: - print(list(l2p[logical]), "<>", [physical]) + logging.warning(list(l2p[logical]), "<>", [physical]) return False return True - def ensure_connected(self, edif_cell_inst, net): + def ensure_connected(self, edif_cell_inst, net, log=logging.info): """ Ensure that all ports on the cell are connected to the net. @@ -490,6 +490,7 @@ def ensure_connected(self, edif_cell_inst, net): for phys_name, log_name in port_names.items(): port = edif_cell_inst.getPortInst(phys_name) if port is None: + log(f" Port {phys_name} not found on {edif_cell_inst.getName()}, connecting to net") new_port = edif_cell_inst.getPort(log_name) net.createPortInst(new_port, edif_cell_inst) From e926be66c6f4cfb371661169e791cf42e4ab664d Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Fri, 28 Feb 2025 13:08:15 -0700 Subject: [PATCH 14/17] fix bram cascade parse --- bfasst/utils/structural.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bfasst/utils/structural.py b/bfasst/utils/structural.py index 4150c311..5dfbf1bb 100644 --- a/bfasst/utils/structural.py +++ b/bfasst/utils/structural.py @@ -660,12 +660,12 @@ def check_for_potential_bram_mapping(self, instance_name: str) -> set[str]: expected_properties = ( named_instance.properties["RAM_EXTENSION_A"] == '"NONE"' and named_instance.properties["RAM_EXTENSION_B"] == '"NONE"' - and named_instance.get_pin("ADDRARDADDR", 15).net.is_vdd - and named_instance.get_pin("ADDRBWRADDR", 15).net.is_vdd - and named_instance.get_pin("CASCADEINA", 0).net.is_vdd - and named_instance.get_pin("CASCADEINB", 0).net.is_vdd - and named_instance.get_pin("CASCADEOUTA", 0).net.is_gnd - and named_instance.get_pin("CASCADEOUTB", 0).net.is_gnd + and named_instance.get_pin("ADDRARDADDR", 15).net.is_const + and named_instance.get_pin("ADDRBWRADDR", 15).net.is_const + and named_instance.get_pin("CASCADEINA", 0).net.is_const + and named_instance.get_pin("CASCADEINB", 0).net.is_const + and named_instance.get_pin("CASCADEOUTA", 0).net.is_const + and named_instance.get_pin("CASCADEOUTB", 0).net.is_const ) if not expected_properties: From cabcb1bd1aa0ae60441641fe20207322eb97675f Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Fri, 28 Feb 2025 13:09:48 -0700 Subject: [PATCH 15/17] format --- bfasst/utils/rw_helpers.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bfasst/utils/rw_helpers.py b/bfasst/utils/rw_helpers.py index 2c3eac88..c2506315 100644 --- a/bfasst/utils/rw_helpers.py +++ b/bfasst/utils/rw_helpers.py @@ -490,7 +490,9 @@ def ensure_connected(self, edif_cell_inst, net, log=logging.info): for phys_name, log_name in port_names.items(): port = edif_cell_inst.getPortInst(phys_name) if port is None: - log(f" Port {phys_name} not found on {edif_cell_inst.getName()}, connecting to net") + log( + f" Port {phys_name} not found on {edif_cell_inst.getName()}, connecting to net" + ) new_port = edif_cell_inst.getPort(log_name) net.createPortInst(new_port, edif_cell_inst) From 052d2da6e623544ba50ef4e76920f4f0f1a1c8cd Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Mon, 3 Mar 2025 14:21:07 -0700 Subject: [PATCH 16/17] Output physical interchange file of phy netlist --- bfasst/utils/rw_phys_netlist.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bfasst/utils/rw_phys_netlist.py b/bfasst/utils/rw_phys_netlist.py index a088ebb6..81c8b54e 100644 --- a/bfasst/utils/rw_phys_netlist.py +++ b/bfasst/utils/rw_phys_netlist.py @@ -22,7 +22,7 @@ from com.xilinx.rapidwright.device import SiteTypeEnum from com.xilinx.rapidwright.design import Design, Unisim from com.xilinx.rapidwright.edif import EDIFNet, EDIFPropertyValue, EDIFValueType -from com.xilinx.rapidwright.interchange import LogNetlistWriter +from com.xilinx.rapidwright.interchange import LogNetlistWriter, PhysNetlistWriter from java.lang import System from java.io import PrintStream, File @@ -252,6 +252,8 @@ def __run_rapidwright( LogNetlistWriter.writeLogNetlist( self.rw_netlist, str(self.stage_dir / "phys_logical_netlist.capnp") ) + logging.info("Writing capnp interchange physical netlist: %s", str(self.stage_dir / "phys_physical_netlist.capnp")) + PhysNetlistWriter.writePhysNetlist(self.rw_design, str(self.stage_dir / "phys_physical_netlist.capnp")) def __process_all_luts(self, cells_already_visited): """Visit all LUTs and replace them with LUT6_2 instances""" From d8841c877613937df979cd96f57085153d6b3b2b Mon Sep 17 00:00:00 2001 From: ReillyMcK Date: Mon, 3 Mar 2025 14:24:41 -0700 Subject: [PATCH 17/17] format --- bfasst/utils/rw_phys_netlist.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/bfasst/utils/rw_phys_netlist.py b/bfasst/utils/rw_phys_netlist.py index 81c8b54e..20dcc845 100644 --- a/bfasst/utils/rw_phys_netlist.py +++ b/bfasst/utils/rw_phys_netlist.py @@ -252,8 +252,13 @@ def __run_rapidwright( LogNetlistWriter.writeLogNetlist( self.rw_netlist, str(self.stage_dir / "phys_logical_netlist.capnp") ) - logging.info("Writing capnp interchange physical netlist: %s", str(self.stage_dir / "phys_physical_netlist.capnp")) - PhysNetlistWriter.writePhysNetlist(self.rw_design, str(self.stage_dir / "phys_physical_netlist.capnp")) + logging.info( + "Writing capnp interchange physical netlist: %s", + str(self.stage_dir / "phys_physical_netlist.capnp"), + ) + PhysNetlistWriter.writePhysNetlist( + self.rw_design, str(self.stage_dir / "phys_physical_netlist.capnp") + ) def __process_all_luts(self, cells_already_visited): """Visit all LUTs and replace them with LUT6_2 instances"""