From e1cd038f06c7161b27afd0073fb507da2b8e5a9e Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral <38422348+umarcor@users.noreply.github.com> Date: Tue, 4 Oct 2022 21:53:49 +0200 Subject: [PATCH] f4pga/flows: cleanup (#644) --- f4pga/flows/cache.py | 15 +++++++------- f4pga/flows/flow_config.py | 18 ++++++++--------- f4pga/flows/inspector.py | 20 +++++++++---------- f4pga/flows/modules/analysis.py | 24 +++++++++++----------- f4pga/flows/modules/io_rename.py | 34 ++++++++++++++++---------------- f4pga/flows/modules/place.py | 16 ++++----------- f4pga/flows/modules/route.py | 6 +++--- 7 files changed, 61 insertions(+), 72 deletions(-) diff --git a/f4pga/flows/cache.py b/f4pga/flows/cache.py index 690e8db..96ad566 100755 --- a/f4pga/flows/cache.py +++ b/f4pga/flows/cache.py @@ -24,13 +24,6 @@ from json import dump as json_dump, load as json_load, JSONDecodeError from f4pga.flows.common import sfprint -def _get_hash(path: Path): - if not path.is_dir(): - with path.open("rb") as rfptr: - return zlib_adler32(rfptr.read()) - return 0 # Directories always get '0' hash. - - class F4Cache: """ `F4Cache` is used to track changes among dependencies and keep the status of the files on a persistent storage. @@ -76,7 +69,13 @@ class F4Cache: def process_file(self, path: Path): """Process file for tracking with f4cache.""" - hash = _get_hash(path) + if path.is_dir(): + # Directories always get '0' hash. + hash = 0 + else: + with path.open("rb") as rfptr: + hash = zlib_adler32(rfptr.read()) + self.current_hashes[path.as_posix()] = hash def update(self, path: Path, consumer: str): diff --git a/f4pga/flows/flow_config.py b/f4pga/flows/flow_config.py index f2977f8..0e1210f 100644 --- a/f4pga/flows/flow_config.py +++ b/f4pga/flows/flow_config.py @@ -31,7 +31,7 @@ def open_flow_cfg(path: str) -> dict: return json_load(rfptr) -def _get_ovs_raw(dict_name: str, flow_cfg, part: "str | None", stage: "str | None"): +def p_get_ovs_raw(dict_name: str, flow_cfg, part: "str | None", stage: "str | None"): vals = flow_cfg.get(dict_name) if vals is None: vals = {} @@ -55,11 +55,6 @@ def verify_platform_name(platform: str, mypath: str): return False -def _is_kword(w: str): - kwords = {"dependencies", "values", "default_platform", "default_target"} - return w in kwords - - class FlowDefinition: stages: "dict[str, Stage]" # stage name -> module path mapping r_env: ResolutionEnv @@ -80,6 +75,9 @@ class FlowDefinition: return self.stages.keys() +KWORDS = {"dependencies", "values", "default_platform", "default_target"} + + class ProjectFlowConfig: flow_cfg: dict path: str @@ -90,7 +88,7 @@ class ProjectFlowConfig: def parts(self): for part in self.flow_cfg.keys(): - if not _is_kword(part): + if part not in KWORDS: yield part def get_default_part(self) -> "str | None": @@ -103,13 +101,13 @@ class ProjectFlowConfig: """ Get dependencies without value resolution applied. """ - return _get_ovs_raw("dependencies", self.flow_cfg, part, None) + return p_get_ovs_raw("dependencies", self.flow_cfg, part, None) def get_values_raw(self, part: "str | None" = None, stage: "str | None" = None): """ Get values without value resolution applied. """ - return _get_ovs_raw("values", self.flow_cfg, part, stage) + return p_get_ovs_raw("values", self.flow_cfg, part, stage) def get_stage_value_overrides(self, part: str, stage: str): stage_vals_ovds = {} @@ -155,7 +153,7 @@ def override_prj_flow_cfg_by_cli(cfg: ProjectFlowConfig, cli_d: "dict[str, dict[ p_dependencies.update(cli_p_dependencies) for stage_name, cli_stage_cfg in part_cfg.items(): - if _is_kword(stage_name): + if stage_name in KWORDS: continue stage_cfg = part_cfg.get(stage_name) diff --git a/f4pga/flows/inspector.py b/f4pga/flows/inspector.py index 7ed4ea6..7f8a83b 100644 --- a/f4pga/flows/inspector.py +++ b/f4pga/flows/inspector.py @@ -23,18 +23,18 @@ from f4pga.flows.module import Module from f4pga.flows.common import decompose_depname -def _get_if_qualifier(deplist: "list[str]", qualifier: str): +def p_get_if_qualifier(deplist: "list[str]", qualifier: str): for dep_name in deplist: name, q = decompose_depname(dep_name) if q == qualifier: yield f"● {Style.BRIGHT}{name}{Style.RESET_ALL}" -def _list_if_qualifier(deplist: "list[str]", qualifier: str, indent: int = 4): +def p_list_if_qualifier(deplist: "list[str]", qualifier: str, indent: int = 4): indent_str = "".join([" " for _ in range(0, indent)]) r = "" - for line in _get_if_qualifier(deplist, qualifier): + for line in p_get_if_qualifier(deplist, qualifier): r += indent_str + line + "\n" return r @@ -44,18 +44,18 @@ def get_module_info(module: Module) -> str: r = "" r += f"Module `{Style.BRIGHT}{module.name}{Style.RESET_ALL}`:\n" r += "Inputs:\n Required:\n Dependencies\n" - r += _list_if_qualifier(module.takes, "req", indent=6) + r += p_list_if_qualifier(module.takes, "req", indent=6) r += " Values:\n" - r += _list_if_qualifier(module.values, "req", indent=6) + r += p_list_if_qualifier(module.values, "req", indent=6) r += " Optional:\n Dependencies:\n" - r += _list_if_qualifier(module.takes, "maybe", indent=6) + r += p_list_if_qualifier(module.takes, "maybe", indent=6) r += " Values:\n" - r += _list_if_qualifier(module.values, "maybe", indent=6) + r += p_list_if_qualifier(module.values, "maybe", indent=6) r += "Outputs:\n Guaranteed:\n" - r += _list_if_qualifier(module.produces, "req", indent=4) + r += p_list_if_qualifier(module.produces, "req", indent=4) r += " On-demand:\n" - r += _list_if_qualifier(module.produces, "demand", indent=4) + r += p_list_if_qualifier(module.produces, "demand", indent=4) r += " Not guaranteed:\n" - r += _list_if_qualifier(module.produces, "maybe", indent=4) + r += p_list_if_qualifier(module.produces, "maybe", indent=4) return r diff --git a/f4pga/flows/modules/analysis.py b/f4pga/flows/modules/analysis.py index ea5ffc6..0f4fadd 100644 --- a/f4pga/flows/modules/analysis.py +++ b/f4pga/flows/modules/analysis.py @@ -23,19 +23,11 @@ from f4pga.flows.tools.vpr import vpr_specific_values, vpr, VprArgs from f4pga.flows.module import Module, ModuleContext -def analysis_merged_post_implementation_file(ctx: ModuleContext): - return str(Path(ctx.takes.eblif).with_suffix("")) + "_merged_post_implementation.v" - - -def analysis_post_implementation_file(ctx: ModuleContext): - return str(Path(ctx.takes.eblif).with_suffix("")) + "_post_synthesis.v" - - class analysisModule(Module): def map_io(self, ctx: ModuleContext): return { - "merged_post_implementation_v": analysis_merged_post_implementation_file(ctx), - "post_implementation_v": analysis_post_implementation_file(ctx), + "merged_post_implementation_v": p_analysis_merged_post_implementation_file(ctx), + "post_implementation_v": p_analysis_post_implementation_file(ctx), } def execute(self, ctx: ModuleContext): @@ -59,10 +51,10 @@ class analysisModule(Module): ) if ctx.is_output_explicit("merged_post_implementation_v"): - Path(analysis_merged_post_implementation_file(ctx)).rename(ctx.outputs.merged_post_implementation_v) + Path(p_analysis_merged_post_implementation_file(ctx)).rename(ctx.outputs.merged_post_implementation_v) if ctx.is_output_explicit("post_implementation_v"): - Path(analysis_post_implementation_file(ctx)).rename(ctx.outputs.post_implementation_v) + Path(p_analysis_post_implementation_file(ctx)).rename(ctx.outputs.post_implementation_v) yield "Saving log..." save_vpr_log("analysis.log", build_dir=build_dir) @@ -76,3 +68,11 @@ class analysisModule(Module): ModuleClass = analysisModule + + +def p_analysis_merged_post_implementation_file(ctx: ModuleContext): + return str(Path(ctx.takes.eblif).with_suffix("")) + "_merged_post_implementation.v" + + +def p_analysis_post_implementation_file(ctx: ModuleContext): + return str(Path(ctx.takes.eblif).with_suffix("")) + "_post_synthesis.v" diff --git a/f4pga/flows/modules/io_rename.py b/f4pga/flows/modules/io_rename.py index 25c5787..6043b3d 100644 --- a/f4pga/flows/modules/io_rename.py +++ b/f4pga/flows/modules/io_rename.py @@ -48,7 +48,7 @@ from f4pga.flows.module import Module, ModuleContext from f4pga.flows.runner import get_module -def _switch_keys(d: "dict[str, ]", renames: "dict[str, str]") -> "dict[str, ]": +def p_switch_keys(d: "dict[str, ]", renames: "dict[str, str]") -> "dict[str, ]": newd = {} for k, v in d.items(): r = renames.get(k) @@ -59,7 +59,7 @@ def _switch_keys(d: "dict[str, ]", renames: "dict[str, str]") -> "dict[str, ]": return newd -def _switchback_attrs(d: Namespace, renames: "dict[str, str]") -> SimpleNamespace: +def p_switchback_attrs(d: Namespace, renames: "dict[str, str]") -> SimpleNamespace: newn = SimpleNamespace() for k, v in vars(d).items(): setattr(newn, k, v) @@ -71,7 +71,7 @@ def _switchback_attrs(d: Namespace, renames: "dict[str, str]") -> SimpleNamespac return newn -def _switch_entries(l: "list[str]", renames: "dict[str, str]") -> "list[str]": +def p_switch_entries(l: "list[str]", renames: "dict[str, str]") -> "list[str]": newl = [] for e in l: r = renames.get(e) @@ -83,7 +83,7 @@ def _switch_entries(l: "list[str]", renames: "dict[str, str]") -> "list[str]": return newl -def _or_empty_dict(d: "dict | None"): +def p_or_empty_dict(d: "dict | None"): return d if d is not None else {} @@ -95,16 +95,16 @@ class IORenameModule(Module): def map_io(self, ctx: ModuleContext): newctx = ctx.shallow_copy() - newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes) - newctx.values = _switchback_attrs(ctx.values, self.rename_values) + newctx.takes = p_switchback_attrs(ctx.takes, self.rename_takes) + newctx.values = p_switchback_attrs(ctx.values, self.rename_values) r = self.module.map_io(newctx) - return _switch_keys(r, self.rename_produces) + return p_switch_keys(r, self.rename_produces) def execute(self, ctx: ModuleContext): newctx = ctx.shallow_copy() - newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes) - newctx.values = _switchback_attrs(ctx.values, self.rename_values) - newctx.outputs = _switchback_attrs(ctx.produces, self.rename_produces) + newctx.takes = p_switchback_attrs(ctx.takes, self.rename_takes) + newctx.values = p_switchback_attrs(ctx.values, self.rename_values) + newctx.outputs = p_switchback_attrs(ctx.produces, self.rename_produces) print(newctx.takes) return self.module.execute(newctx) @@ -113,18 +113,18 @@ class IORenameModule(Module): module_class = get_module(mod_path) module: Module = module_class(params.get("params")) - self.rename_takes = _or_empty_dict(params.get("rename_takes")) - self.rename_produces = _or_empty_dict(params.get("rename_produces")) - self.rename_values = _or_empty_dict(params.get("rename_values")) + self.rename_takes = p_or_empty_dict(params.get("rename_takes")) + self.rename_produces = p_or_empty_dict(params.get("rename_produces")) + self.rename_values = p_or_empty_dict(params.get("rename_values")) self.module = module self.name = f"{module.name}-io_renamed" self.no_of_phases = module.no_of_phases - self.takes = _switch_entries(module.takes, self.rename_takes) - self.produces = _switch_entries(module.produces, self.rename_produces) - self.values = _switch_entries(module.values, self.rename_values) + self.takes = p_switch_entries(module.takes, self.rename_takes) + self.produces = p_switch_entries(module.produces, self.rename_produces) + self.values = p_switch_entries(module.values, self.rename_values) if hasattr(module, "prod_meta"): - self.prod_meta = _switch_keys(module.prod_meta, self.rename_produces) + self.prod_meta = p_switch_keys(module.prod_meta, self.rename_produces) ModuleClass = IORenameModule diff --git a/f4pga/flows/modules/place.py b/f4pga/flows/modules/place.py index 2f404a3..9136613 100644 --- a/f4pga/flows/modules/place.py +++ b/f4pga/flows/modules/place.py @@ -24,21 +24,13 @@ from f4pga.flows.tools.vpr import vpr_specific_values, vpr, VprArgs, save_vpr_lo from f4pga.flows.module import Module, ModuleContext -def default_output_name(eblif): - return str(Path(eblif).with_suffix(".place")) - - -def place_constraints_file(ctx: ModuleContext): - if ctx.takes.place_constraints: - return ctx.takes.place_constraints, False - if ctx.takes.io_place: - return ctx.takes.io_place, False - return str(Path(ctx.takes.eblif).with_suffix(".place")) +def p_default_output_name(eblif): + return Path(eblif).with_suffix(".place") class PlaceModule(Module): def map_io(self, ctx: ModuleContext): - return {"place": default_output_name(ctx.takes.eblif)} + return {"place": str(p_default_output_name(ctx.takes.eblif))} def execute(self, ctx: ModuleContext): build_dir = ctx.takes.build_dir @@ -72,7 +64,7 @@ class PlaceModule(Module): # modules may produce some temporary files with names that differ from # the ones in flow configuration. if ctx.is_output_explicit("place"): - Path(default_output_name(ctx.takes.eblif)).rename(ctx.outputs.place) + p_default_output_name(ctx.takes.eblif).rename(ctx.outputs.place) yield "Saving log..." save_vpr_log("place.log", build_dir=build_dir) diff --git a/f4pga/flows/modules/route.py b/f4pga/flows/modules/route.py index 5df4952..1938466 100644 --- a/f4pga/flows/modules/route.py +++ b/f4pga/flows/modules/route.py @@ -23,13 +23,13 @@ from f4pga.flows.tools.vpr import vpr_specific_values, vpr, VprArgs, save_vpr_lo from f4pga.flows.module import Module, ModuleContext -def route_place_file(ctx: ModuleContext): +def p_route_place_file(ctx: ModuleContext): return Path(ctx.takes.eblif).with_suffix(".route") class RouteModule(Module): def map_io(self, ctx: ModuleContext): - return {"route": str(route_place_file(ctx))} + return {"route": str(p_route_place_file(ctx))} def execute(self, ctx: ModuleContext): build_dir = Path(ctx.takes.eblif).parent @@ -52,7 +52,7 @@ class RouteModule(Module): ) if ctx.is_output_explicit("route"): - route_place_file(ctx).rename(ctx.outputs.route) + p_route_place_file(ctx).rename(ctx.outputs.route) yield "Saving log..." save_vpr_log("route.log", build_dir=build_dir)