f4pga: cleanup and style
Signed-off-by: Unai Martinez-Corral <umartinezcorral@antmicro.com>
This commit is contained in:
parent
26fb1d63b0
commit
636da72d32
|
@ -1,2 +1,3 @@
|
|||
*.pyc
|
||||
*.sw*
|
||||
/f4pga/build/
|
||||
|
|
|
@ -1,31 +1,28 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
sfbuild - Symbiflow Build System
|
||||
F4PGA Build System
|
||||
|
||||
This tool allows for building FPGA targets (such as bitstreams) for any supported
|
||||
platform with just one simple command and a project file.
|
||||
This tool allows for building FPGA targets (such as bitstreams) for any supported platform with just one simple command
|
||||
and a project file.
|
||||
|
||||
The idea is that sfbuild wraps all the tools needed by different platforms in
|
||||
"modules", which define inputs/outputs and various parameters. This allows
|
||||
sfbuild to resolve dependencies for any target provided that a "flow definition"
|
||||
file exists for such target. The flow defeinition file list modules available for
|
||||
that platform and may tweak some settings of those modules.
|
||||
The idea is that F4PGA wraps all the tools needed by different platforms in "modules", which define inputs/outputs and
|
||||
various parameters.
|
||||
This allows F4PGA to resolve dependencies for any target provided that a "flow definition" file exists for such target.
|
||||
The flow defeinition file list modules available for that platform and may tweak some settings of those modules.
|
||||
|
||||
A basic example of using sfbuild:
|
||||
$ sfbuild build --platform arty_35 -t bitstream
|
||||
A basic example of using F4PGA:
|
||||
|
||||
This will make sfbuild attempt to create a bitstream for arty_35 platform.
|
||||
flow.json is a flow configuration file, which should be created for a project
|
||||
that uses sfbuild. Iontains project-specific definitions needed within the flow,
|
||||
such as list of source code files.
|
||||
$ f4pga build --platform arty_35 -t bitstream
|
||||
|
||||
This will make F4PGA attempt to create a bitstream for arty_35 platform.
|
||||
``flow.json`` is a flow configuration file, which should be created for a project that uses F4PGA.
|
||||
Contains project-specific definitions needed within the flow, such as list of source code files.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from argparse import Namespace
|
||||
import os
|
||||
from sys import argv as sys_argv
|
||||
from os import environ
|
||||
import json
|
||||
from json import load as json_load, loads as json_loads
|
||||
from typing import Iterable
|
||||
from colorama import Fore, Style
|
||||
|
||||
|
@ -34,11 +31,11 @@ from f4pga.common import (
|
|||
fatal,
|
||||
scan_modules,
|
||||
set_verbosity_level,
|
||||
sfprint
|
||||
sfprint,
|
||||
sub as common_sub
|
||||
)
|
||||
from f4pga.module import *
|
||||
from f4pga.cache import SymbiCache
|
||||
import f4pga.ugly as ugly
|
||||
from f4pga.flow_config import (
|
||||
ProjectFlowConfig,
|
||||
FlowConfig,
|
||||
|
@ -54,10 +51,10 @@ from f4pga.argparser import setup_argparser, get_cli_flow_config
|
|||
|
||||
SYMBICACHEPATH = '.symbicache'
|
||||
|
||||
binpath = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(os.sys.argv[0])), '..'))
|
||||
binpath = str(Path(sys_argv[0]).resolve().parent.parent)
|
||||
mypath = str(Path(__file__).resolve().parent)
|
||||
|
||||
share_dir_path = os.path.realpath(f"{environ.get('F4PGA_INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow")
|
||||
share_dir_path = str(Path(f"{environ.get('F4PGA_INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow").resolve())
|
||||
|
||||
class DependencyNotProducedException(Exception):
|
||||
dep_name: str
|
||||
|
@ -86,8 +83,7 @@ def req_exists(r):
|
|||
""" Checks whether a dependency exists on a drive. """
|
||||
|
||||
if type(r) is str:
|
||||
if not os.path.isfile(r) and not os.path.islink(r) \
|
||||
and not os.path.isdir(r):
|
||||
if not Path(r).is_file() and not Path(r).is_symlink() and not Path(r).is_dir():
|
||||
return False
|
||||
elif type(r) is list:
|
||||
return not (False in map(req_exists, r))
|
||||
|
@ -471,9 +467,27 @@ def setup_resolution_env():
|
|||
|
||||
r_env = ResolutionEnv({
|
||||
'shareDir': share_dir_path,
|
||||
'binDir': os.path.realpath(os.path.join(share_dir_path, '../../bin'))
|
||||
'binDir': str((Path(share_dir_path) / '../../bin').resolve())
|
||||
})
|
||||
r_env.add_values(ugly.generate_values())
|
||||
|
||||
def _noisy_warnings():
|
||||
"""
|
||||
Emit some noisy warnings.
|
||||
"""
|
||||
environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings.log'
|
||||
return 'noisy_warnings.log'
|
||||
|
||||
def _generate_values():
|
||||
"""
|
||||
Generate initial values, available in configs.
|
||||
"""
|
||||
return {
|
||||
'prjxray_db': common_sub('prjxray-config').decode().replace('\n', ''),
|
||||
'python3': common_sub('which', 'python3').decode().replace('\n', ''),
|
||||
'noisyWarnings': _noisy_warnings()
|
||||
}
|
||||
|
||||
r_env.add_values(_generate_values())
|
||||
return r_env
|
||||
|
||||
def open_project_flow_config(path: str) -> ProjectFlowConfig:
|
||||
|
@ -509,7 +523,7 @@ def get_platform_name_for_part(part_name: str):
|
|||
differ only in a type of package they use.
|
||||
"""
|
||||
with (Path(mypath) / 'part_db.json').open('r') as rfptr:
|
||||
return json.load(rfptr).get(part_name.upper())
|
||||
return json_load(rfptr).get(part_name.upper())
|
||||
|
||||
def cmd_build(args: Namespace):
|
||||
""" sfbuild's `build` command implementation """
|
||||
|
@ -535,7 +549,7 @@ def cmd_build(args: Namespace):
|
|||
fatal(-1, 'No configuration was provided. Use `--flow`, `--platform` or '
|
||||
'`--part` to configure flow..')
|
||||
|
||||
platform_path = os.path.join(mypath, 'platforms', platform + '.json')
|
||||
platform_path = str(Path(mypath) / f'platforms/{platform}.json')
|
||||
platform_def = None
|
||||
try:
|
||||
with open(platform_path) as platform_file:
|
||||
|
@ -550,7 +564,7 @@ def cmd_build(args: Namespace):
|
|||
sfprint(2, 'Scanning modules...')
|
||||
scan_modules(mypath)
|
||||
|
||||
flow_definition_dict = json.loads(platform_def)
|
||||
flow_definition_dict = json_loads(platform_def)
|
||||
flow_def = FlowDefinition(flow_definition_dict, r_env)
|
||||
flow_cfg = FlowConfig(project_flow_cfg, flow_def, platform)
|
||||
|
||||
|
|
|
@ -1,69 +1,158 @@
|
|||
from argparse import ArgumentParser, Namespace
|
||||
import re
|
||||
from re import finditer as re_finditer
|
||||
|
||||
|
||||
def _add_flow_arg(parser: ArgumentParser):
|
||||
parser.add_argument('-f', '--flow', metavar='flow_path', type=str,
|
||||
help='Path to flow definition file')
|
||||
parser.add_argument(
|
||||
'-f',
|
||||
'--flow',
|
||||
metavar='flow_path',
|
||||
type=str,
|
||||
help='Path to flow definition file'
|
||||
)
|
||||
|
||||
|
||||
def _setup_build_parser(parser: ArgumentParser):
|
||||
_add_flow_arg(parser)
|
||||
parser.add_argument('-t', '--target', metavar='target_name', type=str,
|
||||
help='Perform stages necessary to acquire target')
|
||||
parser.add_argument('--platform', metavar='platform_name',
|
||||
help='Target platform_name')
|
||||
parser.add_argument('-P', '--pretend', action='store_true',
|
||||
help='Show dependency resolution without executing flow')
|
||||
parser.add_argument('-i', '--info', action='store_true',
|
||||
help='Display info about available targets')
|
||||
parser.add_argument('-c', '--nocache', action='store_true',
|
||||
help='Ignore caching and rebuild everything up to the '
|
||||
'target.')
|
||||
parser.add_argument('-S', '--stageinfo', nargs=1, metavar='stage_name',
|
||||
help='Display info about stage')
|
||||
parser.add_argument('-r', '--requirements', action='store_true',
|
||||
help='Display info about project\'s requirements.')
|
||||
parser.add_argument('-p', '--part', metavar='part_name',
|
||||
help='Name of the target chip')
|
||||
parser.add_argument('--dep', '-D', action='append', default=[])
|
||||
parser.add_argument('--val', '-V', action='append', default=[])
|
||||
|
||||
parser.add_argument(
|
||||
'-t',
|
||||
'--target',
|
||||
metavar='target_name',
|
||||
type=str,
|
||||
help='Perform stages necessary to acquire target'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--platform',
|
||||
metavar='platform_name',
|
||||
help='Target platform_name'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-P',
|
||||
'--pretend',
|
||||
action='store_true',
|
||||
help='Show dependency resolution without executing flow'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-i',
|
||||
'--info',
|
||||
action='store_true',
|
||||
help='Display info about available targets'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-c',
|
||||
'--nocache',
|
||||
action='store_true',
|
||||
help='Ignore caching and rebuild everything up to the target.'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-S',
|
||||
'--stageinfo',
|
||||
nargs=1,
|
||||
metavar='stage_name',
|
||||
help='Display info about stage'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-r',
|
||||
'--requirements',
|
||||
action='store_true',
|
||||
help='Display info about project\'s requirements.'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-p',
|
||||
'--part',
|
||||
metavar='part_name',
|
||||
help='Name of the target chip'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--dep',
|
||||
'-D',
|
||||
action='append',
|
||||
default=[]
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--val',
|
||||
'-V',
|
||||
action='append',
|
||||
default=[]
|
||||
)
|
||||
|
||||
# Currently unsupported
|
||||
parser.add_argument('-M', '--moduleinfo', nargs=1,
|
||||
metavar='module_name_or_path',
|
||||
help='Display info about module. Requires `-p` option '
|
||||
'in case of module name')
|
||||
parser.add_argument('-T', '--take_explicit_paths', nargs='+',
|
||||
metavar='<name=path, ...>', type=str,
|
||||
help='Specify stage inputs explicitely. This might be '
|
||||
'required if some files got renamed or deleted and '
|
||||
'symbiflow is unable to deduce the flow that lead '
|
||||
'to dependencies required by the requested stage')
|
||||
parser.add_argument(
|
||||
'-M',
|
||||
'--moduleinfo',
|
||||
nargs=1,
|
||||
metavar='module_name_or_path',
|
||||
help='Display info about module. Requires `-p` option in case of module name'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-T',
|
||||
'--take_explicit_paths',
|
||||
nargs='+',
|
||||
metavar='<name=path, ...>',
|
||||
type=str,
|
||||
help='Specify stage inputs explicitely. This might be required if some files got renamed or deleted and '
|
||||
'symbiflow is unable to deduce the flow that lead to dependencies required by the requested stage'
|
||||
)
|
||||
|
||||
|
||||
def _setup_show_dep_parser(parser: ArgumentParser):
|
||||
parser.add_argument('-p', '--platform', metavar='platform_name', type=str,
|
||||
help='Name of the platform (use to display '
|
||||
'platform-specific values.')
|
||||
parser.add_argument('-s', '--stage', metavar='stage_name', type=str,
|
||||
help='Name of the stage (use if you want to set the '
|
||||
'value only for that stage). Requires `-p`.')
|
||||
parser.add_argument(
|
||||
'-p',
|
||||
'--platform',
|
||||
metavar='platform_name',
|
||||
type=str,
|
||||
help='Name of the platform (use to display platform-specific values.'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-s',
|
||||
'--stage',
|
||||
metavar='stage_name',
|
||||
type=str,
|
||||
help='Name of the stage (use if you want to set the value only for that stage). Requires `-p`.'
|
||||
)
|
||||
|
||||
_add_flow_arg(parser)
|
||||
|
||||
# Set up argument parser for the program. Pretty self-explanatory.
|
||||
|
||||
def setup_argparser():
|
||||
"""
|
||||
Set up argument parser for the program.
|
||||
"""
|
||||
parser = ArgumentParser(description='SymbiFlow Build System')
|
||||
|
||||
parser.add_argument('-v', '--verbose', action='count', default=0)
|
||||
parser.add_argument('-s', '--silent', action='store_true')
|
||||
parser.add_argument(
|
||||
'-v',
|
||||
'--verbose',
|
||||
action='count',
|
||||
default=0
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'-s',
|
||||
'--silent',
|
||||
action='store_true'
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest='command')
|
||||
build = subparsers.add_parser('build')
|
||||
_setup_build_parser(build)
|
||||
show_dep = subparsers.add_parser('showd',
|
||||
description='Show the value(s) assigned to a '
|
||||
'dependency')
|
||||
_setup_build_parser(subparsers.add_parser('build'))
|
||||
show_dep = subparsers.add_parser('showd', description='Show the value(s) assigned to a dependency')
|
||||
_setup_show_dep_parser(show_dep)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def _parse_depval(depvalstr: str):
|
||||
"""
|
||||
Parse a dependency or value definition in form of:
|
||||
|
@ -94,6 +183,7 @@ def _parse_depval(depvalstr: str):
|
|||
|
||||
return d
|
||||
|
||||
|
||||
def _unescaped_matches(regexp: str, s: str, escape_chr='\\'):
|
||||
"""
|
||||
Find all occurences of a pattern in a string that contains escape sequences.
|
||||
|
@ -109,8 +199,7 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'):
|
|||
offsets = []
|
||||
offset = 0
|
||||
for sl in s.split(escape_chr):
|
||||
l = len(sl)
|
||||
if l <= 1:
|
||||
if len(sl) <= 1:
|
||||
continue
|
||||
noescape = sl[(1 if offset != 0 else 0):]
|
||||
for _ in noescape:
|
||||
|
@ -118,7 +207,7 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'):
|
|||
offset += 2
|
||||
noescapes += noescape
|
||||
|
||||
iter = re.finditer(regexp, noescapes)
|
||||
iter = re_finditer(regexp, noescapes)
|
||||
|
||||
for m in iter:
|
||||
start = m.start()
|
||||
|
@ -127,10 +216,13 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'):
|
|||
off2 = end + offsets[end]
|
||||
yield off1, off2
|
||||
|
||||
def _unescaped_separated(regexp: str, s: str, escape_chr='\\'):
|
||||
""" Yields substrings of a string that contains escape sequences. """
|
||||
|
||||
last_end = 0;
|
||||
def _unescaped_separated(regexp: str, s: str, escape_chr='\\'):
|
||||
"""
|
||||
Yields substrings of a string that contains escape sequences.
|
||||
"""
|
||||
|
||||
last_end = 0
|
||||
for start, end in _unescaped_matches(regexp, s, escape_chr=escape_chr):
|
||||
yield s[last_end:start]
|
||||
last_end = end
|
||||
|
@ -139,6 +231,7 @@ def _unescaped_separated(regexp: str, s: str, escape_chr='\\'):
|
|||
else:
|
||||
yield ''
|
||||
|
||||
|
||||
def _parse_cli_value(s: str):
|
||||
"""
|
||||
Parse a value/dependency passed to CLI
|
||||
|
@ -207,6 +300,7 @@ def _parse_cli_value(s: str):
|
|||
# String
|
||||
return s.replace('\\', '')
|
||||
|
||||
|
||||
def get_cli_flow_config(args: Namespace, platform: str):
|
||||
def create_defdict():
|
||||
return {
|
||||
|
|
|
@ -1,19 +1,13 @@
|
|||
import os
|
||||
import zlib
|
||||
import json
|
||||
from pathlib import Path
|
||||
from zlib import adler32 as zlib_adler32
|
||||
from json import dump as json_dump, load as json_load, JSONDecodeError
|
||||
|
||||
def _get_file_hash(path: str):
|
||||
with open(path, 'rb') as f:
|
||||
b = f.read()
|
||||
return str(zlib.adler32(b))
|
||||
|
||||
class SymbiCache:
|
||||
"""
|
||||
`SymbiCache` is used to track changes among dependencies and keep
|
||||
the status of the files on a persistent storage.
|
||||
`SymbiCache` is used to track changes among dependencies and keep the status of the files on a persistent storage.
|
||||
Files which are tracked get their checksums calculated and stored in a file.
|
||||
If file's checksum differs from the one saved in a file, that means, the file
|
||||
has changed.
|
||||
If file's checksum differs from the one saved in a file, that means, the file has changed.
|
||||
"""
|
||||
|
||||
hashes: 'dict[str, dict[str, str]]'
|
||||
|
@ -21,13 +15,14 @@ class SymbiCache:
|
|||
cachefile_path: str
|
||||
|
||||
def __init__(self, cachefile_path):
|
||||
""" `chachefile_path` - path to a file used for persistent storage of
|
||||
checksums. """
|
||||
"""
|
||||
`chachefile_path` - path to a file used for persistent storage of checksums.
|
||||
"""
|
||||
|
||||
self.status = {}
|
||||
self.cachefile_path = cachefile_path
|
||||
self.load()
|
||||
|
||||
|
||||
def _try_pop_consumer(self, path: str, consumer: str):
|
||||
if self.status.get(path) and self.status[path].get(consumer):
|
||||
self.status[path].pop(consumer)
|
||||
|
@ -37,7 +32,7 @@ class SymbiCache:
|
|||
self.hashes[path].pop(consumer)
|
||||
if len(self.hashes[path]) == 0:
|
||||
self.hashes.pop(path)
|
||||
|
||||
|
||||
def _try_push_consumer_hash(self, path: str, consumer: str, hash):
|
||||
if not self.hashes.get(path):
|
||||
self.hashes[path] = {}
|
||||
|
@ -46,43 +41,39 @@ class SymbiCache:
|
|||
if not self.status.get(path):
|
||||
self.status[path] = {}
|
||||
self.status[path][consumer] = status
|
||||
|
||||
def _get_last_hash(self, path: str, consumer: str):
|
||||
last_hashes = self.hashes.get(path)
|
||||
if last_hashes is None:
|
||||
return None
|
||||
return last_hashes.get(consumer)
|
||||
|
||||
def update(self, path: str, consumer: str):
|
||||
""" Add/remove a file to.from the tracked files, update checksum
|
||||
if necessary and calculate status.
|
||||
""" Add/remove a file to.from the tracked files, update checksum if necessary and calculate status.
|
||||
|
||||
Multiple hashes are stored per file, one for each consumer module.
|
||||
"__target" is used as a convention for a "fake" consumer in case the file
|
||||
is requested as a target and not used by a module within the active flow.
|
||||
"__target" is used as a convention for a "fake" consumer in case the file is requested as a target and not used
|
||||
by a module within the active flow.
|
||||
"""
|
||||
|
||||
isdir = os.path.isdir(path)
|
||||
if not (os.path.isfile(path) or os.path.islink(path) or isdir):
|
||||
isdir = Path(path).is_dir()
|
||||
if not (Path(path).is_file() or Path(path).is_symlink() or isdir):
|
||||
self._try_pop_consumer(path, consumer)
|
||||
return True
|
||||
hash = 0 # Directories always get '0' hash.
|
||||
if not isdir:
|
||||
hash = _get_file_hash(path)
|
||||
last_hash = self._get_last_hash(path, consumer)
|
||||
with Path(path).open('rb') as rfptr:
|
||||
hash = str(zlib_adler32(rfptr.read()))
|
||||
|
||||
last_hashes = self.hashes.get(path)
|
||||
last_hash = None if last_hashes is None else last_hashes.get(consumer)
|
||||
|
||||
if hash != last_hash:
|
||||
self._try_push_consumer_status(path, consumer, 'changed')
|
||||
self._try_push_consumer_hash(path, consumer, hash)
|
||||
return True
|
||||
else:
|
||||
self._try_push_consumer_status(path, consumer, 'same')
|
||||
return False
|
||||
|
||||
self._try_push_consumer_status(path, consumer, 'same')
|
||||
return False
|
||||
|
||||
def get_status(self, path: str, consumer: str):
|
||||
""" Get status for a file with a given path.
|
||||
returns 'untracked' if the file is not tracked or hasn't been
|
||||
treated with `update` procedure before calling `get_status`. """
|
||||
|
||||
returns 'untracked' if the file is not tracked or hasn't been treated with `update` procedure before calling
|
||||
`get_status`.
|
||||
"""
|
||||
statuses = self.status.get(path)
|
||||
if not statuses:
|
||||
return 'untracked'
|
||||
|
@ -90,26 +81,23 @@ class SymbiCache:
|
|||
if not status:
|
||||
return 'untracked'
|
||||
return status
|
||||
|
||||
|
||||
def load(self):
|
||||
"""Loads cache's state from the persistent storage"""
|
||||
|
||||
try:
|
||||
with open(self.cachefile_path, 'r') as f:
|
||||
b = f.read()
|
||||
self.hashes = json.loads(b)
|
||||
except json.JSONDecodeError as jerr:
|
||||
print('WARNING: .symbicache is corrupted! '
|
||||
'This will cause flow to re-execute from the beggining.')
|
||||
with Path(self.cachefile_path).open('r') as rfptr:
|
||||
self.hashes = json_load(rfptr)
|
||||
except JSONDecodeError as jerr:
|
||||
print("""WARNING: .symbicache is corrupted!
|
||||
This will cause flow to re-execute from the beggining.""")
|
||||
self.hashes = {}
|
||||
except FileNotFoundError:
|
||||
print('Couldn\'t open .symbicache cache file. '
|
||||
'This will cause flow to re-execute from the beggining.')
|
||||
print("""Couldn\'t open .symbicache cache file.
|
||||
This will cause flow to re-execute from the beggining.""")
|
||||
self.hashes = {}
|
||||
|
||||
def save(self):
|
||||
"""Saves cache's state to the persistent storage"""
|
||||
|
||||
with open(self.cachefile_path, 'w') as f:
|
||||
b = json.dumps(self.hashes, indent=4)
|
||||
f.write(b)
|
||||
"""Saves cache's state to the persistent storage."""
|
||||
with Path(self.cachefile_path).open('w') as wfptr:
|
||||
json_dump(str(self.hashes), wfptr, indent=4)
|
||||
|
|
160
f4pga/common.py
160
f4pga/common.py
|
@ -1,9 +1,11 @@
|
|||
from pathlib import Path
|
||||
from os import environ, listdir as os_listdir
|
||||
from sys import argv as sys_argv
|
||||
from argparse import Namespace
|
||||
import subprocess
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import re
|
||||
from shutil import move as sh_mv
|
||||
from subprocess import run
|
||||
from re import match as re_match, finditer as re_finditer
|
||||
|
||||
|
||||
def decompose_depname(name: str):
|
||||
spec = 'req'
|
||||
|
@ -16,6 +18,7 @@ def decompose_depname(name: str):
|
|||
name = name[:len(name) - 1]
|
||||
return name, spec
|
||||
|
||||
|
||||
def with_qualifier(name: str, q: str) -> str:
|
||||
if q == 'req':
|
||||
return decompose_depname(name)[0]
|
||||
|
@ -24,25 +27,33 @@ def with_qualifier(name: str, q: str) -> str:
|
|||
if q == 'demand':
|
||||
return decompose_depname(name)[0] + '!'
|
||||
|
||||
|
||||
_sfbuild_module_collection_name_to_path = {}
|
||||
|
||||
|
||||
def scan_modules(mypath: str):
|
||||
global _sfbuild_module_collection_name_to_path
|
||||
|
||||
sfbuild_home = mypath
|
||||
sfbuild_home_dirs = os.listdir(sfbuild_home)
|
||||
sfbuild_home_dirs = os_listdir(sfbuild_home)
|
||||
sfbuild_module_dirs = \
|
||||
[dir for dir in sfbuild_home_dirs if re.match('.*_modules$', dir)]
|
||||
_sfbuild_module_collection_name_to_path = \
|
||||
dict([(re.match('(.*)_modules$', moddir).groups()[0],
|
||||
os.path.join(sfbuild_home, moddir))
|
||||
for moddir in sfbuild_module_dirs])
|
||||
[dir for dir in sfbuild_home_dirs if re_match('.*_modules$', dir)]
|
||||
_sfbuild_module_collection_name_to_path = dict([
|
||||
(
|
||||
re_match('(.*)_modules$', moddir).groups()[0],
|
||||
str(Path(sfbuild_home) / moddir)
|
||||
)
|
||||
for moddir in sfbuild_module_dirs
|
||||
])
|
||||
|
||||
|
||||
"""Resolves module location from modulestr"""
|
||||
def resolve_modstr(modstr: str):
|
||||
"""
|
||||
Resolves module location from modulestr.
|
||||
"""
|
||||
sl = modstr.split(':')
|
||||
if len(sl) > 2:
|
||||
raise Exception('Incorrect module sysntax. '
|
||||
'Expected one \':\' or one \'::\'')
|
||||
raise Exception('Incorrect module sysntax. Expected one \':\' or one \'::\'')
|
||||
if len(sl) < 2:
|
||||
return modstr
|
||||
collection_name = sl[0]
|
||||
|
@ -51,14 +62,13 @@ def resolve_modstr(modstr: str):
|
|||
col_path = _sfbuild_module_collection_name_to_path.get(collection_name)
|
||||
if not col_path:
|
||||
fatal(-1, f'Module collection {collection_name} does not exist')
|
||||
return os.path.join(col_path, module_filename)
|
||||
return str(Path(col_path) / module_filename)
|
||||
|
||||
|
||||
def deep(fun):
|
||||
"""
|
||||
Create a recursive string transform function for 'str | list | dict',
|
||||
i.e a dependency
|
||||
Create a recursive string transform function for 'str | list | dict', i.e a dependency.
|
||||
"""
|
||||
|
||||
def d(paths, *args, **kwargs):
|
||||
if type(paths) is str:
|
||||
return fun(paths)
|
||||
|
@ -66,18 +76,13 @@ def deep(fun):
|
|||
return [d(p) for p in paths];
|
||||
elif type(paths) is dict:
|
||||
return dict([(k, d(p)) for k, p in paths.items()])
|
||||
|
||||
return d
|
||||
|
||||
def file_noext(path: str):
|
||||
""" Return a file without it's extenstion"""
|
||||
m = re.match('(.*)\\.[^.]*$', path)
|
||||
if m:
|
||||
path = m.groups()[0]
|
||||
return path
|
||||
|
||||
class VprArgs:
|
||||
""" Represents argument list for VPR (Versatile Place and Route) """
|
||||
"""
|
||||
Represents argument list for VPR (Versatile Place and Route).
|
||||
"""
|
||||
|
||||
arch_dir: str
|
||||
arch_def: str
|
||||
|
@ -91,13 +96,13 @@ class VprArgs:
|
|||
def __init__(self, share: str, eblif, values: Namespace,
|
||||
sdc_file: 'str | None' = None,
|
||||
vpr_extra_opts: 'list | None' = None):
|
||||
self.arch_dir = os.path.join(share, 'arch')
|
||||
self.arch_dir = str(Path(share) / 'arch')
|
||||
self.arch_def = values.arch_def
|
||||
self.lookahead = values.rr_graph_lookahead_bin
|
||||
self.rr_graph = values.rr_graph_real_bin
|
||||
self.place_delay = values.vpr_place_delay
|
||||
self.device_name = values.vpr_grid_layout_name
|
||||
self.eblif = os.path.realpath(eblif)
|
||||
self.eblif = str(Path(eblif).resolve())
|
||||
if values.vpr_options is not None:
|
||||
self.optional = options_dict_to_list(values.vpr_options)
|
||||
else:
|
||||
|
@ -107,13 +112,17 @@ class VprArgs:
|
|||
if sdc_file is not None:
|
||||
self.optional += ['--sdc_file', sdc_file]
|
||||
|
||||
|
||||
class SubprocessException(Exception):
|
||||
return_code: int
|
||||
|
||||
def sub(*args, env=None, cwd=None):
|
||||
""" Execute subroutine """
|
||||
|
||||
out = subprocess.run(args, capture_output=True, env=env, cwd=cwd)
|
||||
def sub(*args, env=None, cwd=None):
|
||||
"""
|
||||
Execute subroutine.
|
||||
"""
|
||||
|
||||
out = run(args, capture_output=True, env=env, cwd=cwd)
|
||||
if out.returncode != 0:
|
||||
print(f'[ERROR]: {args[0]} non-zero return code.\n'
|
||||
f'stderr:\n{out.stderr.decode()}\n\n'
|
||||
|
@ -121,8 +130,11 @@ def sub(*args, env=None, cwd=None):
|
|||
exit(out.returncode)
|
||||
return out.stdout
|
||||
|
||||
|
||||
def vpr(mode: str, vprargs: VprArgs, cwd=None):
|
||||
""" Execute `vpr` """
|
||||
"""
|
||||
Execute `vpr`.
|
||||
"""
|
||||
|
||||
modeargs = []
|
||||
if mode == 'pack':
|
||||
|
@ -132,15 +144,17 @@ def vpr(mode: str, vprargs: VprArgs, cwd=None):
|
|||
elif mode == 'route':
|
||||
modeargs = ['--route']
|
||||
|
||||
return sub(*(['vpr',
|
||||
vprargs.arch_def,
|
||||
vprargs.eblif,
|
||||
'--device', vprargs.device_name,
|
||||
'--read_rr_graph', vprargs.rr_graph,
|
||||
'--read_router_lookahead', vprargs.lookahead,
|
||||
'--read_placement_delay_lookup', vprargs.place_delay] +
|
||||
modeargs + vprargs.optional),
|
||||
cwd=cwd)
|
||||
return sub(*([
|
||||
'vpr',
|
||||
vprargs.arch_def,
|
||||
vprargs.eblif,
|
||||
'--device', vprargs.device_name,
|
||||
'--read_rr_graph', vprargs.rr_graph,
|
||||
'--read_router_lookahead', vprargs.lookahead,
|
||||
'--read_placement_delay_lookup', vprargs.place_delay
|
||||
] + modeargs + vprargs.optional), cwd=cwd)
|
||||
|
||||
|
||||
|
||||
_vpr_specific_values = [
|
||||
'arch_def',
|
||||
|
@ -150,10 +164,13 @@ _vpr_specific_values = [
|
|||
'vpr_grid_layout_name',
|
||||
'vpr_options?'
|
||||
]
|
||||
|
||||
|
||||
def vpr_specific_values():
|
||||
global _vpr_specific_values
|
||||
return _vpr_specific_values
|
||||
|
||||
|
||||
def options_dict_to_list(opt_dict: dict):
|
||||
"""
|
||||
Converts a dictionary of named options for CLI program to a list.
|
||||
|
@ -167,36 +184,44 @@ def options_dict_to_list(opt_dict: dict):
|
|||
opts.append(str(val))
|
||||
return opts
|
||||
|
||||
|
||||
def noisy_warnings(device):
|
||||
""" Emit some noisy warnings """
|
||||
os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings-' + device + '_pack.log'
|
||||
"""
|
||||
Emit some noisy warnings.
|
||||
"""
|
||||
environ['OUR_NOISY_WARNINGS'] = f'noisy_warnings-{device}_pack.log'
|
||||
|
||||
|
||||
def my_path():
|
||||
""" Get current PWD """
|
||||
mypath = os.path.realpath(sys.argv[0])
|
||||
return os.path.dirname(mypath)
|
||||
"""
|
||||
Get current PWD.
|
||||
"""
|
||||
return str(Path(sys_argv[0]).resolve().parent)
|
||||
|
||||
|
||||
def save_vpr_log(filename, build_dir=''):
|
||||
""" Save VPR logic (moves the default output file into a desired path) """
|
||||
shutil.move(os.path.join(build_dir, 'vpr_stdout.log'), filename)
|
||||
"""
|
||||
Save VPR logic (moves the default output file into a desired path).
|
||||
"""
|
||||
sh_mv(str(Path(build_dir) / 'vpr_stdout.log'), filename)
|
||||
|
||||
|
||||
def fatal(code, message):
|
||||
"""
|
||||
Print a message informing about an error that has occured and terminate program
|
||||
with a given return code.
|
||||
Print a message informing about an error that has occured and terminate program with a given return code.
|
||||
"""
|
||||
|
||||
raise(Exception(f'[FATAL ERROR]: {message}'))
|
||||
exit(code)
|
||||
|
||||
|
||||
class ResolutionEnv:
|
||||
"""
|
||||
ResolutionEnv is used to hold onto mappings for variables used in flow and
|
||||
perform text substitutions using those variables.
|
||||
Variables can be referred in any "resolvable" string using the following
|
||||
syntax: 'Some static text ${variable_name}'. The '${variable_name}' part
|
||||
will be replaced by the value associated with name 'variable_name', is such
|
||||
mapping exists.
|
||||
ResolutionEnv is used to hold onto mappings for variables used in flow and perform text substitutions using those
|
||||
variables.
|
||||
Variables can be referred in any "resolvable" string using the following syntax: 'Some static text ${variable_name}'.
|
||||
The '${variable_name}' part will be replaced by the value associated with name 'variable_name', is such mapping
|
||||
exists.
|
||||
|
||||
values: dict
|
||||
"""
|
||||
|
||||
|
@ -209,15 +234,14 @@ class ResolutionEnv:
|
|||
def resolve(self, s, final=False):
|
||||
"""
|
||||
Perform resolution on `s`.
|
||||
`s` can be a `str`, a `dict` with arbitrary keys and resolvable values,
|
||||
or a `list` of resolvable values.
|
||||
`s` can be a `str`, a `dict` with arbitrary keys and resolvable values, or a `list` of resolvable values.
|
||||
final=True - resolve any unknown variables into ''
|
||||
This is a hack and probably should be removed in the future
|
||||
"""
|
||||
|
||||
if type(s) is str:
|
||||
match_list = list(re.finditer('\$\{([^${}]*)\}', s))
|
||||
# Assumption: re.finditer finds matches in a left-to-right order
|
||||
match_list = list(re_finditer('\$\{([^${}]*)\}', s))
|
||||
# Assumption: re_finditer finds matches in a left-to-right order
|
||||
match_list.reverse()
|
||||
for match in match_list:
|
||||
match_str = match.group(1)
|
||||
|
@ -242,24 +266,30 @@ class ResolutionEnv:
|
|||
return s
|
||||
|
||||
def add_values(self, values: dict):
|
||||
""" Add mappings from `values`"""
|
||||
|
||||
"""
|
||||
Add mappings from `values`.
|
||||
"""
|
||||
for k, v in values.items():
|
||||
self.values[k] = self.resolve(v)
|
||||
|
||||
|
||||
verbosity_level = 0
|
||||
|
||||
def sfprint(verbosity: int, *args):
|
||||
""" Print with regards to currently set verbosity level """
|
||||
|
||||
def sfprint(verbosity: int, *args):
|
||||
"""
|
||||
Print with regards to currently set verbosity level.
|
||||
"""
|
||||
global verbosity_level
|
||||
if verbosity <= verbosity_level:
|
||||
print(*args)
|
||||
|
||||
|
||||
def set_verbosity_level(level: int):
|
||||
global verbosity_level
|
||||
verbosity_level = level
|
||||
|
||||
|
||||
def get_verbosity_level() -> int:
|
||||
global verbosity_level
|
||||
return verbosity_level
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
# This is only to make pydoc recognize this catalogue as a package
|
|
@ -1,39 +1,20 @@
|
|||
#!/usr/bin/python3
|
||||
from pathlib import Path
|
||||
from shutil import move as sh_mv
|
||||
|
||||
# Symbiflow Stage Module
|
||||
from f4pga.common import vpr_specific_values, VprArgs, get_verbosity_level, sub
|
||||
from f4pga.module import Module, ModuleContext
|
||||
|
||||
# ----------------------------------------------------------------------------- #
|
||||
|
||||
import os
|
||||
from f4pga.common import *
|
||||
from f4pga.module import *
|
||||
|
||||
# ----------------------------------------------------------------------------- #
|
||||
|
||||
def concat_fasm(fasm: str, fasm_extra: str, output: str):
|
||||
fasm_data = None
|
||||
fasm_extra_data = None
|
||||
with open(fasm, 'r') as fasm_file, open(fasm_extra, 'r') as fasm_extra_file:
|
||||
fasm_data = fasm_file.read()
|
||||
fasm_extra_data = fasm_extra_file.read()
|
||||
data = fasm_data + '\n' + fasm_extra_data
|
||||
|
||||
with open(output, 'w') as output_file:
|
||||
output_file.write(data)
|
||||
|
||||
def fasm_output_path(build_dir: str, top: str):
|
||||
return f'{build_dir}/{top}.fasm'
|
||||
|
||||
class FasmModule(Module):
|
||||
|
||||
def map_io(self, ctx: ModuleContext):
|
||||
build_dir = os.path.dirname(ctx.takes.eblif)
|
||||
build_dir = str(Path(ctx.takes.eblif).parent)
|
||||
return {
|
||||
'fasm': fasm_output_path(build_dir, ctx.values.top)
|
||||
'fasm': f'{(Path(build_dir)/ctx.values.top)!s}.fasm'
|
||||
}
|
||||
|
||||
def execute(self, ctx: ModuleContext):
|
||||
build_dir = os.path.dirname(ctx.takes.eblif)
|
||||
build_dir = str(Path(ctx.takes.eblif).parent)
|
||||
|
||||
vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values)
|
||||
|
||||
|
@ -43,10 +24,14 @@ class FasmModule(Module):
|
|||
if ctx.takes.sdc:
|
||||
optional += ['--sdc', ctx.takes.sdc]
|
||||
|
||||
s = ['genfasm', vprargs.arch_def,
|
||||
os.path.realpath(ctx.takes.eblif),
|
||||
'--device', vprargs.device_name,
|
||||
'--read_rr_graph', vprargs.rr_graph
|
||||
s = [
|
||||
'genfasm',
|
||||
vprargs.arch_def,
|
||||
str(Path(ctx.takes.eblif).resolve()),
|
||||
'--device',
|
||||
vprargs.device_name,
|
||||
'--read_rr_graph',
|
||||
vprargs.rr_graph
|
||||
] + vprargs.optional
|
||||
|
||||
if get_verbosity_level() >= 2:
|
||||
|
@ -56,13 +41,17 @@ class FasmModule(Module):
|
|||
|
||||
sub(*s, cwd=build_dir)
|
||||
|
||||
default_fasm_output_name = fasm_output_path(build_dir, ctx.values.top)
|
||||
default_fasm_output_name = f'{(Path(build_dir)/ctx.values.top)!s}.fasm'
|
||||
if default_fasm_output_name != ctx.outputs.fasm:
|
||||
shutil.move(default_fasm_output_name, ctx.outputs.fasm)
|
||||
sh_mv(default_fasm_output_name, ctx.outputs.fasm)
|
||||
|
||||
if ctx.takes.fasm_extra:
|
||||
yield 'Appending extra FASM...'
|
||||
concat_fasm(ctx.outputs.fasm, ctx.takes.fasm_extra, ctx.outputs.fasm)
|
||||
with \
|
||||
open(ctx.outputs.fasm, 'r') as fasm_file, \
|
||||
open(ctx.takes.fasm_extra, 'r') as fasm_extra_file, \
|
||||
open(ctx.outputs.fasm, 'w') as wfptr:
|
||||
wfptr.write(f"{fasm_file.read()}\n{fasm_extra_file.read()}")
|
||||
else:
|
||||
yield 'No extra FASM to append'
|
||||
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
# Symbiflow Stage Module
|
||||
|
||||
"""
|
||||
This module is intended for wrapping simple scripts without rewriting them as
|
||||
an sfbuild module. This is mostly to maintain compatibility with workflows
|
||||
|
@ -12,53 +8,42 @@ Accepted module parameters:
|
|||
* `script` (string, mandatory): Path to the script to be executed
|
||||
* `interpreter` (string, optional): Interpreter for the script
|
||||
* `cwd` (string, optional): Current Working Directory for the script
|
||||
* `outputs` (dict[string -> dict[string -> string]],
|
||||
mandatory):
|
||||
A dict with output descriptions (dicts). Keys name output dependencies.
|
||||
* `mode` (string, mandatory): "file" or "stdout". Describes how the output is
|
||||
grabbed from the script.
|
||||
* `file` (string, required if `mode` is "file"): Name of the file generated by the
|
||||
script.
|
||||
* `target` (string, required): Default name of the file of the generated
|
||||
dependency. You can use all values available durng map_io stage. Each input
|
||||
dependency alsogets two extra values associated with it:
|
||||
`:dependency_name[noext]`, which contains the path to the dependency the
|
||||
extension with anything after last "." removed and `:dependency_name[dir]` which
|
||||
contains directory paths of the dependency. This is useful for deriving an output
|
||||
name from the input.
|
||||
* `outputs` (dict[string -> dict[string -> string]], mandatory):
|
||||
A dict with output descriptions (dicts).
|
||||
Keys name output dependencies.
|
||||
* `mode` (string, mandatory): "file" or "stdout".
|
||||
Describes how the output is grabbed from the script.
|
||||
* `file` (string, required if `mode` is "file"): Name of the file generated by the script.
|
||||
* `target` (string, required): Default name of the file of the generated dependency.
|
||||
You can use all values available durng map_io stage.
|
||||
Each input dependency alsogets two extra values associated with it:
|
||||
`:dependency_name[noext]`, which contains the path to the dependency the extension with anything after last "."
|
||||
removed and `:dependency_name[dir]` which contains directory paths of the dependency.
|
||||
This is useful for deriving an output name from the input.
|
||||
* `meta` (string, optional): Description of the output dependency.
|
||||
* `inputs` (dict[string -> string | bool], mandatory):
|
||||
A dict with input descriptions. Key is either a name of a named argument or a
|
||||
position of unnamed argument prefaced with "#" (eg. "#1"). Positions are indexed
|
||||
from 1, as it's a convention that 0th argument is the path of the executed program.
|
||||
Values are strings that can contains references to variables to be resolved
|
||||
after the project flow configuration is loaded (that means they can reference
|
||||
values and dependencies which are to be set by the user). All of modules inputs
|
||||
will be determined by the references used. Thus dependency and value definitions
|
||||
are implicit. If the value of the resolved string is empty and is associated with a
|
||||
named argument, the argument in question will be skipped entirely. This allows
|
||||
using optional dependencies. To use a named argument as a flag instead, set it to
|
||||
`true`.
|
||||
A dict with input descriptions.
|
||||
Key is either a name of a named argument or a position of unnamed argument prefaced with "#" (eg. "#1").
|
||||
Positions are indexed from 1, as it's a convention that 0th argument is the path of the executed program.
|
||||
Values are strings that can contains references to variables to be resolved after the project flow configuration is
|
||||
loaded (that means they can reference values and dependencies which are to be set by the user).
|
||||
All of modules inputs will be determined by the references used.
|
||||
Thus dependency and value definitions are implicit.
|
||||
If the value of the resolved string is empty and is associated with a named argument, the argument in question will be
|
||||
skipped entirely.
|
||||
This allows using optional dependencies.
|
||||
To use a named argument as a flag instead, set it to `true`.
|
||||
"""
|
||||
|
||||
# TODO: `environment` input kind
|
||||
|
||||
# ----------------------------------------------------------------------------- #
|
||||
from pathlib import Path
|
||||
from shutil import move as sh_mv
|
||||
from re import match as re_match, finditer as re_finditer
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
from f4pga.common import decompose_depname, deep, get_verbosity_level, sub
|
||||
from f4pga.module import Module, ModuleContext
|
||||
|
||||
from f4pga.common import *
|
||||
from f4pga.module import *
|
||||
|
||||
# ----------------------------------------------------------------------------- #
|
||||
|
||||
def _generate_stage_name(params):
|
||||
stage_name = params.get('stage_name')
|
||||
if stage_name is None:
|
||||
stage_name = '<unknown>'
|
||||
return f'{stage_name}-generic'
|
||||
|
||||
def _get_param(params, name: str):
|
||||
param = params.get(name)
|
||||
|
@ -67,6 +52,7 @@ def _get_param(params, name: str):
|
|||
f'missing `{name}` field')
|
||||
return param
|
||||
|
||||
|
||||
def _parse_param_def(param_def: str):
|
||||
if param_def[0] == '#':
|
||||
return 'positional', int(param_def[1:])
|
||||
|
@ -74,8 +60,6 @@ def _parse_param_def(param_def: str):
|
|||
return 'environmental', param_def[1:]
|
||||
return 'named', param_def
|
||||
|
||||
_file_noext_deep = deep(file_noext)
|
||||
_realdirpath_deep = deep(lambda p: os.path.realpath(os.path.dirname(p)))
|
||||
|
||||
class InputReferences:
|
||||
dependencies: 'set[str]'
|
||||
|
@ -89,48 +73,34 @@ class InputReferences:
|
|||
self.dependencies = set()
|
||||
self.values = set()
|
||||
|
||||
|
||||
def _get_input_references(input: str) -> InputReferences:
|
||||
refs = InputReferences()
|
||||
|
||||
if type(input) is not str:
|
||||
return refs
|
||||
|
||||
matches = re.finditer('\$\{([^${}]*)\}', input)
|
||||
for match in matches:
|
||||
for match in re_finditer('\$\{([^${}]*)\}', input):
|
||||
match_str = match.group(1)
|
||||
if match_str[0] == ':':
|
||||
if len(match_str) < 2:
|
||||
raise Exception('Dependency name must be at least 1 character '
|
||||
'long')
|
||||
dep_name = re.match('([^\\[\\]]*)', match_str[1:]).group(1)
|
||||
refs.dependencies.add(dep_name)
|
||||
else:
|
||||
if match_str[0] != ':':
|
||||
refs.values.add(match_str)
|
||||
|
||||
continue
|
||||
if len(match_str) < 2:
|
||||
raise Exception('Dependency name must be at least 1 character long')
|
||||
refs.dependencies.add(re_match('([^\\[\\]]*)', match_str[1:]).group(1))
|
||||
return refs
|
||||
|
||||
|
||||
def _make_noop1():
|
||||
def noop(_):
|
||||
return
|
||||
return noop
|
||||
|
||||
|
||||
def _tailcall1(self, fun):
|
||||
def newself(arg, self=self, fun=fun):
|
||||
fun(arg)
|
||||
self(arg)
|
||||
return newself
|
||||
|
||||
def _add_extra_values_to_env(ctx: ModuleContext):
|
||||
takes = dict(vars(ctx.takes).items())
|
||||
for take_name, take_path in takes.items():
|
||||
if take_path is None:
|
||||
continue
|
||||
attr_name = f':{take_name}[noext]'
|
||||
ctx.r_env.values[attr_name] = _file_noext_deep(take_path)
|
||||
attr_name = f':{take_name}[dir]'
|
||||
dirname = _realdirpath_deep(take_path)
|
||||
ctx.r_env.values[attr_name] = dirname
|
||||
|
||||
def _make_noop1():
|
||||
def noop(_):
|
||||
return
|
||||
return noop
|
||||
|
||||
class GenericScriptWrapperModule(Module):
|
||||
script_path: str
|
||||
|
@ -139,8 +109,15 @@ class GenericScriptWrapperModule(Module):
|
|||
interpreter: 'None | str'
|
||||
cwd: 'None | str'
|
||||
|
||||
@staticmethod
|
||||
def _add_extra_values_to_env(ctx: ModuleContext):
|
||||
for take_name, take_path in vars(ctx.takes).items():
|
||||
if take_path is not None:
|
||||
ctx.r_env.values[f':{take_name}[noext]'] = deep(lambda p: str(Path(p).with_suffix('')))(take_path)
|
||||
ctx.r_env.values[f':{take_name}[dir]'] = deep(lambda p: str(Path(p).parent.resolve()))(take_path)
|
||||
|
||||
def map_io(self, ctx: ModuleContext):
|
||||
_add_extra_values_to_env(ctx)
|
||||
self._add_extra_values_to_env(ctx)
|
||||
|
||||
outputs = {}
|
||||
for dep, _, out_path in self.file_outputs:
|
||||
|
@ -155,7 +132,7 @@ class GenericScriptWrapperModule(Module):
|
|||
return outputs
|
||||
|
||||
def execute(self, ctx: ModuleContext):
|
||||
_add_extra_values_to_env(ctx)
|
||||
self._add_extra_values_to_env(ctx)
|
||||
|
||||
cwd = ctx.r_env.resolve(self.cwd)
|
||||
|
||||
|
@ -187,7 +164,7 @@ class GenericScriptWrapperModule(Module):
|
|||
file = ctx.r_env.resolve(file, final=True)
|
||||
target = ctx.r_env.resolve(target, final=True)
|
||||
if target != file:
|
||||
shutil.move(file, target)
|
||||
sh_mv(file, target)
|
||||
|
||||
def _init_outputs(self, output_defs: 'dict[str, dict[str, str]]'):
|
||||
self.stdout_target = None
|
||||
|
@ -294,7 +271,8 @@ class GenericScriptWrapperModule(Module):
|
|||
self.values.append(val)
|
||||
|
||||
def __init__(self, params):
|
||||
self.name = _generate_stage_name(params)
|
||||
stage_name = params.get('stage_name')
|
||||
self.name = f"{'<unknown>' if stage_name is None else stage_name}-generic"
|
||||
self.no_of_phases = 2
|
||||
self.script_path = params.get('script')
|
||||
self.interpreter = params.get('interpreter')
|
||||
|
@ -307,4 +285,4 @@ class GenericScriptWrapperModule(Module):
|
|||
self._init_outputs(_get_param(params, 'outputs'))
|
||||
self._init_inputs(_get_param(params, 'inputs'))
|
||||
|
||||
ModuleClass = GenericScriptWrapperModule
|
||||
ModuleClass = GenericScriptWrapperModule
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
# Symbiflow Stage Module
|
||||
|
||||
"""
|
||||
Rename (ie. change) dependencies and values of a module. This module wraps another,
|
||||
module whoose name is specified in `params.module` and changes the names of the
|
||||
|
@ -25,13 +21,10 @@ Accepted module parameters:
|
|||
|
||||
"""
|
||||
|
||||
# ----------------------------------------------------------------------------- #
|
||||
|
||||
from f4pga.common import *
|
||||
from f4pga.module import *
|
||||
from f4pga.module import Module, ModuleContext
|
||||
from f4pga.module_runner import get_module
|
||||
|
||||
# ----------------------------------------------------------------------------- #
|
||||
|
||||
def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]':
|
||||
newd = {}
|
||||
|
@ -43,6 +36,7 @@ def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]':
|
|||
newd[k] = v
|
||||
return newd
|
||||
|
||||
|
||||
def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespace:
|
||||
newn = SimpleNamespace()
|
||||
for k, v in vars(d).items():
|
||||
|
@ -54,6 +48,7 @@ def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespac
|
|||
setattr(newn, k, v)
|
||||
return newn
|
||||
|
||||
|
||||
def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]':
|
||||
newl = []
|
||||
for e in l:
|
||||
|
@ -65,12 +60,11 @@ def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]':
|
|||
newl.append(r if r is not None else e)
|
||||
return newl
|
||||
|
||||
def _generate_stage_name(name: str):
|
||||
return f'{name}-io_renamed'
|
||||
|
||||
def _or_empty_dict(d: 'dict | None'):
|
||||
return d if d is not None else {}
|
||||
|
||||
|
||||
class IORenameModule(Module):
|
||||
module: Module
|
||||
rename_takes: 'dict[str, str]'
|
||||
|
@ -102,7 +96,7 @@ class IORenameModule(Module):
|
|||
self.rename_values = _or_empty_dict(params.get("rename_values"))
|
||||
|
||||
self.module = module
|
||||
self.name = _generate_stage_name(module.name)
|
||||
self.name = f'{module.name}-io_renamed'
|
||||
self.no_of_phases = module.no_of_phases
|
||||
self.takes = _switch_entries(module.takes, self.rename_takes)
|
||||
self.produces = _switch_entries(module.produces, self.rename_produces)
|
||||
|
|
|
@ -1,21 +1,14 @@
|
|||
#!/usr/bin/python3
|
||||
"""
|
||||
This module is used as a helper in a abuild chain to automate creating build directiores.
|
||||
It's currenty the only parametric module, meaning it can take user-provided input at an early stage in order to
|
||||
determine its take/produces I/O.
|
||||
This allows other repesenting configurable directories, such as a build directory as dependencies and by doing so, allow
|
||||
the dependency algorithm to lazily create the directories if they become necessary.
|
||||
"""
|
||||
|
||||
# Symbiflow Stage Module
|
||||
from pathlib import Path
|
||||
from f4pga.module import Module, ModuleContext
|
||||
|
||||
""" This module is used as a helper in a abuild chain to automate creating build
|
||||
directiores. It' currenty the only parametric module, meaning it can take
|
||||
user-provided input at an early stage in order todetermine its take/produces
|
||||
I/O. This allows other repesenting configurable directories, such as a build
|
||||
directory as dependencies and by doing so, allow the dependency algorithm to
|
||||
lazily create the directories if they become necessary. """
|
||||
|
||||
# ----------------------------------------------------------------------------- #
|
||||
|
||||
import os
|
||||
from f4pga.common import *
|
||||
from f4pga.module import *
|
||||
|
||||
# ----------------------------------------------------------------------------- #
|
||||
|
||||
class MkDirsModule(Module):
|
||||
deps_to_produce: 'dict[str, str]'
|
||||
|
@ -27,7 +20,7 @@ class MkDirsModule(Module):
|
|||
outputs = vars(ctx.outputs)
|
||||
for _, path in outputs.items():
|
||||
yield f'Creating directory {path}...'
|
||||
os.makedirs(path, exist_ok=True)
|
||||
Path(path).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def __init__(self, params):
|
||||
self.name = 'mkdirs'
|
||||
|
|
|
@ -1,54 +1,54 @@
|
|||
#!/usr/bin/python3
|
||||
from pathlib import Path
|
||||
from os import remove as os_remove
|
||||
from shutil import move as sh_mv
|
||||