f4pga: cleanup and style

Signed-off-by: Unai Martinez-Corral <umartinezcorral@antmicro.com>
This commit is contained in:
Unai Martinez-Corral 2022-03-04 05:13:42 +01:00
parent 26fb1d63b0
commit 636da72d32
19 changed files with 779 additions and 617 deletions

1
.gitignore vendored
View file

@ -1,2 +1,3 @@
*.pyc *.pyc
*.sw* *.sw*
/f4pga/build/

View file

@ -1,31 +1,28 @@
#!/usr/bin/env python3
""" """
sfbuild - Symbiflow Build System F4PGA Build System
This tool allows for building FPGA targets (such as bitstreams) for any supported This tool allows for building FPGA targets (such as bitstreams) for any supported platform with just one simple command
platform with just one simple command and a project file. and a project file.
The idea is that sfbuild wraps all the tools needed by different platforms in The idea is that F4PGA wraps all the tools needed by different platforms in "modules", which define inputs/outputs and
"modules", which define inputs/outputs and various parameters. This allows various parameters.
sfbuild to resolve dependencies for any target provided that a "flow definition" This allows F4PGA to resolve dependencies for any target provided that a "flow definition" file exists for such target.
file exists for such target. The flow defeinition file list modules available for The flow defeinition file list modules available for that platform and may tweak some settings of those modules.
that platform and may tweak some settings of those modules.
A basic example of using sfbuild: A basic example of using F4PGA:
$ sfbuild build --platform arty_35 -t bitstream
This will make sfbuild attempt to create a bitstream for arty_35 platform. $ f4pga build --platform arty_35 -t bitstream
flow.json is a flow configuration file, which should be created for a project
that uses sfbuild. Iontains project-specific definitions needed within the flow, This will make F4PGA attempt to create a bitstream for arty_35 platform.
such as list of source code files. ``flow.json`` is a flow configuration file, which should be created for a project that uses F4PGA.
Contains project-specific definitions needed within the flow, such as list of source code files.
""" """
from pathlib import Path from pathlib import Path
from argparse import Namespace from argparse import Namespace
import os from sys import argv as sys_argv
from os import environ from os import environ
import json from json import load as json_load, loads as json_loads
from typing import Iterable from typing import Iterable
from colorama import Fore, Style from colorama import Fore, Style
@ -34,11 +31,11 @@ from f4pga.common import (
fatal, fatal,
scan_modules, scan_modules,
set_verbosity_level, set_verbosity_level,
sfprint sfprint,
sub as common_sub
) )
from f4pga.module import * from f4pga.module import *
from f4pga.cache import SymbiCache from f4pga.cache import SymbiCache
import f4pga.ugly as ugly
from f4pga.flow_config import ( from f4pga.flow_config import (
ProjectFlowConfig, ProjectFlowConfig,
FlowConfig, FlowConfig,
@ -54,10 +51,10 @@ from f4pga.argparser import setup_argparser, get_cli_flow_config
SYMBICACHEPATH = '.symbicache' SYMBICACHEPATH = '.symbicache'
binpath = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(os.sys.argv[0])), '..')) binpath = str(Path(sys_argv[0]).resolve().parent.parent)
mypath = str(Path(__file__).resolve().parent) mypath = str(Path(__file__).resolve().parent)
share_dir_path = os.path.realpath(f"{environ.get('F4PGA_INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow") share_dir_path = str(Path(f"{environ.get('F4PGA_INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow").resolve())
class DependencyNotProducedException(Exception): class DependencyNotProducedException(Exception):
dep_name: str dep_name: str
@ -86,8 +83,7 @@ def req_exists(r):
""" Checks whether a dependency exists on a drive. """ """ Checks whether a dependency exists on a drive. """
if type(r) is str: if type(r) is str:
if not os.path.isfile(r) and not os.path.islink(r) \ if not Path(r).is_file() and not Path(r).is_symlink() and not Path(r).is_dir():
and not os.path.isdir(r):
return False return False
elif type(r) is list: elif type(r) is list:
return not (False in map(req_exists, r)) return not (False in map(req_exists, r))
@ -471,9 +467,27 @@ def setup_resolution_env():
r_env = ResolutionEnv({ r_env = ResolutionEnv({
'shareDir': share_dir_path, 'shareDir': share_dir_path,
'binDir': os.path.realpath(os.path.join(share_dir_path, '../../bin')) 'binDir': str((Path(share_dir_path) / '../../bin').resolve())
}) })
r_env.add_values(ugly.generate_values())
def _noisy_warnings():
"""
Emit some noisy warnings.
"""
environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings.log'
return 'noisy_warnings.log'
def _generate_values():
"""
Generate initial values, available in configs.
"""
return {
'prjxray_db': common_sub('prjxray-config').decode().replace('\n', ''),
'python3': common_sub('which', 'python3').decode().replace('\n', ''),
'noisyWarnings': _noisy_warnings()
}
r_env.add_values(_generate_values())
return r_env return r_env
def open_project_flow_config(path: str) -> ProjectFlowConfig: def open_project_flow_config(path: str) -> ProjectFlowConfig:
@ -509,7 +523,7 @@ def get_platform_name_for_part(part_name: str):
differ only in a type of package they use. differ only in a type of package they use.
""" """
with (Path(mypath) / 'part_db.json').open('r') as rfptr: with (Path(mypath) / 'part_db.json').open('r') as rfptr:
return json.load(rfptr).get(part_name.upper()) return json_load(rfptr).get(part_name.upper())
def cmd_build(args: Namespace): def cmd_build(args: Namespace):
""" sfbuild's `build` command implementation """ """ sfbuild's `build` command implementation """
@ -535,7 +549,7 @@ def cmd_build(args: Namespace):
fatal(-1, 'No configuration was provided. Use `--flow`, `--platform` or ' fatal(-1, 'No configuration was provided. Use `--flow`, `--platform` or '
'`--part` to configure flow..') '`--part` to configure flow..')
platform_path = os.path.join(mypath, 'platforms', platform + '.json') platform_path = str(Path(mypath) / f'platforms/{platform}.json')
platform_def = None platform_def = None
try: try:
with open(platform_path) as platform_file: with open(platform_path) as platform_file:
@ -550,7 +564,7 @@ def cmd_build(args: Namespace):
sfprint(2, 'Scanning modules...') sfprint(2, 'Scanning modules...')
scan_modules(mypath) scan_modules(mypath)
flow_definition_dict = json.loads(platform_def) flow_definition_dict = json_loads(platform_def)
flow_def = FlowDefinition(flow_definition_dict, r_env) flow_def = FlowDefinition(flow_definition_dict, r_env)
flow_cfg = FlowConfig(project_flow_cfg, flow_def, platform) flow_cfg = FlowConfig(project_flow_cfg, flow_def, platform)

View file

@ -1,69 +1,158 @@
from argparse import ArgumentParser, Namespace from argparse import ArgumentParser, Namespace
import re from re import finditer as re_finditer
def _add_flow_arg(parser: ArgumentParser): def _add_flow_arg(parser: ArgumentParser):
parser.add_argument('-f', '--flow', metavar='flow_path', type=str, parser.add_argument(
help='Path to flow definition file') '-f',
'--flow',
metavar='flow_path',
type=str,
help='Path to flow definition file'
)
def _setup_build_parser(parser: ArgumentParser): def _setup_build_parser(parser: ArgumentParser):
_add_flow_arg(parser) _add_flow_arg(parser)
parser.add_argument('-t', '--target', metavar='target_name', type=str,
help='Perform stages necessary to acquire target') parser.add_argument(
parser.add_argument('--platform', metavar='platform_name', '-t',
help='Target platform_name') '--target',
parser.add_argument('-P', '--pretend', action='store_true', metavar='target_name',
help='Show dependency resolution without executing flow') type=str,
parser.add_argument('-i', '--info', action='store_true', help='Perform stages necessary to acquire target'
help='Display info about available targets') )
parser.add_argument('-c', '--nocache', action='store_true',
help='Ignore caching and rebuild everything up to the ' parser.add_argument(
'target.') '--platform',
parser.add_argument('-S', '--stageinfo', nargs=1, metavar='stage_name', metavar='platform_name',
help='Display info about stage') help='Target platform_name'
parser.add_argument('-r', '--requirements', action='store_true', )
help='Display info about project\'s requirements.')
parser.add_argument('-p', '--part', metavar='part_name', parser.add_argument(
help='Name of the target chip') '-P',
parser.add_argument('--dep', '-D', action='append', default=[]) '--pretend',
parser.add_argument('--val', '-V', action='append', default=[]) action='store_true',
help='Show dependency resolution without executing flow'
)
parser.add_argument(
'-i',
'--info',
action='store_true',
help='Display info about available targets'
)
parser.add_argument(
'-c',
'--nocache',
action='store_true',
help='Ignore caching and rebuild everything up to the target.'
)
parser.add_argument(
'-S',
'--stageinfo',
nargs=1,
metavar='stage_name',
help='Display info about stage'
)
parser.add_argument(
'-r',
'--requirements',
action='store_true',
help='Display info about project\'s requirements.'
)
parser.add_argument(
'-p',
'--part',
metavar='part_name',
help='Name of the target chip'
)
parser.add_argument(
'--dep',
'-D',
action='append',
default=[]
)
parser.add_argument(
'--val',
'-V',
action='append',
default=[]
)
# Currently unsupported # Currently unsupported
parser.add_argument('-M', '--moduleinfo', nargs=1, parser.add_argument(
metavar='module_name_or_path', '-M',
help='Display info about module. Requires `-p` option ' '--moduleinfo',
'in case of module name') nargs=1,
parser.add_argument('-T', '--take_explicit_paths', nargs='+', metavar='module_name_or_path',
metavar='<name=path, ...>', type=str, help='Display info about module. Requires `-p` option in case of module name'
help='Specify stage inputs explicitely. This might be ' )
'required if some files got renamed or deleted and '
'symbiflow is unable to deduce the flow that lead ' parser.add_argument(
'to dependencies required by the requested stage') '-T',
'--take_explicit_paths',
nargs='+',
metavar='<name=path, ...>',
type=str,
help='Specify stage inputs explicitely. This might be required if some files got renamed or deleted and '
'symbiflow is unable to deduce the flow that lead to dependencies required by the requested stage'
)
def _setup_show_dep_parser(parser: ArgumentParser): def _setup_show_dep_parser(parser: ArgumentParser):
parser.add_argument('-p', '--platform', metavar='platform_name', type=str, parser.add_argument(
help='Name of the platform (use to display ' '-p',
'platform-specific values.') '--platform',
parser.add_argument('-s', '--stage', metavar='stage_name', type=str, metavar='platform_name',
help='Name of the stage (use if you want to set the ' type=str,
'value only for that stage). Requires `-p`.') help='Name of the platform (use to display platform-specific values.'
)
parser.add_argument(
'-s',
'--stage',
metavar='stage_name',
type=str,
help='Name of the stage (use if you want to set the value only for that stage). Requires `-p`.'
)
_add_flow_arg(parser) _add_flow_arg(parser)
# Set up argument parser for the program. Pretty self-explanatory.
def setup_argparser(): def setup_argparser():
"""
Set up argument parser for the program.
"""
parser = ArgumentParser(description='SymbiFlow Build System') parser = ArgumentParser(description='SymbiFlow Build System')
parser.add_argument('-v', '--verbose', action='count', default=0) parser.add_argument(
parser.add_argument('-s', '--silent', action='store_true') '-v',
'--verbose',
action='count',
default=0
)
parser.add_argument(
'-s',
'--silent',
action='store_true'
)
subparsers = parser.add_subparsers(dest='command') subparsers = parser.add_subparsers(dest='command')
build = subparsers.add_parser('build') _setup_build_parser(subparsers.add_parser('build'))
_setup_build_parser(build) show_dep = subparsers.add_parser('showd', description='Show the value(s) assigned to a dependency')
show_dep = subparsers.add_parser('showd',
description='Show the value(s) assigned to a '
'dependency')
_setup_show_dep_parser(show_dep) _setup_show_dep_parser(show_dep)
return parser return parser
def _parse_depval(depvalstr: str): def _parse_depval(depvalstr: str):
""" """
Parse a dependency or value definition in form of: Parse a dependency or value definition in form of:
@ -94,6 +183,7 @@ def _parse_depval(depvalstr: str):
return d return d
def _unescaped_matches(regexp: str, s: str, escape_chr='\\'): def _unescaped_matches(regexp: str, s: str, escape_chr='\\'):
""" """
Find all occurences of a pattern in a string that contains escape sequences. Find all occurences of a pattern in a string that contains escape sequences.
@ -109,8 +199,7 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'):
offsets = [] offsets = []
offset = 0 offset = 0
for sl in s.split(escape_chr): for sl in s.split(escape_chr):
l = len(sl) if len(sl) <= 1:
if l <= 1:
continue continue
noescape = sl[(1 if offset != 0 else 0):] noescape = sl[(1 if offset != 0 else 0):]
for _ in noescape: for _ in noescape:
@ -118,7 +207,7 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'):
offset += 2 offset += 2
noescapes += noescape noescapes += noescape
iter = re.finditer(regexp, noescapes) iter = re_finditer(regexp, noescapes)
for m in iter: for m in iter:
start = m.start() start = m.start()
@ -127,10 +216,13 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'):
off2 = end + offsets[end] off2 = end + offsets[end]
yield off1, off2 yield off1, off2
def _unescaped_separated(regexp: str, s: str, escape_chr='\\'):
""" Yields substrings of a string that contains escape sequences. """
last_end = 0; def _unescaped_separated(regexp: str, s: str, escape_chr='\\'):
"""
Yields substrings of a string that contains escape sequences.
"""
last_end = 0
for start, end in _unescaped_matches(regexp, s, escape_chr=escape_chr): for start, end in _unescaped_matches(regexp, s, escape_chr=escape_chr):
yield s[last_end:start] yield s[last_end:start]
last_end = end last_end = end
@ -139,6 +231,7 @@ def _unescaped_separated(regexp: str, s: str, escape_chr='\\'):
else: else:
yield '' yield ''
def _parse_cli_value(s: str): def _parse_cli_value(s: str):
""" """
Parse a value/dependency passed to CLI Parse a value/dependency passed to CLI
@ -207,6 +300,7 @@ def _parse_cli_value(s: str):
# String # String
return s.replace('\\', '') return s.replace('\\', '')
def get_cli_flow_config(args: Namespace, platform: str): def get_cli_flow_config(args: Namespace, platform: str):
def create_defdict(): def create_defdict():
return { return {

View file

@ -1,19 +1,13 @@
import os from pathlib import Path
import zlib from zlib import adler32 as zlib_adler32
import json from json import dump as json_dump, load as json_load, JSONDecodeError
def _get_file_hash(path: str):
with open(path, 'rb') as f:
b = f.read()
return str(zlib.adler32(b))
class SymbiCache: class SymbiCache:
""" """
`SymbiCache` is used to track changes among dependencies and keep `SymbiCache` is used to track changes among dependencies and keep the status of the files on a persistent storage.
the status of the files on a persistent storage.
Files which are tracked get their checksums calculated and stored in a file. Files which are tracked get their checksums calculated and stored in a file.
If file's checksum differs from the one saved in a file, that means, the file If file's checksum differs from the one saved in a file, that means, the file has changed.
has changed.
""" """
hashes: 'dict[str, dict[str, str]]' hashes: 'dict[str, dict[str, str]]'
@ -21,13 +15,14 @@ class SymbiCache:
cachefile_path: str cachefile_path: str
def __init__(self, cachefile_path): def __init__(self, cachefile_path):
""" `chachefile_path` - path to a file used for persistent storage of """
checksums. """ `chachefile_path` - path to a file used for persistent storage of checksums.
"""
self.status = {} self.status = {}
self.cachefile_path = cachefile_path self.cachefile_path = cachefile_path
self.load() self.load()
def _try_pop_consumer(self, path: str, consumer: str): def _try_pop_consumer(self, path: str, consumer: str):
if self.status.get(path) and self.status[path].get(consumer): if self.status.get(path) and self.status[path].get(consumer):
self.status[path].pop(consumer) self.status[path].pop(consumer)
@ -37,7 +32,7 @@ class SymbiCache:
self.hashes[path].pop(consumer) self.hashes[path].pop(consumer)
if len(self.hashes[path]) == 0: if len(self.hashes[path]) == 0:
self.hashes.pop(path) self.hashes.pop(path)
def _try_push_consumer_hash(self, path: str, consumer: str, hash): def _try_push_consumer_hash(self, path: str, consumer: str, hash):
if not self.hashes.get(path): if not self.hashes.get(path):
self.hashes[path] = {} self.hashes[path] = {}
@ -46,43 +41,39 @@ class SymbiCache:
if not self.status.get(path): if not self.status.get(path):
self.status[path] = {} self.status[path] = {}
self.status[path][consumer] = status self.status[path][consumer] = status
def _get_last_hash(self, path: str, consumer: str):
last_hashes = self.hashes.get(path)
if last_hashes is None:
return None
return last_hashes.get(consumer)
def update(self, path: str, consumer: str): def update(self, path: str, consumer: str):
""" Add/remove a file to.from the tracked files, update checksum """ Add/remove a file to.from the tracked files, update checksum if necessary and calculate status.
if necessary and calculate status.
Multiple hashes are stored per file, one for each consumer module. Multiple hashes are stored per file, one for each consumer module.
"__target" is used as a convention for a "fake" consumer in case the file "__target" is used as a convention for a "fake" consumer in case the file is requested as a target and not used
is requested as a target and not used by a module within the active flow. by a module within the active flow.
""" """
isdir = os.path.isdir(path) isdir = Path(path).is_dir()
if not (os.path.isfile(path) or os.path.islink(path) or isdir): if not (Path(path).is_file() or Path(path).is_symlink() or isdir):
self._try_pop_consumer(path, consumer) self._try_pop_consumer(path, consumer)
return True return True
hash = 0 # Directories always get '0' hash. hash = 0 # Directories always get '0' hash.
if not isdir: if not isdir:
hash = _get_file_hash(path) with Path(path).open('rb') as rfptr:
last_hash = self._get_last_hash(path, consumer) hash = str(zlib_adler32(rfptr.read()))
last_hashes = self.hashes.get(path)
last_hash = None if last_hashes is None else last_hashes.get(consumer)
if hash != last_hash: if hash != last_hash:
self._try_push_consumer_status(path, consumer, 'changed') self._try_push_consumer_status(path, consumer, 'changed')
self._try_push_consumer_hash(path, consumer, hash) self._try_push_consumer_hash(path, consumer, hash)
return True return True
else: self._try_push_consumer_status(path, consumer, 'same')
self._try_push_consumer_status(path, consumer, 'same') return False
return False
def get_status(self, path: str, consumer: str): def get_status(self, path: str, consumer: str):
""" Get status for a file with a given path. """ Get status for a file with a given path.
returns 'untracked' if the file is not tracked or hasn't been returns 'untracked' if the file is not tracked or hasn't been treated with `update` procedure before calling
treated with `update` procedure before calling `get_status`. """ `get_status`.
"""
statuses = self.status.get(path) statuses = self.status.get(path)
if not statuses: if not statuses:
return 'untracked' return 'untracked'
@ -90,26 +81,23 @@ class SymbiCache:
if not status: if not status:
return 'untracked' return 'untracked'
return status return status
def load(self): def load(self):
"""Loads cache's state from the persistent storage""" """Loads cache's state from the persistent storage"""
try: try:
with open(self.cachefile_path, 'r') as f: with Path(self.cachefile_path).open('r') as rfptr:
b = f.read() self.hashes = json_load(rfptr)
self.hashes = json.loads(b) except JSONDecodeError as jerr:
except json.JSONDecodeError as jerr: print("""WARNING: .symbicache is corrupted!
print('WARNING: .symbicache is corrupted! ' This will cause flow to re-execute from the beggining.""")
'This will cause flow to re-execute from the beggining.')
self.hashes = {} self.hashes = {}
except FileNotFoundError: except FileNotFoundError:
print('Couldn\'t open .symbicache cache file. ' print("""Couldn\'t open .symbicache cache file.
'This will cause flow to re-execute from the beggining.') This will cause flow to re-execute from the beggining.""")
self.hashes = {} self.hashes = {}
def save(self): def save(self):
"""Saves cache's state to the persistent storage""" """Saves cache's state to the persistent storage."""
with Path(self.cachefile_path).open('w') as wfptr:
with open(self.cachefile_path, 'w') as f: json_dump(str(self.hashes), wfptr, indent=4)
b = json.dumps(self.hashes, indent=4)
f.write(b)

View file

@ -1,9 +1,11 @@
from pathlib import Path
from os import environ, listdir as os_listdir
from sys import argv as sys_argv
from argparse import Namespace from argparse import Namespace
import subprocess from shutil import move as sh_mv
import os from subprocess import run
import shutil from re import match as re_match, finditer as re_finditer
import sys
import re
def decompose_depname(name: str): def decompose_depname(name: str):
spec = 'req' spec = 'req'
@ -16,6 +18,7 @@ def decompose_depname(name: str):
name = name[:len(name) - 1] name = name[:len(name) - 1]
return name, spec return name, spec
def with_qualifier(name: str, q: str) -> str: def with_qualifier(name: str, q: str) -> str:
if q == 'req': if q == 'req':
return decompose_depname(name)[0] return decompose_depname(name)[0]
@ -24,25 +27,33 @@ def with_qualifier(name: str, q: str) -> str:
if q == 'demand': if q == 'demand':
return decompose_depname(name)[0] + '!' return decompose_depname(name)[0] + '!'
_sfbuild_module_collection_name_to_path = {} _sfbuild_module_collection_name_to_path = {}
def scan_modules(mypath: str): def scan_modules(mypath: str):
global _sfbuild_module_collection_name_to_path global _sfbuild_module_collection_name_to_path
sfbuild_home = mypath sfbuild_home = mypath
sfbuild_home_dirs = os.listdir(sfbuild_home) sfbuild_home_dirs = os_listdir(sfbuild_home)
sfbuild_module_dirs = \ sfbuild_module_dirs = \
[dir for dir in sfbuild_home_dirs if re.match('.*_modules$', dir)] [dir for dir in sfbuild_home_dirs if re_match('.*_modules$', dir)]
_sfbuild_module_collection_name_to_path = \ _sfbuild_module_collection_name_to_path = dict([
dict([(re.match('(.*)_modules$', moddir).groups()[0], (
os.path.join(sfbuild_home, moddir)) re_match('(.*)_modules$', moddir).groups()[0],
for moddir in sfbuild_module_dirs]) str(Path(sfbuild_home) / moddir)
)
for moddir in sfbuild_module_dirs
])
"""Resolves module location from modulestr"""
def resolve_modstr(modstr: str): def resolve_modstr(modstr: str):
"""
Resolves module location from modulestr.
"""
sl = modstr.split(':') sl = modstr.split(':')
if len(sl) > 2: if len(sl) > 2:
raise Exception('Incorrect module sysntax. ' raise Exception('Incorrect module sysntax. Expected one \':\' or one \'::\'')
'Expected one \':\' or one \'::\'')
if len(sl) < 2: if len(sl) < 2:
return modstr return modstr
collection_name = sl[0] collection_name = sl[0]
@ -51,14 +62,13 @@ def resolve_modstr(modstr: str):
col_path = _sfbuild_module_collection_name_to_path.get(collection_name) col_path = _sfbuild_module_collection_name_to_path.get(collection_name)
if not col_path: if not col_path:
fatal(-1, f'Module collection {collection_name} does not exist') fatal(-1, f'Module collection {collection_name} does not exist')
return os.path.join(col_path, module_filename) return str(Path(col_path) / module_filename)
def deep(fun): def deep(fun):
""" """
Create a recursive string transform function for 'str | list | dict', Create a recursive string transform function for 'str | list | dict', i.e a dependency.
i.e a dependency
""" """
def d(paths, *args, **kwargs): def d(paths, *args, **kwargs):
if type(paths) is str: if type(paths) is str:
return fun(paths) return fun(paths)
@ -66,18 +76,13 @@ def deep(fun):
return [d(p) for p in paths]; return [d(p) for p in paths];
elif type(paths) is dict: elif type(paths) is dict:
return dict([(k, d(p)) for k, p in paths.items()]) return dict([(k, d(p)) for k, p in paths.items()])
return d return d
def file_noext(path: str):
""" Return a file without it's extenstion"""
m = re.match('(.*)\\.[^.]*$', path)
if m:
path = m.groups()[0]
return path
class VprArgs: class VprArgs:
""" Represents argument list for VPR (Versatile Place and Route) """ """
Represents argument list for VPR (Versatile Place and Route).
"""
arch_dir: str arch_dir: str
arch_def: str arch_def: str
@ -91,13 +96,13 @@ class VprArgs:
def __init__(self, share: str, eblif, values: Namespace, def __init__(self, share: str, eblif, values: Namespace,
sdc_file: 'str | None' = None, sdc_file: 'str | None' = None,
vpr_extra_opts: 'list | None' = None): vpr_extra_opts: 'list | None' = None):
self.arch_dir = os.path.join(share, 'arch') self.arch_dir = str(Path(share) / 'arch')
self.arch_def = values.arch_def self.arch_def = values.arch_def
self.lookahead = values.rr_graph_lookahead_bin self.lookahead = values.rr_graph_lookahead_bin
self.rr_graph = values.rr_graph_real_bin self.rr_graph = values.rr_graph_real_bin
self.place_delay = values.vpr_place_delay self.place_delay = values.vpr_place_delay
self.device_name = values.vpr_grid_layout_name self.device_name = values.vpr_grid_layout_name
self.eblif = os.path.realpath(eblif) self.eblif = str(Path(eblif).resolve())
if values.vpr_options is not None: if values.vpr_options is not None:
self.optional = options_dict_to_list(values.vpr_options) self.optional = options_dict_to_list(values.vpr_options)
else: else:
@ -107,13 +112,17 @@ class VprArgs:
if sdc_file is not None: if sdc_file is not None:
self.optional += ['--sdc_file', sdc_file] self.optional += ['--sdc_file', sdc_file]
class SubprocessException(Exception): class SubprocessException(Exception):
return_code: int return_code: int
def sub(*args, env=None, cwd=None):
""" Execute subroutine """
out = subprocess.run(args, capture_output=True, env=env, cwd=cwd) def sub(*args, env=None, cwd=None):
"""
Execute subroutine.
"""
out = run(args, capture_output=True, env=env, cwd=cwd)
if out.returncode != 0: if out.returncode != 0:
print(f'[ERROR]: {args[0]} non-zero return code.\n' print(f'[ERROR]: {args[0]} non-zero return code.\n'
f'stderr:\n{out.stderr.decode()}\n\n' f'stderr:\n{out.stderr.decode()}\n\n'
@ -121,8 +130,11 @@ def sub(*args, env=None, cwd=None):
exit(out.returncode) exit(out.returncode)
return out.stdout return out.stdout
def vpr(mode: str, vprargs: VprArgs, cwd=None): def vpr(mode: str, vprargs: VprArgs, cwd=None):
""" Execute `vpr` """ """
Execute `vpr`.
"""
modeargs = [] modeargs = []
if mode == 'pack': if mode == 'pack':
@ -132,15 +144,17 @@ def vpr(mode: str, vprargs: VprArgs, cwd=None):
elif mode == 'route': elif mode == 'route':
modeargs = ['--route'] modeargs = ['--route']
return sub(*(['vpr', return sub(*([
vprargs.arch_def, 'vpr',
vprargs.eblif, vprargs.arch_def,
'--device', vprargs.device_name, vprargs.eblif,
'--read_rr_graph', vprargs.rr_graph, '--device', vprargs.device_name,
'--read_router_lookahead', vprargs.lookahead, '--read_rr_graph', vprargs.rr_graph,
'--read_placement_delay_lookup', vprargs.place_delay] + '--read_router_lookahead', vprargs.lookahead,
modeargs + vprargs.optional), '--read_placement_delay_lookup', vprargs.place_delay
cwd=cwd) ] + modeargs + vprargs.optional), cwd=cwd)
_vpr_specific_values = [ _vpr_specific_values = [
'arch_def', 'arch_def',
@ -150,10 +164,13 @@ _vpr_specific_values = [
'vpr_grid_layout_name', 'vpr_grid_layout_name',
'vpr_options?' 'vpr_options?'
] ]
def vpr_specific_values(): def vpr_specific_values():
global _vpr_specific_values global _vpr_specific_values
return _vpr_specific_values return _vpr_specific_values
def options_dict_to_list(opt_dict: dict): def options_dict_to_list(opt_dict: dict):
""" """
Converts a dictionary of named options for CLI program to a list. Converts a dictionary of named options for CLI program to a list.
@ -167,36 +184,44 @@ def options_dict_to_list(opt_dict: dict):
opts.append(str(val)) opts.append(str(val))
return opts return opts
def noisy_warnings(device): def noisy_warnings(device):
""" Emit some noisy warnings """ """
os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings-' + device + '_pack.log' Emit some noisy warnings.
"""
environ['OUR_NOISY_WARNINGS'] = f'noisy_warnings-{device}_pack.log'
def my_path(): def my_path():
""" Get current PWD """ """
mypath = os.path.realpath(sys.argv[0]) Get current PWD.
return os.path.dirname(mypath) """
return str(Path(sys_argv[0]).resolve().parent)
def save_vpr_log(filename, build_dir=''): def save_vpr_log(filename, build_dir=''):
""" Save VPR logic (moves the default output file into a desired path) """ """
shutil.move(os.path.join(build_dir, 'vpr_stdout.log'), filename) Save VPR logic (moves the default output file into a desired path).
"""
sh_mv(str(Path(build_dir) / 'vpr_stdout.log'), filename)
def fatal(code, message): def fatal(code, message):
""" """
Print a message informing about an error that has occured and terminate program Print a message informing about an error that has occured and terminate program with a given return code.
with a given return code.
""" """
raise(Exception(f'[FATAL ERROR]: {message}')) raise(Exception(f'[FATAL ERROR]: {message}'))
exit(code) exit(code)
class ResolutionEnv: class ResolutionEnv:
""" """
ResolutionEnv is used to hold onto mappings for variables used in flow and ResolutionEnv is used to hold onto mappings for variables used in flow and perform text substitutions using those
perform text substitutions using those variables. variables.
Variables can be referred in any "resolvable" string using the following Variables can be referred in any "resolvable" string using the following syntax: 'Some static text ${variable_name}'.
syntax: 'Some static text ${variable_name}'. The '${variable_name}' part The '${variable_name}' part will be replaced by the value associated with name 'variable_name', is such mapping
will be replaced by the value associated with name 'variable_name', is such exists.
mapping exists.
values: dict values: dict
""" """
@ -209,15 +234,14 @@ class ResolutionEnv:
def resolve(self, s, final=False): def resolve(self, s, final=False):
""" """
Perform resolution on `s`. Perform resolution on `s`.
`s` can be a `str`, a `dict` with arbitrary keys and resolvable values, `s` can be a `str`, a `dict` with arbitrary keys and resolvable values, or a `list` of resolvable values.
or a `list` of resolvable values.
final=True - resolve any unknown variables into '' final=True - resolve any unknown variables into ''
This is a hack and probably should be removed in the future This is a hack and probably should be removed in the future
""" """
if type(s) is str: if type(s) is str:
match_list = list(re.finditer('\$\{([^${}]*)\}', s)) match_list = list(re_finditer('\$\{([^${}]*)\}', s))
# Assumption: re.finditer finds matches in a left-to-right order # Assumption: re_finditer finds matches in a left-to-right order
match_list.reverse() match_list.reverse()
for match in match_list: for match in match_list:
match_str = match.group(1) match_str = match.group(1)
@ -242,24 +266,30 @@ class ResolutionEnv:
return s return s
def add_values(self, values: dict): def add_values(self, values: dict):
""" Add mappings from `values`""" """
Add mappings from `values`.
"""
for k, v in values.items(): for k, v in values.items():
self.values[k] = self.resolve(v) self.values[k] = self.resolve(v)
verbosity_level = 0 verbosity_level = 0
def sfprint(verbosity: int, *args):
""" Print with regards to currently set verbosity level """
def sfprint(verbosity: int, *args):
"""
Print with regards to currently set verbosity level.
"""
global verbosity_level global verbosity_level
if verbosity <= verbosity_level: if verbosity <= verbosity_level:
print(*args) print(*args)
def set_verbosity_level(level: int): def set_verbosity_level(level: int):
global verbosity_level global verbosity_level
verbosity_level = level verbosity_level = level
def get_verbosity_level() -> int: def get_verbosity_level() -> int:
global verbosity_level global verbosity_level
return verbosity_level return verbosity_level

View file

@ -1 +0,0 @@
# This is only to make pydoc recognize this catalogue as a package

View file

@ -1,39 +1,20 @@
#!/usr/bin/python3 from pathlib import Path
from shutil import move as sh_mv
# Symbiflow Stage Module from f4pga.common import vpr_specific_values, VprArgs, get_verbosity_level, sub
from f4pga.module import Module, ModuleContext
# ----------------------------------------------------------------------------- #
import os
from f4pga.common import *
from f4pga.module import *
# ----------------------------------------------------------------------------- #
def concat_fasm(fasm: str, fasm_extra: str, output: str):
fasm_data = None
fasm_extra_data = None
with open(fasm, 'r') as fasm_file, open(fasm_extra, 'r') as fasm_extra_file:
fasm_data = fasm_file.read()
fasm_extra_data = fasm_extra_file.read()
data = fasm_data + '\n' + fasm_extra_data
with open(output, 'w') as output_file:
output_file.write(data)
def fasm_output_path(build_dir: str, top: str):
return f'{build_dir}/{top}.fasm'
class FasmModule(Module): class FasmModule(Module):
def map_io(self, ctx: ModuleContext): def map_io(self, ctx: ModuleContext):
build_dir = os.path.dirname(ctx.takes.eblif) build_dir = str(Path(ctx.takes.eblif).parent)
return { return {
'fasm': fasm_output_path(build_dir, ctx.values.top) 'fasm': f'{(Path(build_dir)/ctx.values.top)!s}.fasm'
} }
def execute(self, ctx: ModuleContext): def execute(self, ctx: ModuleContext):
build_dir = os.path.dirname(ctx.takes.eblif) build_dir = str(Path(ctx.takes.eblif).parent)
vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values) vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values)
@ -43,10 +24,14 @@ class FasmModule(Module):
if ctx.takes.sdc: if ctx.takes.sdc:
optional += ['--sdc', ctx.takes.sdc] optional += ['--sdc', ctx.takes.sdc]
s = ['genfasm', vprargs.arch_def, s = [
os.path.realpath(ctx.takes.eblif), 'genfasm',
'--device', vprargs.device_name, vprargs.arch_def,
'--read_rr_graph', vprargs.rr_graph str(Path(ctx.takes.eblif).resolve()),
'--device',
vprargs.device_name,
'--read_rr_graph',
vprargs.rr_graph
] + vprargs.optional ] + vprargs.optional
if get_verbosity_level() >= 2: if get_verbosity_level() >= 2:
@ -56,13 +41,17 @@ class FasmModule(Module):
sub(*s, cwd=build_dir) sub(*s, cwd=build_dir)
default_fasm_output_name = fasm_output_path(build_dir, ctx.values.top) default_fasm_output_name = f'{(Path(build_dir)/ctx.values.top)!s}.fasm'
if default_fasm_output_name != ctx.outputs.fasm: if default_fasm_output_name != ctx.outputs.fasm:
shutil.move(default_fasm_output_name, ctx.outputs.fasm) sh_mv(default_fasm_output_name, ctx.outputs.fasm)
if ctx.takes.fasm_extra: if ctx.takes.fasm_extra:
yield 'Appending extra FASM...' yield 'Appending extra FASM...'
concat_fasm(ctx.outputs.fasm, ctx.takes.fasm_extra, ctx.outputs.fasm) with \
open(ctx.outputs.fasm, 'r') as fasm_file, \
open(ctx.takes.fasm_extra, 'r') as fasm_extra_file, \
open(ctx.outputs.fasm, 'w') as wfptr:
wfptr.write(f"{fasm_file.read()}\n{fasm_extra_file.read()}")
else: else:
yield 'No extra FASM to append' yield 'No extra FASM to append'

View file

@ -1,7 +1,3 @@
#!/usr/bin/python3
# Symbiflow Stage Module
""" """
This module is intended for wrapping simple scripts without rewriting them as This module is intended for wrapping simple scripts without rewriting them as
an sfbuild module. This is mostly to maintain compatibility with workflows an sfbuild module. This is mostly to maintain compatibility with workflows
@ -12,53 +8,42 @@ Accepted module parameters:
* `script` (string, mandatory): Path to the script to be executed * `script` (string, mandatory): Path to the script to be executed
* `interpreter` (string, optional): Interpreter for the script * `interpreter` (string, optional): Interpreter for the script
* `cwd` (string, optional): Current Working Directory for the script * `cwd` (string, optional): Current Working Directory for the script
* `outputs` (dict[string -> dict[string -> string]], * `outputs` (dict[string -> dict[string -> string]], mandatory):
mandatory): A dict with output descriptions (dicts).
A dict with output descriptions (dicts). Keys name output dependencies. Keys name output dependencies.
* `mode` (string, mandatory): "file" or "stdout". Describes how the output is * `mode` (string, mandatory): "file" or "stdout".
grabbed from the script. Describes how the output is grabbed from the script.
* `file` (string, required if `mode` is "file"): Name of the file generated by the * `file` (string, required if `mode` is "file"): Name of the file generated by the script.
script. * `target` (string, required): Default name of the file of the generated dependency.
* `target` (string, required): Default name of the file of the generated You can use all values available durng map_io stage.
dependency. You can use all values available durng map_io stage. Each input Each input dependency alsogets two extra values associated with it:
dependency alsogets two extra values associated with it: `:dependency_name[noext]`, which contains the path to the dependency the extension with anything after last "."
`:dependency_name[noext]`, which contains the path to the dependency the removed and `:dependency_name[dir]` which contains directory paths of the dependency.
extension with anything after last "." removed and `:dependency_name[dir]` which This is useful for deriving an output name from the input.
contains directory paths of the dependency. This is useful for deriving an output
name from the input.
* `meta` (string, optional): Description of the output dependency. * `meta` (string, optional): Description of the output dependency.
* `inputs` (dict[string -> string | bool], mandatory): * `inputs` (dict[string -> string | bool], mandatory):
A dict with input descriptions. Key is either a name of a named argument or a A dict with input descriptions.
position of unnamed argument prefaced with "#" (eg. "#1"). Positions are indexed Key is either a name of a named argument or a position of unnamed argument prefaced with "#" (eg. "#1").
from 1, as it's a convention that 0th argument is the path of the executed program. Positions are indexed from 1, as it's a convention that 0th argument is the path of the executed program.
Values are strings that can contains references to variables to be resolved Values are strings that can contains references to variables to be resolved after the project flow configuration is
after the project flow configuration is loaded (that means they can reference loaded (that means they can reference values and dependencies which are to be set by the user).
values and dependencies which are to be set by the user). All of modules inputs All of modules inputs will be determined by the references used.
will be determined by the references used. Thus dependency and value definitions Thus dependency and value definitions are implicit.
are implicit. If the value of the resolved string is empty and is associated with a If the value of the resolved string is empty and is associated with a named argument, the argument in question will be
named argument, the argument in question will be skipped entirely. This allows skipped entirely.
using optional dependencies. To use a named argument as a flag instead, set it to This allows using optional dependencies.
`true`. To use a named argument as a flag instead, set it to `true`.
""" """
# TODO: `environment` input kind # TODO: `environment` input kind
# ----------------------------------------------------------------------------- # from pathlib import Path
from shutil import move as sh_mv
from re import match as re_match, finditer as re_finditer
import os from f4pga.common import decompose_depname, deep, get_verbosity_level, sub
import shutil from f4pga.module import Module, ModuleContext
import re
from f4pga.common import *
from f4pga.module import *
# ----------------------------------------------------------------------------- #
def _generate_stage_name(params):
stage_name = params.get('stage_name')
if stage_name is None:
stage_name = '<unknown>'
return f'{stage_name}-generic'
def _get_param(params, name: str): def _get_param(params, name: str):
param = params.get(name) param = params.get(name)
@ -67,6 +52,7 @@ def _get_param(params, name: str):
f'missing `{name}` field') f'missing `{name}` field')
return param return param
def _parse_param_def(param_def: str): def _parse_param_def(param_def: str):
if param_def[0] == '#': if param_def[0] == '#':
return 'positional', int(param_def[1:]) return 'positional', int(param_def[1:])
@ -74,8 +60,6 @@ def _parse_param_def(param_def: str):
return 'environmental', param_def[1:] return 'environmental', param_def[1:]
return 'named', param_def return 'named', param_def
_file_noext_deep = deep(file_noext)
_realdirpath_deep = deep(lambda p: os.path.realpath(os.path.dirname(p)))
class InputReferences: class InputReferences:
dependencies: 'set[str]' dependencies: 'set[str]'
@ -89,48 +73,34 @@ class InputReferences:
self.dependencies = set() self.dependencies = set()
self.values = set() self.values = set()
def _get_input_references(input: str) -> InputReferences: def _get_input_references(input: str) -> InputReferences:
refs = InputReferences() refs = InputReferences()
if type(input) is not str: if type(input) is not str:
return refs return refs
for match in re_finditer('\$\{([^${}]*)\}', input):
matches = re.finditer('\$\{([^${}]*)\}', input)
for match in matches:
match_str = match.group(1) match_str = match.group(1)
if match_str[0] == ':': if match_str[0] != ':':
if len(match_str) < 2:
raise Exception('Dependency name must be at least 1 character '
'long')
dep_name = re.match('([^\\[\\]]*)', match_str[1:]).group(1)
refs.dependencies.add(dep_name)
else:
refs.values.add(match_str) refs.values.add(match_str)
continue
if len(match_str) < 2:
raise Exception('Dependency name must be at least 1 character long')
refs.dependencies.add(re_match('([^\\[\\]]*)', match_str[1:]).group(1))
return refs return refs
def _make_noop1():
def noop(_):
return
return noop
def _tailcall1(self, fun): def _tailcall1(self, fun):
def newself(arg, self=self, fun=fun): def newself(arg, self=self, fun=fun):
fun(arg) fun(arg)
self(arg) self(arg)
return newself return newself
def _add_extra_values_to_env(ctx: ModuleContext):
takes = dict(vars(ctx.takes).items())
for take_name, take_path in takes.items():
if take_path is None:
continue
attr_name = f':{take_name}[noext]'
ctx.r_env.values[attr_name] = _file_noext_deep(take_path)
attr_name = f':{take_name}[dir]'
dirname = _realdirpath_deep(take_path)
ctx.r_env.values[attr_name] = dirname
def _make_noop1():
def noop(_):
return
return noop
class GenericScriptWrapperModule(Module): class GenericScriptWrapperModule(Module):
script_path: str script_path: str
@ -139,8 +109,15 @@ class GenericScriptWrapperModule(Module):
interpreter: 'None | str' interpreter: 'None | str'
cwd: 'None | str' cwd: 'None | str'
@staticmethod
def _add_extra_values_to_env(ctx: ModuleContext):
for take_name, take_path in vars(ctx.takes).items():
if take_path is not None:
ctx.r_env.values[f':{take_name}[noext]'] = deep(lambda p: str(Path(p).with_suffix('')))(take_path)
ctx.r_env.values[f':{take_name}[dir]'] = deep(lambda p: str(Path(p).parent.resolve()))(take_path)
def map_io(self, ctx: ModuleContext): def map_io(self, ctx: ModuleContext):
_add_extra_values_to_env(ctx) self._add_extra_values_to_env(ctx)
outputs = {} outputs = {}
for dep, _, out_path in self.file_outputs: for dep, _, out_path in self.file_outputs:
@ -155,7 +132,7 @@ class GenericScriptWrapperModule(Module):
return outputs return outputs
def execute(self, ctx: ModuleContext): def execute(self, ctx: ModuleContext):
_add_extra_values_to_env(ctx) self._add_extra_values_to_env(ctx)
cwd = ctx.r_env.resolve(self.cwd) cwd = ctx.r_env.resolve(self.cwd)
@ -187,7 +164,7 @@ class GenericScriptWrapperModule(Module):
file = ctx.r_env.resolve(file, final=True) file = ctx.r_env.resolve(file, final=True)
target = ctx.r_env.resolve(target, final=True) target = ctx.r_env.resolve(target, final=True)
if target != file: if target != file:
shutil.move(file, target) sh_mv(file, target)
def _init_outputs(self, output_defs: 'dict[str, dict[str, str]]'): def _init_outputs(self, output_defs: 'dict[str, dict[str, str]]'):
self.stdout_target = None self.stdout_target = None
@ -294,7 +271,8 @@ class GenericScriptWrapperModule(Module):
self.values.append(val) self.values.append(val)
def __init__(self, params): def __init__(self, params):
self.name = _generate_stage_name(params) stage_name = params.get('stage_name')
self.name = f"{'<unknown>' if stage_name is None else stage_name}-generic"
self.no_of_phases = 2 self.no_of_phases = 2
self.script_path = params.get('script') self.script_path = params.get('script')
self.interpreter = params.get('interpreter') self.interpreter = params.get('interpreter')
@ -307,4 +285,4 @@ class GenericScriptWrapperModule(Module):
self._init_outputs(_get_param(params, 'outputs')) self._init_outputs(_get_param(params, 'outputs'))
self._init_inputs(_get_param(params, 'inputs')) self._init_inputs(_get_param(params, 'inputs'))
ModuleClass = GenericScriptWrapperModule ModuleClass = GenericScriptWrapperModule

View file

@ -1,7 +1,3 @@
#!/usr/bin/python3
# Symbiflow Stage Module
""" """
Rename (ie. change) dependencies and values of a module. This module wraps another, Rename (ie. change) dependencies and values of a module. This module wraps another,
module whoose name is specified in `params.module` and changes the names of the module whoose name is specified in `params.module` and changes the names of the
@ -25,13 +21,10 @@ Accepted module parameters:
""" """
# ----------------------------------------------------------------------------- #
from f4pga.common import * from f4pga.common import *
from f4pga.module import * from f4pga.module import Module, ModuleContext
from f4pga.module_runner import get_module from f4pga.module_runner import get_module
# ----------------------------------------------------------------------------- #
def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]': def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]':
newd = {} newd = {}
@ -43,6 +36,7 @@ def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]':
newd[k] = v newd[k] = v
return newd return newd
def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespace: def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespace:
newn = SimpleNamespace() newn = SimpleNamespace()
for k, v in vars(d).items(): for k, v in vars(d).items():
@ -54,6 +48,7 @@ def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespac
setattr(newn, k, v) setattr(newn, k, v)
return newn return newn
def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]': def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]':
newl = [] newl = []
for e in l: for e in l:
@ -65,12 +60,11 @@ def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]':
newl.append(r if r is not None else e) newl.append(r if r is not None else e)
return newl return newl
def _generate_stage_name(name: str):
return f'{name}-io_renamed'
def _or_empty_dict(d: 'dict | None'): def _or_empty_dict(d: 'dict | None'):
return d if d is not None else {} return d if d is not None else {}
class IORenameModule(Module): class IORenameModule(Module):
module: Module module: Module
rename_takes: 'dict[str, str]' rename_takes: 'dict[str, str]'
@ -102,7 +96,7 @@ class IORenameModule(Module):
self.rename_values = _or_empty_dict(params.get("rename_values")) self.rename_values = _or_empty_dict(params.get("rename_values"))
self.module = module self.module = module
self.name = _generate_stage_name(module.name) self.name = f'{module.name}-io_renamed'
self.no_of_phases = module.no_of_phases self.no_of_phases = module.no_of_phases
self.takes = _switch_entries(module.takes, self.rename_takes) self.takes = _switch_entries(module.takes, self.rename_takes)
self.produces = _switch_entries(module.produces, self.rename_produces) self.produces = _switch_entries(module.produces, self.rename_produces)

View file

@ -1,21 +1,14 @@
#!/usr/bin/python3 """
This module is used as a helper in a abuild chain to automate creating build directiores.
It's currenty the only parametric module, meaning it can take user-provided input at an early stage in order to
determine its take/produces I/O.
This allows other repesenting configurable directories, such as a build directory as dependencies and by doing so, allow
the dependency algorithm to lazily create the directories if they become necessary.
"""
# Symbiflow Stage Module from pathlib import Path
from f4pga.module import Module, ModuleContext
""" This module is used as a helper in a abuild chain to automate creating build
directiores. It' currenty the only parametric module, meaning it can take
user-provided input at an early stage in order todetermine its take/produces
I/O. This allows other repesenting configurable directories, such as a build
directory as dependencies and by doing so, allow the dependency algorithm to
lazily create the directories if they become necessary. """
# ----------------------------------------------------------------------------- #
import os
from f4pga.common import *
from f4pga.module import *
# ----------------------------------------------------------------------------- #
class MkDirsModule(Module): class MkDirsModule(Module):
deps_to_produce: 'dict[str, str]' deps_to_produce: 'dict[str, str]'
@ -27,7 +20,7 @@ class MkDirsModule(Module):
outputs = vars(ctx.outputs) outputs = vars(ctx.outputs)
for _, path in outputs.items(): for _, path in outputs.items():
yield f'Creating directory {path}...' yield f'Creating directory {path}...'
os.makedirs(path, exist_ok=True) Path(path).mkdir(parents=True, exist_ok=True)
def __init__(self, params): def __init__(self, params):
self.name = 'mkdirs' self.name = 'mkdirs'

View file

@ -1,54 +1,54 @@
#!/usr/bin/python3 from pathlib import Path
from os import remove as os_remove
from shutil import move as sh_mv
# Symbiflow Stage Module
# ----------------------------------------------------------------------------- #
import os
import re
from f4pga.common import * from f4pga.common import *
from f4pga.module import * from f4pga.module import Module, ModuleContext
# ----------------------------------------------------------------------------- #
DEFAULT_TIMING_RPT = 'pre_pack.report_timing.setup.rpt' DEFAULT_TIMING_RPT = 'pre_pack.report_timing.setup.rpt'
DEFAULT_UTIL_RPT = 'packing_pin_util.rpt' DEFAULT_UTIL_RPT = 'packing_pin_util.rpt'
class PackModule(Module): class PackModule(Module):
def map_io(self, ctx: ModuleContext): def map_io(self, ctx: ModuleContext):
p = file_noext(ctx.takes.eblif) epath = Path(ctx.takes.eblif)
build_dir = os.path.dirname(p) build_dir = epath.parent
return { return {
'net': p + '.net', 'net': str(epath.with_suffix('.net')),
'util_rpt': os.path.join(build_dir, DEFAULT_UTIL_RPT), 'util_rpt': str(build_dir / DEFAULT_UTIL_RPT),
'timing_rpt': os.path.join(build_dir, DEFAULT_TIMING_RPT) 'timing_rpt': str(build_dir / DEFAULT_TIMING_RPT)
} }
def execute(self, ctx: ModuleContext): def execute(self, ctx: ModuleContext):
vpr_args = VprArgs(ctx.share, ctx.takes.eblif, ctx.values,
sdc_file=ctx.takes.sdc)
build_dir = os.path.dirname(ctx.outputs.net)
noisy_warnings(ctx.values.device) noisy_warnings(ctx.values.device)
build_dir = Path(ctx.outputs.net).parent
yield 'Packing with VPR...' yield 'Packing with VPR...'
vpr('pack', vpr_args, cwd=build_dir) vpr(
'pack',
VprArgs(
ctx.share,
ctx.takes.eblif,
ctx.values,
sdc_file=ctx.takes.sdc
),
cwd=str(build_dir)
)
og_log = os.path.join(build_dir, 'vpr_stdout.log') og_log = str(build_dir / 'vpr_stdout.log')
yield 'Moving/deleting files...' yield 'Moving/deleting files...'
if ctx.outputs.pack_log: if ctx.outputs.pack_log:
shutil.move(og_log, ctx.outputs.pack_log) sh_mv(og_log, ctx.outputs.pack_log)
else: else:
os.remove(og_log) os_remove(og_log)
if ctx.outputs.timing_rpt: if ctx.outputs.timing_rpt:
shutil.move(os.path.join(build_dir, DEFAULT_TIMING_RPT), sh_mv(str(build_dir / DEFAULT_TIMING_RPT), ctx.outputs.timing_rpt)
ctx.outputs.timing_rpt)
if ctx.outputs.util_rpt: if ctx.outputs.util_rpt:
shutil.move(os.path.join(build_dir, DEFAULT_UTIL_RPT), sh_mv(str(build_dir / DEFAULT_UTIL_RPT), ctx.outputs.util_rpt)
ctx.outputs.util_rpt)
def __init__(self, _): def __init__(self, _):
self.name = 'pack' self.name = 'pack'

View file

@ -1,34 +1,28 @@
#!/usr/bin/python3 from pathlib import Path
# Symbiflow Stage Module
# ----------------------------------------------------------------------------- #
import os import os
from shutil import move as sh_mv
from re import match as re_match
from f4pga.common import * from f4pga.common import *
from f4pga.module import * from f4pga.module import Module, ModuleContext
# ----------------------------------------------------------------------------- #
def default_output_name(place_constraints): def default_output_name(place_constraints):
p = place_constraints p = place_constraints
m = re.match('(.*)\\.[^.]*$', place_constraints) m = re_match('(.*)\\.[^.]*$', place_constraints)
if m: if m:
p = m.groups()[0] + '.place' return m.groups()[0] + '.place'
else: return f'{p}.place'
p += '.place'
return p
def place_constraints_file(ctx: ModuleContext): def place_constraints_file(ctx: ModuleContext):
dummy =- False
p = ctx.takes.place_constraints p = ctx.takes.place_constraints
if not p: if p:
p = ctx.takes.io_place return p, False
if not p: p = ctx.takes.io_place
dummy = True if p:
p = file_noext(ctx.takes.eblif) + '.place' return p, False
return f'{Path(ctx.takes.eblif).stem}.place', True
return p, dummy
class PlaceModule(Module): class PlaceModule(Module):
def map_io(self, ctx: ModuleContext): def map_io(self, ctx: ModuleContext):
@ -45,7 +39,7 @@ class PlaceModule(Module):
with open(place_constraints, 'wb') as f: with open(place_constraints, 'wb') as f:
f.write(b'') f.write(b'')
build_dir = os.path.dirname(ctx.takes.eblif) build_dir = str(Path(ctx.takes.eblif).parent)
vpr_options = ['--fix_clusters', place_constraints] vpr_options = ['--fix_clusters', place_constraints]
@ -63,7 +57,7 @@ class PlaceModule(Module):
# the ones in flow configuration. # the ones in flow configuration.
if ctx.is_output_explicit('place'): if ctx.is_output_explicit('place'):
output_file = default_output_name(place_constraints) output_file = default_output_name(place_constraints)
shutil.move(output_file, ctx.outputs.place) sh_mv(output_file, ctx.outputs.place)
yield 'Saving log...' yield 'Saving log...'
save_vpr_log('place.log', build_dir=build_dir) save_vpr_log('place.log', build_dir=build_dir)

View file

@ -1,24 +1,17 @@
#!/usr/bin/python3 from pathlib import Path
# Symbiflow Stage Module
# ----------------------------------------------------------------------------- #
import os
from f4pga.common import * from f4pga.common import *
from f4pga.module import * from f4pga.module import Module, ModuleContext
# ----------------------------------------------------------------------------- #
class PlaceConstraintsModule(Module): class PlaceConstraintsModule(Module):
def map_io(self, ctx: ModuleContext): def map_io(self, ctx: ModuleContext):
return { return {
'place_constraints': file_noext(ctx.takes.net) + '.preplace' 'place_constraints': f'{Path(ctx.takes.net).stem!s}.preplace'
} }
def execute(self, ctx: ModuleContext): def execute(self, ctx: ModuleContext):
arch_dir = os.path.join(ctx.share, 'arch') arch_dir = str(Path(ctx.share) / 'arch')
arch_def = os.path.join(arch_dir, ctx.values.device, 'arch.timing.xml') arch_def = str(Path(arch_dir) / ctx.values.device / 'arch.timing.xml')
database = sub('prjxray-config').decode().replace('\n', '') database = sub('prjxray-config').decode().replace('\n', '')

View file

@ -1,41 +1,41 @@
#!/usr/bin/python3 from pathlib import Path
from shutil import move as sh_mv
# Symbiflow Stage Module
# ----------------------------------------------------------------------------- #
import os
import shutil
from f4pga.common import * from f4pga.common import *
from f4pga.module import * from f4pga.module import Module, ModuleContext
# ----------------------------------------------------------------------------- #
def route_place_file(eblif: str): def route_place_file(ctx: ModuleContext):
return file_noext(eblif) + '.route' return str(Path(ctx.takes.eblif).with_suffix('.route'))
class RouteModule(Module): class RouteModule(Module):
def map_io(self, ctx: ModuleContext): def map_io(self, ctx: ModuleContext):
return { return {
'route': route_place_file(ctx.takes.eblif) 'route': route_place_file(ctx)
} }
def execute(self, ctx: ModuleContext): def execute(self, ctx: ModuleContext):
build_dir = os.path.dirname(ctx.takes.eblif) build_dir = str(Path(ctx.takes.eblif).parent)
vpr_options = [] vpr_options = []
if ctx.values.vpr_options: if ctx.values.vpr_options:
vpr_options = options_dict_to_list(ctx.values.vpr_options) vpr_options = options_dict_to_list(ctx.values.vpr_options)
vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values,
sdc_file=ctx.takes.sdc)
yield 'Routing with VPR...' yield 'Routing with VPR...'
vpr('route', vprargs, cwd=build_dir) vpr(
'route',
VprArgs(
ctx.share,
ctx.takes.eblif,
ctx.values,
sdc_file=ctx.takes.sdc
),
cwd=build_dir
)
if ctx.is_output_explicit('route'): if ctx.is_output_explicit('route'):
shutil.move(route_place_file(ctx.takes.eblif), ctx.outputs.route) sh_mv(route_place_file(ctx), ctx.outputs.route)
yield 'Saving log...' yield 'Saving log...'
save_vpr_log('route.log', build_dir=build_dir) save_vpr_log('route.log', build_dir=build_dir)

View file

@ -1,17 +1,12 @@
#!/usr/bin/python3
# Symbiflow Stage Module
# ----------------------------------------------------------------------------- #
import os import os
from f4pga.common import * from f4pga.common import *
from f4pga.module import * from f4pga.module import Module, ModuleContext
# ----------------------------------------------------------------------------- #
# Setup environmental variables for YOSYS TCL scripts
def yosys_setup_tcl_env(tcl_env_def): def yosys_setup_tcl_env(tcl_env_def):
"""
Setup environmental variables for YOSYS TCL scripts.
"""
env = {} env = {}
for key, value in tcl_env_def.items(): for key, value in tcl_env_def.items():
if value is None: if value is None:
@ -22,6 +17,7 @@ def yosys_setup_tcl_env(tcl_env_def):
env[key] = v env[key] = v
return env return env
def yosys_synth(tcl, tcl_env, verilog_files=[], read_verilog_args=None, log=None): def yosys_synth(tcl, tcl_env, verilog_files=[], read_verilog_args=None, log=None):
# Set up environment for TCL weirdness # Set up environment for TCL weirdness
optional = [] optional = []
@ -41,19 +37,15 @@ def yosys_synth(tcl, tcl_env, verilog_files=[], read_verilog_args=None, log=None
verilog_files = [] verilog_files = []
# Execute YOSYS command # Execute YOSYS command
return sub(*(['yosys', '-p', tcl] + optional + verilog_files), return sub(*(['yosys', '-p', tcl] + optional + verilog_files), env=env)
env=env)
def yosys_conv(tcl, tcl_env, synth_json): def yosys_conv(tcl, tcl_env, synth_json):
# Set up environment for TCL weirdness # Set up environment for TCL weirdness
env = os.environ.copy() env = os.environ.copy()
env.update(tcl_env) env.update(tcl_env)
return sub('yosys', '-p', f'read_json {synth_json}; tcl {tcl}', env=env)
# Execute YOSYS command
return sub('yosys', '-p', 'read_json ' + synth_json + '; tcl ' + tcl,
env=env)
# ----------------------------------------------------------------------------- #
class SynthModule(Module): class SynthModule(Module):
extra_products: 'list[str]' extra_products: 'list[str]'

View file

@ -1,22 +1,21 @@
import os from pathlib import Path
import json
from f4pga.common import file_noext, ResolutionEnv, deep
from f4pga.stage import Stage
from copy import copy from copy import copy
from os import listdir as os_listdir
from json import dump as json_dump, load as json_load
from f4pga.common import ResolutionEnv, deep
from f4pga.stage import Stage
_realpath_deep = deep(os.path.realpath)
def open_flow_cfg(path: str) -> dict: def open_flow_cfg(path: str) -> dict:
flow_cfg_json: str with Path(path).open('r') as rfptr:
with open(path, 'r') as flow_cfg_file: return json_load(rfptr)
flow_cfg_json = flow_cfg_file.read()
return json.loads(flow_cfg_json)
def save_flow_cfg(flow: dict, path: str): def save_flow_cfg(flow: dict, path: str):
flow_cfg_json = json.dumps(flow, indent=4) with Path(path).open('w') as wfptr:
with open(path, 'w') as flow_cfg_file: json_dump(flow, wfptr, indent=4)
flow_cfg_file.write(flow_cfg_json)
def _get_lazy_dict(parent: dict, name: str): def _get_lazy_dict(parent: dict, name: str):
d = parent.get(name) d = parent.get(name)
@ -25,69 +24,96 @@ def _get_lazy_dict(parent: dict, name: str):
parent[name] = d parent[name] = d
return d return d
def _get_ov_dict(dname: str, flow: dict,
platform: 'str | None' = None, stage: 'str | None' = None):
d: dict
if platform:
platform_dict: dict = flow[platform]
if stage:
stage_dict: dict = _get_lazy_dict(platform_dict, stage)
d = _get_lazy_dict(stage_dict, dname)
else:
d = _get_lazy_dict(platform_dict, dname)
else:
d = _get_lazy_dict(flow, dname)
return d def _get_ov_dict(
dname: str,
flow: dict,
platform: 'str | None' = None,
stage: 'str | None' = None
):
if not platform:
return _get_lazy_dict(flow, dname)
platform_dict: dict = flow[platform]
if stage:
stage_dict: dict = _get_lazy_dict(platform_dict, stage)
return _get_lazy_dict(stage_dict, dname)
return _get_lazy_dict(platform_dict, dname)
def _get_dep_dict(flow: dict,
platform: 'str | None' = None, stage: 'str | None' = None): def _get_dep_dict(
flow: dict,
platform: 'str | None' = None,
stage: 'str | None' = None
):
return _get_ov_dict('dependencies', flow, platform, stage) return _get_ov_dict('dependencies', flow, platform, stage)
def _get_vals_dict(flow: dict,
platform: 'str | None' = None, stage: 'str | None' = None): def _get_vals_dict(
flow: dict,
platform: 'str | None' = None,
stage: 'str | None' = None
):
return _get_ov_dict('values', flow, platform, stage) return _get_ov_dict('values', flow, platform, stage)
def _add_ov(ov_dict_getter, failstr_constr, flow_cfg: dict, name: str,
values: list, platform: 'str | None' = None,
stage: 'str | None' = None) -> bool:
d = ov_dict_getter(flow_cfg, platform, stage)
deps = d.get(name)
if type(deps) is list:
deps += values
elif deps is None:
d[name] = values
else:
print(failstr_constr(name))
return False
def _add_ov(
ov_dict_getter,
failstr_constr,
flow_cfg: dict,
name: str,
values: list,
platform: 'str | None' = None,
stage: 'str | None' = None
) -> bool:
d = ov_dict_getter(flow_cfg, platform, stage)
deps = d.get(name)
if type(deps) is list:
deps += values
return True return True
def _rm_ov_by_values(ov_dict_getter, notset_str_constr, notlist_str_constr, if deps is None:
flow: dict, name: str, vals: list, d[name] = values
platform: 'str | None' = None, return True
stage: 'str | None' = None) -> bool:
values_to_remove = set(vals) print(failstr_constr(name))
return False
def _rm_ov_by_values(
ov_dict_getter,
notset_str_constr,
notlist_str_constr,
flow: dict,
name: str,
vals: list,
platform: 'str | None' = None,
stage: 'str | None' = None
) -> bool:
d = ov_dict_getter(flow, platform, stage) d = ov_dict_getter(flow, platform, stage)
vallist: list = d.get(name) vallist: list = d.get(name)
if type(vallist) is list: if type(vallist) is list:
d[name] = [val for val in vallist if val not in values_to_remove] d[name] = [val for val in vallist if val not in set(vals)]
elif type(vallist) is None: return True
if type(vallist) is None:
print(notset_str_constr(name)) print(notset_str_constr(name))
return False return False
else:
print(notlist_str_constr(name))
return False
return True print(notlist_str_constr(name))
return False
def _rm_ov_by_idx(ov_dict_getter, notset_str_constr, notlist_str_constr, def _rm_ov_by_idx(
flow: dict, name: str, idcs: list, ov_dict_getter,
platform: 'str | None' = None, notset_str_constr,
stage: 'str | None' = None) -> bool: notlist_str_constr,
flow: dict,
name: str,
idcs: list,
platform: 'str | None' = None,
stage: 'str | None' = None
) -> bool:
idcs.sort(reverse=True) idcs.sort(reverse=True)
if len(idcs) == 0: if len(idcs) == 0:
@ -103,17 +129,22 @@ def _rm_ov_by_idx(ov_dict_getter, notset_str_constr, notlist_str_constr,
for idx in idcs: for idx in idcs:
vallist.pop(idx) vallist.pop(idx)
elif vallist is None: return True
if vallist is None:
print(notset_str_constr(name)) print(notset_str_constr(name))
return False return False
else:
print(notlist_str_constr(name))
return False
return True print(notlist_str_constr(name))
return False
def _get_ovs_raw(dict_name: str, flow_cfg,
platform: 'str | None', stage: 'str | None'): def _get_ovs_raw(
dict_name: str,
flow_cfg,
platform: 'str | None',
stage: 'str | None'
):
vals = flow_cfg.get(dict_name) vals = flow_cfg.get(dict_name)
if vals is None: if vals is None:
vals = {} vals = {}
@ -128,48 +159,105 @@ def _get_ovs_raw(dict_name: str, flow_cfg,
return vals return vals
def _remove_dependencies_by_values(flow: dict, name: str, deps: list,
platform: 'str | None' = None, def _remove_dependencies_by_values(
stage: 'str | None' = None) -> bool: flow: dict,
name: str,
deps: list,
platform: 'str | None' = None,
stage: 'str | None' = None
) -> bool:
def notset_str_constr(dname): def notset_str_constr(dname):
return f'Dependency `{dname}` is not set. Nothing to remove.' return f'Dependency `{dname}` is not set. Nothing to remove.'
def notlist_str_constr(dname): def notlist_str_constr(dname):
return f'Dependency `{dname}` is not a list! Use unsetd instead.' return f'Dependency `{dname}` is not a list! Use unsetd instead.'
return _rm_ov_by_values(_get_dep_dict, notset_str_constr, notlist_str_constr, return _rm_ov_by_values(
flow, name, deps, platform, stage) _get_dep_dict,
notset_str_constr,
notlist_str_constr,
flow,
name,
deps,
platform,
stage
)
def _remove_dependencies_by_idx(flow: dict, name: str, idcs: list,
platform: 'str | None' = None, def _remove_dependencies_by_idx(
stage: 'str | None' = None) -> bool: flow: dict,
name: str,
idcs: list,
platform: 'str | None' = None,
stage: 'str | None' = None
) -> bool:
def notset_str_constr(dname): def notset_str_constr(dname):
return f'Dependency `{dname}` is not set. Nothing to remove.' return f'Dependency `{dname}` is not set. Nothing to remove.'
def notlist_str_constr(dname): def notlist_str_constr(dname):
return f'Dependency `{dname}` is not a list! Use unsetd instead.' return f'Dependency `{dname}` is not a list! Use unsetd instead.'
return _rm_ov_by_idx(_get_dep_dict, notset_str_constr, notlist_str_constr, return _rm_ov_by_idx(
flow, name, idcs, platform, stage) _get_dep_dict,
notset_str_constr,
notlist_str_constr,
flow,
name,
idcs,
platform,
stage
)
def _remove_values_by_values(flow: dict, name: str, deps: list,
platform: 'str | None' = None, def _remove_values_by_values(
stage: 'str | None' = None) -> bool: flow: dict,
name: str,
deps: list,
platform: 'str | None' = None,
stage: 'str | None' = None
) -> bool:
def notset_str_constr(vname): def notset_str_constr(vname):
return f'Value `{vname}` is not set. Nothing to remove.' return f'Value `{vname}` is not set. Nothing to remove.'
def notlist_str_constr(vname): def notlist_str_constr(vname):
return f'Value `{vname}` is not a list! Use unsetv instead.' return f'Value `{vname}` is not a list! Use unsetv instead.'
return _rm_ov_by_values(_get_vals_dict, notset_str_constr, notlist_str_constr, return _rm_ov_by_values(
flow, name, deps, platform, stage) _get_vals_dict,
notset_str_constr,
notlist_str_constr,
flow,
name,
deps,
platform,
stage
)
def _remove_values_by_idx(flow: dict, name: str, idcs: list,
platform: 'str | None' = None, def _remove_values_by_idx(
stage: 'str | None' = None) -> bool: flow: dict,
name: str,
idcs: list,
platform: 'str | None' = None,
stage: 'str | None' = None
) -> bool:
def notset_str_constr(dname): def notset_str_constr(dname):
return f'Dependency `{dname}` is not set. Nothing to remove.' return f'Dependency `{dname}` is not set. Nothing to remove.'
def notlist_str_constr(dname): def notlist_str_constr(dname):
return f'Dependency `{dname}` is not a list! Use unsetv instead.' return f'Dependency `{dname}` is not a list! Use unsetv instead.'
return _rm_ov_by_idx(_get_vals_dict, notset_str_constr, notlist_str_constr, return _rm_ov_by_idx(
flow, name, idcs, platform, stage) _get_vals_dict,
notset_str_constr,
notlist_str_constr,
flow,
name,
idcs,
platform,
stage
)
def unset_dependency(flow: dict, name: str,
platform: 'str | None', stage: 'str | None'): def unset_dependency(
flow: dict,
name: str,
platform: 'str | None',
stage: 'str | None'
):
d = _get_dep_dict(flow, platform, stage) d = _get_dep_dict(flow, platform, stage)
if d.get(name) is None: if d.get(name) is None:
print(f'Dependency `{name}` is not set!') print(f'Dependency `{name}` is not set!')
@ -177,22 +265,26 @@ def unset_dependency(flow: dict, name: str,
d.pop(name) d.pop(name)
return True return True
def verify_platform_name(platform: str, mypath: str): def verify_platform_name(platform: str, mypath: str):
for plat_def_filename in os.listdir(os.path.join(mypath, 'platforms')): for plat_def_filename in os_listdir(str(Path(mypath) / 'platforms')):
platform_name = file_noext(plat_def_filename) platform_name = str(Path(plat_def_filename).stem)
if platform == platform_name: if platform == platform_name:
return True return True
return False return False
def verify_stage(platform: str, stage: str, mypath: str): def verify_stage(platform: str, stage: str, mypath: str):
# TODO: Verify stage # TODO: Verify stage
return True return True
def _is_kword(w: str): def _is_kword(w: str):
return \ return \
(w == 'dependencies') | (w == 'values') | \ (w == 'dependencies') | (w == 'values') | \
(w == 'default_platform') | (w == 'default_target') (w == 'default_platform') | (w == 'default_target')
class FlowDefinition: class FlowDefinition:
# stage name -> module path mapping # stage name -> module path mapping
stages: 'dict[str, Stage]' stages: 'dict[str, Stage]'
@ -225,6 +317,7 @@ class FlowDefinition:
r_env.add_values(stage.value_overrides) r_env.add_values(stage.value_overrides)
return r_env return r_env
class ProjectFlowConfig: class ProjectFlowConfig:
flow_cfg: dict flow_cfg: dict
# r_env: ResolutionEnv # r_env: ResolutionEnv
@ -275,20 +368,26 @@ class ProjectFlowConfig:
return r_env return r_env
""" Get dependencies without value resolution applied """
def get_dependencies_raw(self, platform: 'str | None' = None): def get_dependencies_raw(self, platform: 'str | None' = None):
"""
Get dependencies without value resolution applied.
"""
return _get_ovs_raw('dependencies', self.flow_cfg, platform, None) return _get_ovs_raw('dependencies', self.flow_cfg, platform, None)
""" Get values without value resolution applied """ def get_values_raw(
def get_values_raw(self, platform: 'str | None' = None, self,
stage: 'str | None' = None): platform: 'str | None' = None,
stage: 'str | None' = None
):
"""
Get values without value resolution applied.
"""
return _get_ovs_raw('values', self.flow_cfg, platform, stage) return _get_ovs_raw('values', self.flow_cfg, platform, stage)
def get_stage_value_overrides(self, platform: str, stage: str): def get_stage_value_overrides(self, platform: str, stage: str):
stage_cfg = self.flow_cfg[platform].get(stage) stage_cfg = self.flow_cfg[platform].get(stage)
if stage_cfg is None: if stage_cfg is None:
return {} return {}
stage_vals_ovds = stage_cfg.get('values') stage_vals_ovds = stage_cfg.get('values')
if stage_vals_ovds is None: if stage_vals_ovds is None:
return {} return {}
@ -317,8 +416,7 @@ class FlowConfig:
raw_project_deps = project_config.get_dependencies_raw(platform) raw_project_deps = project_config.get_dependencies_raw(platform)
self.dependencies_explicit = \ self.dependencies_explicit = deep(lambda p: str(Path(p).resolve()))(self.r_env.resolve(raw_project_deps))
_realpath_deep(self.r_env.resolve(raw_project_deps))
for stage_name, stage in platform_def.stages.items(): for stage_name, stage in platform_def.stages.items():
project_val_ovds = \ project_val_ovds = \
@ -349,12 +447,9 @@ class FlowConfigException(Exception):
def __str__(self) -> str: def __str__(self) -> str:
return f'Error in config `{self.path}: {self.message}' return f'Error in config `{self.path}: {self.message}'
def open_project_flow_cfg(path: str) -> ProjectFlowConfig: def open_project_flow_cfg(path: str) -> ProjectFlowConfig:
cfg = ProjectFlowConfig(path) cfg = ProjectFlowConfig(path)
with Path(path).open('r') as rfptr:
flow_cfg_json: str cfg.flow_cfg = json_load(rfptr)
with open(path, 'r') as flow_cfg_file: return cfg
flow_cfg_json = flow_cfg_file.read()
cfg.flow_cfg = json.loads(flow_cfg_json)
return cfg

View file

@ -1,17 +1,22 @@
# Here are the things necessary to write a symbiflow Module """
Here are the things necessary to write an F4PGA Module.
"""
import abc
from types import SimpleNamespace from types import SimpleNamespace
from f4pga.common import * from abc import abstractmethod
from colorama import Fore, Style
from f4pga.common import (
decompose_depname,
ResolutionEnv
)
class Module: class Module:
""" """
A `Module` is a wrapper for whatever tool is used in a flow. A `Module` is a wrapper for whatever tool is used in a flow.
Modules can request dependencies, values and are guranteed to have all the Modules can request dependencies, values and are guranteed to have all the required ones present when entering
required ones present when entering `exec` mode. `exec` mode.
They also have to specify what dependencies they produce and create the files They also have to specify what dependencies they produce and create the files for these dependencies.
for these dependencies.
""" """
no_of_phases: int no_of_phases: int
@ -21,16 +26,16 @@ class Module:
values: 'list[str]' values: 'list[str]'
prod_meta: 'dict[str, str]' prod_meta: 'dict[str, str]'
@abc.abstractmethod @abstractmethod
def execute(self, ctx): def execute(self, ctx):
""" """
Executes module. Use yield to print a message informing about current Executes module.
execution phase. Use yield to print a message informing about current execution phase.
`ctx` is `ModuleContext`. `ctx` is `ModuleContext`.
""" """
pass pass
@abc.abstractmethod @abstractmethod
def map_io(self, ctx) -> 'dict[str, ]': def map_io(self, ctx) -> 'dict[str, ]':
""" """
Returns paths for outputs derived from given inputs. Returns paths for outputs derived from given inputs.
@ -44,48 +49,50 @@ class Module:
self.name = '<BASE STAGE>' self.name = '<BASE STAGE>'
self.prod_meta = {} self.prod_meta = {}
class ModuleContext: class ModuleContext:
""" """
A class for object holding mappings for dependencies and values as well as A class for object holding mappings for dependencies and values as well as other information needed during modules
other information needed during modules execution. execution.
""" """
share: str # Absolute path to Symbiflow's share directory share: str # Absolute path to Symbiflow's share directory
bin: str # Absolute path to Symbiflow's bin directory bin: str # Absolute path to Symbiflow's bin directory
takes: SimpleNamespace # Maps symbolic dependency names to relative takes: SimpleNamespace # Maps symbolic dependency names to relative paths.
# paths. produces: SimpleNamespace # Contains mappings for explicitely specified dependencies.
produces: SimpleNamespace # Contains mappings for explicitely specified # Useful mostly for checking for on-demand optional outputs (such as logs) with
# dependencies. Useful mostly for checking for # `is_output_explicit` method.
# on-demand optional outputs (such as logs) outputs: SimpleNamespace # Contains mappings for all available outputs.
# with `is_output_explicit` method. values: SimpleNamespace # Contains all available requested values.
outputs: SimpleNamespace # Contains mappings for all available outputs. r_env: ResolutionEnv # `ResolutionEnvironmet` object holding mappings for current scope.
values: SimpleNamespace # Contains all available requested values.
r_env: ResolutionEnv # `ResolutionEnvironmet` object holding mappings
# for current scope.
module_name: str # Name of the module. module_name: str # Name of the module.
def is_output_explicit(self, name: str): def is_output_explicit(self, name: str):
""" True if user has explicitely specified output's path. """ """
o = getattr(self.produces, name) True if user has explicitely specified output's path.
return o is not None """
return getattr(self.produces, name) is not None
def _getreqmaybe(self, obj, deps: 'list[str]', deps_cfg: 'dict[str, ]'): def _getreqmaybe(self, obj, deps: 'list[str]', deps_cfg: 'dict[str, ]'):
""" """
Add attribute for a dependency or panic if a required dependency has not Add attribute for a dependency or panic if a required dependency has not been given to the module on its input.
been given to the module on its input.
""" """
for name in deps: for name in deps:
name, spec = decompose_depname(name) name, spec = decompose_depname(name)
value = deps_cfg.get(name) value = deps_cfg.get(name)
if value is None and spec == 'req': if value is None and spec == 'req':
fatal(-1, f'Dependency `{name}` is required by module ' fatal(-1, f'Dependency `{name}` is required by module `{self.module_name}` but wasn\'t provided')
f'`{self.module_name}` but wasn\'t provided')
setattr(obj, name, self.r_env.resolve(value)) setattr(obj, name, self.r_env.resolve(value))
# `config` should be a dictionary given as modules input. # `config` should be a dictionary given as modules input.
def __init__(self, module: Module, config: 'dict[str, ]', def __init__(
r_env: ResolutionEnv, share: str, bin: str): self,
module: Module,
config: 'dict[str, ]',
r_env: ResolutionEnv,
share: str,
bin: str
):
self.module_name = module.name self.module_name = module.name
self.takes = SimpleNamespace() self.takes = SimpleNamespace()
self.produces = SimpleNamespace() self.produces = SimpleNamespace()
@ -122,6 +129,7 @@ class ModuleContext:
return mycopy return mycopy
class ModuleRuntimeException(Exception): class ModuleRuntimeException(Exception):
info: str info: str
@ -131,14 +139,15 @@ class ModuleRuntimeException(Exception):
def __str___(self): def __str___(self):
return self.info return self.info
def get_mod_metadata(module: Module):
""" Get descriptions for produced dependencies. """
def get_mod_metadata(module: Module):
"""
Get descriptions for produced dependencies.
"""
meta = {} meta = {}
has_meta = hasattr(module, 'prod_meta') has_meta = hasattr(module, 'prod_meta')
for prod in module.produces: for prod in module.produces:
prod = prod.replace('?', '') prod = prod.replace('?', '').replace('!', '')
prod = prod.replace('!', '')
if not has_meta: if not has_meta:
meta[prod] = '<no descritption>' meta[prod] = '<no descritption>'
continue continue

View file

@ -1,14 +1,16 @@
""" Dynamically import and run sfbuild modules """ """
Dynamically import and run F4PGA modules.
"""
from contextlib import contextmanager from contextlib import contextmanager
import importlib import importlib.util as importlib_util
import importlib.util from pathlib import Path
import os
from colorama import Style
from f4pga.module import Module, ModuleContext, get_mod_metadata from f4pga.module import Module, ModuleContext, get_mod_metadata
from f4pga.common import ResolutionEnv, deep, sfprint from f4pga.common import ResolutionEnv, deep, sfprint
from colorama import Fore, Style
_realpath_deep = deep(os.path.realpath)
@contextmanager @contextmanager
def _add_to_sys_path(path: str): def _add_to_sys_path(path: str):
@ -20,17 +22,20 @@ def _add_to_sys_path(path: str):
finally: finally:
sys.path = old_syspath sys.path = old_syspath
def import_module_from_path(path: str): def import_module_from_path(path: str):
absolute_path = os.path.realpath(path) absolute_path = str(Path(path).resolve())
with _add_to_sys_path(path): with _add_to_sys_path(path):
spec = importlib.util.spec_from_file_location(absolute_path, absolute_path) spec = importlib_util.spec_from_file_location(absolute_path, absolute_path)
module = importlib.util.module_from_spec(spec) module = importlib_util.module_from_spec(spec)
spec.loader.exec_module(module) spec.loader.exec_module(module)
return module return module
# Once imported a module will be added to that dict to avaid re-importing it # Once imported a module will be added to that dict to avaid re-importing it
preloaded_modules = {} preloaded_modules = {}
def get_module(path: str): def get_module(path: str):
global preloaded_modules global preloaded_modules
@ -41,10 +46,10 @@ def get_module(path: str):
mod = import_module_from_path(path) mod = import_module_from_path(path)
preloaded_modules[path] = mod preloaded_modules[path] = mod
# All sfbuild modules should expose a `ModuleClass` type/alias which is a # All F4PGA modules should expose a `ModuleClass` type/alias which is a class implementing a Module interface
# class implementing a Module interface
return mod.ModuleClass return mod.ModuleClass
class ModRunCtx: class ModRunCtx:
share: str share: str
bin: str bin: str
@ -58,6 +63,7 @@ class ModRunCtx:
def make_r_env(self): def make_r_env(self):
return ResolutionEnv(self.config['values']) return ResolutionEnv(self.config['values'])
class ModuleFailException(Exception): class ModuleFailException(Exception):
module: str module: str
mode: str mode: str
@ -69,8 +75,11 @@ class ModuleFailException(Exception):
self.e = e self.e = e
def __str__(self) -> str: def __str__(self) -> str:
return f'ModuleFailException:\n Module `{self.module}` failed ' \ return f"""ModuleFailException:
f'MODE: \'{self.mode}\'\n\nException `{type(self.e)}`: {self.e}' Module `{self.module}` failed MODE: \'{self.mode}\'
Exception `{type(self.e)}`: {self.e}
"""
def module_io(module: Module): def module_io(module: Module):
return { return {
@ -80,32 +89,41 @@ def module_io(module: Module):
'meta': get_mod_metadata(module) 'meta': get_mod_metadata(module)
} }
def module_map(module: Module, ctx: ModRunCtx): def module_map(module: Module, ctx: ModRunCtx):
try: try:
mod_ctx = ModuleContext(module, ctx.config, ctx.make_r_env(), ctx.share, mod_ctx = ModuleContext(
ctx.bin) module,
ctx.config,
ctx.make_r_env(),
ctx.share,
ctx.bin
)
except Exception as e: except Exception as e:
raise ModuleFailException(module.name, 'map', e) raise ModuleFailException(module.name, 'map', e)
return _realpath_deep(vars(mod_ctx.outputs)) return deep(lambda p: str(Path(p).resolve()))(vars(mod_ctx.outputs))
def module_exec(module: Module, ctx: ModRunCtx): def module_exec(module: Module, ctx: ModRunCtx):
try: try:
mod_ctx = ModuleContext(module, ctx.config, ctx.make_r_env(), ctx.share, mod_ctx = ModuleContext(
ctx.bin) module,
ctx.config,
ctx.make_r_env(),
ctx.share,
ctx.bin
)
except Exception as e: except Exception as e:
raise ModuleFailException(module.name, 'exec', e) raise ModuleFailException(module.name, 'exec', e)
sfprint(1, 'Executing module ' sfprint(1, f'Executing module `{Style.BRIGHT + module.name + Style.RESET_ALL}`:')
f'`{Style.BRIGHT + module.name + Style.RESET_ALL}`:')
current_phase = 1 current_phase = 1
try: try:
for phase_msg in module.execute(mod_ctx): for phase_msg in module.execute(mod_ctx):
sfprint(1, f' {Style.BRIGHT}[{current_phase}/{module.no_of_phases}]' sfprint(1, f' {Style.BRIGHT}[{current_phase}/{module.no_of_phases}] {Style.RESET_ALL}: {phase_msg}')
f'{Style.RESET_ALL}: {phase_msg}')
current_phase += 1 current_phase += 1
except Exception as e: except Exception as e:
raise ModuleFailException(module.name, 'exec', e) raise ModuleFailException(module.name, 'exec', e)
sfprint(1, f'Module `{Style.BRIGHT + module.name + Style.RESET_ALL}` ' sfprint(1, f'Module `{Style.BRIGHT + module.name + Style.RESET_ALL}` has finished its work!')
'has finished its work!')

View file

@ -1,19 +0,0 @@
""" The "ugly" module is dedicated for some *ugly* workarounds """
import os
from f4pga.common import sub as common_sub
def noisy_warnings():
""" Emit some noisy warnings """
os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings.log'
return 'noisy_warnings.log'
def generate_values():
""" Generate initial values, available in configs """
return{
'prjxray_db': common_sub('prjxray-config').decode().replace('\n', ''),
'python3': common_sub('which', 'python3').decode().replace('\n', ''),
'noisyWarnings': noisy_warnings()
}