f4pga: test pyF4PGA in CI

Signed-off-by: Unai Martinez-Corral <umartinezcorral@antmicro.com>
This commit is contained in:
Unai Martinez-Corral 2022-03-01 02:52:59 +01:00
parent 22003c2f49
commit 920d1d5ec3
22 changed files with 196 additions and 309 deletions

View File

@ -6,7 +6,8 @@ on:
jobs:
Run-tests:
Test-pip:
runs-on: ubuntu-latest
strategy:
fail-fast: false
@ -39,14 +40,20 @@ jobs:
- name: Test py4FPGA build
run: |
cd f4pga-examples
export INSTALL_DIR=/opt/f4pga
export PATH="$INSTALL_DIR/xc7/install/bin:$PATH";
source "$INSTALL_DIR/xc7/conda/etc/profile.d/conda.sh"
conda activate xc7
pip install -r ../f4pga/requirements.txt
cd f4pga
pip install --use-feature=in-tree-build .
cd ..
PYTHONPATH=$(pwd)/../f4pga python3 ../f4pga/sfbuild.py build --flow ../.github/sftest.json -t bitstream
cd f4pga-examples
f4pga build --flow ../.github/sftest.json -t bitstream
- name: Test py4FPGA (PYTHONPATH)
run: |
PYTHONPATH=$(pwd) python3 f4pga/sfbuild.py
PYTHONPATH=$(pwd) python3 f4pga/sfbuild.py -h

View File

@ -1,146 +0,0 @@
# Installs sfbuild - experimental Symbiflow Build System
function(INSTALL_DIR)
# Create directory during installation phase
set(options)
set(one_value_args INSTALL_DIRECTORY)
set(multi_value_args)
cmake_parse_arguments(
INSTALL_DIR
"${options}"
"${one_value_args}"
"${multi_value_args}"
${ARGN}
)
set(make_dir_code "file(MAKE_DIRECTORY ${INSTALL_DIR_INSTALL_DIRECTORY})")
install(CODE ${make_dir_code})
endfunction()
function(INSTALL_DIR_CONTENT)
# Install files from ROOT_DIRECTORY/FILES_DIRECTORY directory into a FILES_DIRECTORY subdirectory of INSTALL_DIRECTORY
set(options)
set(one_value_args
ROOT_DIRECTORY
FILES_DIRECTORY
DESTINATION)
set(multi_value_args
FILES
INSTALL_OPTS)
cmake_parse_arguments(
INSTALL_DIR_CONTENT
"${options}"
"${one_value_args}"
"${multi_value_args}"
${ARGN}
)
if(NOT DEFINED INSTALL_DIR_CONTENT_ROOT_DIRECTORY)
set(INSTALL_DIR_CONTENT_ROOT_DIRECTORY .)
endif()
if(NOT DEFINED INSTALL_DIR_CONTENT_FILES_DIRECTORY)
set(INSTALL_DIR_CONTENT_FILES_DIRECTORY .)
endif()
set(file_paths)
foreach(file ${INSTALL_DIR_CONTENT_FILES})
list(APPEND file_paths ${INSTALL_DIR_CONTENT_ROOT_DIRECTORY}/${INSTALL_DIR_CONTENT_FILES_DIRECTORY}/${file})
endforeach()
install(FILES ${file_paths}
DESTINATION ${INSTALL_DIR_CONTENT_DESTINATION}/${INSTALL_DIR_CONTENT_FILES_DIRECTORY}
${INSTALL_DIR_CONTENT_INSTALL_OPTS})
endfunction()
find_package(Python3 COMPONENTS Interpreter REQUIRED)
get_target_property_required(VPR env VPR)
get_target_property_required(GENFASM env GENFASM)
set(SFBUILD_SUPPORTED_PLATFORMS
ql-eos-s3
xc7a50t
xc7a100t
xc7a200t
ql-k4n8_fast
ql-k4n8_slow)
# Create required directories
foreach(DIR_PATH ${SFBUILD_DIRECTORIES})
install_dir(INSTALL_DIRECTORY ${CMAKE_INSTALL_PREFIX}/bin/${DIR_PATH})
endforeach()
# Install sfbuild
install_dir_content(
FILES
__init__.py
sf_argparse.py
sf_cache.py
sf_flow_config.py
sf_module_inspector.py
sf_stage.py
sf_ugly.py
sfbuild.py
sfbuild
DESTINATION bin/sfbuild
INSTALL_OPTS
PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE)
install_dir_content(
FILES __init__.py
FILES_DIRECTORY sf_common
DESTINATION bin/sfbuild
INSTALL_OPTS
PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE)
install_dir_content(
FILES __init__.py
FILES_DIRECTORY sf_module
DESTINATION bin/sfbuild
INSTALL_OPTS
PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE)
install_dir_content(
FILES __init__.py
FILES_DIRECTORY sf_module_runner
DESTINATION bin/sfbuild
INSTALL_OPTS
PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE)
# Install common modules
install_dir_content(
FILES
fasm.py
generic_script_wrapper.py
io_rename.py
mkdirs.py
pack.py
place_constraints.py
place.py
route.py
synth.py
FILES_DIRECTORY sf_common_modules
DESTINATION bin/sfbuild
INSTALL_OPTS
PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE)
# Install platform flow definitions
set(sfbuild_supported_platform_defs)
foreach(SFBUILD_PLATFORM ${SFBUILD_SUPPORTED_PLATFORMS})
set(sfbuild_platform_def "${SFBUILD_PLATFORM}.json")
list(APPEND sfbuild_supported_platform_defs ${sfbuild_platform_def})
endforeach()
install_dir_content(
FILES ${sfbuild_supported_platform_defs}
FILES_DIRECTORY platforms
DESTINATION bin/sfbuild
INSTALL_OPTS
PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ GROUP_READ)
# Install part_db
install_dir_content(
FILES
parts.json
FILES_DIRECTORY part_db
DESTINATION bin/sfbuild
INSTALL_OPTS
PERMISSIONS WORLD_READ OWNER_WRITE OWNER_READ GROUP_READ
)

View File

@ -1 +0,0 @@
import sfbuild

View File

@ -19,6 +19,7 @@
# SPDX-License-Identifier: Apache-2.0
from pathlib import Path
from typing import List
from setuptools import setup as setuptools_setup
@ -27,6 +28,28 @@ F4PGA_FAM = environ.get('F4PGA_FAM', 'xc7')
packagePath = Path(__file__).resolve().parent
requirementsFile = packagePath / "requirements.txt"
# Read requirements file and add them to package dependency list
def get_requirements(file: Path) -> List[str]:
requirements = []
with file.open("r") as fh:
for line in fh.read().splitlines():
if line.startswith("#") or line == "":
continue
elif line.startswith("-r"):
# Remove the first word/argument (-r)
filename = " ".join(line.split(" ")[1:])
requirements += get_requirements(file.parent / filename)
elif line.startswith("https"):
# Convert 'URL#NAME' to 'NAME @ URL'
splitItems = line.split("#")
requirements.append("{} @ {}".format(splitItems[1], splitItems[0]))
else:
requirements.append(line)
return requirements
sf = "symbiflow"
shwrappers = "f4pga.wrappers.sh.__init__"
@ -54,15 +77,21 @@ setuptools_setup(
description="F4PGA.",
url="https://github.com/chipsalliance/f4pga",
packages=[
"f4pga",
"f4pga.sf_common_modules",
"f4pga.wrappers.sh",
],
package_dir={"f4pga": "."},
package_data={
'f4pga': ['platforms/*.json'],
'f4pga.wrappers.sh': ['xc7/*.f4pga.sh', 'quicklogic/*.f4pga.sh']
},
classifiers=[],
python_requires='>=3.6',
install_requires=list(set(get_requirements(requirementsFile))),
entry_points={
"console_scripts": wrapper_entrypoints
"console_scripts": [
"f4pga = f4pga.sfbuild:main",
] + wrapper_entrypoints
},
)

View File

@ -27,7 +27,7 @@ def with_qualifier(name: str, q: str) -> str:
_sfbuild_module_collection_name_to_path = {}
def scan_modules(mypath: str):
global _sfbuild_module_collection_name_to_path
sfbuild_home = mypath
sfbuild_home_dirs = os.listdir(sfbuild_home)
sfbuild_module_dirs = \
@ -66,7 +66,7 @@ def deep(fun):
return [d(p) for p in paths];
elif type(paths) is dict:
return dict([(k, d(p)) for k, p in paths.items()])
return d
def file_noext(path: str):
@ -99,7 +99,7 @@ class VprArgs:
self.device_name = values.vpr_grid_layout_name
self.eblif = os.path.realpath(eblif)
if values.vpr_options is not None:
self.optional = options_dict_to_list(values.vpr_options)
self.optional = options_dict_to_list(values.vpr_options)
else:
self.optional = []
if vpr_extra_opts is not None:
@ -159,7 +159,7 @@ def options_dict_to_list(opt_dict: dict):
Converts a dictionary of named options for CLI program to a list.
Example: { "option_name": "value" } -> [ "--option_name", "value" ]
"""
opts = []
for key, val in opt_dict.items():
opts.append('--' + key)
@ -186,7 +186,7 @@ def fatal(code, message):
with a given return code.
"""
print(f'[FATAL ERROR]: {message}')
raise(Exception(f'[FATAL ERROR]: {message}'))
exit(code)
class ResolutionEnv:
@ -202,7 +202,7 @@ class ResolutionEnv:
def __init__(self, values={}):
self.values = values
def __copy__(self):
return ResolutionEnv(self.values.copy())
@ -251,7 +251,7 @@ verbosity_level = 0
def sfprint(verbosity: int, *args):
""" Print with regards to currently set verbosity level """
global verbosity_level
if verbosity <= verbosity_level:
print(*args)

View File

@ -5,8 +5,8 @@
# ----------------------------------------------------------------------------- #
import os
from sf_common import *
from sf_module import *
from f4pga.sf_common import *
from f4pga.sf_module import *
# ----------------------------------------------------------------------------- #
@ -31,10 +31,10 @@ class FasmModule(Module):
return {
'fasm': fasm_output_path(build_dir, ctx.values.top)
}
def execute(self, ctx: ModuleContext):
def execute(self, ctx: ModuleContext):
build_dir = os.path.dirname(ctx.takes.eblif)
vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values)
optional = []
@ -48,12 +48,12 @@ class FasmModule(Module):
'--device', vprargs.device_name,
'--read_rr_graph', vprargs.rr_graph
] + vprargs.optional
if get_verbosity_level() >= 2:
yield 'Generating FASM...\n ' + ' '.join(s)
else:
yield 'Generating FASM...'
sub(*s, cwd=build_dir)
default_fasm_output_name = fasm_output_path(build_dir, ctx.values.top)
@ -65,7 +65,7 @@ class FasmModule(Module):
concat_fasm(ctx.outputs.fasm, ctx.takes.fasm_extra, ctx.outputs.fasm)
else:
yield 'No extra FASM to append'
def __init__(self, _):
self.name = 'fasm'
self.no_of_phases = 2

View File

@ -4,7 +4,7 @@
"""
This module is intended for wrapping simple scripts without rewriting them as
an sfbuild module. This is mostly to maintain compatibility with workflows
an sfbuild module. This is mostly to maintain compatibility with workflows
that do not use sfbuild and instead rely on legacy scripts.
Accepted module parameters:
@ -24,7 +24,7 @@ Accepted module parameters:
dependency alsogets two extra values associated with it:
`:dependency_name[noext]`, which contains the path to the dependency the
extension with anything after last "." removed and `:dependency_name[dir]` which
contains directory paths of the dependency. This is useful for deriving an output
contains directory paths of the dependency. This is useful for deriving an output
name from the input.
* `meta` (string, optional): Description of the output dependency.
* `inputs` (dict[string -> string | bool], mandatory):
@ -49,8 +49,8 @@ import os
import shutil
import re
from sf_common import *
from sf_module import *
from f4pga.sf_common import *
from f4pga.sf_module import *
# ----------------------------------------------------------------------------- #
@ -106,7 +106,7 @@ def _get_input_references(input: str) -> InputReferences:
refs.dependencies.add(dep_name)
else:
refs.values.add(match_str)
return refs
@ -146,14 +146,14 @@ class GenericScriptWrapperModule(Module):
for dep, _, out_path in self.file_outputs:
out_path_resolved = ctx.r_env.resolve(out_path, final=True)
outputs[dep] = out_path_resolved
if self.stdout_target:
out_path_resolved = \
ctx.r_env.resolve(self.stdout_target[1], final=True)
outputs[self.stdout_target[0]] = out_path_resolved
return outputs
def execute(self, ctx: ModuleContext):
_add_extra_values_to_env(ctx)
@ -163,9 +163,9 @@ class GenericScriptWrapperModule(Module):
+ self.get_args(ctx)
if self.interpreter:
sub_args = [ctx.r_env.resolve(self.interpreter, final=True)] + sub_args
sub_env = self.get_env(ctx)
# XXX: This may produce incorrect string if arguments contains whitespace
# characters
cmd = ' '.join(sub_args)
@ -174,7 +174,7 @@ class GenericScriptWrapperModule(Module):
yield f'Running script...\n {cmd}'
else:
yield f'Running an externel script...'
data = sub(*sub_args, cwd=cwd, env=sub_env)
yield 'Writing outputs...'
@ -182,7 +182,7 @@ class GenericScriptWrapperModule(Module):
target = ctx.r_env.resolve(self.stdout_target[1], final=True)
with open(target, 'wb') as f:
f.write(data)
for _, file, target in self.file_outputs:
file = ctx.r_env.resolve(file, final=True)
target = ctx.r_env.resolve(target, final=True)
@ -199,15 +199,15 @@ class GenericScriptWrapperModule(Module):
meta = output_def.get('meta')
if meta is str:
self.prod_meta[dname] = meta
mode = output_def.get('mode')
if type(mode) is not str:
raise Exception(f'Output mode for `{dep_name}` is not specified')
target = output_def.get('target')
if type(target) is not str:
raise Exception('`target` field is not specified')
if mode == 'file':
file = output_def.get('file')
if type(file) is not str:
@ -217,7 +217,7 @@ class GenericScriptWrapperModule(Module):
if self.stdout_target is not None:
raise Exception('stdout output is already specified')
self.stdout_target = dname, target
# A very functional approach
def _init_inputs(self, input_defs):
positional_args = []
@ -267,7 +267,7 @@ class GenericScriptWrapperModule(Module):
if val != '':
push_env(val)
get_env = _tailcall1(get_env, push_q)
def get_all_args(ctx: ModuleContext):
nonlocal get_args, positional_args, named_args
@ -277,14 +277,14 @@ class GenericScriptWrapperModule(Module):
pos = [ a for _, a in positional_args]
return named_args + pos
def get_all_env(ctx: ModuleContext):
nonlocal get_env, env_vars
get_env(ctx)
if len(env_vars.items()) == 0:
return None
return env_vars
setattr(self, 'get_args', get_all_args)
setattr(self, 'get_env', get_all_env)
@ -292,7 +292,7 @@ class GenericScriptWrapperModule(Module):
self.takes.append(dep)
for val in refs.values:
self.values.append(val)
def __init__(self, params):
self.name = _generate_stage_name(params)
self.no_of_phases = 2

View File

@ -27,9 +27,9 @@ Accepted module parameters:
# ----------------------------------------------------------------------------- #
from sf_common import *
from sf_module import *
from sf_module_runner import get_module
from f4pga.sf_common import *
from f4pga.sf_module import *
from f4pga.sf_module_runner import get_module
# ----------------------------------------------------------------------------- #
@ -64,7 +64,7 @@ def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]':
else:
newl.append(r if r is not None else e)
return newl
def _generate_stage_name(name: str):
return f'{name}-io_renamed'
@ -83,7 +83,7 @@ class IORenameModule(Module):
newctx.values = _switchback_attrs(ctx.values, self.rename_values)
r = self.module.map_io(newctx)
return _switch_keys(r, self.rename_produces)
def execute(self, ctx: ModuleContext):
newctx = ctx.shallow_copy()
newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes)
@ -91,7 +91,7 @@ class IORenameModule(Module):
newctx.outputs = _switchback_attrs(ctx.produces, self.rename_produces)
print(newctx.takes)
return self.module.execute(newctx)
def __init__(self, params):
mod_path = resolve_modstr(params["module"])
module_class = get_module(mod_path)

View File

@ -12,8 +12,8 @@ lazily create the directories if they become necessary. """
# ----------------------------------------------------------------------------- #
import os
from sf_common import *
from sf_module import *
from f4pga.sf_common import *
from f4pga.sf_module import *
# ----------------------------------------------------------------------------- #
@ -22,13 +22,13 @@ class MkDirsModule(Module):
def map_io(self, ctx: ModuleContext):
return ctx.r_env.resolve(self.deps_to_produce)
def execute(self, ctx: ModuleContext):
outputs = vars(ctx.outputs)
for _, path in outputs.items():
yield f'Creating directory {path}...'
os.makedirs(path, exist_ok=True)
def __init__(self, params):
self.name = 'mkdirs'
self.no_of_phases = len(params) if params else 0

View File

@ -6,8 +6,8 @@
import os
import re
from sf_common import *
from sf_module import *
from f4pga.sf_common import *
from f4pga.sf_module import *
# ----------------------------------------------------------------------------- #
@ -24,7 +24,7 @@ class PackModule(Module):
'util_rpt': os.path.join(build_dir, DEFAULT_UTIL_RPT),
'timing_rpt': os.path.join(build_dir, DEFAULT_TIMING_RPT)
}
def execute(self, ctx: ModuleContext):
vpr_args = VprArgs(ctx.share, ctx.takes.eblif, ctx.values,
sdc_file=ctx.takes.sdc)
@ -42,14 +42,14 @@ class PackModule(Module):
shutil.move(og_log, ctx.outputs.pack_log)
else:
os.remove(og_log)
if ctx.outputs.timing_rpt:
shutil.move(os.path.join(build_dir, DEFAULT_TIMING_RPT),
ctx.outputs.timing_rpt)
if ctx.outputs.util_rpt:
shutil.move(os.path.join(build_dir, DEFAULT_UTIL_RPT),
ctx.outputs.util_rpt)
def __init__(self, _):
self.name = 'pack'
self.no_of_phases = 2

View File

@ -5,8 +5,8 @@
# ----------------------------------------------------------------------------- #
import os
from sf_common import *
from sf_module import *
from f4pga.sf_common import *
from f4pga.sf_module import *
# ----------------------------------------------------------------------------- #
@ -27,33 +27,33 @@ def place_constraints_file(ctx: ModuleContext):
if not p:
dummy = True
p = file_noext(ctx.takes.eblif) + '.place'
return p, dummy
class PlaceModule(Module):
def map_io(self, ctx: ModuleContext):
mapping = {}
p, _ = place_constraints_file(ctx)
mapping['place'] = default_output_name(p)
return mapping
def execute(self, ctx: ModuleContext):
place_constraints, dummy = place_constraints_file(ctx)
place_constraints = os.path.realpath(place_constraints)
if dummy:
with open(place_constraints, 'wb') as f:
f.write(b'')
build_dir = os.path.dirname(ctx.takes.eblif)
vpr_options = ['--fix_clusters', place_constraints]
yield 'Running VPR...'
vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values,
sdc_file=ctx.takes.sdc, vpr_extra_opts=vpr_options)
vpr('place', vprargs, cwd=build_dir)
# VPR names output on its own. If user requested another name, the
# output file should be moved.
# TODO: This extends the set of names that would cause collisions.

View File

@ -5,8 +5,8 @@
# ----------------------------------------------------------------------------- #
import os
from sf_common import *
from sf_module import *
from f4pga.sf_common import *
from f4pga.sf_module import *
# ----------------------------------------------------------------------------- #
@ -19,9 +19,9 @@ class PlaceConstraintsModule(Module):
def execute(self, ctx: ModuleContext):
arch_dir = os.path.join(ctx.share, 'arch')
arch_def = os.path.join(arch_dir, ctx.values.device, 'arch.timing.xml')
database = sub('prjxray-config').decode().replace('\n', '')
yield 'Generating .place...'
extra_opts: 'list[str]'
@ -38,7 +38,7 @@ class PlaceConstraintsModule(Module):
'--db_root', database,
'--part', ctx.values.part_name]
+ extra_opts))
yield 'Saving place constraint data...'
with open(ctx.outputs.place_constraints, 'wb') as f:
f.write(data)

View File

@ -6,27 +6,27 @@
import os
import shutil
from sf_common import *
from sf_module import *
from f4pga.sf_common import *
from f4pga.sf_module import *
# ----------------------------------------------------------------------------- #
def route_place_file(eblif: str):
return file_noext(eblif) + '.route'
return file_noext(eblif) + '.route'
class RouteModule(Module):
def map_io(self, ctx: ModuleContext):
return {
'route': route_place_file(ctx.takes.eblif)
}
def execute(self, ctx: ModuleContext):
build_dir = os.path.dirname(ctx.takes.eblif)
vpr_options = []
if ctx.values.vpr_options:
vpr_options = options_dict_to_list(ctx.values.vpr_options)
vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values,
sdc_file=ctx.takes.sdc)
@ -39,7 +39,7 @@ class RouteModule(Module):
yield 'Saving log...'
save_vpr_log('route.log', build_dir=build_dir)
def __init__(self, _):
self.name = 'route'
self.no_of_phases = 2

View File

@ -5,8 +5,8 @@
# ----------------------------------------------------------------------------- #
import os
from sf_common import *
from sf_module import *
from f4pga.sf_common import *
from f4pga.sf_module import *
# ----------------------------------------------------------------------------- #
@ -39,7 +39,7 @@ def yosys_synth(tcl, tcl_env, verilog_files=[], read_verilog_args=None, log=None
for verilog in verilog_files:
tcl = f'read_verilog {args_str} {verilog}; {tcl}'
verilog_files = []
# Execute YOSYS command
return sub(*(['yosys', '-p', tcl] + optional + verilog_files),
env=env)
@ -63,7 +63,7 @@ class SynthModule(Module):
top = ctx.values.top
if ctx.takes.build_dir:
top = os.path.join(ctx.takes.build_dir, top)
top = os.path.join(ctx.takes.build_dir, top)
mapping['eblif'] = top + '.eblif'
mapping['fasm_extra'] = top + '_fasm_extra.fasm'
mapping['json'] = top + '.json'
@ -84,7 +84,7 @@ class SynthModule(Module):
ctx.values.device + '_' + name + '.' + name)
return mapping
def execute(self, ctx: ModuleContext):
split_inouts = os.path.join(ctx.share, 'scripts/split_inouts.py')
synth_tcl = os.path.join(ctx.values.tcl_scripts, 'synth.tcl')
@ -92,26 +92,26 @@ class SynthModule(Module):
tcl_env = yosys_setup_tcl_env(ctx.values.yosys_tcl_env) \
if ctx.values.yosys_tcl_env else {}
if get_verbosity_level() >= 2:
yield f'Synthesizing sources: {ctx.takes.sources}...'
else:
yield f'Synthesizing sources...'
yosys_synth(synth_tcl, tcl_env, ctx.takes.sources,
ctx.values.read_verilog_args, ctx.outputs.synth_log)
yield f'Splitting in/outs...'
sub('python3', split_inouts, '-i', ctx.outputs.json, '-o',
ctx.outputs.synth_json)
if not os.path.isfile(ctx.produces.fasm_extra):
with open(ctx.produces.fasm_extra, 'w') as f:
f.write('')
yield f'Converting...'
yosys_conv(conv_tcl, tcl_env, ctx.outputs.synth_json)
def __init__(self, params):
self.name = 'synthesize'
self.no_of_phases = 3
@ -123,7 +123,7 @@ class SynthModule(Module):
extra_takes = params.get('takes')
if extra_takes:
self.takes += extra_takes
self.produces = [
'eblif',
'fasm_extra',
@ -138,7 +138,7 @@ class SynthModule(Module):
self.extra_products = extra_products
else:
self.extra_products = []
self.values = [
'top',
'device',

View File

@ -1,8 +1,8 @@
import os
import json
from sf_common import file_noext, ResolutionEnv, deep
from sf_stage import Stage
from f4pga.sf_common import file_noext, ResolutionEnv, deep
from f4pga.sf_stage import Stage
from copy import copy
_realpath_deep = deep(os.path.realpath)
@ -37,22 +37,22 @@ def _get_ov_dict(dname: str, flow: dict,
d = _get_lazy_dict(platform_dict, dname)
else:
d = _get_lazy_dict(flow, dname)
return d
def _get_dep_dict(flow: dict,
platform: 'str | None' = None, stage: 'str | None' = None):
return _get_ov_dict('dependencies', flow, platform, stage)
return _get_ov_dict('dependencies', flow, platform, stage)
def _get_vals_dict(flow: dict,
platform: 'str | None' = None, stage: 'str | None' = None):
return _get_ov_dict('values', flow, platform, stage)
return _get_ov_dict('values', flow, platform, stage)
def _add_ov(ov_dict_getter, failstr_constr, flow_cfg: dict, name: str,
values: list, platform: 'str | None' = None,
stage: 'str | None' = None) -> bool:
d = ov_dict_getter(flow_cfg, platform, stage)
deps = d.get(name)
if type(deps) is list:
deps += values
@ -61,7 +61,7 @@ def _add_ov(ov_dict_getter, failstr_constr, flow_cfg: dict, name: str,
else:
print(failstr_constr(name))
return False
return True
def _rm_ov_by_values(ov_dict_getter, notset_str_constr, notlist_str_constr,
@ -70,7 +70,7 @@ def _rm_ov_by_values(ov_dict_getter, notset_str_constr, notlist_str_constr,
stage: 'str | None' = None) -> bool:
values_to_remove = set(vals)
d = ov_dict_getter(flow, platform, stage)
vallist: list = d.get(name)
if type(vallist) is list:
d[name] = [val for val in vallist if val not in values_to_remove]
@ -80,7 +80,7 @@ def _rm_ov_by_values(ov_dict_getter, notset_str_constr, notlist_str_constr,
else:
print(notlist_str_constr(name))
return False
return True
@ -93,14 +93,14 @@ def _rm_ov_by_idx(ov_dict_getter, notset_str_constr, notlist_str_constr,
if len(idcs) == 0:
print(f'Index list is emtpy!')
return False
d = ov_dict_getter(flow, platform, stage)
vallist: list = d.get(name)
if type(vallist) is list:
if idcs[0] >= len(vallist) or idcs[len(idcs) - 1] < 0:
print(f'Index out of range (max: {len(vallist)}!')
return False
for idx in idcs:
vallist.pop(idx)
elif vallist is None:
@ -109,7 +109,7 @@ def _rm_ov_by_idx(ov_dict_getter, notset_str_constr, notlist_str_constr,
else:
print(notlist_str_constr(name))
return False
return True
def _get_ovs_raw(dict_name: str, flow_cfg,
@ -125,7 +125,7 @@ def _get_ovs_raw(dict_name: str, flow_cfg,
stage_deps = flow_cfg[platform][stage].get(dict_name)
if stage_deps is not None:
vals.update(stage_deps)
return vals
def _remove_dependencies_by_values(flow: dict, name: str, deps: list,
@ -206,12 +206,12 @@ class FlowDefinition:
global_vals = flow_def.get('values')
if global_vals is not None:
self.r_env.add_values(global_vals)
stages_d = flow_def['stages']
modopts_d = flow_def.get('stage_options')
if modopts_d is None:
modopts_d = {}
for stage_name, modstr in stages_d.items():
opts = modopts_d.get(stage_name)
self.stages[stage_name] = Stage(stage_name, modstr, opts)
@ -241,7 +241,7 @@ class ProjectFlowConfig:
for platform, _ in self.flow_cfg.items():
if not _is_kword(platform):
yield platform
def add_platform(self, device: str) -> bool:
d = self.flow_cfg.get(device)
if d:
@ -264,7 +264,7 @@ class ProjectFlowConfig:
def get_default_target(self, platform: str) -> 'str | None':
return self.flow_cfg[platform].get('default_target')
def get_stage_r_env(self, platform: str, stage: str) -> ResolutionEnv:
r_env = self._cache_platform_r_env(platform)
@ -272,28 +272,28 @@ class ProjectFlowConfig:
stage_values = stage_cfg.get('values')
if stage_values:
r_env.add_values(stage_values)
return r_env
""" Get dependencies without value resolution applied """
def get_dependencies_raw(self, platform: 'str | None' = None):
return _get_ovs_raw('dependencies', self.flow_cfg, platform, None)
""" Get values without value resolution applied """
def get_values_raw(self, platform: 'str | None' = None,
stage: 'str | None' = None):
return _get_ovs_raw('values', self.flow_cfg, platform, stage)
def get_stage_value_overrides(self, platform: str, stage: str):
stage_cfg = self.flow_cfg[platform].get(stage)
if stage_cfg is None:
return {}
stage_vals_ovds = stage_cfg.get('values')
if stage_vals_ovds is None:
return {}
return stage_vals_ovds
def get_dependency_platform_overrides(self, platform: str):
platform_ovds = self.flow_cfg[platform].get('dependencies')
if platform_ovds is None:
@ -314,17 +314,17 @@ class FlowConfig:
self.r_env.add_values(platform_vals)
self.stages = platform_def.stages
self.platform = platform
raw_project_deps = project_config.get_dependencies_raw(platform)
self.dependencies_explicit = \
_realpath_deep(self.r_env.resolve(raw_project_deps))
for stage_name, stage in platform_def.stages.items():
project_val_ovds = \
project_config.get_stage_value_overrides(platform, stage_name)
stage.value_overrides.update(project_val_ovds)
def get_dependency_overrides(self):
return self.dependencies_explicit
@ -332,9 +332,9 @@ class FlowConfig:
stage = self.stages[stage_name]
r_env = copy(self.r_env)
r_env.add_values(stage.value_overrides)
return r_env
def get_stage(self, stage_name: str) -> Stage:
return self.stages[stage_name]
@ -345,7 +345,7 @@ class FlowConfigException(Exception):
def __init__(self, path: str, message: str):
self.path = path
self.message = message
def __str__(self) -> str:
return f'Error in config `{self.path}: {self.message}'
@ -356,5 +356,5 @@ def open_project_flow_cfg(path: str) -> ProjectFlowConfig:
with open(path, 'r') as flow_cfg_file:
flow_cfg_json = flow_cfg_file.read()
cfg.flow_cfg = json.loads(flow_cfg_json)
return cfg

View File

@ -2,7 +2,7 @@
import abc
from types import SimpleNamespace
from sf_common import *
from f4pga.sf_common import *
from colorama import Fore, Style
class Module:
@ -13,7 +13,7 @@ class Module:
They also have to specify what dependencies they produce and create the files
for these dependencies.
"""
no_of_phases: int
name: str
takes: 'list[str]'
@ -37,7 +37,7 @@ class Module:
`ctx` is `ModuleContext`.
"""
pass
def __init__(self, params: 'dict[str, ]'):
self.no_of_phases = 0
self.current_phase = 0
@ -49,7 +49,7 @@ class ModuleContext:
A class for object holding mappings for dependencies and values as well as
other information needed during modules execution.
"""
share: str # Absolute path to Symbiflow's share directory
bin: str # Absolute path to Symbiflow's bin directory
takes: SimpleNamespace # Maps symbolic dependency names to relative
@ -59,11 +59,11 @@ class ModuleContext:
# on-demand optional outputs (such as logs)
# with `is_output_explicit` method.
outputs: SimpleNamespace # Contains mappings for all available outputs.
values: SimpleNamespace # Contains all available requested values.
values: SimpleNamespace # Contains all available requested values.
r_env: ResolutionEnv # `ResolutionEnvironmet` object holding mappings
# for current scope.
module_name: str # Name of the module.
def is_output_explicit(self, name: str):
""" True if user has explicitely specified output's path. """
o = getattr(self.produces, name)
@ -74,7 +74,7 @@ class ModuleContext:
Add attribute for a dependency or panic if a required dependency has not
been given to the module on its input.
"""
for name in deps:
name, spec = decompose_depname(name)
value = deps_cfg.get(name)
@ -120,7 +120,7 @@ class ModuleContext:
mycopy.share = self.share
mycopy.bin = self.bin
return mycopy
return mycopy
class ModuleRuntimeException(Exception):
info: str
@ -133,7 +133,7 @@ class ModuleRuntimeException(Exception):
def get_mod_metadata(module: Module):
""" Get descriptions for produced dependencies. """
meta = {}
has_meta = hasattr(module, 'prod_meta')
for prod in module.produces:

View File

@ -1,5 +1,5 @@
from sf_module import Module
from sf_common import decompose_depname
from f4pga.sf_module import Module
from f4pga.sf_common import decompose_depname
from colorama import Style
def _get_if_qualifier(deplist: 'list[str]', qualifier: str):

View File

@ -4,8 +4,8 @@ from contextlib import contextmanager
import importlib
import importlib.util
import os
from sf_module import Module, ModuleContext, get_mod_metadata
from sf_common import ResolutionEnv, deep, sfprint
from f4pga.sf_module import Module, ModuleContext, get_mod_metadata
from f4pga.sf_common import ResolutionEnv, deep, sfprint
from colorama import Fore, Style
_realpath_deep = deep(os.path.realpath)

View File

@ -1,6 +1,6 @@
from sf_common import decompose_depname, resolve_modstr
from sf_module import Module
from sf_module_runner import get_module, module_io
from f4pga.sf_common import decompose_depname, resolve_modstr
from f4pga.sf_module import Module
from f4pga.sf_module_runner import get_module, module_io
class StageIO:
"""
@ -20,7 +20,7 @@ class StageIO:
"""
self.name, self.spec = decompose_depname(encoded_name)
def __repr__(self) -> str:
return 'StageIO { name: \'' + self.name + '\', spec: ' + \
self.spec + '}'
@ -34,17 +34,17 @@ class Stage:
name: str # Name of the stage (module's name)
takes: 'list[StageIO]' # List of symbolic names of dependencies used by
# the stage
produces: 'list[StageIO]' # List of symbolic names of dependencies
produces: 'list[StageIO]' # List of symbolic names of dependencies
# produced by the stage
value_overrides: 'dict[str, ]' # Stage-specific values
module: Module
meta: 'dict[str, str]' # Stage's metadata extracted from module's
# output.
def __init__(self, name: str, modstr: str, mod_opts: 'dict[str, ] | None'):
if mod_opts is None:
mod_opts = {}
module_path = resolve_modstr(modstr)
ModuleClass = get_module(module_path)
self.module = ModuleClass(mod_opts.get('params'))
@ -54,20 +54,20 @@ class Stage:
self.value_overrides = values
else:
self.value_overrides = {}
mod_io = module_io(self.module)
self.name = name
self.takes = []
for input in mod_io['takes']:
io = StageIO(input)
self.takes.append(io)
self.produces = []
for input in mod_io['produces']:
io = StageIO(input)
self.produces.append(io)
self.meta = mod_io['meta']
def __repr__(self) -> str:

View File

@ -1,11 +1,11 @@
""" The "ugly" module is dedicated for some *ugly* workarounds """
import os
import sf_common
from f4pga.sf_common import sub as common_sub
def noisy_warnings():
""" Emit some noisy warnings """
os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings.log'
return 'noisy_warnings.log'
@ -13,7 +13,7 @@ def generate_values():
""" Generate initial values, available in configs """
return{
'prjxray_db': sf_common.sub('prjxray-config').decode().replace('\n', ''),
'python3': sf_common.sub('which', 'python3').decode().replace('\n', ''),
'prjxray_db': common_sub('prjxray-config').decode().replace('\n', ''),
'python3': common_sub('which', 'python3').decode().replace('\n', ''),
'noisyWarnings': noisy_warnings()
}

View File

@ -1,5 +0,0 @@
#!/bin/sh
MYDIR=`dirname $0`
python3 ${MYDIR}/sfbuild.py $@

View File

@ -21,30 +21,30 @@ that uses sfbuild. Iontains project-specific definitions needed within the flow,
such as list of source code files.
"""
from pathlib import Path
from argparse import Namespace
import os
from os import environ
import json
from typing import Iterable
from colorama import Fore, Style
from sf_common import ResolutionEnv, fatal, scan_modules, set_verbosity_level, \
from f4pga.sf_common import ResolutionEnv, fatal, scan_modules, set_verbosity_level, \
sfprint
from sf_module import *
from sf_cache import SymbiCache
import sf_ugly
from sf_flow_config import ProjectFlowConfig, FlowConfig, FlowDefinition, \
from f4pga.sf_module import *
from f4pga.sf_cache import SymbiCache
import f4pga.sf_ugly as sf_ugly
from f4pga.sf_flow_config import ProjectFlowConfig, FlowConfig, FlowDefinition, \
open_project_flow_cfg, verify_platform_name, \
verify_stage
from sf_module_runner import *
from sf_module_inspector import get_module_info
from sf_stage import Stage
from sf_argparse import setup_argparser, get_cli_flow_config
from f4pga.sf_module_runner import *
from f4pga.sf_module_inspector import get_module_info
from f4pga.sf_stage import Stage
from f4pga.sf_argparse import setup_argparser, get_cli_flow_config
SYMBICACHEPATH = '.symbicache'
mypath = os.path.realpath(os.sys.argv[0])
mypath = os.path.dirname(mypath)
binpath = os.path.realpath(os.path.join(mypath, '..'))
binpath = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(os.sys.argv[0])), '..'))
mypath = str(Path(__file__).resolve().parent)
share_dir_path = os.path.realpath(f"{environ.get('INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow")
@ -624,7 +624,7 @@ def cmd_show_dependencies(args: Namespace):
set_verbosity_level(-1)
if __name__ == '__main__':
def main():
parser = setup_argparser()
args = parser.parse_args()
@ -640,3 +640,6 @@ if __name__ == '__main__':
sfprint(0, 'Please use a command.\nUse `--help` flag to learn more.')
sfbuild_done()
if __name__ == '__main__':
main()