Merge pull request from antmicro/umarcor/add-pyFPGA

Add sfbuild|pyF4PGA
This commit is contained in:
Tomasz Michalak 2022-05-17 11:57:56 +02:00 committed by GitHub
commit a6f19c029f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
51 changed files with 5243 additions and 14 deletions

View file

@ -62,7 +62,7 @@ echo '::endgroup::'
cd ..
echo '::group::🗑️ Remove the wrappers (pre-packaged from arch-defs) and add f4pga-env'
echo '::group::Add f4pga-env'
case "$F4PGA_FAM" in
xc7) F4PGA_DIR_ROOT='install';;
@ -71,6 +71,11 @@ esac
F4PGA_DIR_BIN="$F4PGA_INSTALL_DIR_FAM/$F4PGA_DIR_ROOT"/bin/
cp $(dirname "$0")/../../f4pga-env "$F4PGA_DIR_BIN"
echo '::endgroup::'
echo '::group::🗑️ Remove the wrappers (pre-packaged from arch-defs)'
cd "$F4PGA_DIR_BIN"
case "$F4PGA_FAM" in

43
.github/sftest.json vendored Normal file
View file

@ -0,0 +1,43 @@
{
"default_platform": "xc7a50t",
"values": {
"top": "top"
},
"dependencies": {
"sources": [
"xc7/counter_test/counter.v"
],
"synth_log": "synth.log",
"pack_log": "pack.log"
},
"xc7a200t": {
"default_target": "bitstream",
"dependencies": {
"xdc": [
"arty200.xdc"
],
"build_dir": "build/arty_200"
}
},
"xc7a100t": {
"default_target": "bitstream",
"dependencies": {
"xdc": [
"arty.xdc"
],
"build_dir": "build/arty100"
}
},
"xc7a50t": {
"default_target": "bitstream",
"dependencies": {
"build_dir": "build/arty_35",
"xdc": [
"arty.xdc"
]
},
"values": {
"part": "xc7a35tcpg236-1"
}
}
}

View file

@ -21,12 +21,13 @@ on:
jobs:
Docs:
runs-on: ubuntu-latest
name: '📓 Docs'
steps:
- name: '🧰 Checkout'
- name: 🧰 Checkout
uses: actions/checkout@v3
with:
submodules: recursive
@ -62,28 +63,28 @@ jobs:
git push -u origin +HEAD:gh-pages
Example:
Deprecated:
runs-on: ubuntu-latest
name: '🐍 Example'
strategy:
fail-fast: false
matrix:
include:
- { fam: xc7, example: counter_test }
- { fam: eos-s3, example: btn_counter }
name: '🚦 Example (deprecated sh) | ${{ matrix.fam }}'
env:
F4PGA_INSTALL_DIR: /opt/f4pga
F4PGA_FAM: ${{ matrix.fam }}
steps:
- name: '🧰 Checkout'
- name: 🧰 Checkout
uses: actions/checkout@v3
- name: '🔧 Prepare environment'
- name: 🔧 Prepare environment
run: ./.github/scripts/prepare_environment.sh
- name: '🐍 Install f4pga (pip)'
- name: 🐍 Install f4pga (pip)
run: |
. ./.github/scripts/activate.sh
@ -91,14 +92,14 @@ jobs:
pip install .
cd ..
- name: '🚧 Test f4pga-env'
- name: 🚧 Test f4pga-env
run: |
. ./.github/scripts/activate.sh
echo "F4PGA_ENV_BIN=$(f4pga-env bin)" >> "$GITHUB_ENV"
echo "F4PGA_ENV_SHARE=$(f4pga-env share)" >> "$GITHUB_ENV"
- name: '🚧 Test make example'
- name: 🚧 Test make example
run: |
. ./.github/scripts/activate.sh
@ -127,3 +128,91 @@ jobs:
name: eos-s3-Bitstream
path: f4pga-examples/eos-s3/btn_counter/build/top.bit
if-no-files-found: error
pyF4PGA:
runs-on: ubuntu-latest
name: '🐍 Example | xc7'
env:
F4PGA_INSTALL_DIR: /opt/f4pga
F4PGA_FAM: xc7
steps:
- name: 🧰 Checkout
uses: actions/checkout@v3
# with:
# submodules: recursive
- name: 🔧 Prepare environment
run: ./.github/scripts/prepare_environment.sh
- name: 🐍 Install f4pga (pip)
run: |
. ./.github/scripts/activate.sh
cd f4pga
pip install --use-feature=in-tree-build .
cd ..
- name: 🚧 Test f4pga build
run: |
. ./.github/scripts/activate.sh
cd f4pga-examples
f4pga build --flow ../.github/sftest.json -t bitstream
- name: '📤 Upload artifact: Arty 35 bitstream'
uses: actions/upload-artifact@v3
with:
name: arty_35-Bitstream-pyF4PGA
path: f4pga-examples/build/arty_35/top.bit
PYTHONPATH:
runs-on: ubuntu-latest
name: '🐍 PYTHONPATH'
env:
F4PGA_INSTALL_DIR: /opt/f4pga
F4PGA_FAM: xc7
steps:
- name: 🧰 Checkout
uses: actions/checkout@v3
- name: 🚧 Test pyF4PGA (PYTHONPATH)
run: |
PYTHONPATH=$(pwd) python3 f4pga/__init__.py
PYTHONPATH=$(pwd) python3 f4pga/__init__.py -h
pyWrappers:
runs-on: ubuntu-latest
name: '🐍 Python wrappers'
env:
F4PGA_INSTALL_DIR: /opt/f4pga
F4PGA_FAM: xc7
steps:
- name: 🧰 Checkout
uses: actions/checkout@v3
- name: 🔧 Prepare environment
run: ./.github/scripts/prepare_environment.sh
- name: 🐍 Install f4pga (pip)
run: |
. ./.github/scripts/activate.sh
cd f4pga
pip install --use-feature=in-tree-build .
cd ..
- name: 🚦 Test Python wrappers
run: |
. ./.github/scripts/activate.sh
pip3 install -r ./test/requirements.txt
pytest -vsrA --color=yes test/wrappers.py

1
.gitignore vendored
View file

@ -1,2 +1,3 @@
*.pyc
*.sw*
/f4pga/build/

View file

@ -47,16 +47,24 @@ extensions = [
'sphinx.ext.intersphinx',
'sphinx_verilog_domain',
'sphinxcontrib.bibtex',
'myst_parser'
]
bibtex_default_style = 'plain'
bibtex_bibfiles = ['refs.bib']
myst_enable_extensions = [
"colon_fence",
]
numfig = True
templates_path = ['_templates']
source_suffix = ['.rst', '.md']
source_suffix = {
'.rst': 'restructuredtext',
'.md': 'markdown'
}
master_doc = 'index'

103
docs/f4pga/DevNotes.md Normal file
View file

@ -0,0 +1,103 @@
# Developer's notes
##### Last update: 2022-05-06
:::{warning}
These notes are provided as-is and they shouldn't be treated as a full-blown accurate
documentation, but rather as a helpful resource for those who want to get involved with
development of _f4pga_. These are not updated regularly.
For more detailed, up-to-date information about the code, refer to the pydoc documentation.
:::
## Project's structure
* `__init__.py` contains the logic and entry point of the build system
* `argparser.py` contains boring code for CLI interface
* `cache.py` contains code needed for tracking modifications in the project.
* `common.py` contains code shared by the main utility and the modules
* `flow_config.py` contains code for reading and accessing flow definitions and configurations
* `module_inspector.py` contains utilities for inspecting I/O of modules
* `module_runner.py` contains code required to load modules at run-time
* `module.py` contains definitions required for writing and using f4pga modules
* `part_db.json` contains mappings from part names to platform names
* `setup.py` contains a package installation script
* `stage.py` contains classes relevant to stage representation
* `modules` contains loadable modules
* `platforms` contains platform flow definitions
:::{important}
Through the codebase _f4pga_ (tool) might be often referenced as _sfbuild_.
Similarly, _F4PGA_ (toolchain) might get called _Symbiflow_.
This is due to the project being written back when _F4PGA_ was called _Symbiflow_.
:::
## Different subsystems and where to find them?
### Building and dependency resolution
All the code regarding dependency resolution is located in `__init__.py` file.
Take a look at the `Flow` class.
Most of the work is done in `Flow._resolve_dependencies` method. Basically it
performs a _DFS_ with _stages_ (instances of _f4pga modules_) as its nodes
which are linked using symbolic names of dependencies on inputs and outputs.
It queries the modules for information regarding i/o (most importantly the paths
on which they are going to produce outputs), checks whether
their inputs are going to be satisfied, checks if dependencies were modified, etc.
The actual building is done using `Flow._build_dep` procedure. It uses a similar
_DFS_ approach to invoke modules and check their inputs and outputs.
### Modification tracking
Modification tracking is done by taking, comparing and keeping track of `adler32`
hashes of all dependencies. Each dependency has a set of hashes associated with it.
The reason for having multiple hashes is that a dependency may have multiple
"_consumers_", ie. _stages_ which take it as input. Each hash is associated with
particular consumer. This is necessary, because the system tries to avoid rebuilds
when possible and status of each file (modified/unmodified) may differ in regards
to individual stages.
Keeping track of status of each file is done using `SymbiCache` class, which is
defined in `sf_cache.py` file. `SymbiCache` is used mostly inside `Flow`'s methods.
### Internal environmental variable system
_f4pga_ exposes some data to the user as well as reads some using internal
environmental variables. These can be referenced by users in
_platform flow definitions_ and _project flow configurations_ using the
`${variable_name}` syntax when defining values. They can also be read inside
_f4pga modules_ by accessing the `ctx.values` namespace.
The core of its system is the `ResolutionEnvironemt` class which can be found
inside the `common` module.
### Installation
Check `CMakeLists.txt`.
## TODO:
* Define a clear specification for entries in _platform flow definitions_ and
_platform flow configurations_. Which environmental variables can be accessed
where, and when?
* Force "_on-demand_" outputs if they are required by another stage.
This may require redesigning the "on-demand" feature, which currently works
by producing a dependency if and only if the user explicitly provides the
path. Otherwise the path is unknown.
* Make commenting style consistent
* Document writing flow definitions
* Extend the metadata system for modules, perhaps make it easier to use.
* Add missing metadata for module targets.
* (_suggestion_) Generate platform definitions using CMake.
### Out of the current scope
* Change interfaces of some internal python scripts. This could lead to possibly
merging some modules for XC7 and Quicklogic into one common module.

409
docs/f4pga/Usage.md Normal file
View file

@ -0,0 +1,409 @@
# Usage
## Getting started
To use `f4pga` you need a working Python 3 installation which should be included as a part of the conda virtual
environment set up during F4PGA installation.
`f4pga` is installed together with F4PGA, regardless of the version of the toolchain.
However, only _XC7_ architectures are supported currently and _Quicklogic_ support is a work in progress.
To get started with a project that already uses `f4pga`, go to the project's directory and run the following line to
generate a bitstream:
```bash
$ f4pga build -f flow.json
```
`flow.json` should be a *project flow configuration* file included with the project.
If you are unsure if you got the right file, you can check an example of the contents of such file shown in the
*Build a target* section below.
The location of the bitstream will be indicated by `f4pga` after the flow completes.
Look for a line like this one on stdout:
```bash
Target `bitstream` -> build/arty_35/top.bit
```
## Fundamental concepts
If you want to create a new project, it's highly recommended that you read this section first.
### f4pga
`f4pga` is a modular build system designed to handle various _Verilog-to-bitstream_ flows for FPGAs.
It works by wrapping the necessary tools in Python, which are called *f4pga modules*.
Modules are then referenced in *platform flow definition* files, together with configuration specific for a given
platform.
Flow definition files for the following platforms are included as a part of _f4pga_:
* **AMD Xilinx x7a50t** (and architecturally equivalent devices, such as x7a35t)
* **AMD Xilinx x7a100t**
* **AMD Xilinx x7a200t**
* **Quicklogic EOS-S3** (currently unsupported, provided only for development purposes)
* **Quicklogic K4N8** (currently unsupported, provided only for development purposes)
You can also write your own *platform flow definition* file if you want to bring support for a different device.
Each project that uses `f4pga` to perform any flow should include a _.json_ file describing the project.
The purpose of that file is to configure inputs for the flow and override configuration values if necessary.
### Modules
A *module* (also referred to as *f4pga module* in situations where there might be confusion between arbitrary Python
_modules_ and f4pga _modules_) is a Python script that wraps a tool used within the F4PGA ecosystem.
The main purpose of the wrappers is to provide a unified interface for `f4pga` to use and to configure the tool,
as well as provide information about files required and produced by the tool.
### Dependencies
A *dependency* is any file, directory or a list of such that a *module* takes as its input or produces on its output.
Modules specify their dependencies by using symbolic names instead of file paths.
The files they produce are also given symbolic names and paths which are either set through *project flow configuration*
file or derived from the paths of the dependencies taken by the module.
### Target
*Target* is a dependency that the user has asked F4PGA to produce.
### Flow
A *flow* is set of *modules* executed in a right order to produce a *target*.
### .symbicache
All *dependencies* are tracked by a modification tracking system which stores hashes of the files
(directories get always `'0'` hash) in `.symbicache` file in the root of the project.
When F4PGA constructs a *flow*, it will try to omit execution of modules which would receive the same data on their
input.
There is a strong _assumption_ there that a *module*'s output remains unchanged if the input configuration isn't
changed, ie. *modules* are deterministic. This is might be not true for some tools and in case you really want to re-run
a stage, there's a `--nocache` option that treats the `.symbicache` file as if it was empty.
### Resolution
A *dependency* is said to be *resolved* if it meets one of the following criteria:
* it exists on persistent storage and its hash matches the one stored in .symbicache
* there exists such *flow* that all of the dependencies of its modules are *resolved* and it produces the *dependency* in
question.
### Platform's flow definition
*Platform flow definition* is a piece of data describing a space of flows for a given platform, serialized into a _JSON_.
It's stored in a file that's named after the device's name under `f4pga/platforms`.
*Platform flow definition* contains a list of modules available for constructing flows and defines a set of values which
the modules can reference.
In case of some modules it may also define a set of parameters used during their construction.
`mkdirs` module uses that to allow production of of multiple directories as separate dependencies.
This however is an experimental feature which possibly will be removed in favor of having multiple instances of the same
module with renameable outputs.
Not all *dependencies** have to be *resolved* at this stage, a *platform's flow definition* for example won't be able to
provide a list of source files needed in a *flow*.
### Project's flow configuration
Similarly to *platform flow definition*, *Projects flow configuration* is a _JSON_ that is used to configure *modules*. There are however a couple differences here and there.
* The most obvious one is that this file is unique for a project and is provided by the user of `f4pga`.
* The other difference is that it doesn't list *modules* available for the platform.
* All the values provided in *projects flow configuration* will override those provided in *platform flow definition*.
* It can contain sections with configurations for different platforms.
* Unlike *platform flow definition* it can give explicit paths to dependencies.
* At this stage all mandatory *dependencies* should be resolved.
Typically *projects flow configuration* will be used to resolve dependencies for _HDL source code_ and _device constraints_.
## Build a target
### Using flow configuration file
To build a *target* `target_name`, use the following command:
```bash
$ f4pga build -f flow.json -p platform_device_name -t target_name
```
where `flow.json` is a path to *projects flow configuration*.
For example, let's consider the following *projects flow configuration (flow.json)*:
```json
{
"default_platform": "xc7a50t",
"dependencies": {
"sources": ["counter.v"],
"xdc": ["arty.xdc"],
"synth_log": "synth.log",
"pack_log": "pack.log",
},
"values": {
"top": "top"
},
"xc7a50t": {
"default_target": "bitstream",
"dependencies": {
"build_dir": "build/arty_35"
}
}
}
```
It specifies list of paths to Verilog source files as `sources` dependency.
Similarly it also provides an `XDC` file with constrains (`xdc` dependency).
It also names a path for synthesis and logs (`synth_log`, `pack_log`).
These two are optional on-demand outputs, meaning they won't be produces unless their paths are explicitly set.
`top` value is set to in order to specify the name of top Verilog module, which is required during synthesis.
`build_dir` is an optional helper dependency.
When available, modules will put their outputs into that directory.
It's also an _on-demand_ output of `mkdirs` module in _xc7a50t_ flow definition, which means that if specified directory
does not exist, `mkdirs` will create it and provide as `build_dir` dependency.
With this flow configuration, you can build a bitstream for arty_35 using the
following command:
```
$ f4pga build -f flow.json -p XC7A35TCSG324-1 -t bitstream
```
Because we have `default_platform` defined, we can skip the `--platform` or `--part` argument.
We can also skip the `--target` argument because we have a `default_target` defined for the
chosen platform. This will default to the `bitstream` target of `xc7a50t` platform:
```
$ f4pga build -f flow.json
```
### Using Command-Line Interface
Alternatively you can use CLI to pass the configuration without creating a flow file:
```
$ f4pga build -p XC7A35TCSG324-1 -Dsources=[counter.v] -Dxdc=[arty.xdc] -Dsynth_log=synth.log -Dpack_log=pack.log -Dbuild_dir=buils/arty_35 -Vtop=top -t bitstream
```
CLI flow configuration can be used alongside a flow configuration file and will override
conflicting dependencies/values from the file.
CLI configuration follows the following format:
`<dependency/value identifier>=<expression>`
`<dependency/value identifier>` is the name of dependency or value optionally prefixed by a stage
name and a dot (`.`). Using the notation with stage name sets the dependency/value only for the
specified stage.
`<expression>` is a form of defining a dependency path or a value. Characters are interpreted
as strings unless the follow one of the following format:
* `[item1,item2,item3,...]` - this is a list of strings
* `{key1:value1,key2:value2,key3:value3,...}` - this is a dictionary
Nesting structures is currently unsupported in CLI.
### Pretend mode
You can also add a `--pretend` (`-P`) option if you just want to see the results of dependency resolution for a
specified target without building it.
This is useful when you just want to know what files will be generated and where will they be stored.
### Info mode
Modules have the ability to include description to the dependencies they produce.
Running `f4pga` with `--info` (`-i`) flag allows you to see descriptions of these dependencies.
This option doesn't require a target to be specified, but you still have to provide a flow configuration and platform
name.
This is still an experimental option, most targets currently lack descriptions and no information whether the output is
_on-demand_ is currently displayed.
Example:
```bash
$ f4pga -v build flow.json --platform x7a50t -i
```
```
Platform dependencies/targets:
build_dir: <no descritption>
module: `mk_build_dir`
eblif: Extended BLIF hierarchical sequential designs file
generated by YOSYS
module: `synth`
fasm_extra: <no description>
module: `synth`
json: JSON file containing a design generated by YOSYS
module: `synth`
synth_json: <no description>
module: `synth`
sdc: <no description>
module: `synth`
```
:::{important}
This is only a snippet of the entire output.
:::
### Summary of global options
| long | short | arguments | description |
|-----------|:-----:|--------------------------|----------------------------------------------------------------------------|
| --verbose | -v | - | Control verbosity level. 0 for no verbose output. 2 for maximum verbosity |
| --silent | -s | - | Suppress any output |
### Summary of all available sub-commands
| name | description |
|---------|-----------------------------|
| build | Build a project |
| showd | Print value of a dependency
### Summary of all options available for `build` sub-command
| long | short | arguments | description |
|-------------|:-----:|--------------------------|---------------------------------------------------------|
| --flow | -f | flow configuration file | Use flow configuration file |
| --platform | | platform name | Specify target platform name (eg. x7a100t) |
| --part | -p | part name | Specify target platform by part name |
| --target | -t | target dependency name | Specify target to produce |
| --info | -i | - | Display information about available targets |
| --pretend | -P | - | Resolve dependencies without executing the flow |
| --nocache | | - | Do not perform incremental build (do full a full build) |
| --stageinfo | -S | stage name | Display information about a specified stage |
| --dep | -D | dependency_name=pathexpr | Add a dependency to configuration |
| --val | -V | value_name=valueexpr | Add a value to configuration |
### Summary of all options available for `showd` sub-command
| long | short | arguments | description |
|-------------|:-----:|--------------------------|--------------------------------------------------------------------------|
| --flow | -f | flow configuration file | Use flow configuration file |
| --platform | -p | platform name | Specify target platform name (to display platform-specific dependencies) |
| --stage | -s | part name | Specify stage name (to display stage-specific dependencies) |
### Dependency resolution display
F4PGA displays some information about dependencies when requesting a target.
Here's an example of a possible output when trying to build `bitstream` target (use `-P`):
```
F4PGA Build System
Scanning modules...
Project status:
[R] bitstream: bitstream -> build/arty_35/top.bit
[O] build_dir: build/arty_35
[R] eblif: synth -> build/arty_35/top.eblif
[R] fasm: fasm -> build/arty_35/top.fasm
[R] fasm_extra: synth -> build/arty_35/top_fasm_extra.fasm
[R] io_place: ioplace -> build/arty_35/top.ioplace
[R] net: pack -> build/arty_35/top.net
[X] pcf: MISSING
[R] place: place -> build/arty_35/top.place
[R] place_constraints: place_constraints -> build/arty_35/top.preplace
[R] route: route -> build/arty_35/top.route
[R] sdc: synth -> build/arty_35/top.sdc
[N] sources: ['counter.v']
[O] xdc: ['arty.xdc']
f4pga: DONE
```
The letters in the boxes describe the status of a dependency whose name is next to the box.
* **X** - dependency unresolved.
Dependency is not present or cannot be produced.
This isn't always a bad sign. Some dependencies are not required to, such as `pcf`.
* **O** - dependency present, unchanged.
This dependency is already built and is confirmed to stay unchanged during flow execution.
* **N** - dependency present, new/changed.
This dependency is already present on the persistent storage, but it was either missing earlier, or its content
changed since the last time it was used.
:::{warning}
It won't continue to be reported as "**N**" after a successful build of any target.
This may lead to some false "**O**"s in some complex scenarios.
This should be fixed in the future.
:::
* **S** - dependency not present, resolved.
This dependency is not currently available on the persistent storage, however it will be produced within flow's
execution.
* **R** - dependency present, resolved, requires rebuild.
This dependency is currently available on the persistent storage, however it has to be rebuilt due to the changes in
the project.
Additional info about a dependency will be displayed next to its name after a colon:
* In case of dependencies that are to be built (**S**/**R**), there's a name of a module that will produce this
dependency, followed by `->` and a path or list of paths to file(s)/directory(ies) that will be produced as this
dependency.
* In case of dependencies which do not require execution of any modules, only a path or list of paths to
file(s)/directory(ies) that will be displayed.
* In case of unresolved dependencies (**X**), which are never produced by any module, a text saying "`MISSING`" will be
displayed.
In the example above file `counter.v` has been modified and is now marked as "**N**".
This causes a bunch of other dependencies to be rebuilt ("**R**").
`build_dir` and `xdc` were already present, so they are marked as "**O**".
## Common targets and values
Targets and values are named with some conventions.
Below are lists of the target and value names along with their meanings.
### Need to be provided by the user
| Target name | list | Description |
|-------------|:----:|-------------|
| `sources` | yes | Verilog sources |
| `sdc` | no | Synopsys Design Constraints |
| `xdc` | yes | Xilinx Design Constraints (available only for Xilinx platforms) |
| `pcf` | no | Physical Constraints File |
### Available in most flows
| Target name | list | Description |
|--------------|:----:|-----------------------------------------------------------------|
| `eblif` | no | Extended blif file |
| `bitstream` | no | Bitstream |
| `net` | no | Netlist |
| `fasm` | no | Final FPGA Assembly |
| `fasm_extra` | no | Additional FPGA assembly that may be generated during synthesis |
| `build_dir` | no | A directory to put the output files in |
### Built-in values
| Value name | type | Description |
|-----------------|----------|---------------------------------------------------|
| `shareDir` | `string` | Path to f4pga's installation "share" directory |
| `python3` | `string` | Path to Python 3 executable |
| `noisyWarnings` | `string` | Path to noisy warnings log (should be deprecated) |
| `prjxray_db` | `string` | Path to Project X-Ray database |
### Used in flow definitions
| Value name | type | Description |
|---------------|------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------|
| `top` | `string` | Top module name |
| `build_dir` | `string` | Path to build directory (should be optional) |
| `device` | `string` | Name of the device |
| `vpr_options` | `dict[string -> string \| number]` | Named options passed to VPR. No `--` prefix included. |
| `part_name` | `string` | Name of the chip used. The distinction between `device` and `part_name` is ambiguous at the moment and should be addressed in the future. |
| `arch_def` | `string` | Path to an XML file containing architecture definition. |

7
docs/f4pga/browse_pydoc.sh Executable file
View file

@ -0,0 +1,7 @@
#!/bin/sh
MY_DIR=`dirname $0`
SFBUILD_DIR=${MY_DIR}/../../f4pga
SFBUILD_PY=${SFBUILD_DIR}/__init__.py
PYTHONPATH=${SFBUILD_DIR} pydoc -b

27
docs/f4pga/index.rst Normal file
View file

@ -0,0 +1,27 @@
Overview
########
Python F4PGA is a package containing multiple modules to facilitate the usage of all the tools integrated in the F4PGA
ecosystem, and beyond.
The scope of Python F4PGA is threefold:
* Provide a fine-grained *pythonic* interface to the tools and utilities available as either command-line interfaces
(CLIs) or application proggraming interfaces (APIs) (either web or through shared libraries).
* Provide a CLI entrypoint covering the whole flows for end-users to produce bitstreams from HDL and/or software sources.
* Provide a CLI entrypoint for developers contributing to bitstream documentation and testing (continuous integration).
.. ATTENTION::
This is work-in-progress to adapt and organize the existing shell/bash based plumbing from multiple F4PGA repositories.
Therefore, it's still a *pre-alpha* and the codebase, commands and flows are subject to change.
It is strongly suggested not to rely on Python F4PGA until this note is updated/removed.
References
==========
* :gh:`chipsalliance/fpga-tool-perf#390@issuecomment-1023487178 <chipsalliance/fpga-tool-perf/pull/390#issuecomment-1023487178>`
* :ghsharp:`2225`
* :ghsharp:`2371`
* :ghsharp:`2455`
* `F4PGA GSoC 2022 project ideas: Generalization of wrapper scripts for installed F4PGA toolchain and making them OS agnostic <https://github.com/f4pga/ideas/blob/master/gsoc-2022-ideas.md#generalization-of-wrapper-scripts-for-installed-f4pga-toolchain-and-making-them-OS-agnostic>`__
* :gh:`FuseSoc <olofk/fusesoc>` | :gh:`Edalize <olofk/edalize>`
* `Electronic Design Automation Abstraction (EDA²) <https://edaa-org.github.io/>`__

View file

@ -0,0 +1,18 @@
# fasm
The _fasm_ module generates FPGA assembly using `genfasm` (VPR-only).
The module should guarantee the following outputs:
* `fasm`
For detailed information about these targets, please refer to
`docs/common targets and variables.md`
The setup of the synth module follows the following specifications:
## Values
The `fasm` module accepts the following values:
* `pnr_corner` (string, optional): PnR corner to use. Relevant only for Quicklogic's
eFPGAs.

View file

@ -0,0 +1,41 @@
# generic_script_wrapper
This module provides a way to integrate an external command into an f4pga flow.
Its inputs and outputs are fully defined by the author of flow definition.
## Parameters
Parameters are everything when it comes to this module:
* `stage_name` (string, optional): Name describing the stage
* `script` (string, mandatory): Path to the script to be executed
* `interpreter` (string, optional): Interpreter for the script
* `cwd` (string, optional): Current Working Directory for the script
* `outputs` (dict[string -> dict[string -> string]],
mandatory):
A dict with output descriptions (dicts). Keys name output dependencies.
* `mode` (string, mandatory): "file" or "stdout". Describes how the output is
grabbed from the script.
* `file` (string, required if `mode` is "file"): Name of the file generated by the
script.
* `target` (string, required): Default name of the file of the generated
dependency. You can use all values available during map_io stage. Each input
dependency also gets two extra values associated with it:
`:dependency_name[noext]`, which contains the path to the dependency the
extension with anything after last "." removed and `:dependency_name[dir]` which
contains directory paths of the dependency. This is useful for deriving an output
name from the input.
* `meta` (string, optional): Description of the output dependency.
* `inputs` (dict[string -> string | bool], mandatory):
A dict with input descriptions. Key is can be a name of a named argument, a
position of unnamed argument, when prefaced with "#" (eg. "#1"), or a name of an
environmental variable, when prefaced with "$". Positions are indexed
from 1, as it's a convention that 0th argument is the path of the executed program.
Values are strings that can contain references to variables to be resolved
after the project flow configuration is loaded (that means they can reference
values and dependencies which are to be set by the user). All of modules inputs
will be determined by the references used. Thus dependency and value definitions
are implicit. If the value of the resolved string is empty and is associated with a
named argument, the argument in question will be skipped entirely. This allows
using optional dependencies. To use a named argument as a flag instead, set it to
`true`.

278
docs/f4pga/modules/index.md Normal file
View file

@ -0,0 +1,278 @@
# Modules
## Interface
This document contains all the information needed to configure modules for
your _**f4pga**_ project as well as some info about the API used to write
modules.
### Configuration interface:
Modules are configured through an internal API by _**f4pga**_.
The basic requirement for a module script is to expose a class with `Module`
interface.
_**f4pga**_ reads its configuration from two different sources:
**platform's flow definition**, which is a file that usually comes bundled with f4pga
and **project's flow configuration**, which is a set of configuration options provided by the user
through a JSON file or CLI interface.
Those sources contain snippets of _module configurations_.
A _module configuration_ is a structure with the following fields:
* `takes` - a dictionary that contains keys which are names of the dependencies used by the module.
The values are paths to those dependencies.
They can be either singular strings or lists of strings.
* `produces` - a dictionary that contains keys which are names of the dependencies produced by the module.
The values are requested filenames for the files generated by the module.
They can be either singular strings or lists of strings.
* `values` - a dictionary that contains other values used to configure the module.
The keys are value's names and the values can have any type.
### Platform-level configuration
In case of **platform's flow definition**, a `values` dictionary can be defined
globally and the values defined there will be passed to every module's config.
Those values can be overridden per-module through `module_options` dictionary.
Parameters used during module's construction can also be defined in `module_options`
as `params` (those are not a part of _module configuration_, instead they are used
during the actual construction of a module instance, before it declares any of its
input/outputs etc.. This is typically used to achieve some parametrization over module's
I/O).
Defining dictionaries for `takes` and `produces` is currently disallowed within
**platform's flow definition**.
For examples of **platform's flow definition** described here, please have a look at
`f4pga/platforms/` directory. It contains **platform flow definitions** that come bundled
with f4pga.
### Project-level configuration
This section describes **project's flow configuration**.
Similarly to **platform's flow definition**, `values` dict can be provided.
The values provided there will overwrite the values from
**platform's flow definition** in case of a collision.
Unlike **platform's flow definition**, **project's flow configuration** may contain
`dependencies` dict. This dictionary would be used to map symbolic dependency
names to actual paths. Most dependencies can have their paths resolved implicitly
without the need to provide explicit paths, which is a mechanism that is described
in a later section of this document. However some dependencies must be provided
explicitly, eg. paths to project's Verilog source files. It should be noted that
depending on the flow definition and the dependency in question, the path does not
necessarily have to point to an already existing file. If the dependency is a
product of a module within the flow, the path assigned to it will be used
by the module to build that dependency. This is also used to in case of _on-demand_
dependencies, which won't be produced unless the user explicitly provides a path
for them.
**project's flow configuration** cannot specify `params` for modules and does not
use `module_options` dictionary. Neither it can instantiate any extra stages.
Any entry with a couple _exceptions*_ is treated as a platform name.
Enabling support for a given platform within a **project's flow configuration** file
requires having an entry for that platform.
Each of those entries may contain `dependencies`, `values` fields which will
overload the `dependecies` and `values` defined in a global scope of
**project's flow configuration**. Any other field under those platform entries
is treated as a _stage-specific-configuration_. The key is a name of a stage within
a flow for the specified platform and the values are dicts which may contain
`dependencies` and `values` fields that overload `dependencies` and `values`
respectively, locally for the stage. Additionally a `default_target` field can be
provided to specify a default target to built when the user does not specify it through
a CLI interface.
The aforementioned _*exceptions_ are:
* `dependencies` - dependencies shared by all platforms.
* `values` - values shared by all platforms
* `default_platform` - default platform to chose in case it doesn't get specified
by the user
Those apply only to flow configuration file.
### Internal environmental variables
It's very useful to be able to refer to some data within
**platform's flow definition** and **project's flow configuration** to
either avoid redundant definitions or to store and access results of certain operations.
_**f4pga**_ allows doing that by using a special syntax for accessing internal
environmental variables.
The syntax is `${variable_name}`. Any string value within
**platform's flow definition** and **project's flow configuration** that contains
such patterns will have them replaced with the values of the variables referenced
if those values are strings. Eg.:
With the following values defined:
```json
{
"a_value": "1234",
"another_value": "a_value: ${a_value}"
}
```
`another_value` will resolve to:
```json
"a_value: 1234"
```
If the value is a list however, the result would be a list with all entries being
the original string with the reference to a variable replaced by following
items of the original list. Eg.:
With the following values defined
```json
{
"list_of_values": ["a", "b", "c"],
"some_string": "item: ${list_of_values}"
}
```
`some_string` will resolve to
```json
["item: a", "item: b", "item: c"]
```
Be careful when using this kind of resolution, as it's computational and memory
complexity grows exponentially in regards to the number of list variables being
referenced, which is a rather obvious fact, but it's still worth mentioning.
The variables that can be referenced within a definition/configuration fall into 3
categories:
* **value references** - anything declared as a `value` can be accessed by it's
name
* **dependency references** - any dependency path can be referenced using the name
of the dependency prefaced with a ':' prefix. Eg.: `${:eblif}` will resolve
to the path of `eblif` dependency. Make sure that the dependency can be
actually resolved when you are using this kind of reference. For example
you can't use the a reference to `eblif` dependency in a module which does not
rely on it. An exception is the producer module which can in fact reference it's
own outputs but these references cannot be used during the _mapping_ stage
(more on that later).
* **built-in references** - there are a couple of built-in variables which are very
handy:
* `shareDir` - path to f4pga's _share_ directory.
* `binDir` - path to f4pga's _bin_ directory.
* `prjxray_db` - Project X-Ray database path.
* `python3` - path to Python 3 interpreter.
* `noisyWarnings` - (this one should probably get removed)
### `Module` class
Each module is represented as a class derived from `Module` class.
The class should implement the following methods:
* `execute(self, ctx: ModuleContext)` - executes the module in _exec_ mode
* `map_io(self, ctx: ModuleContext) -> 'dict[str, ]'` - executes the module in
_mapping_ mode
* `__init__(self, params: 'dict[str, ]')` - initializer. The `params`
is a dict with optional parameter for the module.
Each module script should expose the class by defining it's name/type alias as
`ModuleClass`. f4pga tries to access a `ModuleClass` attribute within a package
when instantiating a module.
### Module's execution modes
A module has essentially two execution modes:
* _mapping_ mode
* _exec_ mode
#### _mapping_ mode
In _mapping_ mode the module is provided with an incomplete configuration which
includes:
* `takes` namespace: this maps names of input dependencies to the paths of these
dependencies
* `values` namespace: this maps names of variables to the values of those
variables.
The module has to provide a dictionary that will provide every output dependency
that's not _on-demand_ a default path. This is basically a promise that when
executed in _exec_ mode, the module will produce files for this paths.
Typically such paths would be derived from a path of one of it's input dependencies.
This mechanism allows the user to avoid specifying an explicit path for each
intermediate target.
It should be noted that variables referring to the output dependencies
can't be accessed at this stage for the obvious reason as their values are yet
to be evaluated.
#### _exec_ mode
In _exec_ mode the module does the actual work.
The configuration passed into this mode is full and it includes:
* `takes` namespace: this maps names of input dependencies to the paths of these
dependencies
* `values` namespace: this maps names of variables to the values of those
variables.
* `produces` namespace: this maps names of output dependencies to explicit paths.
This should not be used directly really, but it's useful for
`ModuleContext.is_output_explicit` method.
* `outputs` namespace: this maps names of output dependencies to their paths.
When the module finishes executing in _exec_ mode, all of the dependencies
described in `outputs` should be present.
### Module initialization/instantiation
In the the `__init__` method of module's class, the following fields should be
set:
* `takes` - a list of symbolic dependency names for dependencies used by the module
* `produces` - a list of symbolic dependencies names for dependencies produced
by the module.
* `values` - a list of names given to the variables used withing the module
* `prod_meta` - A dictionary which maps product names to descriptions of these
products. Those entries are optional and can be skipped.
#### Qualifiers/decorators
By default the presence of all the dependencies and values is mandatory
(In case of `produces` that means that the module always has to produce the listed
dependencies). This can be changed by "decorating" a name in one of the following
ways:
* '`?`' _suffix_
* In `takes` - the dependency is not necessary for the module to execute
* In `produces` - the dependency may be produced, but it is not guaranteed.
* In `values` the value is not required for the module to execute.
Referring to it through `ModuleContext.values.value_name` won't raise an
exception if the value is not present, instead `None` will be returned.
* '`!`' _suffix_
* In `produces` - the dependency is going to be produced only if the user
provides an explicit path for it.
Currently it's impossible to combine both '`!`' and '`?`' together. This limitation
does not have any reason behind it other than the way the qualifier system
is implemented at the moment. It might be removed in the future.
## Common modules
```{toctree}
fasm
generic_script_wrapper
io_rename
mkdirs
pack
place
place_constraints
route
synth
```

View file

@ -0,0 +1,25 @@
# io_rename
This module provides a way to rename (ie. change) dependencies and values of an
instance of a different module. It wraps another, module whose name is specified in `params.module` and changes the names of the dependencies and values it relies on.
## Parameters
* `module` (string, required) - name of the wrapped module
* `params` (dict[string -> any], optional): parameters passed to the wrapped
module instance.
* `rename_takes` (dict[string -> string]) - mapping for inputs ("takes")
* `rename_produces` (dict[string -> string]) - mapping for outputs ("products")
* `rename_values` (dict[string -> string]) - mapping for values
In the three mapping dicts, keys represent the names visible to the wrapped module
and values represent the names visible to the modules outside.
Not specifying a mapping for a given entry will leave it with its original name.
## Values
All values specified for this modules will be accessible by the wrapped module.
## Extra notes
This module might be removed in the future in favor of a native renaming support.

View file

@ -0,0 +1,9 @@
# mkdirs
This modules creates directories specified by the author of flow definition
as its targets..
## Parameters
Each key serves as a name of a directory to be created, while the value is the
path for that directory.

View file

@ -0,0 +1,7 @@
# pack
:::{warning}
this page is under construction
:::
Pack circuit with VPR.

View file

@ -0,0 +1,7 @@
# place
:::{warning}
this page is under construction
:::
Place cells with VPR.

View file

@ -0,0 +1,11 @@
# place_constraints
:::{warning}
this page is under construction
:::
Move cell placement to satisfy constraints imposed by an architecture. (VPR-only)
:::{note}
This will be deprecated once VPR constraint system supports this functionality natively.
:::

View file

@ -0,0 +1,7 @@
# route
:::{warning}
this page is under construction
:::
Route a design with VPR.

View file

@ -0,0 +1,41 @@
# synth
The _synth_ module is meant to be used to execute YOSYS synthesis.
The module should guarantee the following outputs:
* `eblif`
* `fasm_extra` (can be empty)
* `json`
* `synth_json`
* `synth_log` (on demand)
For detailed information about these targets, please refer to
`docs/common targets and variables.md`
What files and how are they generated is dependent on TCL scripts executed
withing YOSYS and the script vary depending on the target platform. Due to this
design choice it is required for the author of the flow definition to parameterize
the `synth` module in a way that will **GUARANTEE** the targets mentioned above
will be generated upon a successful YOSYS run.
The setup of the synth module follows the following specifications:
## Parameters
The `params` section of a stage configuration may contain a `produces` list.
The list should specify additional targets that will be generated
(`?` qualifier is allowed).
## Values
The `synth` module requires the following values:
* `tcl_scripts` (string, required): A path to a directory containing `synth.tcl`
and `conv.tcl` scripts that will be used by YOSYS.
* `read_verilog_args` (list[string | number], optional) - If specified, the Verilog
sources will be read using the `read_verilog` procedure with options contained in
this value.
* `yosys_tcl_env` (dict[string -> string | list[string], required) - A mapping that
defines environmental variables that will be used within the TCL scripts. This
should contain the references to module's inputs and outputs in order to guarantee
the generation of the desired targets.

View file

@ -10,6 +10,7 @@ The project aims to design tools that are highly extendable and multiplatform.
:align: center
.. toctree::
:caption: About F4PGA
@ -38,8 +39,18 @@ The project aims to design tools that are highly extendable and multiplatform.
.. toctree::
:caption: Development
development/building-docs
development/venv
contributing/building-docs
contributing/venv
.. toctree::
:caption: Python utils
:maxdepth: 2
f4pga/index
f4pga/Usage
f4pga/modules/index
f4pga/DevNotes
.. toctree::

View file

@ -1,3 +1,4 @@
myst-parser
sphinx>=4.5.0
sphinxcontrib-bibtex
https://github.com/f4pga/sphinx_f4pga_theme/archive/f4pga.zip#sphinx-f4pga-theme

666
f4pga/__init__.py Executable file
View file

@ -0,0 +1,666 @@
"""
F4PGA Build System
This tool allows for building FPGA targets (such as bitstreams) for any supported platform with just one simple command
and a project file.
The idea is that F4PGA wraps all the tools needed by different platforms in "modules", which define inputs/outputs and
various parameters.
This allows F4PGA to resolve dependencies for any target provided that a "flow definition" file exists for such target.
The flow defeinition file list modules available for that platform and may tweak some settings of those modules.
A basic example of using F4PGA:
$ f4pga build --platform arty_35 -t bitstream
This will make F4PGA attempt to create a bitstream for arty_35 platform.
``flow.json`` is a flow configuration file, which should be created for a project that uses F4PGA.
Contains project-specific definitions needed within the flow, such as list of source code files.
"""
from pathlib import Path
from argparse import Namespace
from sys import argv as sys_argv
from os import environ
from json import load as json_load, loads as json_loads
from typing import Iterable
from colorama import Fore, Style
from f4pga.common import (
ResolutionEnv,
fatal,
scan_modules,
set_verbosity_level,
sfprint,
sub as common_sub
)
from f4pga.module import *
from f4pga.cache import SymbiCache
from f4pga.flow_config import (
ProjectFlowConfig,
FlowConfig,
FlowDefinition,
open_project_flow_cfg,
verify_platform_name,
verify_stage
)
from f4pga.module_runner import *
from f4pga.module_inspector import get_module_info
from f4pga.stage import Stage
from f4pga.argparser import setup_argparser, get_cli_flow_config
SYMBICACHEPATH = '.symbicache'
binpath = str(Path(sys_argv[0]).resolve().parent.parent)
mypath = str(Path(__file__).resolve().parent)
share_dir_path = str(Path(f"{environ.get('F4PGA_INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow").resolve())
class DependencyNotProducedException(Exception):
dep_name: str
provider: str
def __init__(self, dep_name: str, provider: str):
self.dep_name = dep_name
self.provider = provider
def __str__(self) -> str:
return f'Stage `{self.provider}` did not produce promised ' \
f'dependency `{self.dep_name}`'
def dep_value_str(dep: str):
return ':' + dep
def platform_stages(platform_flow, r_env):
""" Iterates over all stages available in a given flow. """
stage_options = platform_flow.get('stage_options')
for stage_name, modulestr in platform_flow['stages'].items():
mod_opts = stage_options.get(stage_name) if stage_options else None
yield Stage(stage_name, modulestr, mod_opts, r_env)
def req_exists(r):
""" Checks whether a dependency exists on a drive. """
if type(r) is str:
if not Path(r).is_file() and not Path(r).is_symlink() and not Path(r).is_dir():
return False
elif type(r) is list:
return not (False in map(req_exists, r))
else:
raise Exception(f'Requirements can be currently checked only for single '
f'paths, or path lists (reason: {r})')
return True
def map_outputs_to_stages(stages: 'list[Stage]'):
"""
Associates a stage with every possible output.
This is commonly refferef to as `os_map` (output-stage-map) through the code.
"""
os_map: 'dict[str, Stage]' = {} # Output-Stage map
for stage in stages:
for output in stage.produces:
if not os_map.get(output.name):
os_map[output.name] = stage
elif os_map[output.name] != stage:
raise Exception(f'Dependency `{output.name}` is generated by '
f'stage `{os_map[output.name].name}` and '
f'`{stage.name}`. Dependencies can have only one '
'provider at most.')
return os_map
def filter_existing_deps(deps: 'dict[str, ]', symbicache):
return [(n, p) for n, p in deps.items() \
if req_exists(p)] # and not dep_differ(p, symbicache)]
def get_stage_values_override(og_values: dict, stage: Stage):
values = og_values.copy()
values.update(stage.value_ovds)
return values
def prepare_stage_io_input(stage: Stage):
return { 'params': stage.params } if stage.params is not None else {}
def prepare_stage_input(stage: Stage, platform_name: str, values: dict,
dep_paths: 'dict[str, ]', config_paths: 'dict[str, ]'):
takes = {}
for take in stage.takes:
paths = dep_paths.get(take.name)
if paths: # Some takes may be not required
takes[take.name] = paths
produces = {}
for prod in stage.produces:
if dep_paths.get(prod.name):
produces[prod.name] = dep_paths[prod.name]
elif config_paths.get(prod.name):
produces[prod.name] = config_paths[prod.name]
stage_mod_cfg = {
'takes': takes,
'produces': produces,
'values': values,
'platform': platform_name,
}
return stage_mod_cfg
def update_dep_statuses(paths, consumer: str, symbicache: SymbiCache):
if type(paths) is str:
return symbicache.update(paths, consumer)
elif type(paths) is list:
for p in paths:
return update_dep_statuses(p, consumer, symbicache)
elif type(paths) is dict:
for _, p in paths.items():
return update_dep_statuses(p, consumer, symbicache)
fatal(-1, 'WRONG PATHS TYPE')
def dep_differ(paths, consumer: str, symbicache: SymbiCache):
"""
Check if a dependency differs from its last version, lack of dependency is
treated as "differs"
"""
if type(paths) is str:
s = symbicache.get_status(paths, consumer)
if s == 'untracked':
symbicache.update(paths, consumer)
return symbicache.get_status(paths, consumer) != 'same'
elif type(paths) is list:
return True in [dep_differ(p, consumer, symbicache) for p in paths]
elif type(paths) is dict:
return True in [dep_differ(p, consumer, symbicache) \
for _, p in paths.items()]
return False
def dep_will_differ(target: str, paths, consumer: str,
os_map: 'dict[str, Stage]', run_stages: 'set[str]',
symbicache: SymbiCache):
"""
Check if a dependency or any of the dependencies it depends on differ from
their last versions.
"""
provider = os_map.get(target)
if provider:
return (provider.name in run_stages) or \
dep_differ(paths, consumer, symbicache)
return dep_differ(paths, consumer, symbicache)
def _print_unreachable_stage_message(provider: Stage, take: str):
sfprint(0, ' Stage '
f'`{Style.BRIGHT + provider.name + Style.RESET_ALL}` is '
'unreachable due to unmet dependency '
f'`{Style.BRIGHT + take.name + Style.RESET_ALL}`')
def config_mod_runctx(stage: Stage, platform_name: str, values: 'dict[str, ]',
dep_paths: 'dict[str, str | list[str]]',
config_paths: 'dict[str, str | list[str]]'):
config = prepare_stage_input(stage, platform_name, values,
dep_paths, config_paths)
return ModRunCtx(share_dir_path, binpath, config)
class Flow:
""" Describes a complete, configured flow, ready for execution. """
# Dependendecy to build
target: str
# Values in global scope
cfg: FlowConfig
# dependency-producer map
os_map: 'dict[str, Stage]'
# Paths resolved for dependencies
dep_paths: 'dict[str, str | list[str]]'
# Explicit configs for dependency paths
# config_paths: 'dict[str, str | list[str]]'
# Stages that need to be run
run_stages: 'set[str]'
# Number of stages that relied on outdated version of a (checked) dependency
deps_rebuilds: 'dict[str, int]'
symbicache: 'SymbiCache | None'
flow_cfg: FlowConfig
def __init__(self, target: str, cfg: FlowConfig,
symbicache: 'SymbiCache | None'):
self.target = target
self.os_map = map_outputs_to_stages(cfg.stages.values())
explicit_deps = cfg.get_dependency_overrides()
# print(explicit_deps)
self.dep_paths = dict(filter_existing_deps(explicit_deps, symbicache))
self.run_stages = set()
self.symbicache = symbicache
self.cfg = cfg
self.deps_rebuilds = {}
self._resolve_dependencies(self.target, set())
def _dep_will_differ(self, dep: str, paths, consumer: str):
if not self.symbicache: # Handle --nocache mode
return True
return dep_will_differ(dep, paths, consumer,
self.os_map, self.run_stages,
self.symbicache)
def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'):
# Initialize the dependency status if necessary
if self.deps_rebuilds.get(dep) is None:
self.deps_rebuilds[dep] = 0
# Check if an explicit dependency is already resolved
paths = self.dep_paths.get(dep)
if paths and not self.os_map.get(dep):
return
# Check if a stage can provide the required dependency
provider = self.os_map.get(dep)
if not provider or provider.name in stages_checked:
return
# TODO: Check if the dependency is "on-demand" and force it in provider's
# config if it is.
for take in provider.takes:
self._resolve_dependencies(take.name, stages_checked)
# If any of the required dependencies is unavailable, then the
# provider stage cannot be run
take_paths = self.dep_paths.get(take.name)
# Add input path to values (dirty hack)
provider.value_overrides[dep_value_str(take.name)] = take_paths
if not take_paths and take.spec == 'req':
_print_unreachable_stage_message(provider, take)
return
if self._dep_will_differ(take.name, take_paths, provider.name):
sfprint(2, f'{take.name} is causing rebuild for {provider.name}')
self.run_stages.add(provider.name)
self.deps_rebuilds[take.name] += 1
stage_values = self.cfg.get_r_env(provider.name).values
modrunctx = config_mod_runctx(provider, self.cfg.platform,
stage_values, self.dep_paths,
self.cfg.get_dependency_overrides())
outputs = module_map(provider.module, modrunctx)
stages_checked.add(provider.name)
self.dep_paths.update(outputs)
for _, out_paths in outputs.items():
if (out_paths is not None) and not (req_exists(out_paths)):
self.run_stages.add(provider.name)
# Verify module's outputs and add paths as values.
outs = outputs.keys()
# print(outs)
for o in provider.produces:
if o.name not in outs:
if o.spec == 'req' or (o.spec == 'demand' and \
o.name in self.cfg.get_dependency_overrides().keys()):
fatal(-1, f'Module {provider.name} did not produce a mapping '
f'for a required output `{o.name}`')
else:
# Remove an on-demand/optional output that is not produced
# from os_map.
self.os_map.pop(o.name)
# Add a value for the output (dirty ack yet again)
o_path = outputs.get(o.name)
if o_path is not None:
provider.value_overrides[dep_value_str(o.name)] = \
outputs.get(o.name)
def print_resolved_dependencies(self, verbosity: int):
deps = list(self.deps_rebuilds.keys())
deps.sort()
for dep in deps:
status = Fore.RED + '[X]' + Fore.RESET
source = Fore.YELLOW + 'MISSING' + Fore.RESET
paths = self.dep_paths.get(dep)
if paths:
exists = req_exists(paths)
provider = self.os_map.get(dep)
if provider and provider.name in self.run_stages:
if exists:
status = Fore.YELLOW + '[R]' + Fore.RESET
else:
status = Fore.YELLOW + '[S]' + Fore.RESET
source = f'{Fore.BLUE + self.os_map[dep].name + Fore.RESET} ' \
f'-> {paths}'
elif exists:
if self.deps_rebuilds[dep] > 0:
status = Fore.GREEN + '[N]' + Fore.RESET
else:
status = Fore.GREEN + '[O]' + Fore.RESET
source = paths
elif self.os_map.get(dep):
status = Fore.RED + '[U]' + Fore.RESET
source = \
f'{Fore.BLUE + self.os_map[dep].name + Fore.RESET} -> ???'
sfprint(verbosity, f' {Style.BRIGHT + status} '
f'{dep + Style.RESET_ALL}: {source}')
def _build_dep(self, dep):
paths = self.dep_paths.get(dep)
provider = self.os_map.get(dep)
run = (provider.name in self.run_stages) if provider else False
if not paths:
sfprint(2, f'Dependency {dep} is unresolved.')
return False
if req_exists(paths) and not run:
return True
else:
assert(provider)
any_dep_differ = False if self.symbicache else True
for p_dep in provider.takes:
if not self._build_dep(p_dep.name):
assert (p_dep.spec != 'req')
continue
if self.symbicache:
any_dep_differ |= \
update_dep_statuses(self.dep_paths[p_dep.name],
provider.name, self.symbicache)
# If dependencies remained the same, consider the dep as up-to date
# For example, when changing a comment in Verilog source code,
# the initial dependency resolution will report a need for complete
# rebuild, however, after the synthesis stage, the generated eblif
# will reamin the same, thus making it unnecessary to continue the
# rebuild process.
if (not any_dep_differ) and req_exists(paths):
sfprint(2, f'Skipping rebuild of `'
f'{Style.BRIGHT + dep + Style.RESET_ALL}` because all '
f'of it\'s dependencies remained unchanged')
return True
stage_values = self.cfg.get_r_env(provider.name).values
modrunctx = config_mod_runctx(provider, self.cfg.platform,
stage_values, self.dep_paths,
self.cfg.get_dependency_overrides())
module_exec(provider.module, modrunctx)
self.run_stages.discard(provider.name)
if not req_exists(paths):
raise DependencyNotProducedException(dep, provider.name)
return True
def execute(self):
self._build_dep(self.target)
if self.symbicache:
update_dep_statuses(self.dep_paths[self.target], '__target',
self.symbicache)
sfprint(0, f'Target `{Style.BRIGHT + self.target + Style.RESET_ALL}` '
f'-> {self.dep_paths[self.target]}')
def display_dep_info(stages: 'Iterable[Stage]'):
sfprint(0, 'Platform dependencies/targets:')
longest_out_name_len = 0
for stage in stages:
for out in stage.produces:
l = len(out.name)
if l > longest_out_name_len:
longest_out_name_len = l
desc_indent = longest_out_name_len + 7
nl_indentstr = '\n'
for _ in range(0, desc_indent):
nl_indentstr += ' '
for stage in stages:
for out in stage.produces:
pname = Style.BRIGHT + out.name + Style.RESET_ALL
indent = ''
for _ in range(0, desc_indent - len(pname) + 3):
indent += ' '
specstr = '???'
if out.spec == 'req':
specstr = f'{Fore.BLUE}guaranteed{Fore.RESET}'
elif out.spec == 'maybe':
specstr = f'{Fore.YELLOW}not guaranteed{Fore.RESET}'
elif out.spec == 'demand':
specstr = f'{Fore.RED}on-demand{Fore.RESET}'
pgen = f'{Style.DIM}stage: `{stage.name}`, '\
f'spec: {specstr}{Style.RESET_ALL}'
pdesc = stage.meta[out.name].replace('\n', nl_indentstr)
sfprint(0, f' {Style.BRIGHT + out.name + Style.RESET_ALL}:'
f'{indent}{pdesc}{nl_indentstr}{pgen}')
def display_stage_info(stage: Stage):
if stage is None:
sfprint(0, f'Stage does not exist')
sfbuild_fail()
return
sfprint(0, f'Stage `{Style.BRIGHT}{stage.name}{Style.RESET_ALL}`:')
sfprint(0, f' Module: `{Style.BRIGHT}{stage.module.name}{Style.RESET_ALL}`')
sfprint(0, f' Module info:')
mod_info = get_module_info(stage.module)
mod_info = '\n '.join(mod_info.split('\n'))
sfprint(0, f' {mod_info}')
sfbuild_done_str = Style.BRIGHT + Fore.GREEN + 'DONE'
sfbuild_silent = 0
def sfbuild_fail():
global sfbuild_done_str
sfbuild_done_str = Style.BRIGHT + Fore.RED + 'FAILED'
def sfbuild_done():
sfprint(1, f'f4pga: {sfbuild_done_str}'
f'{Style.RESET_ALL + Fore.RESET}')
exit(0)
def setup_resolution_env():
""" Sets up a ResolutionEnv with sfbuild's default built-ins. """
r_env = ResolutionEnv({
'shareDir': share_dir_path,
'binDir': str((Path(share_dir_path) / '../../bin').resolve())
})
def _noisy_warnings():
"""
Emit some noisy warnings.
"""
environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings.log'
return 'noisy_warnings.log'
def _generate_values():
"""
Generate initial values, available in configs.
"""
return {
'prjxray_db': common_sub('prjxray-config').decode().replace('\n', ''),
'python3': common_sub('which', 'python3').decode().replace('\n', ''),
'noisyWarnings': _noisy_warnings()
}
r_env.add_values(_generate_values())
return r_env
def open_project_flow_config(path: str) -> ProjectFlowConfig:
try:
flow_cfg = open_project_flow_cfg(path)
except FileNotFoundError as _:
fatal(-1, 'The provided flow configuration file does not exist')
return flow_cfg
def verify_platform_stage_params(flow_cfg: FlowConfig,
platform: 'str | None' = None,
stage: 'str | None' = None):
if platform:
if not verify_platform_name(platform, mypath):
sfprint(0, f'Platform `{platform}`` is unsupported.')
return False
if args.platform not in flow_cfg.platforms():
sfprint(0, f'Platform `{platform}`` is not in project.')
return False
if stage:
if not verify_stage(platform, stage, mypath):
sfprint(0, f'Stage `{stage}` is invalid.')
sfbuild_fail()
return False
return True
def get_platform_name_for_part(part_name: str):
"""
Gets a name that identifies the platform setup required for a specific chip.
The reason for such distinction is that plenty of chips with different names
differ only in a type of package they use.
"""
with (Path(mypath) / 'part_db.json').open('r') as rfptr:
return json_load(rfptr).get(part_name.upper())
def cmd_build(args: Namespace):
""" sfbuild's `build` command implementation """
project_flow_cfg: ProjectFlowConfig = None
platform = args.platform
if platform is None:
if args.part:
platform = get_platform_name_for_part(args.part)
if args.flow:
project_flow_cfg = open_project_flow_config(args.flow)
elif platform is not None:
project_flow_cfg = ProjectFlowConfig('.temp.flow.json')
project_flow_cfg.flow_cfg = get_cli_flow_config(args, platform)
if platform is None and project_flow_cfg is not None:
platform = project_flow_cfg.get_default_platform()
if platform is None:
fatal(-1, 'You have to specify a platform name or a part name or '
'configure a default platform.')
if platform is None or project_flow_cfg is None:
fatal(-1, 'No configuration was provided. Use `--flow`, `--platform` or '
'`--part` to configure flow..')
platform_path = str(Path(mypath) / f'platforms/{platform}.json')
platform_def = None
try:
with open(platform_path) as platform_file:
platform_def = platform_file.read()
except FileNotFoundError as _:
fatal(-1, f'The platform flow definition file {platform_path} for the platform '
f'{platform} referenced in flow definition file {args.flow} '
'cannot be found.')
r_env = setup_resolution_env()
sfprint(2, 'Scanning modules...')
scan_modules(mypath)
flow_definition_dict = json_loads(platform_def)
flow_def = FlowDefinition(flow_definition_dict, r_env)
flow_cfg = FlowConfig(project_flow_cfg, flow_def, platform)
if len(flow_cfg.stages) == 0:
fatal(-1, 'Platform flow does not define any stage')
if args.info:
display_dep_info(flow_cfg.stages.values())
sfbuild_done()
if args.stageinfo:
display_stage_info(flow_cfg.stages.get(args.stageinfo[0]))
sfbuild_done()
target = args.target
if target is None:
target = project_flow_cfg.get_default_target(platform)
if target is None:
fatal(-1, 'Please specify desired target using `--target` option '
'or configure a default target.')
flow = Flow(
target=target,
cfg=flow_cfg,
symbicache=SymbiCache(SYMBICACHEPATH) if not args.nocache else None
)
dep_print_verbosity = 0 if args.pretend else 2
sfprint(dep_print_verbosity, '\nProject status:')
flow.print_resolved_dependencies(dep_print_verbosity)
sfprint(dep_print_verbosity, '')
if args.pretend:
sfbuild_done()
try:
flow.execute()
except Exception as e:
sfprint(0, e)
sfbuild_fail()
if flow.symbicache:
flow.symbicache.save()
def cmd_show_dependencies(args: Namespace):
""" sfbuild's `showd` command implementation """
flow_cfg = open_project_flow_config(args.flow)
if not verify_platform_stage_params(flow_cfg, args.platform):
sfbuild_fail()
return
platform_overrides: 'set | None' = None
if args.platform is not None:
platform_overrides = \
set(flow_cfg.get_dependency_platform_overrides(args.platform).keys())
display_list = []
raw_deps = flow_cfg.get_dependencies_raw(args.platform)
for dep_name, dep_paths in raw_deps.items():
prstr: str
if (platform_overrides is not None) and (dep_name in platform_overrides):
prstr = f'{Style.DIM}({args.platform}){Style.RESET_ALL} ' \
f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}'
else:
prstr = f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}'
display_list.append((dep_name, prstr))
display_list.sort(key = lambda p: p[0])
for _, prstr in display_list:
sfprint(0, prstr)
set_verbosity_level(-1)
def main():
parser = setup_argparser()
args = parser.parse_args()
set_verbosity_level(args.verbose - (1 if args.silent else 0))
if args.command == 'build':
cmd_build(args)
sfbuild_done()
if args.command == 'showd':
cmd_show_dependencies(args)
sfbuild_done()
sfprint(0, 'Please use a command.\nUse `--help` flag to learn more.')
sfbuild_done()
if __name__ == '__main__':
main()

301
f4pga/argparser.py Normal file
View file

@ -0,0 +1,301 @@
from argparse import ArgumentParser, Namespace
from re import finditer as re_finditer
def _add_flow_arg(parser: ArgumentParser):
parser.add_argument(
'-f',
'--flow',
metavar='flow_path',
type=str,
help='Path to flow definition file'
)
def _setup_build_parser(parser: ArgumentParser):
_add_flow_arg(parser)
parser.add_argument(
'-t',
'--target',
metavar='target_name',
type=str,
help='Perform stages necessary to acquire target'
)
parser.add_argument(
'--platform',
metavar='platform_name',
help='Target platform_name'
)
parser.add_argument(
'-P',
'--pretend',
action='store_true',
help='Show dependency resolution without executing flow'
)
parser.add_argument(
'-i',
'--info',
action='store_true',
help='Display info about available targets'
)
parser.add_argument(
'-c',
'--nocache',
action='store_true',
help='Ignore caching and rebuild everything up to the target.'
)
parser.add_argument(
'-S',
'--stageinfo',
nargs=1,
metavar='stage_name',
help='Display info about stage'
)
parser.add_argument(
'-p',
'--part',
metavar='part_name',
help='Name of the target chip'
)
parser.add_argument(
'--dep',
'-D',
action='append',
default=[]
)
parser.add_argument(
'--val',
'-V',
action='append',
default=[]
)
def _setup_show_dep_parser(parser: ArgumentParser):
parser.add_argument(
'-p',
'--platform',
metavar='platform_name',
type=str,
help='Name of the platform (use to display platform-specific values.)'
)
parser.add_argument(
'-s',
'--stage',
metavar='stage_name',
type=str,
help='Name of the stage (use if you want to set the value only for that stage). Requires `-p`.'
)
_add_flow_arg(parser)
def setup_argparser():
"""
Set up argument parser for the program.
"""
parser = ArgumentParser(description='F4PGA Build System')
parser.add_argument(
'-v',
'--verbose',
action='count',
default=0
)
parser.add_argument(
'-s',
'--silent',
action='store_true'
)
subparsers = parser.add_subparsers(dest='command')
_setup_build_parser(subparsers.add_parser('build'))
show_dep = subparsers.add_parser('showd', description='Show the value(s) assigned to a dependency')
_setup_show_dep_parser(show_dep)
return parser
def _parse_depval(depvalstr: str):
"""
Parse a dependency or value definition in form of:
optional_stage_name.value_or_dependency_name=value
See `_parse_cli_value` for detail on how to pass different kinds of values.
"""
d = { 'name': None, 'stage': None, 'value': None }
splitted = list(_unescaped_separated('=', depvalstr))
if len(splitted) != 2:
raise Exception('Too many components')
pathstr = splitted[0]
valstr = splitted[1]
path_components = pathstr.split('.')
if len(path_components) < 1:
raise Exception('Missing value')
d['name'] = path_components.pop(len(path_components) - 1)
if len(path_components) > 0:
d['stage'] = path_components.pop(0)
if len(path_components) > 0:
raise Exception('Too many path components')
d['value'] = _parse_cli_value(valstr)
return d
def _unescaped_matches(regexp: str, s: str, escape_chr='\\'):
"""
Find all occurences of a pattern in a string that contains escape sequences.
Yields pairs of starting and ending indices of the pattern.
"""
noescapes = ''
# We remove all escape sequnces from a string, so it will match only with
# unescaped characters, but to map the results back to the string containing the
# escape sequences, we need to track the offsets by which the characters were
# shifted.
offsets = []
offset = 0
for sl in s.split(escape_chr):
if len(sl) <= 1:
continue
noescape = sl[(1 if offset != 0 else 0):]
for _ in noescape:
offsets.append(offset)
offset += 2
noescapes += noescape
iter = re_finditer(regexp, noescapes)
for m in iter:
start = m.start()
end = m.end()
off1 = start + offsets[start]
off2 = end + offsets[end]
yield off1, off2
def _unescaped_separated(regexp: str, s: str, escape_chr='\\'):
"""
Yields substrings of a string that contains escape sequences.
"""
last_end = 0
for start, end in _unescaped_matches(regexp, s, escape_chr=escape_chr):
yield s[last_end:start]
last_end = end
if last_end < len(s):
yield s[last_end:]
else:
yield ''
def _parse_cli_value(s: str):
"""
Parse a value/dependency passed to CLI
CLI values are generated by the following non-contextual grammar:
S -> :str: (string/number value)
S -> [I]
S -> {D}
I -> I,I
I -> S
D -> D,D
D -> K:S
K -> :str:
Starting symbol = S
Terminal symbols: '[', ']', '{', '}', ':', ,',', :str:
(:str: represents any string where terminals are escaped)
TODO: The current implementation of my parser is crippled and is
not able to parse nested structures. Currently there is no real use
case for having nested structures as values, so it's kinda fine atm.
"""
if len(s) == 0:
return ''
# List
if s[0] == '[':
if len(s) < 2 or s[len(s)-1] != ']':
raise Exception('Missing \']\' delimiter')
inner = s[1:(len(s)-1)]
if inner == '':
return []
return [_parse_cli_value(v) for v in _unescaped_separated(',', inner)]
# Dictionary
if s[0] == '{':
if len(s) < 2 or s[len(s)-1] != '}':
raise Exception('Missing \'}\' delimiter')
d = {}
inner = s[1:(len(s)-1)]
if inner == '':
return {}
for kv in _unescaped_separated(',', inner):
k_v = list(_unescaped_separated(':', kv))
if len(k_v) < 2:
raise Exception('Missing value in dictionary entry')
if len(k_v) > 2:
raise Exception('Unexpected \':\' token')
key = k_v[0]
value = _parse_cli_value(k_v[1])
d[key] = value
return d
# Bool hack
if s == '\\True':
return True
if s == '\\False':
return False
# Number hack
if len(s) >= 3 and s[0:1] == '\\N':
return int(s[2:])
# String
return s.replace('\\', '')
def get_cli_flow_config(args: Namespace, platform: str):
def create_defdict():
return {
'dependencies': {},
'values': {},
}
platform_flow_config = create_defdict()
def add_entries(arglist: 'list[str]', dict_name: str):
for value_def in (_parse_depval(cliv) for cliv in arglist):
stage = value_def['stage']
if stage is None:
platform_flow_config[dict_name][value_def['name']] = \
value_def['value']
else:
if platform_flow_config.get(stage) is None:
platform_flow_config[stage] = create_defdict()
platform_flow_config[stage][dict_name][value_def['name']] = \
value_def['value']
add_entries(args.dep, 'dependencies')
add_entries(args.val, 'values')
return { platform: platform_flow_config }

103
f4pga/cache.py Executable file
View file

@ -0,0 +1,103 @@
from pathlib import Path
from zlib import adler32 as zlib_adler32
from json import dump as json_dump, load as json_load, JSONDecodeError
class SymbiCache:
"""
`SymbiCache` is used to track changes among dependencies and keep the status of the files on a persistent storage.
Files which are tracked get their checksums calculated and stored in a file.
If file's checksum differs from the one saved in a file, that means, the file has changed.
"""
hashes: 'dict[str, dict[str, str]]'
status: 'dict[str, str]'
cachefile_path: str
def __init__(self, cachefile_path):
"""
`chachefile_path` - path to a file used for persistent storage of checksums.
"""
self.status = {}
self.cachefile_path = cachefile_path
self.load()
def _try_pop_consumer(self, path: str, consumer: str):
if self.status.get(path) and self.status[path].get(consumer):
self.status[path].pop(consumer)
if len(self.status[path]) == 0:
self.status.pop(path)
if self.hashes.get(path) and self.hashes[path].get(consumer):
self.hashes[path].pop(consumer)
if len(self.hashes[path]) == 0:
self.hashes.pop(path)
def _try_push_consumer_hash(self, path: str, consumer: str, hash):
if not self.hashes.get(path):
self.hashes[path] = {}
self.hashes[path][consumer] = hash
def _try_push_consumer_status(self, path: str, consumer: str, status):
if not self.status.get(path):
self.status[path] = {}
self.status[path][consumer] = status
def update(self, path: str, consumer: str):
""" Add/remove a file to.from the tracked files, update checksum if necessary and calculate status.
Multiple hashes are stored per file, one for each consumer module.
"__target" is used as a convention for a "fake" consumer in case the file is requested as a target and not used
by a module within the active flow.
"""
isdir = Path(path).is_dir()
if not (Path(path).is_file() or Path(path).is_symlink() or isdir):
self._try_pop_consumer(path, consumer)
return True
hash = 0 # Directories always get '0' hash.
if not isdir:
with Path(path).open('rb') as rfptr:
hash = str(zlib_adler32(rfptr.read()))
last_hashes = self.hashes.get(path)
last_hash = None if last_hashes is None else last_hashes.get(consumer)
if hash != last_hash:
self._try_push_consumer_status(path, consumer, 'changed')
self._try_push_consumer_hash(path, consumer, hash)
return True
self._try_push_consumer_status(path, consumer, 'same')
return False
def get_status(self, path: str, consumer: str):
""" Get status for a file with a given path.
returns 'untracked' if the file is not tracked or hasn't been treated with `update` procedure before calling
`get_status`.
"""
statuses = self.status.get(path)
if not statuses:
return 'untracked'
status = statuses.get(consumer)
if not status:
return 'untracked'
return status
def load(self):
"""Loads cache's state from the persistent storage"""
try:
with Path(self.cachefile_path).open('r') as rfptr:
self.hashes = json_load(rfptr)
except JSONDecodeError as jerr:
print("""WARNING: .symbicache is corrupted!
This will cause flow to re-execute from the beggining.""")
self.hashes = {}
except FileNotFoundError:
print("""Couldn\'t open .symbicache cache file.
This will cause flow to re-execute from the beggining.""")
self.hashes = {}
def save(self):
"""Saves cache's state to the persistent storage."""
with Path(self.cachefile_path).open('w') as wfptr:
json_dump(str(self.hashes), wfptr, indent=4)

295
f4pga/common.py Normal file
View file

@ -0,0 +1,295 @@
from pathlib import Path
from os import environ, listdir as os_listdir
from sys import argv as sys_argv
from argparse import Namespace
from shutil import move as sh_mv
from subprocess import run
from re import match as re_match, finditer as re_finditer
def decompose_depname(name: str):
spec = 'req'
specchar = name[len(name) - 1]
if specchar == '?':
spec = 'maybe'
elif specchar == '!':
spec = 'demand'
if spec != 'req':
name = name[:len(name) - 1]
return name, spec
def with_qualifier(name: str, q: str) -> str:
if q == 'req':
return decompose_depname(name)[0]
if q == 'maybe':
return decompose_depname(name)[0] + '?'
if q == 'demand':
return decompose_depname(name)[0] + '!'
_sfbuild_module_collection_name_to_path = {}
def scan_modules(mypath: str):
global _sfbuild_module_collection_name_to_path
sfbuild_home = mypath
sfbuild_home_dirs = os_listdir(sfbuild_home)
sfbuild_module_dirs = \
[dir for dir in sfbuild_home_dirs if re_match('.*_modules$', dir)]
_sfbuild_module_collection_name_to_path = dict([
(
re_match('(.*)_modules$', moddir).groups()[0],
str(Path(sfbuild_home) / moddir)
)
for moddir in sfbuild_module_dirs
])
def resolve_modstr(modstr: str):
"""
Resolves module location from modulestr.
"""
sl = modstr.split(':')
if len(sl) > 2:
raise Exception('Incorrect module sysntax. Expected one \':\' or one \'::\'')
if len(sl) < 2:
return modstr
collection_name = sl[0]
module_filename = sl[1] + '.py'
col_path = _sfbuild_module_collection_name_to_path.get(collection_name)
if not col_path:
fatal(-1, f'Module collection {collection_name} does not exist')
return str(Path(col_path) / module_filename)
def deep(fun):
"""
Create a recursive string transform function for 'str | list | dict', i.e a dependency.
"""
def d(paths, *args, **kwargs):
if type(paths) is str:
return fun(paths)
elif type(paths) is list:
return [d(p) for p in paths];
elif type(paths) is dict:
return dict([(k, d(p)) for k, p in paths.items()])
return d
class VprArgs:
"""
Represents argument list for VPR (Versatile Place and Route).
"""
arch_dir: str
arch_def: str
lookahead: str
rr_graph: str
place_delay: str
device_name: str
eblif: str
optional: list
def __init__(self, share: str, eblif, values: Namespace,
sdc_file: 'str | None' = None,
vpr_extra_opts: 'list | None' = None):
self.arch_dir = str(Path(share) / 'arch')
self.arch_def = values.arch_def
self.lookahead = values.rr_graph_lookahead_bin
self.rr_graph = values.rr_graph_real_bin
self.place_delay = values.vpr_place_delay
self.device_name = values.vpr_grid_layout_name
self.eblif = str(Path(eblif).resolve())
if values.vpr_options is not None:
self.optional = options_dict_to_list(values.vpr_options)
else:
self.optional = []
if vpr_extra_opts is not None:
self.optional += vpr_extra_opts
if sdc_file is not None:
self.optional += ['--sdc_file', sdc_file]
class SubprocessException(Exception):
return_code: int
def sub(*args, env=None, cwd=None):
"""
Execute subroutine.
"""
out = run(args, capture_output=True, env=env, cwd=cwd)
if out.returncode != 0:
print(f'[ERROR]: {args[0]} non-zero return code.\n'
f'stderr:\n{out.stderr.decode()}\n\n'
)
exit(out.returncode)
return out.stdout
def vpr(mode: str, vprargs: VprArgs, cwd=None):
"""
Execute `vpr`.
"""
modeargs = []
if mode == 'pack':
modeargs = ['--pack']
elif mode == 'place':
modeargs = ['--place']
elif mode == 'route':
modeargs = ['--route']
return sub(*([
'vpr',
vprargs.arch_def,
vprargs.eblif,
'--device', vprargs.device_name,
'--read_rr_graph', vprargs.rr_graph,
'--read_router_lookahead', vprargs.lookahead,
'--read_placement_delay_lookup', vprargs.place_delay
] + modeargs + vprargs.optional), cwd=cwd)
_vpr_specific_values = [
'arch_def',
'rr_graph_lookahead_bin',
'rr_graph_real_bin',
'vpr_place_delay',
'vpr_grid_layout_name',
'vpr_options?'
]
def vpr_specific_values():
global _vpr_specific_values
return _vpr_specific_values
def options_dict_to_list(opt_dict: dict):
"""
Converts a dictionary of named options for CLI program to a list.
Example: { "option_name": "value" } -> [ "--option_name", "value" ]
"""
opts = []
for key, val in opt_dict.items():
opts.append('--' + key)
if not(type(val) is list and val == []):
opts.append(str(val))
return opts
def noisy_warnings(device):
"""
Emit some noisy warnings.
"""
environ['OUR_NOISY_WARNINGS'] = f'noisy_warnings-{device}_pack.log'
def my_path():
"""
Get current PWD.
"""
return str(Path(sys_argv[0]).resolve().parent)
def save_vpr_log(filename, build_dir=''):
"""
Save VPR logic (moves the default output file into a desired path).
"""
sh_mv(str(Path(build_dir) / 'vpr_stdout.log'), filename)
def fatal(code, message):
"""
Print a message informing about an error that has occured and terminate program with a given return code.
"""
raise(Exception(f'[FATAL ERROR]: {message}'))
exit(code)
class ResolutionEnv:
"""
ResolutionEnv is used to hold onto mappings for variables used in flow and perform text substitutions using those
variables.
Variables can be referred in any "resolvable" string using the following syntax: 'Some static text ${variable_name}'.
The '${variable_name}' part will be replaced by the value associated with name 'variable_name', is such mapping
exists.
values: dict
"""
def __init__(self, values={}):
self.values = values
def __copy__(self):
return ResolutionEnv(self.values.copy())
def resolve(self, s, final=False):
"""
Perform resolution on `s`.
`s` can be a `str`, a `dict` with arbitrary keys and resolvable values, or a `list` of resolvable values.
final=True - resolve any unknown variables into ''
This is a hack and probably should be removed in the future
"""
if type(s) is str:
match_list = list(re_finditer('\$\{([^${}]*)\}', s))
# Assumption: re_finditer finds matches in a left-to-right order
match_list.reverse()
for match in match_list:
match_str = match.group(1)
match_str = match_str.replace('?', '')
v = self.values.get(match_str)
if not v:
if final:
v = ''
else:
continue
span = match.span()
if type(v) is str:
s = s[:span[0]] + v + s[span[1]:]
elif type(v) is list: # Assume it's a list of strings
ns = list([s[:span[0]] + ve + s[span[1]:] for ve in v])
s = ns
elif type(s) is list:
s = list(map(self.resolve, s))
elif type(s) is dict:
s = dict([(k, self.resolve(v)) for k, v in s.items()])
return s
def add_values(self, values: dict):
"""
Add mappings from `values`.
"""
for k, v in values.items():
self.values[k] = self.resolve(v)
verbosity_level = 0
def sfprint(verbosity: int, *args):
"""
Print with regards to currently set verbosity level.
"""
global verbosity_level
if verbosity <= verbosity_level:
print(*args)
def set_verbosity_level(level: int):
global verbosity_level
verbosity_level = level
def get_verbosity_level() -> int:
global verbosity_level
return verbosity_level

View file

View file

@ -0,0 +1,79 @@
from pathlib import Path
from shutil import move as sh_mv
from f4pga.common import vpr_specific_values, VprArgs, get_verbosity_level, sub
from f4pga.module import Module, ModuleContext
class FasmModule(Module):
def map_io(self, ctx: ModuleContext):
build_dir = str(Path(ctx.takes.eblif).parent)
return {
'fasm': f'{(Path(build_dir)/ctx.values.top)!s}.fasm'
}
def execute(self, ctx: ModuleContext):
build_dir = str(Path(ctx.takes.eblif).parent)
vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values)
optional = []
if ctx.values.pnr_corner is not None:
optional += ['--pnr_corner', ctx.values.pnr_corner]
if ctx.takes.sdc:
optional += ['--sdc', ctx.takes.sdc]
s = [
'genfasm',
vprargs.arch_def,
str(Path(ctx.takes.eblif).resolve()),
'--device',
vprargs.device_name,
'--read_rr_graph',
vprargs.rr_graph
] + vprargs.optional
if get_verbosity_level() >= 2:
yield 'Generating FASM...\n ' + ' '.join(s)
else:
yield 'Generating FASM...'
sub(*s, cwd=build_dir)
default_fasm_output_name = f'{(Path(build_dir)/ctx.values.top)!s}.fasm'
if default_fasm_output_name != ctx.outputs.fasm:
sh_mv(default_fasm_output_name, ctx.outputs.fasm)
if ctx.takes.fasm_extra:
yield 'Appending extra FASM...'
with \
open(ctx.outputs.fasm, 'r') as fasm_file, \
open(ctx.takes.fasm_extra, 'r') as fasm_extra_file, \
open(ctx.outputs.fasm, 'w') as wfptr:
wfptr.write(f"{fasm_file.read()}\n{fasm_extra_file.read()}")
else:
yield 'No extra FASM to append'
def __init__(self, _):
self.name = 'fasm'
self.no_of_phases = 2
self.takes = [
'eblif',
'net',
'place',
'route',
'fasm_extra?',
'sdc?'
]
self.produces = [ 'fasm' ]
self.values = [
'device',
'top',
'pnr_corner?'
] + vpr_specific_values()
self.prod_meta = {
'fasm': 'FPGA assembly file'
}
ModuleClass = FasmModule

View file

@ -0,0 +1,288 @@
"""
This module is intended for wrapping simple scripts without rewriting them as
an sfbuild module. This is mostly to maintain compatibility with workflows
that do not use sfbuild and instead rely on legacy scripts.
Accepted module parameters:
* `stage_name` (string, optional): Name describing the stage
* `script` (string, mandatory): Path to the script to be executed
* `interpreter` (string, optional): Interpreter for the script
* `cwd` (string, optional): Current Working Directory for the script
* `outputs` (dict[string -> dict[string -> string]], mandatory):
A dict with output descriptions (dicts).
Keys name output dependencies.
* `mode` (string, mandatory): "file" or "stdout".
Describes how the output is grabbed from the script.
* `file` (string, required if `mode` is "file"): Name of the file generated by the script.
* `target` (string, required): Default name of the file of the generated dependency.
You can use all values available durng map_io stage.
Each input dependency alsogets two extra values associated with it:
`:dependency_name[noext]`, which contains the path to the dependency the extension with anything after last "."
removed and `:dependency_name[dir]` which contains directory paths of the dependency.
This is useful for deriving an output name from the input.
* `meta` (string, optional): Description of the output dependency.
* `inputs` (dict[string -> string | bool], mandatory):
A dict with input descriptions.
Key is either a name of a named argument or a position of unnamed argument prefaced with "#" (eg. "#1").
Positions are indexed from 1, as it's a convention that 0th argument is the path of the executed program.
Values are strings that can contains references to variables to be resolved after the project flow configuration is
loaded (that means they can reference values and dependencies which are to be set by the user).
All of modules inputs will be determined by the references used.
Thus dependency and value definitions are implicit.
If the value of the resolved string is empty and is associated with a named argument, the argument in question will be
skipped entirely.
This allows using optional dependencies.
To use a named argument as a flag instead, set it to `true`.
"""
# TODO: `environment` input kind
from pathlib import Path
from shutil import move as sh_mv
from re import match as re_match, finditer as re_finditer
from f4pga.common import decompose_depname, deep, get_verbosity_level, sub
from f4pga.module import Module, ModuleContext
def _get_param(params, name: str):
param = params.get(name)
if not param:
raise Exception(f'generic module wrapper parameters '
f'missing `{name}` field')
return param
def _parse_param_def(param_def: str):
if param_def[0] == '#':
return 'positional', int(param_def[1:])
elif param_def[0] == '$':
return 'environmental', param_def[1:]
return 'named', param_def
class InputReferences:
dependencies: 'set[str]'
values: 'set[str]'
def merge(self, other):
self.dependencies.update(other.dependencies)
self.values.update(other.values)
def __init__(self):
self.dependencies = set()
self.values = set()
def _get_input_references(input: str) -> InputReferences:
refs = InputReferences()
if type(input) is not str:
return refs
for match in re_finditer('\$\{([^${}]*)\}', input):
match_str = match.group(1)
if match_str[0] != ':':
refs.values.add(match_str)
continue
if len(match_str) < 2:
raise Exception('Dependency name must be at least 1 character long')
refs.dependencies.add(re_match('([^\\[\\]]*)', match_str[1:]).group(1))
return refs
def _make_noop1():
def noop(_):
return
return noop
def _tailcall1(self, fun):
def newself(arg, self=self, fun=fun):
fun(arg)
self(arg)
return newself
class GenericScriptWrapperModule(Module):
script_path: str
stdout_target: 'None | tuple[str, str]'
file_outputs: 'list[tuple[str, str, str]]'
interpreter: 'None | str'
cwd: 'None | str'
@staticmethod
def _add_extra_values_to_env(ctx: ModuleContext):
for take_name, take_path in vars(ctx.takes).items():
if take_path is not None:
ctx.r_env.values[f':{take_name}[noext]'] = deep(lambda p: str(Path(p).with_suffix('')))(take_path)
ctx.r_env.values[f':{take_name}[dir]'] = deep(lambda p: str(Path(p).parent.resolve()))(take_path)
def map_io(self, ctx: ModuleContext):
self._add_extra_values_to_env(ctx)
outputs = {}
for dep, _, out_path in self.file_outputs:
out_path_resolved = ctx.r_env.resolve(out_path, final=True)
outputs[dep] = out_path_resolved
if self.stdout_target:
out_path_resolved = \
ctx.r_env.resolve(self.stdout_target[1], final=True)
outputs[self.stdout_target[0]] = out_path_resolved
return outputs
def execute(self, ctx: ModuleContext):
self._add_extra_values_to_env(ctx)
cwd = ctx.r_env.resolve(self.cwd)
sub_args = [ctx.r_env.resolve(self.script_path, final=True)] \
+ self.get_args(ctx)
if self.interpreter:
sub_args = [ctx.r_env.resolve(self.interpreter, final=True)] + sub_args
sub_env = self.get_env(ctx)
# XXX: This may produce incorrect string if arguments contains whitespace
# characters
cmd = ' '.join(sub_args)
if get_verbosity_level() >= 2:
yield f'Running script...\n {cmd}'
else:
yield f'Running an externel script...'
data = sub(*sub_args, cwd=cwd, env=sub_env)
yield 'Writing outputs...'
if self.stdout_target:
target = ctx.r_env.resolve(self.stdout_target[1], final=True)
with open(target, 'wb') as f:
f.write(data)
for _, file, target in self.file_outputs:
file = ctx.r_env.resolve(file, final=True)
target = ctx.r_env.resolve(target, final=True)
if target != file:
sh_mv(file, target)
def _init_outputs(self, output_defs: 'dict[str, dict[str, str]]'):
self.stdout_target = None
self.file_outputs = []
for dep_name, output_def in output_defs.items():
dname, _ = decompose_depname(dep_name)
self.produces.append(dep_name)
meta = output_def.get('meta')
if meta is str:
self.prod_meta[dname] = meta
mode = output_def.get('mode')
if type(mode) is not str:
raise Exception(f'Output mode for `{dep_name}` is not specified')
target = output_def.get('target')
if type(target) is not str:
raise Exception('`target` field is not specified')
if mode == 'file':
file = output_def.get('file')
if type(file) is not str:
raise Exception('Output file is not specified')
self.file_outputs.append((dname, file, target))
elif mode == 'stdout':
if self.stdout_target is not None:
raise Exception('stdout output is already specified')
self.stdout_target = dname, target
# A very functional approach
def _init_inputs(self, input_defs):
positional_args = []
named_args = []
env_vars = {}
refs = InputReferences()
get_args = _make_noop1()
get_env = _make_noop1()
for arg_code, input in input_defs.items():
param_kind, param = _parse_param_def(arg_code)
push = None
push_env = None
if param_kind == 'named':
def push_named(val: 'str | bool | int', param=param):
nonlocal named_args
if type(val) is bool:
named_args.append(f'--{param}')
else:
named_args += [f'--{param}', str(val)]
push = push_named
elif param_kind == 'environmental':
def push_environ(val: 'str | bool | int', param=param):
nonlocal env_vars
env_vars[param] = val
push_env = push_environ
else:
def push_positional(val: str, param=param):
nonlocal positional_args
positional_args.append((param, val))
push = push_positional
input_refs = _get_input_references(input)
refs.merge(input_refs)
if push is not None:
def push_q(ctx: ModuleContext, push=push, input=input):
val = ctx.r_env.resolve(input, final=True)
if val != '':
push(val)
get_args = _tailcall1(get_args, push_q)
else:
def push_q(ctx: ModuleContext, push_env=push_env, input=input):
val = ctx.r_env.resolve(input, final=True)
if val != '':
push_env(val)
get_env = _tailcall1(get_env, push_q)
def get_all_args(ctx: ModuleContext):
nonlocal get_args, positional_args, named_args
get_args(ctx)
positional_args.sort(key=lambda t: t[0])
pos = [ a for _, a in positional_args]
return named_args + pos
def get_all_env(ctx: ModuleContext):
nonlocal get_env, env_vars
get_env(ctx)
if len(env_vars.items()) == 0:
return None
return env_vars
setattr(self, 'get_args', get_all_args)
setattr(self, 'get_env', get_all_env)
for dep in refs.dependencies:
self.takes.append(dep)
for val in refs.values:
self.values.append(val)
def __init__(self, params):
stage_name = params.get('stage_name')
self.name = f"{'<unknown>' if stage_name is None else stage_name}-generic"
self.no_of_phases = 2
self.script_path = params.get('script')
self.interpreter = params.get('interpreter')
self.cwd = params.get('cwd')
self.takes = []
self.produces = []
self.values = []
self.prod_meta = {}
self._init_outputs(_get_param(params, 'outputs'))
self._init_inputs(_get_param(params, 'inputs'))
ModuleClass = GenericScriptWrapperModule

View file

@ -0,0 +1,107 @@
"""
Rename (ie. change) dependencies and values of a module. This module wraps another,
module whoose name is specified in `params.module` and changes the names of the
dependencies and values it relies on. The parmeters for the wrapped module can be
specified through `params.params`. dict. There are three mapping for the names:
* `params.rename_takes` - mapping for inputs ("takes")
* `params.rename_produces` - mapping for outputs ("products")
* `params.rename_values` - mapping for values
Keys represent the names visible to the wrpped module and values represent the
names visible to the modules outside.
Not specifying a mapping for a given entry will leave it with its original name.
---------------
Accepted module parameters:
* `module` (string, required)
* `params` (dict[string -> any], optional)
* `rename_takes` (dict[string -> string], optional)
* `rename_produces` (dict[string -> string], optional)
* `rename_values` (dict[string -> string], optional)
"""
from f4pga.common import *
from f4pga.module import Module, ModuleContext
from f4pga.module_runner import get_module
def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]':
newd = {}
for k, v in d.items():
r = renames.get(k)
if r is not None:
newd[r] = v
else:
newd[k] = v
return newd
def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespace:
newn = SimpleNamespace()
for k, v in vars(d).items():
setattr(newn, k, v)
for k, r in renames.items():
if hasattr(newn, r):
v = getattr(newn, r)
delattr(newn, r)
setattr(newn, k, v)
return newn
def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]':
newl = []
for e in l:
r = renames.get(e)
if r is not None:
_, q = decompose_depname(e)
newl.append(with_qualifier(r, q))
else:
newl.append(r if r is not None else e)
return newl
def _or_empty_dict(d: 'dict | None'):
return d if d is not None else {}
class IORenameModule(Module):
module: Module
rename_takes: 'dict[str, str]'
rename_produces: 'dict[str, str]'
rename_values: 'dict[str, str]'
def map_io(self, ctx: ModuleContext):
newctx = ctx.shallow_copy()
newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes)
newctx.values = _switchback_attrs(ctx.values, self.rename_values)
r = self.module.map_io(newctx)
return _switch_keys(r, self.rename_produces)
def execute(self, ctx: ModuleContext):
newctx = ctx.shallow_copy()
newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes)
newctx.values = _switchback_attrs(ctx.values, self.rename_values)
newctx.outputs = _switchback_attrs(ctx.produces, self.rename_produces)
print(newctx.takes)
return self.module.execute(newctx)
def __init__(self, params):
mod_path = resolve_modstr(params["module"])
module_class = get_module(mod_path)
module: Module = module_class(params.get("params"))
self.rename_takes = _or_empty_dict(params.get("rename_takes"))
self.rename_produces = _or_empty_dict(params.get("rename_produces"))
self.rename_values = _or_empty_dict(params.get("rename_values"))
self.module = module
self.name = f'{module.name}-io_renamed'
self.no_of_phases = module.no_of_phases
self.takes = _switch_entries(module.takes, self.rename_takes)
self.produces = _switch_entries(module.produces, self.rename_produces)
self.values = _switch_entries(module.values, self.rename_values)
if hasattr(module, 'prod_meta'):
self.prod_meta = _switch_keys(module.prod_meta, self.rename_produces)
ModuleClass = IORenameModule

View file

@ -0,0 +1,33 @@
"""
This module is used as a helper in a abuild chain to automate creating build directiores.
It's currenty the only parametric module, meaning it can take user-provided input at an early stage in order to
determine its take/produces I/O.
This allows other repesenting configurable directories, such as a build directory as dependencies and by doing so, allow
the dependency algorithm to lazily create the directories if they become necessary.
"""
from pathlib import Path
from f4pga.module import Module, ModuleContext
class MkDirsModule(Module):
deps_to_produce: 'dict[str, str]'
def map_io(self, ctx: ModuleContext):
return ctx.r_env.resolve(self.deps_to_produce)
def execute(self, ctx: ModuleContext):
outputs = vars(ctx.outputs)
for _, path in outputs.items():
yield f'Creating directory {path}...'
Path(path).mkdir(parents=True, exist_ok=True)
def __init__(self, params):
self.name = 'mkdirs'
self.no_of_phases = len(params) if params else 0
self.takes = []
self.produces = list(params.keys()) if params else []
self.values = []
self.deps_to_produce = params
ModuleClass = MkDirsModule

View file

@ -0,0 +1,70 @@
from pathlib import Path
from os import remove as os_remove
from shutil import move as sh_mv
from f4pga.common import *
from f4pga.module import Module, ModuleContext
DEFAULT_TIMING_RPT = 'pre_pack.report_timing.setup.rpt'
DEFAULT_UTIL_RPT = 'packing_pin_util.rpt'
class PackModule(Module):
def map_io(self, ctx: ModuleContext):
epath = Path(ctx.takes.eblif)
build_dir = epath.parent
return {
'net': str(epath.with_suffix('.net')),
'util_rpt': str(build_dir / DEFAULT_UTIL_RPT),
'timing_rpt': str(build_dir / DEFAULT_TIMING_RPT)
}
def execute(self, ctx: ModuleContext):
noisy_warnings(ctx.values.device)
build_dir = Path(ctx.outputs.net).parent
yield 'Packing with VPR...'
vpr(
'pack',
VprArgs(
ctx.share,
ctx.takes.eblif,
ctx.values,
sdc_file=ctx.takes.sdc
),
cwd=str(build_dir)
)
og_log = str(build_dir / 'vpr_stdout.log')
yield 'Moving/deleting files...'
if ctx.outputs.pack_log:
sh_mv(og_log, ctx.outputs.pack_log)
else:
os_remove(og_log)
if ctx.outputs.timing_rpt:
sh_mv(str(build_dir / DEFAULT_TIMING_RPT), ctx.outputs.timing_rpt)
if ctx.outputs.util_rpt:
sh_mv(str(build_dir / DEFAULT_UTIL_RPT), ctx.outputs.util_rpt)
def __init__(self, _):
self.name = 'pack'
self.no_of_phases = 2
self.takes = [
'eblif',
'sdc?'
]
self.produces = [
'net',
'util_rpt',
'timing_rpt',
'pack_log!'
]
self.values = [
'device',
] + vpr_specific_values()
ModuleClass = PackModule

View file

@ -0,0 +1,80 @@
from pathlib import Path
import os
from shutil import move as sh_mv
from re import match as re_match
from f4pga.common import *
from f4pga.module import Module, ModuleContext
def default_output_name(place_constraints):
p = place_constraints
m = re_match('(.*)\\.[^.]*$', place_constraints)
if m:
return m.groups()[0] + '.place'
return f'{p}.place'
def place_constraints_file(ctx: ModuleContext):
p = ctx.takes.place_constraints
if p:
return p, False
p = ctx.takes.io_place
if p:
return p, False
return f'{Path(ctx.takes.eblif).stem}.place', True
class PlaceModule(Module):
def map_io(self, ctx: ModuleContext):
mapping = {}
p, _ = place_constraints_file(ctx)
mapping['place'] = default_output_name(p)
return mapping
def execute(self, ctx: ModuleContext):
place_constraints, dummy = place_constraints_file(ctx)
place_constraints = os.path.realpath(place_constraints)
if dummy:
with open(place_constraints, 'wb') as f:
f.write(b'')
build_dir = str(Path(ctx.takes.eblif).parent)
vpr_options = ['--fix_clusters', place_constraints]
yield 'Running VPR...'
vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values,
sdc_file=ctx.takes.sdc, vpr_extra_opts=vpr_options)
vpr('place', vprargs, cwd=build_dir)
# VPR names output on its own. If user requested another name, the
# output file should be moved.
# TODO: This extends the set of names that would cause collisions.
# As for now (22-07-2021), no collision detection is being done, but
# when the problem gets tackled, we should keep in mind that VPR-based
# modules may produce some temporary files with names that differ from
# the ones in flow configuration.
if ctx.is_output_explicit('place'):
output_file = default_output_name(place_constraints)
sh_mv(output_file, ctx.outputs.place)
yield 'Saving log...'
save_vpr_log('place.log', build_dir=build_dir)
def __init__(self, _):
self.name = 'place'
self.no_of_phases = 2
self.takes = [
'eblif',
'sdc?',
'place_constraints?',
'io_place?'
]
self.produces = [ 'place' ]
self.values = [
'device',
'vpr_options?'
] + vpr_specific_values()
ModuleClass = PlaceModule

View file

@ -0,0 +1,55 @@
from pathlib import Path
from f4pga.common import *
from f4pga.module import Module, ModuleContext
class PlaceConstraintsModule(Module):
def map_io(self, ctx: ModuleContext):
return {
'place_constraints': f'{Path(ctx.takes.net).stem!s}.preplace'
}
def execute(self, ctx: ModuleContext):
arch_dir = str(Path(ctx.share) / 'arch')
arch_def = str(Path(arch_dir) / ctx.values.device / 'arch.timing.xml')
database = sub('prjxray-config').decode().replace('\n', '')
yield 'Generating .place...'
extra_opts: 'list[str]'
if ctx.values.extra_opts:
extra_opts = options_dict_to_list(ctx.values.extra_opts)
else:
extra_opts = []
data = sub(*(['python3', ctx.values.script,
'--net', ctx.takes.net,
'--arch', arch_def,
'--blif', ctx.takes.eblif,
'--input', ctx.takes.io_place,
'--db_root', database,
'--part', ctx.values.part_name]
+ extra_opts))
yield 'Saving place constraint data...'
with open(ctx.outputs.place_constraints, 'wb') as f:
f.write(data)
def __init__(self, _):
self.name = 'place_constraints'
self.no_of_phases = 2
self.takes = [
'eblif',
'net',
'io_place'
]
self.produces = [ 'place_constraints' ]
self.values = [
'device',
'part_name',
'script',
'extra_opts?'
]
ModuleClass = PlaceConstraintsModule

View file

@ -0,0 +1,57 @@
from pathlib import Path
from shutil import move as sh_mv
from f4pga.common import *
from f4pga.module import Module, ModuleContext
def route_place_file(ctx: ModuleContext):
return str(Path(ctx.takes.eblif).with_suffix('.route'))
class RouteModule(Module):
def map_io(self, ctx: ModuleContext):
return {
'route': route_place_file(ctx)
}
def execute(self, ctx: ModuleContext):
build_dir = str(Path(ctx.takes.eblif).parent)
vpr_options = []
if ctx.values.vpr_options:
vpr_options = options_dict_to_list(ctx.values.vpr_options)
yield 'Routing with VPR...'
vpr(
'route',
VprArgs(
ctx.share,
ctx.takes.eblif,
ctx.values,
sdc_file=ctx.takes.sdc
),
cwd=build_dir
)
if ctx.is_output_explicit('route'):
sh_mv(route_place_file(ctx), ctx.outputs.route)
yield 'Saving log...'
save_vpr_log('route.log', build_dir=build_dir)
def __init__(self, _):
self.name = 'route'
self.no_of_phases = 2
self.takes = [
'eblif',
'place',
'sdc?'
]
self.produces = [ 'route' ]
self.values = [
'device',
'vpr_options?'
] + vpr_specific_values()
ModuleClass = RouteModule

154
f4pga/common_modules/synth.py Executable file
View file

@ -0,0 +1,154 @@
import os
from f4pga.common import *
from f4pga.module import Module, ModuleContext
def yosys_setup_tcl_env(tcl_env_def):
"""
Setup environmental variables for YOSYS TCL scripts.
"""
env = {}
for key, value in tcl_env_def.items():
if value is None:
continue
v = value
if type(value) is list:
v = ' '.join(value)
env[key] = v
return env
def yosys_synth(tcl, tcl_env, verilog_files=[], read_verilog_args=None, log=None):
# Set up environment for TCL weirdness
optional = []
if log:
optional += ['-l', log]
env = os.environ.copy()
env.update(tcl_env)
tcl = f'tcl {tcl}'
# Use append read_verilog commands to the scripts for more sophisticated
# input if arguments are specified. Omit direct input throught `yosys` command.
if read_verilog_args:
args_str = ' '.join(read_verilog_args)
for verilog in verilog_files:
tcl = f'read_verilog {args_str} {verilog}; {tcl}'
verilog_files = []
# Execute YOSYS command
return sub(*(['yosys', '-p', tcl] + optional + verilog_files), env=env)
def yosys_conv(tcl, tcl_env, synth_json):
# Set up environment for TCL weirdness
env = os.environ.copy()
env.update(tcl_env)
return sub('yosys', '-p', f'read_json {synth_json}; tcl {tcl}', env=env)
class SynthModule(Module):
extra_products: 'list[str]'
def map_io(self, ctx: ModuleContext):
mapping = {}
top = ctx.values.top
if ctx.takes.build_dir:
top = os.path.join(ctx.takes.build_dir, top)
mapping['eblif'] = top + '.eblif'
mapping['fasm_extra'] = top + '_fasm_extra.fasm'
mapping['json'] = top + '.json'
mapping['synth_json'] = top + '_io.json'
b_path = os.path.dirname(top)
for extra in self.extra_products:
name, spec = decompose_depname(extra)
if spec == 'maybe':
raise ModuleRuntimeException(
f'Yosys synth extra products can\'t use \'maybe\ '
f'(?) specifier. Product causing this error: `{extra}`.'
)
elif spec == 'req':
mapping[name] = \
os.path.join(b_path,
ctx.values.device + '_' + name + '.' + name)
return mapping
def execute(self, ctx: ModuleContext):
split_inouts = os.path.join(ctx.share, 'scripts/split_inouts.py')
synth_tcl = os.path.join(ctx.values.tcl_scripts, 'synth.tcl')
conv_tcl = os.path.join(ctx.values.tcl_scripts, 'conv.tcl')
tcl_env = yosys_setup_tcl_env(ctx.values.yosys_tcl_env) \
if ctx.values.yosys_tcl_env else {}
if get_verbosity_level() >= 2:
yield f'Synthesizing sources: {ctx.takes.sources}...'
else:
yield f'Synthesizing sources...'
yosys_synth(synth_tcl, tcl_env, ctx.takes.sources,
ctx.values.read_verilog_args, ctx.outputs.synth_log)
yield f'Splitting in/outs...'
sub('python3', split_inouts, '-i', ctx.outputs.json, '-o',
ctx.outputs.synth_json)
if not os.path.isfile(ctx.produces.fasm_extra):
with open(ctx.produces.fasm_extra, 'w') as f:
f.write('')
yield f'Converting...'
yosys_conv(conv_tcl, tcl_env, ctx.outputs.synth_json)
def __init__(self, params):
self.name = 'synthesize'
self.no_of_phases = 3
self.takes = [
'sources',
'build_dir?'
]
# Extra takes for use with TCL scripts
extra_takes = params.get('takes')
if extra_takes:
self.takes += extra_takes
self.produces = [
'eblif',
'fasm_extra',
'json',
'synth_json',
'synth_log!'
]
# Extra products for use with TCL scripts
extra_products = params.get('produces')
if extra_products:
self.produces += extra_products
self.extra_products = extra_products
else:
self.extra_products = []
self.values = [
'top',
'device',
'tcl_scripts',
'yosys_tcl_env?',
'read_verilog_args?'
]
self.prod_meta = {
'eblif': 'Extended BLIF hierarchical sequential designs file\n'
'generated by YOSYS',
'json': 'JSON file containing a design generated by YOSYS',
'synth_log': 'YOSYS synthesis log',
'fasm_extra': 'Extra FASM generated during sythesis stage. Needed in '
'some designs.\nIn case it\'s not necessary, the file '
'will be empty.'
}
extra_meta = params.get('prod_meta')
if extra_meta:
self.prod_meta.update(extra_meta)
ModuleClass = SynthModule

202
f4pga/flow_config.py Normal file
View file

@ -0,0 +1,202 @@
from pathlib import Path
from copy import copy
from os import listdir as os_listdir
from json import dump as json_dump, load as json_load
from f4pga.common import ResolutionEnv, deep
from f4pga.stage import Stage
def open_flow_cfg(path: str) -> dict:
with Path(path).open('r') as rfptr:
return json_load(rfptr)
def _get_ovs_raw(
dict_name: str,
flow_cfg,
platform: 'str | None',
stage: 'str | None'
):
vals = flow_cfg.get(dict_name)
if vals is None:
vals = {}
if platform is not None:
platform_vals= flow_cfg[platform].get(dict_name)
if platform_vals is not None:
vals.update(platform_vals)
if stage is not None:
stage_deps = flow_cfg[platform][stage].get(dict_name)
if stage_deps is not None:
vals.update(stage_deps)
return vals
def verify_platform_name(platform: str, mypath: str):
for plat_def_filename in os_listdir(str(Path(mypath) / 'platforms')):
platform_name = str(Path(plat_def_filename).stem)
if platform == platform_name:
return True
return False
def verify_stage(platform: str, stage: str, mypath: str):
# TODO: Verify stage
return True
def _is_kword(w: str):
kwords = {
'dependencies',
'values',
'default_platform',
'default_target'
}
return w in kwords
class FlowDefinition:
stages: 'dict[str, Stage]' # stage name -> module path mapping
r_env: ResolutionEnv
def __init__(self, flow_def: dict, r_env: ResolutionEnv):
self.flow_def = flow_def
self.r_env = r_env
self.stages = {}
global_vals = flow_def.get('values')
if global_vals is not None:
self.r_env.add_values(global_vals)
stages_d = flow_def['stages']
modopts_d = flow_def.get('stage_options')
if modopts_d is None:
modopts_d = {}
for stage_name, modstr in stages_d.items():
opts = modopts_d.get(stage_name)
self.stages[stage_name] = Stage(stage_name, modstr, opts)
def stage_names(self):
return self.stages.keys()
def get_stage_r_env(self, stage_name: 'str') -> ResolutionEnv:
stage = self.stages[stage_name]
r_env = copy(self.r_env)
r_env.add_values(stage.value_overrides)
return r_env
class ProjectFlowConfig:
flow_cfg: dict
path: str
def __init__(self, path: str):
self.flow_cfg = {}
self.path = copy(path)
def platforms(self):
for platform, _ in self.flow_cfg.items():
if not _is_kword(platform):
yield platform
def get_default_platform(self) -> 'str | None':
return self.flow_cfg.get('default_platform')
def get_default_target(self, platform: str) -> 'str | None':
return self.flow_cfg[platform].get('default_target')
def get_stage_r_env(self, platform: str, stage: str) -> ResolutionEnv:
r_env = self._cache_platform_r_env(platform)
stage_cfg = self.flow_cfg[platform][stage]
stage_values = stage_cfg.get('values')
if stage_values:
r_env.add_values(stage_values)
return r_env
def get_dependencies_raw(self, platform: 'str | None' = None):
"""
Get dependencies without value resolution applied.
"""
return _get_ovs_raw('dependencies', self.flow_cfg, platform, None)
def get_values_raw(
self,
platform: 'str | None' = None,
stage: 'str | None' = None
):
"""
Get values without value resolution applied.
"""
return _get_ovs_raw('values', self.flow_cfg, platform, stage)
def get_stage_value_overrides(self, platform: str, stage: str):
stage_cfg = self.flow_cfg[platform].get(stage)
if stage_cfg is None:
return {}
stage_vals_ovds = stage_cfg.get('values')
if stage_vals_ovds is None:
return {}
return stage_vals_ovds
def get_dependency_platform_overrides(self, platform: str):
platform_ovds = self.flow_cfg[platform].get('dependencies')
if platform_ovds is None:
return {}
return platform_ovds
class FlowConfig:
platform: str
r_env: ResolutionEnv
dependencies_explicit: 'dict[str, ]'
stages: 'dict[str, Stage]'
def __init__(self, project_config: ProjectFlowConfig,
platform_def: FlowDefinition, platform: str):
self.r_env = platform_def.r_env
platform_vals = project_config.get_values_raw(platform)
self.r_env.add_values(platform_vals)
self.stages = platform_def.stages
self.platform = platform
raw_project_deps = project_config.get_dependencies_raw(platform)
self.dependencies_explicit = deep(lambda p: str(Path(p).resolve()))(self.r_env.resolve(raw_project_deps))
for stage_name, stage in platform_def.stages.items():
project_val_ovds = \
project_config.get_stage_value_overrides(platform, stage_name)
stage.value_overrides.update(project_val_ovds)
def get_dependency_overrides(self):
return self.dependencies_explicit
def get_r_env(self, stage_name: str) -> ResolutionEnv:
stage = self.stages[stage_name]
r_env = copy(self.r_env)
r_env.add_values(stage.value_overrides)
return r_env
def get_stage(self, stage_name: str) -> Stage:
return self.stages[stage_name]
class FlowConfigException(Exception):
path: str
message: str
def __init__(self, path: str, message: str):
self.path = path
self.message = message
def __str__(self) -> str:
return f'Error in config `{self.path}: {self.message}'
def open_project_flow_cfg(path: str) -> ProjectFlowConfig:
cfg = ProjectFlowConfig(path)
with Path(path).open('r') as rfptr:
cfg.flow_cfg = json_load(rfptr)
return cfg

156
f4pga/module.py Normal file
View file

@ -0,0 +1,156 @@
"""
Here are the things necessary to write an F4PGA Module.
"""
from types import SimpleNamespace
from abc import abstractmethod
from f4pga.common import (
decompose_depname,
ResolutionEnv
)
class Module:
"""
A `Module` is a wrapper for whatever tool is used in a flow.
Modules can request dependencies, values and are guranteed to have all the required ones present when entering
`exec` mode.
They also have to specify what dependencies they produce and create the files for these dependencies.
"""
no_of_phases: int
name: str
takes: 'list[str]'
produces: 'list[str]'
values: 'list[str]'
prod_meta: 'dict[str, str]'
@abstractmethod
def execute(self, ctx):
"""
Executes module.
Use yield to print a message informing about current execution phase.
`ctx` is `ModuleContext`.
"""
pass
@abstractmethod
def map_io(self, ctx) -> 'dict[str, ]':
"""
Returns paths for outputs derived from given inputs.
`ctx` is `ModuleContext`.
"""
pass
def __init__(self, params: 'dict[str, ]'):
self.no_of_phases = 0
self.current_phase = 0
self.name = '<BASE STAGE>'
self.prod_meta = {}
class ModuleContext:
"""
A class for object holding mappings for dependencies and values as well as other information needed during modules
execution.
"""
share: str # Absolute path to F4PGA's share directory
bin: str # Absolute path to F4PGA's bin directory
takes: SimpleNamespace # Maps symbolic dependency names to relative paths.
produces: SimpleNamespace # Contains mappings for explicitely specified dependencies.
# Useful mostly for checking for on-demand optional outputs (such as logs) with
# `is_output_explicit` method.
outputs: SimpleNamespace # Contains mappings for all available outputs.
values: SimpleNamespace # Contains all available requested values.
r_env: ResolutionEnv # `ResolutionEnvironmet` object holding mappings for current scope.
module_name: str # Name of the module.
def is_output_explicit(self, name: str):
"""
True if user has explicitely specified output's path.
"""
return getattr(self.produces, name) is not None
def _getreqmaybe(self, obj, deps: 'list[str]', deps_cfg: 'dict[str, ]'):
"""
Add attribute for a dependency or panic if a required dependency has not been given to the module on its input.
"""
for name in deps:
name, spec = decompose_depname(name)
value = deps_cfg.get(name)
if value is None and spec == 'req':
fatal(-1, f'Dependency `{name}` is required by module `{self.module_name}` but wasn\'t provided')
setattr(obj, name, self.r_env.resolve(value))
# `config` should be a dictionary given as modules input.
def __init__(
self,
module: Module,
config: 'dict[str, ]',
r_env: ResolutionEnv,
share: str,
bin: str
):
self.module_name = module.name
self.takes = SimpleNamespace()
self.produces = SimpleNamespace()
self.values = SimpleNamespace()
self.outputs = SimpleNamespace()
self.r_env = r_env
self.share = share
self.bin = bin
self._getreqmaybe(self.takes, module.takes, config['takes'])
self._getreqmaybe(self.values, module.values, config['values'])
produces_resolved = self.r_env.resolve(config['produces'])
for name, value in produces_resolved.items():
setattr(self.produces, name, value)
outputs = module.map_io(self)
outputs.update(produces_resolved)
self._getreqmaybe(self.outputs, module.produces, outputs)
def shallow_copy(self):
cls = type(self)
mycopy = cls.__new__(cls)
mycopy.module_name = self.module_name
mycopy.takes = self.takes
mycopy.produces = self.produces
mycopy.values = self.values
mycopy.outputs = self.outputs
mycopy.r_env = self.r_env
mycopy.share = self.share
mycopy.bin = self.bin
return mycopy
class ModuleRuntimeException(Exception):
info: str
def __init__(self, info: str):
self.info = info
def __str___(self):
return self.info
def get_mod_metadata(module: Module):
"""
Get descriptions for produced dependencies.
"""
meta = {}
has_meta = hasattr(module, 'prod_meta')
for prod in module.produces:
prod = prod.replace('?', '').replace('!', '')
if not has_meta:
meta[prod] = '<no descritption>'
continue
prod_meta = module.prod_meta.get(prod)
meta[prod] = prod_meta if prod_meta else '<no description>'
return meta

38
f4pga/module_inspector.py Normal file
View file

@ -0,0 +1,38 @@
from f4pga.module import Module
from f4pga.common import decompose_depname
from colorama import Style
def _get_if_qualifier(deplist: 'list[str]', qualifier: str):
for dep_name in deplist:
name, q = decompose_depname(dep_name)
if q == qualifier:
yield f'{Style.BRIGHT}{name}{Style.RESET_ALL}'
def _list_if_qualifier(deplist: 'list[str]', qualifier: str, indent: int = 4):
indent_str = ''.join([' ' for _ in range(0, indent)])
r = ''
for line in _get_if_qualifier(deplist, qualifier):
r += indent_str + line + '\n'
return r
def get_module_info(module: Module) -> str:
r= ''
r += f'Module `{Style.BRIGHT}{module.name}{Style.RESET_ALL}`:\n'
r += 'Inputs:\n Required:\n Dependencies\n'
r += _list_if_qualifier(module.takes, 'req', indent=6)
r += ' Values:\n'
r += _list_if_qualifier(module.values, 'req', indent=6)
r += ' Optional:\n Dependencies:\n'
r += _list_if_qualifier(module.takes, 'maybe', indent=6)
r += ' Values:\n'
r += _list_if_qualifier(module.values, 'maybe', indent=6)
r += 'Outputs:\n Guaranteed:\n'
r += _list_if_qualifier(module.produces, 'req', indent=4)
r += ' On-demand:\n'
r += _list_if_qualifier(module.produces, 'demand', indent=4)
r += ' Not guaranteed:\n'
r += _list_if_qualifier(module.produces, 'maybe', indent= 4)
return r

129
f4pga/module_runner.py Normal file
View file

@ -0,0 +1,129 @@
"""
Dynamically import and run F4PGA modules.
"""
from contextlib import contextmanager
import importlib.util as importlib_util
from pathlib import Path
from colorama import Style
from f4pga.module import Module, ModuleContext, get_mod_metadata
from f4pga.common import ResolutionEnv, deep, sfprint
@contextmanager
def _add_to_sys_path(path: str):
import sys
old_syspath = sys.path
sys.path = [path] + sys.path
try:
yield
finally:
sys.path = old_syspath
def import_module_from_path(path: str):
absolute_path = str(Path(path).resolve())
with _add_to_sys_path(path):
spec = importlib_util.spec_from_file_location(absolute_path, absolute_path)
module = importlib_util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
# Once imported a module will be added to that dict to avaid re-importing it
preloaded_modules = {}
def get_module(path: str):
global preloaded_modules
cached = preloaded_modules.get(path)
if cached:
return cached.ModuleClass
mod = import_module_from_path(path)
preloaded_modules[path] = mod
# All F4PGA modules should expose a `ModuleClass` type/alias which is a class implementing a Module interface
return mod.ModuleClass
class ModRunCtx:
share: str
bin: str
config: 'dict[str, ]'
def __init__(self, share: str, bin: str, config: 'dict[str, ]'):
self.share = share
self.bin = bin
self.config = config
def make_r_env(self):
return ResolutionEnv(self.config['values'])
class ModuleFailException(Exception):
module: str
mode: str
e: Exception
def __init__(self, module: str, mode: str, e: Exception):
self.module = module
self.mode = mode
self.e = e
def __str__(self) -> str:
return f"""ModuleFailException:
Module `{self.module}` failed MODE: \'{self.mode}\'
Exception `{type(self.e)}`: {self.e}
"""
def module_io(module: Module):
return {
'name': module.name,
'takes': module.takes,
'produces': module.produces,
'meta': get_mod_metadata(module)
}
def module_map(module: Module, ctx: ModRunCtx):
try:
mod_ctx = ModuleContext(
module,
ctx.config,
ctx.make_r_env(),
ctx.share,
ctx.bin
)
except Exception as e:
raise ModuleFailException(module.name, 'map', e)
return deep(lambda p: str(Path(p).resolve()))(vars(mod_ctx.outputs))
def module_exec(module: Module, ctx: ModRunCtx):
try:
mod_ctx = ModuleContext(
module,
ctx.config,
ctx.make_r_env(),
ctx.share,
ctx.bin
)
except Exception as e:
raise ModuleFailException(module.name, 'exec', e)
sfprint(1, f'Executing module `{Style.BRIGHT + module.name + Style.RESET_ALL}`:')
current_phase = 1
try:
for phase_msg in module.execute(mod_ctx):
sfprint(1, f' {Style.BRIGHT}[{current_phase}/{module.no_of_phases}] {Style.RESET_ALL}: {phase_msg}')
current_phase += 1
except Exception as e:
raise ModuleFailException(module.name, 'exec', e)
sfprint(1, f'Module `{Style.BRIGHT + module.name + Style.RESET_ALL}` has finished its work!')

193
f4pga/part_db.json Normal file
View file

@ -0,0 +1,193 @@
{
"XC7A50TCSG324-1": "xc7a50t",
"XC7A50TCSG324-2": "xc7a50t",
"XC7A50TCSG324-2L": "xc7a50t",
"XC7A50TCSG324-3": "xc7a50t",
"XC7A50TCPG236-1": "xc7a50t",
"XC7A50TCPG236-2": "xc7a50t",
"XC7A50TCPG236-2L": "xc7a50t",
"XC7A50TCPG236-3": "xc7a50t",
"XC7A50TCSG325-1": "xc7a50t",
"XC7A50TCSG325-2": "xc7a50t",
"XC7A50TCSG325-2L": "xc7a50t",
"XC7A50TCSG325-3": "xc7a50t",
"XC7A50TFGG484-1": "xc7a50t",
"XC7A50TFGG484-2": "xc7a50t",
"XC7A50TFGG484-2L": "xc7a50t",
"XC7A50TFGG484-3": "xc7a50t",
"XC7A50TFTG256-1": "xc7a50t",
"XC7A50TFTG256-2": "xc7a50t",
"XC7A50TFTG256-2L": "xc7a50t",
"XC7A50TFTG256-3": "xc7a50t",
"XC7A35TIFTG256-1L": "xc7a50t",
"XC7A35TIFGG484-1L": "xc7a50t",
"XC7A35TICSG325-1L": "xc7a50t",
"XC7A35TICSG324-1L": "xc7a50t",
"XC7A35TICPG236-1L": "xc7a50t",
"XC7A50TICPG236-1L": "xc7a50t",
"XC7A50TIFTG256-1L": "xc7a50t",
"XC7A50TIFGG484-1L": "xc7a50t",
"XC7A50TICSG325-1L": "xc7a50t",
"XC7A50TICSG324-1L": "xc7a50t",
"XC7A35TFTG256-1": "xc7a50t",
"XC7A35TFTG256-2": "xc7a50t",
"XC7A35TFTG256-2L": "xc7a50t",
"XC7A35TFTG256-3": "xc7a50t",
"XC7A35TFGG484-1": "xc7a50t",
"XC7A35TFGG484-2": "xc7a50t",
"XC7A35TFGG484-2L": "xc7a50t",
"XC7A35TFGG484-3": "xc7a50t",
"XC7A35TCSG325-1": "xc7a50t",
"XC7A35TCSG325-2": "xc7a50t",
"XC7A35TCSG325-2L": "xc7a50t",
"XC7A35TCSG325-3": "xc7a50t",
"XC7A35TCSG324-1": "xc7a50t",
"XC7A35TCSG324-2": "xc7a50t",
"XC7A35TCSG324-2L": "xc7a50t",
"XC7A35TCSG324-3": "xc7a50t",
"XC7A35TCPG236-1": "xc7a50t",
"XC7A35TCPG236-2": "xc7a50t",
"XC7A35TCPG236-2L": "xc7a50t",
"XC7A35TCPG236-3": "xc7a50t",
"XC7A100TIFTG256-1L": "xc7a100ti",
"XC7A100TIFGG676-1L": "xc7a100ti",
"XC7A100TIFGG484-1L": "xc7a100ti",
"XC7A100TICSG324-1L": "xc7a100ti",
"XC7A100TFTG256-1": "xc7a100t",
"XC7A100TFTG256-2": "xc7a100t",
"XC7A100TFTG256-2L": "xc7a100t",
"XC7A100TFTG256-3": "xc7a100t",
"XC7A100TFGG676-1": "xc7a100t",
"XC7A100TFGG676-2": "xc7a100t",
"XC7A100TFGG676-2L": "xc7a100t",
"XC7A100TFGG676-3": "xc7a100t",
"XC7A100TFGG484-1": "xc7a100t",
"XC7A100TFGG484-2": "xc7a100t",
"XC7A100TFGG484-2L": "xc7a100t",
"XC7A100TFGG484-3": "xc7a100t",
"XC7A100TCSG324-1": "xc7a100t",
"XC7A100TCSG324-2": "xc7a100t",
"XC7A100TCSG324-2L": "xc7a100t",
"XC7A100TCSG324-3": "xc7a100t",
"XC7A200TFBG484-1": "xc7a200t",
"XC7A200TFBG484-2": "xc7a200t",
"XC7A200TFBG484-2L": "xc7a200t",
"XC7A200TFBG484-3": "xc7a200t",
"XC7A200TFBG676-1": "xc7a200t",
"XC7A200TFBG676-2": "xc7a200t",
"XC7A200TFBG676-2L": "xc7a200t",
"XC7A200TFBG676-3": "xc7a200t",
"XC7A200TFBV484-1": "xc7a200t",
"XC7A200TFBV484-2": "xc7a200t",
"XC7A200TFBV484-2L": "xc7a200t",
"XC7A200TFBV484-3": "xc7a200t",
"XC7A200TFBV676-1": "xc7a200t",
"XC7A200TFBV676-2": "xc7a200t",
"XC7A200TFBV676-2L": "xc7a200t",
"XC7A200TFBV676-3": "xc7a200t",
"XC7A200TFFG1156-1": "xc7a200t",
"XC7A200TFFG1156-2": "xc7a200t",
"XC7A200TFFG1156-2L": "xc7a200t",
"XC7A200TFFG1156-3": "xc7a200t",
"XC7A200TSBV484-1": "xc7a200t",
"XC7A200TSBV484-2": "xc7a200t",
"XC7A200TSBV484-2L": "xc7a200t",
"XC7A200TSBV484-3": "xc7a200t",
"XC7A200TFFV1156-1": "xc7a200t",
"XC7A200TFFV1156-2": "xc7a200t",
"XC7A200TFFV1156-2L": "xc7a200t",
"XC7A200TFFV1156-3": "xc7a200t",
"XC7A200TSBG484-1": "xc7a200t",
"XC7A200TSBG484-2": "xc7a200t",
"XC7A200TSBG484-2L": "xc7a200t",
"XC7A200TSBG484-3": "xc7a200t",
"XC7A200TISBV484-1L": "xc7a200t",
"XC7A200TISBG484-1L": "xc7a200t",
"XC7A200TIFFV1156-1L": "xc7a200t",
"XC7A200TIFFG1156-1L": "xc7a200t",
"XC7A200TIFBV676-1L": "xc7a200t",
"XC7A200TIFBV484-1L": "xc7a200t",
"XC7A200TIFBG676-1L": "xc7a200t",
"XC7A200TIFBG484-1L": "xc7a200t",
"XC7A12TLCSG325-2L": "xc7a12tl",
"XC7A12TLCPG238-2L": "xc7a12tl",
"XC7A25TLCSG325-2L": "xc7a25tl",
"XC7A25TLCPG238-2L": "xc7a25tl",
"XC7A35TLCPG236-2L": "xc7a35tl",
"XC7A35TLCSG324-2L": "xc7a35tl",
"XC7A35TLCSG325-2L": "xc7a35tl",
"XC7A35TLFGG484-2L": "xc7a35tl",
"XC7A35TLFTG256-2L": "xc7a35tl",
"XC7A15TLCPG236-2L": "xc7a15tl",
"XC7A15TLCSG324-2L": "xc7a15tl",
"XC7A15TLCSG325-2L": "xc7a15tl",
"XC7A15TLFGG484-2L": "xc7a15tl",
"XC7A15TLFTG256-2L": "xc7a15tl",
"XC7A50TLCPG236-2L": "xc7a50tl",
"XC7A50TLCSG324-2L": "xc7a50tl",
"XC7A50TLCSG325-2L": "xc7a50tl",
"XC7A50TLFGG484-2L": "xc7a50tl",
"XC7A50TLFTG256-2L": "xc7a50tl",
"XC7A75TLFTG256-2L": "xc7a75tl",
"XC7A75TLFGG676-2L": "xc7a75tl",
"XC7A75TLFGG484-2L": "xc7a75tl",
"XC7A75TLCSG324-2L": "xc7a75tl",
"XC7A100TLCSG324-2L": "xc7a100tl",
"XC7A100TLFGG484-2L": "xc7a100tl",
"XC7A100TLFGG676-2L": "xc7a100tl",
"XC7A100TLFTG256-2L": "xc7a100tl",
"XC7A200TLFBG484-2L": "xc7a200tl",
"XC7A200TLFBG676-2L": "xc7a200tl",
"XC7A200TLFBV484-2L": "xc7a200tl",
"XC7A200TLFBV676-2L": "xc7a200tl",
"XC7A200TLFFG1156-2L": "xc7a200tl",
"XC7A200TLFFV1156-2L": "xc7a200tl",
"XC7A200TLSBG484-2L": "xc7a200tl",
"XC7A200TLSBV484-2L": "xc7a200tl",
"XA7A35TCSG325-1I": "xa7a50t",
"XA7A35TCSG325-1Q": "xa7a50t",
"XA7A35TCSG325-2I": "xa7a50t",
"XA7A35TCSG324-1I": "xa7a50t",
"XA7A35TCSG324-1Q": "xa7a50t",
"XA7A35TCSG324-2I": "xa7a50t",
"XA7A35TCPG236-1I": "xa7a50t",
"XA7A35TCPG236-1Q": "xa7a50t",
"XA7A35TCPG236-2I": "xa7a50t",
"XA7A15TCPG236-1I": "xa7a15t",
"XA7A15TCPG236-1Q": "xa7a15t",
"XA7A15TCPG236-2I": "xa7a15t",
"XA7A15TCSG324-1I": "xa7a15t",
"XA7A15TCSG324-1Q": "xa7a15t",
"XA7A15TCSG324-2I": "xa7a15t",
"XA7A15TCSG325-1I": "xa7a15t",
"XA7A15TCSG325-1Q": "xa7a15t",
"XA7A15TCSG325-2I": "xa7a15t",
"XA7A50TCPG236-1I": "xa7a50t",
"XA7A50TCPG236-1Q": "xa7a50t",
"XA7A50TCPG236-2I": "xa7a50t",
"XA7A50TCSG324-1I": "xa7a50t",
"XA7A50TCSG324-1Q": "xa7a50t",
"XA7A50TCSG324-2I": "xa7a50t",
"XA7A50TCSG325-1I": "xa7a50t",
"XA7A50TCSG325-1Q": "xa7a50t",
"XA7A50TCSG325-2I": "xa7a50t",
"XA7A100TFGG484-1I": "xa7a100t",
"XA7A100TFGG484-1Q": "xa7a100t",
"XA7A100TFGG484-2I": "xa7a100t",
"XA7A100TCSG324-1I": "xa7a100t",
"XA7A100TCSG324-1Q": "xa7a100t",
"XA7A100TCSG324-2I": "xa7a100t",
"EOS3FF512-PDN64": "ql-eos-s3",
"EOS3FF512-WRN42": "ql-eos-s3",
"EOS3FLF512-PDN64": "ql-eos-s3",
"EOS3FLF512-WRN42": "ql-eos-s3",
"EOS3CF512-PDN64": "ql-eos-s3",
"EOS3CF512-WRN42": "ql-eos-s3",
"EOS3CLF512-PDN64": "ql-eos-s3",
"EOS3CLF512-WRN42": "ql-eos-s3",
"K4N8": "ql-k4n8_slow",
"K4N8_SLOW": "ql-k4n8_slow",
"K4N8_FAST": "ql-k4n8_fast"
}

View file

@ -0,0 +1,121 @@
{
"stages": {
"mk_build_dir": "common:mkdirs",
"synth": "common:synth",
"pack": "common:pack",
"ioplace": "common:generic_script_wrapper",
"place": "common:place",
"route": "common:route",
"fasm": "common:fasm",
"bitstream": "common:generic_script_wrapper"
},
"values": {
"part_name": "pd64",
"device": "ql-eos-s3",
"device_alt": "ql-eos-s3_wlcsp",
"pinmap": "${shareDir}/arch/ql-eos-s3_wlcsp/pinmap_PD64.csv",
"arch_def": "${shareDir}/arch/ql-eos-s3_wlcsp/arch.timing.xml",
"rr_graph_lookahead_bin": "${shareDir}/arch/ql-eos-s3_wlcsp/rr_graph_ql-eos-s3_wlcsp.lookahead.bin",
"rr_graph_real_bin": "${shareDir}/arch/ql-eos-s3_wlcsp/rr_graph_ql-eos-s3_wlcsp.rr_graph.real.bin",
"vpr_place_delay": "${shareDir}/arch/ql-eos-s3_wlcsp/rr_graph_ql-eos-s3_wlcsp.place_delay.bin",
"vpr_grid_layout_name": "ql-eos-s3",
"vpr_options": {
"max_router_iterations": 500,
"routing_failure_predictor": "off",
"router_high_fanout_threshold": -1,
"constant_net_method": "route",
"route_chan_width": 100,
"clock_modeling": "route",
"place_delay_model": "delta_override",
"router_lookahead": "extended_map",
"check_route": "quick",
"strict_checks": "off",
"allow_dangling_combinational_nodes": "on",
"disable_errors": "check_unbuffered_edges:check_route",
"congested_routing_iteration_threshold": "0.8",
"incremental_reroute_delay_ripup": "off",
"base_cost_type": "delay_normalized_length_bounded",
"bb_factor": "10",
"initial_pres_fac": "4.0",
"check_rr_graph": "off",
"pack_high_fanout_threshold": "PB-lOGIC:18",
"suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment "
}
},
"stage_options": {
"mk_build_dir": {
"params": {
"build_dir": "build/${device}"
}
},
"synth": {
"params": {
"takes": [ "pcf?" ],
"produces": [ "synth_v" ]
},
"values": {
"tcl_scripts": "${shareDir}/scripts/pp3",
"read_verilog_args": [],
"yosys_tcl_env": {
"OUT_JSON": "${:json}",
"OUT_SYNTH_V": "${:synth_v}",
"OUT_EBLIF": "${:eblif}",
"OUT_FASM_EXTRA": "${:fasm_extra}",
"TECHMAP_PATH": "${shareDir}/techmaps/pp3",
"DEVICE_CELLS_SIM": "${shareDir}/arch/ql-eos-s3_wlcsp/cells/ram_sim.v",
"DEVICE_CELLS_MAP": "${shareDir}/arch/ql-eos-s3_wlcsp/cells/ram_map.v",
"PINMAP_FILE": "${shareDir}/arch/ql-eos-s3_wlcsp/pinmap_PD64.csv",
"PCF_FILE": "${:pcf}"
}
}
},
"ioplace": {
"params": {
"stage_name": "ioplace",
"interpreter": "${python3}",
"script": "${binDir}/python/ql_pp3_create_ioplace.py",
"outputs": {
"io_place": {
"mode": "stdout",
"target": "${:eblif[noext]}.ioplace"
}
},
"inputs": {
"blif": "${:eblif}",
"net": "${:net}",
"pcf": "${:pcf}",
"map": "${shareDir}/arch/ql-eos-s3_wlcsp/pinmap_PD64.csv",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"bitstream": {
"params": {
"stage_name": "bitstream",
"script": "qlfasm",
"outputs": {
"bitstream": {
"mode": "file",
"file": "bitstream-${device}.bit",
"target": "${build_dir?}/bitstream-${device}.bit"
},
"bitstream_log": {
"mode": "stdout",
"target": "${build_dir?}/bitstream-${device}.log"
}
},
"inputs": {
"#1": "${:fasm}",
"#2": "bitstream-${device}.bit",
"dev-type": "ql-eos-s3",
"db-root": "${shareDir}/fasm_database/pp3"
}
},
"values": {
"build_dir?": "."
}
}
}
}

View file

@ -0,0 +1,167 @@
{
"stages": {
"mk_build_dir": "common:mkdirs",
"synth": "common:synth",
"pack": "common:pack",
"ioplace": "common:generic_script_wrapper",
"place": "common:place",
"repack": "common:generic_script_wrapper",
"route": "common:io_rename",
"fasm": "common:io_rename",
"bitstream": "common:generic_script_wrapper"
},
"values": {
"part_name": "k4n8",
"device": "qlf_k4n8_umc22",
"rr_graph_lookahead_bin": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/rr_graph_qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast.lookahead.bin",
"rr_graph_real_bin": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/qlf_k4n8-qlf_k4n8_umc22_fast.rr_graph.bin",
"vpr_place_delay": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/rr_graph_qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast.place_delay.bin",
"vpr_grid_layout_name": "qlf_k4n8-qlf_k4n8_umc22_fast",
"arch_def": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/arch_qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast.xml",
"vpr_options": {
"max_router_iterations": 500,
"routing_failure_predictor": "off",
"router_high_fanout_threshold": -1,
"constant_net_method": "route",
"route_chan_width": 100,
"clock_modeling": "ideal",
"place_delta_delay_matrix_calculation_method": "dijkstra",
"place_delay_model": "delta_override",
"router_lookahead": "extended_map",
"allow_dangling_combinational_nodes": "on",
"absorb_buffer_luts": "off"
}
},
"stage_options": {
"mk_build_dir": {
"params": {
"build_dir": "build/${device}"
}
},
"synth": {
"params": {
"produces": [ "synth_v" ]
},
"values": {
"tcl_scripts": "${shareDir}/scripts/qlf_k4n8",
"read_verilog_args": [],
"yosys_tcl_env": {
"TOP": "${top}",
"OUT_JSON": "${:json}",
"TECHMAP_PATH": "${shareDir}/techmaps/qlf_k4n8",
"OUT_SYNTH_V": "${:synth_v}",
"OUT_EBLIF": "${:eblif}",
"PYTHON3": "${python3}"
}
}
},
"ioplace": {
"params": {
"stage_name": "ioplace",
"interpreter": "${python3}",
"script": "${binDir}/python/ql_qlf_create_ioplace.py",
"outputs": {
"io_place": {
"mode": "stdout",
"target": "${:eblif[noext]}.ioplace"
}
},
"inputs": {
"blif": "${:eblif}",
"net": "${:net}",
"pcf": "${:pcf}",
"pinmap_xml": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/pinmap_qlf_k4n8_umc22.xml",
"csv_file": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/pinmap_qlf_k4n8_umc22.csv",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"repack": {
"values": {
"repacking_rules": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/qlf_k4n8-qlf_k4n8_umc22_fast.repacking_rules.json"
},
"params": {
"stage_name": "repack",
"interpreter": "${python3}",
"script": "${binDir}/python/repacker/repack.py",
"outputs": {
"eblif_repacked": {
"mode": "file",
"file": "${:eblif[noext]}_repacked.eblif",
"target": "${:eblif[noext]}_repacked.eblif"
},
"place_repacked": {
"mode": "file",
"file": "${:place[noext]}_repacked.place",
"target": "${:place[noext]}_repacked.place"
},
"net_repacked": {
"mode": "file",
"file": "${:net[noext]}_repacked.net",
"target": "${:net[noext]}_repacked.net"
},
"repack_log": {
"mode": "stdout",
"target": "${top}.repack.log"
}
},
"inputs": {
"eblif-in": "${:eblif}",
"net-in": "${:net}",
"place-in": "${:place}",
"eblif-out": "${:eblif[noext]}_repacked.eblif",
"place-out": "${:place[noext]}_repacked.place",
"net-out": "${:net[noext]}_repacked.net",
"absorb_buffer_luts": "on",
"vpr-arch": "${arch_def}",
"repacking-rules": "${repacking_rules}",
"json-constraints": "${json_constraints?}",
"pcf-constraints": "${pcf?}",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"route": {
"params": {
"module": "common:route",
"rename_takes": {
"eblif": "eblif_repacked",
"place": "place_repacked",
"net": "net_repacked"
}
}
},
"fasm": {
"params": {
"module": "common:fasm",
"rename_takes": {
"eblif": "eblif_repacked",
"place": "place_repacked",
"net": "net_repacked"
}
}
},
"bitstream": {
"params": {
"stage_name": "bitstream",
"script": "qlf_fasm",
"outputs": {
"bitstream": {
"mode": "file",
"file": "${:fasm[noext]}.bit",
"target": "${:fasm[noext]}.bit"
}
},
"inputs": {
"#1": "${:fasm}",
"#2": "${:fasm[noext]}.bit",
"db-root": "${shareDir}/fasm_database/qlf_k4n8",
"format": "4byte",
"assemble": true
}
}
}
}
}

View file

@ -0,0 +1,167 @@
{
"stages": {
"mk_build_dir": "common:mkdirs",
"synth": "common:synth",
"pack": "common:pack",
"ioplace": "common:generic_script_wrapper",
"place": "common:place",
"repack": "common:generic_script_wrapper",
"route": "common:io_rename",
"fasm": "common:io_rename",
"bitstream": "common:generic_script_wrapper"
},
"values": {
"part_name": "k4n8",
"device": "qlf_k4n8_umc22",
"rr_graph_lookahead_bin": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/rr_graph_qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow.lookahead.bin",
"rr_graph_real_bin": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/qlf_k4n8-qlf_k4n8_umc22_slow.rr_graph.bin",
"vpr_place_delay": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/rr_graph_qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow.place_delay.bin",
"vpr_grid_layout_name": "qlf_k4n8-qlf_k4n8_umc22_slow",
"arch_def": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/arch_qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow.xml",
"vpr_options": {
"max_router_iterations": 500,
"routing_failure_predictor": "off",
"router_high_fanout_threshold": -1,
"constant_net_method": "route",
"route_chan_width": 100,
"clock_modeling": "ideal",
"place_delta_delay_matrix_calculation_method": "dijkstra",
"place_delay_model": "delta_override",
"router_lookahead": "extended_map",
"allow_dangling_combinational_nodes": "on",
"absorb_buffer_luts": "off"
}
},
"stage_options": {
"mk_build_dir": {
"params": {
"build_dir": "build/${device}"
}
},
"synth": {
"params": {
"produces": [ "synth_v" ]
},
"values": {
"tcl_scripts": "${shareDir}/scripts/qlf_k4n8",
"read_verilog_args": [],
"yosys_tcl_env": {
"TOP": "${top}",
"OUT_JSON": "${:json}",
"TECHMAP_PATH": "${shareDir}/techmaps/qlf_k4n8",
"OUT_SYNTH_V": "${:synth_v}",
"OUT_EBLIF": "${:eblif}",
"PYTHON3": "${python3}"
}
}
},
"ioplace": {
"params": {
"stage_name": "ioplace",
"interpreter": "${python3}",
"script": "${binDir}/python/ql_qlf_create_ioplace.py",
"outputs": {
"io_place": {
"mode": "stdout",
"target": "${:eblif[noext]}.ioplace"
}
},
"inputs": {
"blif": "${:eblif}",
"net": "${:net}",
"pcf": "${:pcf}",
"pinmap_xml": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/pinmap_qlf_k4n8_umc22.xml",
"csv_file": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/pinmap_qlf_k4n8_umc22.csv",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"repack": {
"values": {
"repacking_rules": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/qlf_k4n8-qlf_k4n8_umc22_slow.repacking_rules.json"
},
"params": {
"stage_name": "repack",
"interpreter": "${python3}",
"script": "${binDir}/python/repacker/repack.py",
"outputs": {
"eblif_repacked": {
"mode": "file",
"file": "${:eblif[noext]}_repacked.eblif",
"target": "${:eblif[noext]}_repacked.eblif"
},
"place_repacked": {
"mode": "file",
"file": "${:place[noext]}_repacked.place",
"target": "${:place[noext]}_repacked.place"
},
"net_repacked": {
"mode": "file",
"file": "${:net[noext]}_repacked.net",
"target": "${:net[noext]}_repacked.net"
},
"repack_log": {
"mode": "stdout",
"target": "${top}.repack.log"
}
},
"inputs": {
"eblif-in": "${:eblif}",
"net-in": "${:net}",
"place-in": "${:place}",
"eblif-out": "${:eblif[noext]}_repacked.eblif",
"place-out": "${:place[noext]}_repacked.place",
"net-out": "${:net[noext]}_repacked.net",
"absorb_buffer_luts": "on",
"vpr-arch": "${arch_def}",
"repacking-rules": "${repacking_rules}",
"json-constraints": "${json_constraints?}",
"pcf-constraints": "${pcf?}",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"route": {
"params": {
"module": "common:route",
"rename_takes": {
"eblif": "eblif_repacked",
"place": "place_repacked",
"net": "net_repacked"
}
}
},
"fasm": {
"params": {
"module": "common:fasm",
"rename_takes": {
"eblif": "eblif_repacked",
"place": "place_repacked",
"net": "net_repacked"
}
}
},
"bitstream": {
"params": {
"stage_name": "bitstream",
"script": "qlf_fasm",
"outputs": {
"bitstream": {
"mode": "file",
"file": "${:fasm[noext]}.bit",
"target": "${:fasm[noext]}.bit"
}
},
"inputs": {
"#1": "${:fasm}",
"#2": "${:fasm[noext]}.bit",
"db-root": "${shareDir}/fasm_database/qlf_k4n8",
"format": "4byte",
"assemble": true
}
}
}
}
}

View file

@ -0,0 +1,155 @@
{
"values": {
"part_name": "xc7a100tcsg324-1",
"device": "xc7a100t_test",
"bitstream_device": "artix7",
"pinmap": "${shareDir}/arch/xc7a100t_test/vpr_grid_map.csv",
"arch_def": "${shareDir}/arch/xc7a100t_test/arch.timing.xml",
"rr_graph_lookahead_bin": "${shareDir}/arch/xc7a100t_test/rr_graph_xc7a100t_test.lookahead.bin",
"rr_graph_real_bin": "${shareDir}/arch/xc7a100t_test/rr_graph_xc7a100t_test.rr_graph.real.bin",
"vpr_place_delay": "${shareDir}/arch/xc7a100t_test/rr_graph_xc7a100t_test.place_delay.bin",
"vpr_grid_layout_name": "xc7a100t-test",
"vpr_options": {
"max_router_iterations": 500,
"routing_failure_predictor": "off",
"router_high_fanout_threshold": -1,
"constant_net_method": "route",
"route_chan_width": 500,
"router_heap": "bucket",
"clock_modeling": "route",
"place_delta_delay_matrix_calculation_method": "dijkstra",
"place_delay_model": "delta",
"router_lookahead": "extended_map",
"check_route": "quick",
"strict_checks": "off",
"allow_dangling_combinational_nodes": "on",
"disable_errors": "check_unbuffered_edges:check_route",
"congested_routing_iteration_threshold": "0.8",
"incremental_reroute_delay_ripup": "off",
"base_cost_type": "delay_normalized_length_bounded",
"bb_factor": 10,
"acc_fac": "0.7",
"astar_fac": "1.8",
"initial_pres_fac": "2.828",
"pres_fac_mult": "1.2",
"check_rr_graph": "off",
"suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment:calculate_average_switch"
}
},
"stages": {
"mk_build_dir": "common:mkdirs",
"synth": "common:synth",
"pack": "common:pack",
"ioplace": "common:generic_script_wrapper",
"place_constraints": "common:generic_script_wrapper",
"place": "common:place",
"route": "common:route",
"fasm": "common:fasm",
"bitstream": "common:generic_script_wrapper"
},
"stage_options": {
"mk_build_dir": {
"params": {
"build_dir": "build/${device}"
}
},
"synth": {
"params": {
"takes": [ "xdc?" ],
"produces": [
"sdc",
"synth_v"
],
"prod_meta": {
"sdc": "Standard Design Constraints file for X7 series."
}
},
"values": {
"tcl_scripts": "${shareDir}/scripts/xc7",
"yosys_tcl_env": {
"USE_ROI": "FALSE",
"TOP": "${top}",
"OUT_JSON": "${:json}",
"OUT_SDC": "${:sdc}",
"PART_JSON": "${prjxray_db}/${bitstream_device}/${part_name}/part.json",
"OUT_FASM_EXTRA": "${:fasm_extra}",
"TECHMAP_PATH": "${shareDir}/techmaps/xc7_vpr/techmap",
"OUT_SYNTH_V": "${:synth_v}",
"SYNTH_JSON": "${:synth_json}",
"OUT_EBLIF": "${:eblif}",
"PYTHON3": "${python3}",
"UTILS_PATH": "${shareDir}/scripts",
"INPUT_XDC_FILES": "${:xdc}"
}
}
},
"ioplace": {
"params": {
"stage_name": "ioplace",
"interpreter": "${python3}",
"script": "${shareDir}/scripts/prjxray_create_ioplace.py",
"outputs": {
"io_place": {
"mode": "stdout",
"target": "${:net[noext]}.ioplace"
}
},
"inputs": {
"blif": "${:eblif}",
"map": "${shareDir}/arch/${device}/${part_name}/pinmap.csv",
"net": "${:net}",
"pcf": "${:pcf?}",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"place_constraints": {
"params": {
"stage_name": "place_constraints",
"interpreter": "${python3}",
"script": "${shareDir}/scripts/prjxray_create_place_constraints.py",
"outputs": {
"place_constraints": {
"mode": "stdout",
"target": "${:net[noext]}.preplace"
}
},
"inputs": {
"net": "${:net}",
"arch": "${shareDir}/arch/${device}/arch.timing.xml",
"blif": "${:eblif}",
"input": "${:io_place}",
"db_root": "${prjxray_db}",
"part": "${part_name}",
"vpr_grid_map": "${shareDir}/arch/${device}/vpr_grid_map.csv",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"bitstream": {
"params": {
"stage_name": "bitstream",
"script": "xcfasm",
"outputs": {
"bitstream": {
"mode": "file",
"file": "${:fasm[noext]}.bit",
"target": "${:fasm[noext]}.bit"
}
},
"inputs": {
"db-root": "${prjxray_db}/${bitstream_device}",
"part": "${part_name}",
"part_file": "${prjxray_db}/${bitstream_device}/${part_name}/part.yaml",
"sparse": true,
"emit_pudc_b_pullup": true,
"fn_in": "${:fasm}",
"frm2bit": "xc7frames2bit",
"bit_out": "${:fasm[noext]}.bit"
}
}
}
}
}

View file

@ -0,0 +1,155 @@
{
"values": {
"part_name": "xc7a200tsbg484-1",
"device": "xc7a200t_test",
"bitstream_device": "artix7",
"pinmap": "${shareDir}/arch/xc7a200t_test/vpr_grid_map.csv",
"arch_def": "${shareDir}/arch/xc7a200t_test/arch.timing.xml",
"rr_graph_lookahead_bin": "${shareDir}/arch/xc7a200t_test/rr_graph_xc7a200t_test.lookahead.bin",
"rr_graph_real_bin": "${shareDir}/arch/xc7a200t_test/rr_graph_xc7a200t_test.rr_graph.real.bin",
"vpr_place_delay": "${shareDir}/arch/xc7a200t_test/rr_graph_xc7a200t_test.place_delay.bin",
"vpr_grid_layout_name": "xc7a200t-test",
"vpr_options": {
"max_router_iterations": 500,
"routing_failure_predictor": "off",
"router_high_fanout_threshold": -1,
"constant_net_method": "route",
"route_chan_width": 500,
"router_heap": "bucket",
"clock_modeling": "route",
"place_delta_delay_matrix_calculation_method": "dijkstra",
"place_delay_model": "delta",
"router_lookahead": "extended_map",
"check_route": "quick",
"strict_checks": "off",
"allow_dangling_combinational_nodes": "on",
"disable_errors": "check_unbuffered_edges:check_route",
"congested_routing_iteration_threshold": "0.8",
"incremental_reroute_delay_ripup": "off",
"base_cost_type": "delay_normalized_length_bounded",
"bb_factor": 10,
"acc_fac": "0.7",
"astar_fac": "1.8",
"initial_pres_fac": "2.828",
"pres_fac_mult": "1.2",
"check_rr_graph": "off",
"suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment:calculate_average_switch"
}
},
"stages": {
"mk_build_dir": "common:mkdirs",
"synth": "common:synth",
"pack": "common:pack",
"ioplace": "common:generic_script_wrapper",
"place_constraints": "common:generic_script_wrapper",
"place": "common:place",
"route": "common:route",
"fasm": "common:fasm",
"bitstream": "common:generic_script_wrapper"
},
"stage_options": {
"mk_build_dir": {
"params": {
"build_dir": "build/${device}"
}
},
"synth": {
"params": {
"takes": [ "xdc?" ],
"produces": [
"sdc",
"synth_v"
],
"prod_meta": {
"sdc": "Standard Design Constraints file for X7 series."
}
},
"values": {
"tcl_scripts": "${shareDir}/scripts/xc7",
"yosys_tcl_env": {
"USE_ROI": "FALSE",
"TOP": "${top}",
"OUT_JSON": "${:json}",
"OUT_SDC": "${:sdc}",
"PART_JSON": "${prjxray_db}/${bitstream_device}/${part_name}/part.json",
"OUT_FASM_EXTRA": "${:fasm_extra}",
"TECHMAP_PATH": "${shareDir}/techmaps/xc7_vpr/techmap",
"OUT_SYNTH_V": "${:synth_v}",
"SYNTH_JSON": "${:synth_json}",
"OUT_EBLIF": "${:eblif}",
"PYTHON3": "${python3}",
"UTILS_PATH": "${shareDir}/scripts",
"INPUT_XDC_FILES": "${:xdc}"
}
}
},
"ioplace": {
"params": {
"stage_name": "ioplace",
"interpreter": "${python3}",
"script": "${shareDir}/scripts/prjxray_create_ioplace.py",
"outputs": {
"io_place": {
"mode": "stdout",
"target": "${:net[noext]}.ioplace"
}
},
"inputs": {
"blif": "${:eblif}",
"map": "${shareDir}/arch/${device}/${part_name}/pinmap.csv",
"net": "${:net}",
"pcf": "${:pcf?}",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"place_constraints": {
"params": {
"stage_name": "place_constraints",
"interpreter": "${python3}",
"script": "${shareDir}/scripts/prjxray_create_place_constraints.py",
"outputs": {
"place_constraints": {
"mode": "stdout",
"target": "${:net[noext]}.preplace"
}
},
"inputs": {
"net": "${:net}",
"arch": "${shareDir}/arch/${device}/arch.timing.xml",
"blif": "${:eblif}",
"input": "${:io_place}",
"db_root": "${prjxray_db}",
"part": "${part_name}",
"vpr_grid_map": "${shareDir}/arch/${device}/vpr_grid_map.csv",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"bitstream": {
"params": {
"stage_name": "bitstream",
"script": "xcfasm",
"outputs": {
"bitstream": {
"mode": "file",
"file": "${:fasm[noext]}.bit",
"target": "${:fasm[noext]}.bit"
}
},
"inputs": {
"db-root": "${prjxray_db}/${bitstream_device}",
"part": "${part_name}",
"part_file": "${prjxray_db}/${bitstream_device}/${part_name}/part.yaml",
"sparse": true,
"emit_pudc_b_pullup": true,
"fn_in": "${:fasm}",
"frm2bit": "xc7frames2bit",
"bit_out": "${:fasm[noext]}.bit"
}
}
}
}
}

View file

@ -0,0 +1,155 @@
{
"stages": {
"mk_build_dir": "common:mkdirs",
"synth": "common:synth",
"pack": "common:pack",
"ioplace": "common:generic_script_wrapper",
"place_constraints": "common:generic_script_wrapper",
"place": "common:place",
"route": "common:route",
"fasm": "common:fasm",
"bitstream": "common:generic_script_wrapper"
},
"values": {
"part_name": "xc7a35tcsg324-1",
"device": "xc7a50t_test",
"bitstream_device": "artix7",
"pinmap": "${shareDir}/arch/xc7a50t_test/vpr_grid_map.csv",
"arch_def": "${shareDir}/arch/xc7a50t_test/arch.timing.xml",
"rr_graph_lookahead_bin": "${shareDir}/arch/xc7a50t_test/rr_graph_xc7a50t_test.lookahead.bin",
"rr_graph_real_bin": "${shareDir}/arch/xc7a50t_test/rr_graph_xc7a50t_test.rr_graph.real.bin",
"vpr_place_delay": "${shareDir}/arch/xc7a50t_test/rr_graph_xc7a50t_test.place_delay.bin",
"vpr_grid_layout_name": "xc7a50t-test",
"vpr_options": {
"max_router_iterations": 500,
"routing_failure_predictor": "off",
"router_high_fanout_threshold": -1,
"constant_net_method": "route",
"route_chan_width": 500,
"router_heap": "bucket",
"clock_modeling": "route",
"place_delta_delay_matrix_calculation_method": "dijkstra",
"place_delay_model": "delta",
"router_lookahead": "extended_map",
"check_route": "quick",
"strict_checks": "off",
"allow_dangling_combinational_nodes": "on",
"disable_errors": "check_unbuffered_edges:check_route",
"congested_routing_iteration_threshold": "0.8",
"incremental_reroute_delay_ripup": "off",
"base_cost_type": "delay_normalized_length_bounded",
"bb_factor": 10,
"acc_fac": "0.7",
"astar_fac": "1.8",
"initial_pres_fac": "2.828",
"pres_fac_mult": "1.2",
"check_rr_graph": "off",
"suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment:calculate_average_switch"
}
},
"stage_options": {
"mk_build_dir": {
"params": {
"build_dir": "build/${device}"
}
},
"synth": {
"params": {
"takes": [ "xdc?" ],
"produces": [
"sdc",
"synth_v"
],
"prod_meta": {
"sdc": "Standard Design Constraints file for X7 series."
}
},
"values": {
"tcl_scripts": "${shareDir}/scripts/xc7",
"yosys_tcl_env": {
"USE_ROI": "FALSE",
"TOP": "${top}",
"OUT_JSON": "${:json}",
"OUT_SDC": "${:sdc}",
"PART_JSON": "${prjxray_db}/${bitstream_device}/${part_name}/part.json",
"OUT_FASM_EXTRA": "${:fasm_extra}",
"TECHMAP_PATH": "${shareDir}/techmaps/xc7_vpr/techmap",
"OUT_SYNTH_V": "${:synth_v}",
"SYNTH_JSON": "${:synth_json}",
"OUT_EBLIF": "${:eblif}",
"PYTHON3": "${python3}",
"UTILS_PATH": "${shareDir}/scripts",
"INPUT_XDC_FILES": "xc7/counter_test/arty.xdc"
}
}
},
"ioplace": {
"params": {
"stage_name": "ioplace",
"interpreter": "${python3}",
"script": "${shareDir}/scripts/prjxray_create_ioplace.py",
"outputs": {
"io_place": {
"mode": "stdout",
"target": "${:net[noext]}.ioplace"
}
},
"inputs": {
"blif": "${:eblif}",
"map": "${shareDir}/arch/${device}/${part_name}/pinmap.csv",
"net": "${:net}",
"pcf": "${:pcf?}",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"place_constraints": {
"params": {
"stage_name": "place_constraints",
"interpreter": "${python3}",
"script": "${shareDir}/scripts/prjxray_create_place_constraints.py",
"outputs": {
"place_constraints": {
"mode": "stdout",
"target": "${:net[noext]}.preplace"
}
},
"inputs": {
"net": "${:net}",
"arch": "${shareDir}/arch/${device}/arch.timing.xml",
"blif": "${:eblif}",
"input": "${:io_place}",
"db_root": "${prjxray_db}",
"part": "${part_name}",
"vpr_grid_map": "${shareDir}/arch/${device}/vpr_grid_map.csv",
"$PYTHONPATH": "${binDir}/python/"
}
}
},
"bitstream": {
"params": {
"stage_name": "bitstream",
"script": "xcfasm",
"outputs": {
"bitstream": {
"mode": "file",
"file": "${:fasm[noext]}.bit",
"target": "${:fasm[noext]}.bit"
}
},
"inputs": {
"db-root": "${prjxray_db}/${bitstream_device}",
"part": "${part_name}",
"part_file": "${prjxray_db}/${bitstream_device}/${part_name}/part.yaml",
"sparse": true,
"emit_pudc_b_pullup": true,
"fn_in": "${:fasm}",
"frm2bit": "xc7frames2bit",
"bit_out": "${:fasm[noext]}.bit"
}
}
}
}
}

1
f4pga/requirements.txt Normal file
View file

@ -0,0 +1 @@
colorama

View file

@ -19,6 +19,7 @@
# SPDX-License-Identifier: Apache-2.0
from pathlib import Path
from typing import List
from setuptools import setup as setuptools_setup
@ -27,6 +28,28 @@ F4PGA_FAM = environ.get('F4PGA_FAM', 'xc7')
packagePath = Path(__file__).resolve().parent
requirementsFile = packagePath / "requirements.txt"
# Read requirements file and add them to package dependency list
def get_requirements(file: Path) -> List[str]:
requirements = []
with file.open("r") as fh:
for line in fh.read().splitlines():
if line.startswith("#") or line == "":
continue
elif line.startswith("-r"):
# Remove the first word/argument (-r)
filename = " ".join(line.split(" ")[1:])
requirements += get_requirements(file.parent / filename)
elif line.startswith("https"):
# Convert 'URL#NAME' to 'NAME @ URL'
splitItems = line.split("#")
requirements.append("{} @ {}".format(splitItems[1], splitItems[0]))
else:
requirements.append(line)
return requirements
sf = "symbiflow"
shwrappers = "f4pga.wrappers.sh.__init__"
@ -54,15 +77,21 @@ setuptools_setup(
description="F4PGA.",
url="https://github.com/chipsalliance/f4pga",
packages=[
"f4pga.wrappers.sh",
"f4pga",
"f4pga.common_modules",
"f4pga.wrappers.sh"
],
package_dir={"f4pga": "."},
package_data={
'f4pga': ['*.json', 'platforms/*.json'],
'f4pga.wrappers.sh': ['xc7/*.f4pga.sh', 'quicklogic/*.f4pga.sh']
},
classifiers=[],
python_requires='>=3.6',
install_requires=list(set(get_requirements(requirementsFile))),
entry_points={
"console_scripts": wrapper_entrypoints
"console_scripts": [
"f4pga = f4pga.__init__:main"
] + wrapper_entrypoints
},
)

78
f4pga/stage.py Normal file
View file

@ -0,0 +1,78 @@
from f4pga.common import decompose_depname, resolve_modstr
from f4pga.module import Module
from f4pga.module_runner import get_module, module_io
class StageIO:
"""
Stage dependency input/output.
TODO: Solve the inconsistecy between usage of that and usage of
`decompose_depname` with an unprocessed string.
"""
name: str # A symbolic name given to the dependency
spec: str
def __init__(self, encoded_name: str):
"""
Encoded name feauters special characters that imply certain qualifiers.
Any name that ends with '?' is treated as with 'maybe' qualifier.
The '?' Symbol is then dropped from the dependency name.
"""
self.name, self.spec = decompose_depname(encoded_name)
def __repr__(self) -> str:
return 'StageIO { name: \'' + self.name + '\', spec: ' + \
self.spec + '}'
class Stage:
"""
Represents a single stage in a flow. I.e an instance of a module with a
local set of values.
"""
name: str # Name of the stage (module's name)
takes: 'list[StageIO]' # List of symbolic names of dependencies used by
# the stage
produces: 'list[StageIO]' # List of symbolic names of dependencies
# produced by the stage
value_overrides: 'dict[str, ]' # Stage-specific values
module: Module
meta: 'dict[str, str]' # Stage's metadata extracted from module's
# output.
def __init__(self, name: str, modstr: str, mod_opts: 'dict[str, ] | None'):
if mod_opts is None:
mod_opts = {}
module_path = resolve_modstr(modstr)
ModuleClass = get_module(module_path)
self.module = ModuleClass(mod_opts.get('params'))
values = mod_opts.get('values')
if values is not None:
self.value_overrides = values
else:
self.value_overrides = {}
mod_io = module_io(self.module)
self.name = name
self.takes = []
for input in mod_io['takes']:
io = StageIO(input)
self.takes.append(io)
self.produces = []
for input in mod_io['produces']:
io = StageIO(input)
self.produces.append(io)
self.meta = mod_io['meta']
def __repr__(self) -> str:
return 'Stage \'' + self.name + '\' {' \
f' value_overrides: {self.value_ovds},' \
f' args: {self.args},' \
f' takes: {self.takes},' \
f' produces: {self.produces} ' + '}'

1
test/requirements.txt Normal file
View file

@ -0,0 +1 @@
pytest

46
test/wrappers.py Normal file
View file

@ -0,0 +1,46 @@
from pytest import mark
from sys import stdout, stderr
from subprocess import check_call
@mark.xfail
@mark.parametrize("wrapper", ['place', 'route', 'synth', 'write-fasm'])
def test_wrapper(wrapper):
print(f"\n::group::Test {wrapper}")
stdout.flush()
stderr.flush()
try:
check_call(f"f4pga-{wrapper}")
finally:
print("\n::endgroup::")
@mark.xfail
@mark.parametrize(
"wrapper",
[
'symbiflow_generate_constraints',
'symbiflow_pack',
'symbiflow_place',
'symbiflow_route',
'symbiflow_synth',
'symbiflow_write_bitstream',
'symbiflow_write_fasm',
'symbiflow_write_xml_rr_graph',
'vpr_common',
'symbiflow_analysis',
'symbiflow_repack',
'symbiflow_generate_bitstream',
'symbiflow_generate_libfile',
'ql_symbiflow'
]
)
def test_shell_wrapper(wrapper):
print(f"\n::group::Test {wrapper}")
stdout.flush()
stderr.flush()
try:
check_call(f"{wrapper}")
finally:
print("\n::endgroup::")