mirror of
https://github.com/chipsalliance/f4pga.git
synced 2025-01-03 03:43:37 -05:00
f4pga/utils: all python scripts moved from quicklogic arch-defs tarballs
Co-Authored-By: Unai Martinez-Corral <umartinezcorral@antmicro.com> Signed-off-by: Pawel Czarnecki <pczarnecki@antmicro.com>
This commit is contained in:
parent
cf35028b0a
commit
236b07be5d
56 changed files with 18606 additions and 26 deletions
f4pga
flows
utils/quicklogic
convert_compile_opts.pycreate_lib.pypinmap_parse.py
pp3
arch_import.pyconnections.pycreate_default_fasm.pycreate_ioplace.pycreate_place_constraints.pydata_import.pydata_structs.py
process_sdc_constraints.pyeos-s3
fasm2bels.pyprepare_vpr_database.pyrouting_import.pyrr_utils.pyswitchbox_model.pytile_import.pytiming.pyutils.pyverilogmodule.pyvis_switchboxes.pyqlf_k4n8
repacker
README.mdarch_xml_utils.pyblock_path.pyeblif_netlist.pynetlist_cleaning.pypacked_netlist.pypb_rr_graph.pypb_rr_graph_netlist.pypb_rr_graph_router.pypb_type.pyrepack.py
yosys_fixup_cell_names.pytests
eblif_roundtrip
identity.xsllut_padding
lut1.ebliflut1_0.golden.ebliflut1_0.netlut1_1.golden.ebliflut1_1.netlut1_2.golden.ebliflut1_2.netlut1_3.golden.ebliflut1_3.nettest_lut_padding.py
packed_netlist_roundtrip
sort_netlist.xslwrappers
|
@ -246,7 +246,7 @@ ql-eos-s3:
|
|||
params:
|
||||
stage_name: prepare_sdc
|
||||
interpreter: '${python3}'
|
||||
script: '${shareDir}/scripts/process_sdc_constraints.py'
|
||||
script: ['-m', 'f4pga.utils.quicklogic.process_sdc_constraints']
|
||||
outputs:
|
||||
sdc:
|
||||
mode: file
|
||||
|
@ -300,7 +300,7 @@ ql-eos-s3:
|
|||
params:
|
||||
stage_name: ioplace
|
||||
interpreter: '${python3}'
|
||||
script: '${shareDir}/scripts/pp3_create_ioplace.py'
|
||||
script: ['-m', 'f4pga.utils.quicklogic.pp3.create_ioplace']
|
||||
outputs:
|
||||
io_place:
|
||||
mode: stdout
|
||||
|
@ -316,7 +316,7 @@ ql-eos-s3:
|
|||
params:
|
||||
stage_name: place_constraints
|
||||
interpreter: '${python3}'
|
||||
script: '${shareDir}/scripts/pp3_create_place_constraints.py'
|
||||
script: ['-m', 'f4pga.utils.quicklogic.pp3.create_place_constraints']
|
||||
outputs:
|
||||
place_constraints:
|
||||
mode: stdout
|
||||
|
@ -333,7 +333,7 @@ ql-eos-s3:
|
|||
params:
|
||||
stage_name: iomux_jlink
|
||||
interpreter: '${python3}'
|
||||
script: '${shareDir}/scripts/pp3_eos_s3_iomux_config.py'
|
||||
script: ['-m', 'f4pga.utils.quicklogic.pp3.eos-s3.iomux_config']
|
||||
outputs:
|
||||
iomux_jlink:
|
||||
mode: stdout
|
||||
|
@ -349,7 +349,7 @@ ql-eos-s3:
|
|||
params:
|
||||
stage_name: iomux_openocd
|
||||
interpreter: '${python3}'
|
||||
script: '${shareDir}/scripts/pp3_eos_s3_iomux_config.py'
|
||||
script: ['-m', 'f4pga.utils.quicklogic.pp3.eos-s3.iomux_config']
|
||||
outputs:
|
||||
iomux_openocd:
|
||||
mode: stdout
|
||||
|
@ -365,7 +365,7 @@ ql-eos-s3:
|
|||
params:
|
||||
stage_name: iomux_binary
|
||||
interpreter: '${python3}'
|
||||
script: '${shareDir}/scripts/pp3_eos_s3_iomux_config.py'
|
||||
script: ['-m', 'f4pga.utils.quicklogic.pp3.eos-s3.iomux_config']
|
||||
outputs:
|
||||
iomux_binary:
|
||||
mode: stdout
|
||||
|
@ -570,10 +570,9 @@ ql-eos-s3:
|
|||
out-verilog: '${:bitstream}.v'
|
||||
out-pcf: '${:bitstream}.pcf'
|
||||
out-qcf: '${:bitstream}.qcf'
|
||||
$F4PGA_INSTALL_DIR: '${shareDir}/../../../../'
|
||||
$F4PGA_INSTALL_DIR: '${shareDir}/../../../'
|
||||
$FPGA_FAM: eos-s3
|
||||
$PATH: >-
|
||||
${shareDir}/../../../conda/envs/eos-s3/bin/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
$PATH: '${shareDir}/../../conda/envs/eos-s3/bin/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
|
||||
$BIN_DIR_PATH: '${binDir}'
|
||||
|
||||
|
||||
|
@ -630,7 +629,7 @@ ql-k4n8_fast: &ql-k4n8
|
|||
params:
|
||||
stage_name: ioplace
|
||||
interpreter: '${python3}'
|
||||
script: '${binDir}/python/ql_qlf_create_ioplace.py'
|
||||
script: ['-m', 'f4pga.utils.quicklogic.qlf_k4n8.create_ioplace']
|
||||
outputs:
|
||||
io_place:
|
||||
mode: stdout
|
||||
|
@ -649,7 +648,7 @@ ql-k4n8_fast: &ql-k4n8
|
|||
params:
|
||||
stage_name: repack
|
||||
interpreter: '${python3}'
|
||||
script: '${binDir}/python/repacker/repack.py'
|
||||
script: ['-m', 'f4pga.utils.quicklogic.repacker.repack']
|
||||
outputs:
|
||||
eblif_repacked:
|
||||
mode: file
|
||||
|
@ -742,7 +741,7 @@ ql-k4n8_slow:
|
|||
params:
|
||||
stage_name: ioplace
|
||||
interpreter: '${python3}'
|
||||
script: '${binDir}/python/ql_qlf_create_ioplace.py'
|
||||
script: ['-m', 'f4pga.utils.quicklogic.qlf_k4n8.create_ioplace']
|
||||
outputs:
|
||||
io_place:
|
||||
mode: stdout
|
||||
|
|
221
f4pga/utils/quicklogic/convert_compile_opts.py
Normal file
221
f4pga/utils/quicklogic/convert_compile_opts.py
Normal file
|
@ -0,0 +1,221 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
This script reads Verilog compile options string from stdin and outputs a
|
||||
series of Yosys commands that implement them to stdout.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shlex
|
||||
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
"""
|
||||
Like print() but to stderr
|
||||
"""
|
||||
print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
|
||||
def parse_options(lines, opts=None):
|
||||
"""
|
||||
Parses compile options
|
||||
"""
|
||||
|
||||
# Remove "#" comments and C/C++ style "//" comments, remove blank lines,
|
||||
# join all remaining ones into a single string
|
||||
opt_string = ""
|
||||
for line in lines:
|
||||
|
||||
# Remove comments
|
||||
pos = line.find("#")
|
||||
if pos != -1:
|
||||
line = line[:pos]
|
||||
|
||||
pos = line.find("//")
|
||||
if pos != -1:
|
||||
line = line[:pos]
|
||||
|
||||
# Append
|
||||
line = line.strip()
|
||||
if line:
|
||||
opt_string += line + " "
|
||||
|
||||
# Remove all C/C++ style "/* ... */" comments
|
||||
while True:
|
||||
|
||||
# Find beginning of a block comment. Finish if none is found
|
||||
p0 = opt_string.find("/*")
|
||||
if p0 == -1:
|
||||
break
|
||||
|
||||
# Find ending of a block comments. Throw an error if none is found
|
||||
p1 = opt_string.find("*/", p0 + 2)
|
||||
if p1 == -1:
|
||||
eprint("ERROR: Unbalanced block comment!")
|
||||
exit(-1)
|
||||
|
||||
# Remove the comment
|
||||
opt_string = opt_string[:p0] + opt_string[p1 + 2 :]
|
||||
|
||||
# Initialize options if not given
|
||||
if opts is None:
|
||||
opts = {"incdir": set(), "libdir": set(), "libext": set(), "defines": {}}
|
||||
|
||||
# Scan and process options
|
||||
parts = iter(shlex.split(opt_string))
|
||||
while True:
|
||||
|
||||
# Get the option
|
||||
try:
|
||||
opt = next(parts)
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
# A file containing options
|
||||
if opt == "-f":
|
||||
try:
|
||||
arg = next(parts)
|
||||
except StopIteration:
|
||||
eprint("ERROR: Missing file name for '-f'")
|
||||
exit(-1)
|
||||
|
||||
# Open and read the file, recurse
|
||||
if not os.path.isfile(arg):
|
||||
eprint("ERROR: File '{}' does not exist".format(arg))
|
||||
exit(-1)
|
||||
|
||||
with open(arg, "r") as fp:
|
||||
lines = fp.readlines()
|
||||
|
||||
parse_options(lines, opts)
|
||||
|
||||
# Verilog library directory
|
||||
elif opt == "-y":
|
||||
try:
|
||||
arg = next(parts)
|
||||
except StopIteration:
|
||||
eprint("ERROR: Missing directory name for '-y'")
|
||||
exit(-1)
|
||||
|
||||
if not os.path.isdir(arg):
|
||||
eprint("ERROR: Directory '{}' does not exist".format(arg))
|
||||
exit(-1)
|
||||
|
||||
opts["libdir"].add(arg)
|
||||
|
||||
# Library file extensions
|
||||
elif opt.startswith("+libext+"):
|
||||
args = opt.strip().split("+")
|
||||
if len(args) < 2:
|
||||
eprint("ERROR: Missing file extensions(s) for '+libext+'")
|
||||
exit(-1)
|
||||
|
||||
opts["libext"] |= set(args[2:])
|
||||
|
||||
# Verilog include directory
|
||||
elif opt.startswith("+incdir+"):
|
||||
args = opt.strip().split("+")
|
||||
if len(args) < 2:
|
||||
eprint("ERROR: Missing file name(s) for '+incdir+'")
|
||||
exit(-1)
|
||||
|
||||
opts["incdir"] |= set(args[2:])
|
||||
|
||||
# Verilog defines
|
||||
elif opt.startswith("+define+"):
|
||||
args = opt.strip().split("+")
|
||||
if len(args) < 2:
|
||||
eprint("ERROR: Malformed '+define+' directive")
|
||||
exit(-1)
|
||||
|
||||
# Parse defines. They may or may not have values
|
||||
for arg in args[2:]:
|
||||
if "=" in arg:
|
||||
key, value = arg.split("=")
|
||||
else:
|
||||
key, value = arg, None
|
||||
|
||||
if key in opts["defines"]:
|
||||
eprint("ERROR: Macro '{}' defined twice!".format(key))
|
||||
opts["defines"][key] = value
|
||||
|
||||
return opts
|
||||
|
||||
|
||||
def quote(s):
|
||||
"""
|
||||
Quotes a string if it needs it
|
||||
"""
|
||||
if " " in s:
|
||||
return '"' + s + '"'
|
||||
else:
|
||||
return s
|
||||
|
||||
|
||||
def translate_options(opts):
|
||||
"""
|
||||
Translates the given options into Yosys commands
|
||||
"""
|
||||
|
||||
commands = []
|
||||
|
||||
# Include directories
|
||||
for incdir in opts["incdir"]:
|
||||
cmd = "verilog_defaults -add -I{}".format(quote(incdir))
|
||||
commands.append(cmd)
|
||||
|
||||
# Macro defines
|
||||
for key, val in opts["defines"].items():
|
||||
if val is not None:
|
||||
cmd = "verilog_defaults -add -D{}={}".format(key, val)
|
||||
else:
|
||||
cmd = "verilog_defaults -add -D{}".format(key)
|
||||
commands.append(cmd)
|
||||
|
||||
# Since Yosys does not automatically search for an unknown module in
|
||||
# verilog files make it read all library files upfront. Do this by
|
||||
# searching for files with extensions provided with "+libext+" in
|
||||
# paths privided by "-y".
|
||||
libext = opts["libext"] | {"v"}
|
||||
|
||||
for libdir in opts["libdir"]:
|
||||
for f in os.listdir(libdir):
|
||||
_, ext = os.path.splitext(f)
|
||||
if ext.replace(".", "") in libext:
|
||||
fname = os.path.join(libdir, f)
|
||||
cmd = "read_verilog {}".format(quote(fname))
|
||||
commands.append(cmd)
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# Read lines from stdin, parse options
|
||||
lines = sys.stdin.readlines()
|
||||
opts = parse_options(lines)
|
||||
|
||||
# Translate parsed options to Yosys commands and output them
|
||||
cmds = translate_options(opts)
|
||||
for cmd in cmds:
|
||||
print(cmd)
|
442
f4pga/utils/quicklogic/create_lib.py
Normal file
442
f4pga/utils/quicklogic/create_lib.py
Normal file
|
@ -0,0 +1,442 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
Creates a device library file
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import csv
|
||||
import os
|
||||
import re
|
||||
from collections import namedtuple
|
||||
from collections import defaultdict
|
||||
from datetime import date
|
||||
import simplejson as json
|
||||
import lxml.etree as ET
|
||||
|
||||
"""
|
||||
Pin properties
|
||||
name - pin_name
|
||||
dir - direction (input/output)
|
||||
used - specify 'yes' if user is using the pin else specify 'no'
|
||||
clk - specify associate clock
|
||||
"""
|
||||
PinData = namedtuple("PinData", "name dir used clk")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Creates a device library file by getting data from given csv and xml file
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description="Creates a device library file.")
|
||||
parser.add_argument("--lib", "-l", "-L", type=str, default="qlf_k4n8.lib", help="The output device lib file")
|
||||
parser.add_argument("--lib_name", "-n", "-N", type=str, required=True, help="Specify library name")
|
||||
parser.add_argument(
|
||||
"--template_data_path",
|
||||
"-t",
|
||||
"-T",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Specify path from where to pick template data for library creation",
|
||||
)
|
||||
parser.add_argument("--cell_name", "-m", "-M", type=str, required=True, help="Specify cell name")
|
||||
parser.add_argument("--csv", "-c", "-C", type=str, required=True, help="Input pin-map csv file")
|
||||
parser.add_argument("--xml", "-x", "-X", type=str, required=True, help="Input interface-mapping xml file")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not os.path.exists(args.template_data_path):
|
||||
print('Invalid template data path "{}" specified'.format(args.template_data_path), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
csv_pin_data = defaultdict(set)
|
||||
assoc_clk = dict()
|
||||
with open(args.csv, newline="") as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
# define properties for scalar pins
|
||||
scalar_port_names = vec_to_scalar(row["port_name"])
|
||||
for port in scalar_port_names:
|
||||
if port.find("F2A") != -1:
|
||||
csv_pin_data["F2A"].add(port)
|
||||
elif port.find("A2F") != -1:
|
||||
csv_pin_data["A2F"].add(port)
|
||||
if row["Associated Clock"] is not None:
|
||||
assoc_clk[port] = row["Associated Clock"].strip()
|
||||
|
||||
port_names = parse_xml(args.xml)
|
||||
create_lib(port_names, args.template_data_path, csv_pin_data, args.lib_name, args.lib, args.cell_name, assoc_clk)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
def create_lib(port_names, template_data_path, csv_pin_data, lib_name, lib_file_name, cell_name, assoc_clk):
|
||||
"""
|
||||
Create lib file
|
||||
"""
|
||||
# Read header template file and populate lib file with this data
|
||||
curr_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
common_lib_data = dict()
|
||||
common_lib_data_file = os.path.join(template_data_path, "common_lib_data.json")
|
||||
with open(common_lib_data_file) as fp:
|
||||
common_lib_data = json.load(fp)
|
||||
|
||||
today = date.today()
|
||||
curr_date = today.strftime("%B %d, %Y")
|
||||
|
||||
lib_header_tmpl = os.path.join(template_data_path, "lib_header_template.txt")
|
||||
header_templ_file = os.path.join(curr_dir, lib_header_tmpl)
|
||||
in_str = open(header_templ_file, "r").read()
|
||||
|
||||
curr_str = in_str.replace("{lib_name}", lib_name)
|
||||
str_rep_date = curr_str.replace("{curr_date}", curr_date)
|
||||
str1 = str_rep_date.replace("{cell_name}", cell_name)
|
||||
|
||||
a2f_pin_list = [None] * len(port_names["A2F"])
|
||||
f2a_pin_list = [None] * len(port_names["F2A"])
|
||||
for port in port_names["A2F"]:
|
||||
pin_dir = "input"
|
||||
pin_used = False
|
||||
if port in csv_pin_data["A2F"]:
|
||||
pin_used = True
|
||||
|
||||
clk = ""
|
||||
if port in assoc_clk:
|
||||
if assoc_clk[port].strip() != "":
|
||||
clk = assoc_clk[port].strip()
|
||||
|
||||
index = port_index(port)
|
||||
if index != -1:
|
||||
pin_data = PinData(name=port, dir=pin_dir, used=pin_used, clk=clk)
|
||||
a2f_pin_list[index] = pin_data
|
||||
else:
|
||||
print("ERROR: No index present in A2F port: '{}'".format(port))
|
||||
|
||||
for port in port_names["F2A"]:
|
||||
pin_dir = "output"
|
||||
pin_used = False
|
||||
if port in csv_pin_data["F2A"]:
|
||||
pin_used = True
|
||||
|
||||
clk = ""
|
||||
if port in assoc_clk:
|
||||
if assoc_clk[port].strip() != "":
|
||||
clk = assoc_clk[port].strip()
|
||||
|
||||
index = port_index(port)
|
||||
if index != -1:
|
||||
pin_data = PinData(name=port, dir=pin_dir, used=pin_used, clk=clk)
|
||||
f2a_pin_list[index] = pin_data
|
||||
else:
|
||||
print("ERROR: No index present in F2A port: '{}'\n".format(port))
|
||||
|
||||
lib_data = ""
|
||||
a2f_bus_name = ""
|
||||
if len(a2f_pin_list) > 0:
|
||||
pos = a2f_pin_list[0].name.find("[")
|
||||
if pos != -1:
|
||||
a2f_bus_name = a2f_pin_list[0].name[0:pos]
|
||||
lib_data += "\n{}bus ( {} ) {{\n".format(add_tab(2), a2f_bus_name)
|
||||
lib_data += "\n{}bus_type : BUS1536_type1 ;".format(add_tab(3))
|
||||
lib_data += "\n{}direction : input ;\n".format(add_tab(3))
|
||||
|
||||
for pin in a2f_pin_list:
|
||||
if pin.used:
|
||||
curr_str = "\n{}pin ({}) {{".format(add_tab(3), pin.name)
|
||||
cap = common_lib_data["input_used"]["cap"]
|
||||
max_tran = common_lib_data["input_used"]["max_tran"]
|
||||
curr_str += form_pin_header(pin.dir, cap, max_tran)
|
||||
|
||||
if pin.clk != "":
|
||||
clks = pin.clk.split(" ")
|
||||
for clk in clks:
|
||||
clk_name = clk
|
||||
timing_type = ["setup_rising", "hold_rising"]
|
||||
for val in timing_type:
|
||||
curr_str += form_in_timing_group(clk_name, val, common_lib_data)
|
||||
curr_str += "\n{}}} /* end of pin {} */\n".format(add_tab(3), pin.name)
|
||||
lib_data += curr_str
|
||||
else:
|
||||
curr_str = "\n{}pin ({}) {{".format(add_tab(3), pin.name)
|
||||
cap = common_lib_data["input_unused"]["cap"]
|
||||
max_tran = common_lib_data["input_unused"]["max_tran"]
|
||||
curr_str += form_pin_header(pin.dir, cap, max_tran)
|
||||
curr_str += "\n{}}} /* end of pin {} */\n".format(add_tab(3), pin.name)
|
||||
lib_data += curr_str
|
||||
|
||||
if len(a2f_pin_list) > 0:
|
||||
lib_data += "\n{}}} /* end of bus {} */\n".format(add_tab(2), a2f_bus_name)
|
||||
|
||||
f2a_bus_name = ""
|
||||
if len(f2a_pin_list) > 0:
|
||||
pos = f2a_pin_list[0].name.find("[")
|
||||
if pos != -1:
|
||||
f2a_bus_name = f2a_pin_list[0].name[0:pos]
|
||||
lib_data += "\n{}bus ( {} ) {{\n".format(add_tab(2), f2a_bus_name)
|
||||
lib_data += "\n{}bus_type : BUS1536_type1 ;".format(add_tab(3))
|
||||
lib_data += "\n{}direction : output ;\n".format(add_tab(3))
|
||||
|
||||
for pin in f2a_pin_list:
|
||||
if pin.used:
|
||||
curr_str = "\n{}pin ({}) {{".format(add_tab(3), pin.name)
|
||||
cap = common_lib_data["output_used"]["cap"]
|
||||
max_tran = common_lib_data["output_used"]["max_tran"]
|
||||
curr_str += form_pin_header(pin.dir, cap, max_tran)
|
||||
|
||||
if pin.clk != "":
|
||||
clks = pin.clk.split(" ")
|
||||
for clk in clks:
|
||||
clk_name = clk
|
||||
timing_type = "rising_edge"
|
||||
curr_str += form_out_timing_group(clk_name, timing_type, common_lib_data)
|
||||
curr_str += form_out_reset_timing_group("RESET_N", "positive_unate", "clear", common_lib_data)
|
||||
curr_str += "\n{}}} /* end of pin {} */\n".format(add_tab(3), pin.name)
|
||||
lib_data += curr_str
|
||||
else:
|
||||
curr_str = "\n{}pin ({}) {{".format(add_tab(3), pin.name)
|
||||
cap = common_lib_data["output_unused"]["cap"]
|
||||
max_tran = common_lib_data["output_unused"]["max_tran"]
|
||||
curr_str += form_pin_header(pin.dir, cap, max_tran)
|
||||
curr_str += "\n{}}} /* end of pin {} */\n".format(add_tab(3), pin.name)
|
||||
lib_data += curr_str
|
||||
if len(f2a_pin_list) > 0:
|
||||
lib_data += "\n{}}} /* end of bus {} */\n".format(add_tab(2), f2a_bus_name)
|
||||
|
||||
dedicated_pin_tmpl = os.path.join(template_data_path, "dedicated_pin_lib_data.txt")
|
||||
dedicated_pin_lib_file = os.path.join(curr_dir, dedicated_pin_tmpl)
|
||||
dedicated_pin_data = open(dedicated_pin_lib_file, "r").read()
|
||||
|
||||
inter_str = str1.replace("@dedicated_pin_data@", dedicated_pin_data)
|
||||
|
||||
final_str = inter_str.replace("@user_pin_data@", lib_data)
|
||||
with open(lib_file_name, "w") as out_fp:
|
||||
out_fp.write(final_str)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def port_index(port):
|
||||
"""
|
||||
Returns index in the port name like gfpga_pad_IO_A2F[1248]
|
||||
"""
|
||||
indx_parser = re.compile(r"[a-zA-Z0-9_]*\[(?P<index>[0-9]+)\]$")
|
||||
match = indx_parser.fullmatch(port)
|
||||
index = -1
|
||||
if match is not None:
|
||||
index = int(match.group("index"))
|
||||
|
||||
return index
|
||||
|
||||
|
||||
def form_pin_header(direction, cap, max_tran):
|
||||
"""
|
||||
Form pin header section
|
||||
"""
|
||||
curr_str = "\n{}direction : {};\n{}capacitance : {};".format(add_tab(4), direction, add_tab(4), cap)
|
||||
curr_str += "\n{}max_transition : {};".format(add_tab(4), max_tran)
|
||||
return curr_str
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def form_out_reset_timing_group(reset_name, timing_sense, timing_type, common_lib_data):
|
||||
"""
|
||||
Form timing group for output pin when related pin is reset
|
||||
"""
|
||||
cell_fall_val = common_lib_data["reset_timing"]["cell_fall_val"]
|
||||
fall_tran_val = common_lib_data["reset_timing"]["fall_tran_val"]
|
||||
curr_str = '\n{}timing () {{\n{}related_pin : "{}";'.format(add_tab(4), add_tab(5), reset_name)
|
||||
curr_str += "\n{}timing_sense : {};".format(add_tab(5), timing_sense)
|
||||
curr_str += "\n{}timing_type : {};".format(add_tab(5), timing_type)
|
||||
curr_str += "\n{}cell_fall (scalar) {{\n{}values({});\n{}}}".format(
|
||||
add_tab(5), add_tab(6), cell_fall_val, add_tab(5)
|
||||
)
|
||||
curr_str += "\n{}fall_transition (scalar) {{\n{}values({});\n{}}}".format(
|
||||
add_tab(5), add_tab(6), fall_tran_val, add_tab(5)
|
||||
)
|
||||
curr_str += "\n{}}}".format(add_tab(4))
|
||||
return curr_str
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def form_out_timing_group(clk_name, timing_type, common_lib_data):
|
||||
"""
|
||||
Form timing group for output pin in a Pin group in a library file
|
||||
"""
|
||||
cell_rise_val = common_lib_data["output_timing"]["rising_edge_cell_rise_val"]
|
||||
cell_fall_val = common_lib_data["output_timing"]["rising_edge_cell_fall_val"]
|
||||
rise_tran_val = common_lib_data["output_timing"]["rising_edge_rise_tran_val"]
|
||||
fall_tran_val = common_lib_data["output_timing"]["rising_edge_fall_tran_val"]
|
||||
curr_str = '\n{}timing () {{\n{}related_pin : "{}";'.format(add_tab(4), add_tab(5), clk_name)
|
||||
curr_str += "\n{}timing_type : {};".format(add_tab(5), timing_type)
|
||||
curr_str += "\n{}cell_rise (scalar) {{\n{}values({});\n{}}}".format(
|
||||
add_tab(5), add_tab(6), cell_rise_val, add_tab(5)
|
||||
)
|
||||
curr_str += "\n{}rise_transition (scalar) {{\n{}values({});\n{}}}".format(
|
||||
add_tab(5), add_tab(6), rise_tran_val, add_tab(5)
|
||||
)
|
||||
curr_str += "\n{}cell_fall (scalar) {{\n{}values({});\n{}}}".format(
|
||||
add_tab(5), add_tab(6), cell_fall_val, add_tab(5)
|
||||
)
|
||||
curr_str += "\n{}fall_transition (scalar) {{\n{}values({});\n{}}}".format(
|
||||
add_tab(5), add_tab(6), fall_tran_val, add_tab(5)
|
||||
)
|
||||
curr_str += "\n{}}}".format(add_tab(4))
|
||||
return curr_str
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def add_tab(num_tabs):
|
||||
"""
|
||||
Add given number of tabs and return the string
|
||||
"""
|
||||
curr_str = "\t" * num_tabs
|
||||
return curr_str
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def form_in_timing_group(clk_name, timing_type, common_lib_data):
|
||||
"""
|
||||
Form timing group for input pin in a Pin group in a library file
|
||||
"""
|
||||
rise_constraint_val = "0.0"
|
||||
fall_constraint_val = "0.0"
|
||||
if timing_type == "setup_rising":
|
||||
rise_constraint_val = common_lib_data["input_timing"]["setup_rising_rise_constraint_val"]
|
||||
fall_constraint_val = common_lib_data["input_timing"]["setup_rising_fall_constraint_val"]
|
||||
else:
|
||||
rise_constraint_val = common_lib_data["input_timing"]["hold_rising_rise_constraint_val"]
|
||||
fall_constraint_val = common_lib_data["input_timing"]["hold_rising_fall_constraint_val"]
|
||||
|
||||
curr_str = '\n{}timing () {{\n{}related_pin : "{}";'.format(add_tab(4), add_tab(5), clk_name)
|
||||
curr_str += "\n{}timing_type : {};".format(add_tab(5), timing_type)
|
||||
curr_str += "\n{}rise_constraint (scalar) {{\n{}values({});\n{}}}".format(
|
||||
add_tab(5), add_tab(6), rise_constraint_val, add_tab(5)
|
||||
)
|
||||
curr_str += "\n{}fall_constraint (scalar) {{\n{}values({});\n{}}}".format(
|
||||
add_tab(5), add_tab(6), fall_constraint_val, add_tab(5)
|
||||
)
|
||||
curr_str += "\n{}}}".format(add_tab(4))
|
||||
return curr_str
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def parse_xml(xml_file):
|
||||
"""
|
||||
Parses given xml file and collects the desired data
|
||||
"""
|
||||
parser = ET.XMLParser(resolve_entities=False, strip_cdata=False)
|
||||
xml_tree = ET.parse(xml_file, parser)
|
||||
xml_root = xml_tree.getroot()
|
||||
|
||||
port_names = defaultdict(set)
|
||||
# Get the "IO" section
|
||||
xml_io = xml_root.find("IO")
|
||||
if xml_io is None:
|
||||
print("ERROR: No mandatory 'IO' section defined in 'DEVICE' section")
|
||||
sys.exit(1)
|
||||
|
||||
xml_top_io = xml_io.find("TOP_IO")
|
||||
if xml_top_io is not None:
|
||||
port_names = parse_xml_io(xml_top_io, port_names)
|
||||
|
||||
xml_bottom_io = xml_io.find("BOTTOM_IO")
|
||||
if xml_bottom_io is not None:
|
||||
port_names = parse_xml_io(xml_bottom_io, port_names)
|
||||
|
||||
xml_left_io = xml_io.find("LEFT_IO")
|
||||
if xml_left_io is not None:
|
||||
port_names = parse_xml_io(xml_left_io, port_names)
|
||||
|
||||
xml_right_io = xml_io.find("RIGHT_IO")
|
||||
if xml_right_io is not None:
|
||||
port_names = parse_xml_io(xml_right_io, port_names)
|
||||
|
||||
return port_names
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def parse_xml_io(xml_io, port_names):
|
||||
"""
|
||||
Parses xml and get data for mapped_name key
|
||||
"""
|
||||
assert xml_io is not None
|
||||
for xml_cell in xml_io.findall("CELL"):
|
||||
mapped_name = xml_cell.get("mapped_name")
|
||||
# define properties for scalar pins
|
||||
scalar_mapped_pins = vec_to_scalar(mapped_name)
|
||||
if mapped_name.find("F2A") != -1:
|
||||
port_names["F2A"].update(scalar_mapped_pins)
|
||||
elif mapped_name.find("A2F") != -1:
|
||||
port_names["A2F"].update(scalar_mapped_pins)
|
||||
return port_names
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def vec_to_scalar(port_name):
|
||||
"""
|
||||
Converts given bus port into a list of its scalar port equivalents
|
||||
"""
|
||||
scalar_ports = []
|
||||
if port_name is not None and ":" in port_name:
|
||||
open_brace = port_name.find("[")
|
||||
close_brace = port_name.find("]")
|
||||
if open_brace == -1 or close_brace == -1:
|
||||
print(
|
||||
'Invalid portname "{}" specified. Bus ports should contain [ ] to specify range'.format(port_name),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
bus = port_name[open_brace + 1 : close_brace]
|
||||
lsb = int(bus[: bus.find(":")])
|
||||
msb = int(bus[bus.find(":") + 1 :])
|
||||
if lsb > msb:
|
||||
for i in range(lsb, msb - 1, -1):
|
||||
curr_port_name = port_name[:open_brace] + "[" + str(i) + "]"
|
||||
scalar_ports.append(curr_port_name)
|
||||
else:
|
||||
for i in range(lsb, msb + 1):
|
||||
curr_port_name = port_name[:open_brace] + "[" + str(i) + "]"
|
||||
scalar_ports.append(curr_port_name)
|
||||
else:
|
||||
scalar_ports.append(port_name)
|
||||
|
||||
return scalar_ports
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
347
f4pga/utils/quicklogic/pinmap_parse.py
Normal file
347
f4pga/utils/quicklogic/pinmap_parse.py
Normal file
|
@ -0,0 +1,347 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
This script parses given interface pin-mapping xml file, stores the pin-mapping
|
||||
information w.r.t. its location in the device. It also generates a template
|
||||
csv file for the end-user of the eFPGA device. User can modify the template
|
||||
csv file and specify the user-defined pin names to the ports defined in the
|
||||
template csv file.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import sys
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
import lxml.etree as ET
|
||||
|
||||
|
||||
class PinMappingData(object):
|
||||
"""
|
||||
Pin mapping data for IO ports in an eFPGA device.
|
||||
|
||||
port_name - IO port name
|
||||
mapped_pin - User-defined pin name mapped to the given port_name
|
||||
x - x coordinate corresponding to column number
|
||||
y - y coordinate corresponding to row number
|
||||
z - z coordinate corresponding to pin index at current x,y location
|
||||
"""
|
||||
|
||||
def __init__(self, port_name, mapped_pin, x, y, z):
|
||||
self.port_name = port_name
|
||||
self.mapped_pin = mapped_pin
|
||||
self.x = x
|
||||
self.y = y
|
||||
self.z = z
|
||||
|
||||
def __str__(self):
|
||||
return "{Port_name: '%s' mapped_pin: '%s' x: '%s' y: '%s' z: '%s'}" % (
|
||||
self.port_name,
|
||||
self.mapped_pin,
|
||||
self.x,
|
||||
self.y,
|
||||
self.z,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "{Port_name: '%s' mapped_pin: '%s' x: '%s' y: '%s' z: '%s'}" % (
|
||||
self.port_name,
|
||||
self.mapped_pin,
|
||||
self.x,
|
||||
self.y,
|
||||
self.z,
|
||||
)
|
||||
|
||||
|
||||
"""
|
||||
Device properties present in the pin-mapping xml
|
||||
|
||||
name - Device name
|
||||
family - Device family name
|
||||
width - Device width aka number of cells in a row
|
||||
heigth - Device height aka number of cells in a column
|
||||
z - Number of cells per row/col
|
||||
"""
|
||||
DeviceData = namedtuple("DeviceData", "name family width height z")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def parse_io(xml_io, port_map, orientation, width, height, z):
|
||||
"""
|
||||
Parses IO section of xml file
|
||||
"""
|
||||
assert xml_io is not None
|
||||
|
||||
pins = {}
|
||||
|
||||
io_row = ""
|
||||
io_col = ""
|
||||
if orientation in ("TOP", "BOTTOM"):
|
||||
io_row = xml_io.get("y")
|
||||
if io_row is None:
|
||||
if orientation == "TOP":
|
||||
io_row = str(int(height) - 1)
|
||||
elif orientation == "BOTTOM":
|
||||
io_row = "0"
|
||||
elif orientation in ("LEFT", "RIGHT"):
|
||||
io_col = xml_io.get("x")
|
||||
if io_col is None:
|
||||
if orientation == "LEFT":
|
||||
io_col = "0"
|
||||
elif orientation == "RIGHT":
|
||||
io_col = str(int(width) - 1)
|
||||
|
||||
for xml_cell in xml_io.findall("CELL"):
|
||||
port_name = xml_cell.get("port_name")
|
||||
mapped_name = xml_cell.get("mapped_name")
|
||||
startx = xml_cell.get("startx")
|
||||
starty = xml_cell.get("starty")
|
||||
endx = xml_cell.get("endx")
|
||||
endy = xml_cell.get("endy")
|
||||
|
||||
# define properties for scalar pins
|
||||
scalar_mapped_pins = vec_to_scalar(mapped_name)
|
||||
|
||||
i = 0
|
||||
if startx is not None and endx is not None:
|
||||
curr_startx = int(startx)
|
||||
curr_endx = int(endx)
|
||||
y = io_row
|
||||
if curr_startx < curr_endx:
|
||||
for x in range(curr_startx, curr_endx + 1):
|
||||
for j in range(0, int(z)):
|
||||
pins[x, y, j] = PinMappingData(
|
||||
port_name=port_name, mapped_pin=scalar_mapped_pins[i], x=x, y=y, z=j
|
||||
)
|
||||
port_map[scalar_mapped_pins[i]] = pins[x, y, j]
|
||||
i += 1
|
||||
else:
|
||||
for x in range(curr_startx, curr_endx - 1, -1):
|
||||
for j in range(0, int(z)):
|
||||
pins[x, y, j] = PinMappingData(
|
||||
port_name=port_name, mapped_pin=scalar_mapped_pins[i], x=x, y=y, z=j
|
||||
)
|
||||
port_map[scalar_mapped_pins[i]] = pins[x, y, j]
|
||||
i += 1
|
||||
elif starty is not None and endy is not None:
|
||||
curr_starty = int(starty)
|
||||
curr_endy = int(endy)
|
||||
x = io_col
|
||||
if curr_starty < curr_endy:
|
||||
for y in range(curr_starty, curr_endy + 1):
|
||||
for j in range(0, int(z)):
|
||||
pins[x, y, j] = PinMappingData(
|
||||
port_name=port_name, mapped_pin=scalar_mapped_pins[i], x=x, y=y, z=j
|
||||
)
|
||||
port_map[scalar_mapped_pins[i]] = pins[x, y, j]
|
||||
i += 1
|
||||
else:
|
||||
for y in range(curr_starty, curr_endy - 1, -1):
|
||||
for j in range(0, int(z)):
|
||||
pins[x, y, j] = PinMappingData(
|
||||
port_name=port_name, mapped_pin=scalar_mapped_pins[i], x=x, y=y, z=j
|
||||
)
|
||||
port_map[scalar_mapped_pins[i]] = pins[x, y, j]
|
||||
i += 1
|
||||
|
||||
return pins, port_map
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def vec_to_scalar(port_name):
|
||||
"""
|
||||
Converts given bus port into its scalar ports
|
||||
"""
|
||||
scalar_ports = []
|
||||
if port_name is not None and ":" in port_name:
|
||||
open_brace = port_name.find("[")
|
||||
close_brace = port_name.find("]")
|
||||
if open_brace == -1 or close_brace == -1:
|
||||
print(
|
||||
'Invalid portname "{}" specified. Bus ports should contain [ ] to specify range'.format(port_name),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
bus = port_name[open_brace + 1 : close_brace]
|
||||
lsb = int(bus[: bus.find(":")])
|
||||
msb = int(bus[bus.find(":") + 1 :])
|
||||
if lsb > msb:
|
||||
for i in range(lsb, msb - 1, -1):
|
||||
curr_port_name = port_name[:open_brace] + "[" + str(i) + "]"
|
||||
scalar_ports.append(curr_port_name)
|
||||
else:
|
||||
for i in range(lsb, msb + 1):
|
||||
curr_port_name = port_name[:open_brace] + "[" + str(i) + "]"
|
||||
scalar_ports.append(curr_port_name)
|
||||
else:
|
||||
scalar_ports.append(port_name)
|
||||
|
||||
return scalar_ports
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def parse_io_cells(xml_root):
|
||||
"""
|
||||
Parses the "IO" section of the pinmapfile. Returns a dict indexed by IO cell
|
||||
names which contains cell types and their locations in the device grid.
|
||||
"""
|
||||
|
||||
cells = {}
|
||||
port_map = {}
|
||||
|
||||
width = (xml_root.get("width"),)
|
||||
height = (xml_root.get("height"),)
|
||||
io_per_cell = xml_root.get("z")
|
||||
|
||||
# Get the "IO" section
|
||||
xml_io = xml_root.find("IO")
|
||||
if xml_io is None:
|
||||
print("ERROR: No mandatory 'IO' section defined in 'DEVICE' section")
|
||||
sys.exit(1)
|
||||
|
||||
xml_top_io = xml_io.find("TOP_IO")
|
||||
if xml_top_io is not None:
|
||||
currcells, port_map = parse_io(xml_top_io, port_map, "TOP", width, height, io_per_cell)
|
||||
cells["TOP"] = currcells
|
||||
|
||||
xml_bottom_io = xml_io.find("BOTTOM_IO")
|
||||
if xml_bottom_io is not None:
|
||||
currcells, port_map = parse_io(xml_bottom_io, port_map, "BOTTOM", width, height, io_per_cell)
|
||||
cells["BOTTOM"] = currcells
|
||||
|
||||
xml_left_io = xml_io.find("LEFT_IO")
|
||||
if xml_left_io is not None:
|
||||
currcells, port_map = parse_io(xml_left_io, port_map, "LEFT", width, height, io_per_cell)
|
||||
cells["LEFT"] = currcells
|
||||
|
||||
xml_right_io = xml_io.find("RIGHT_IO")
|
||||
if xml_right_io is not None:
|
||||
currcells, port_map = parse_io(xml_right_io, port_map, "RIGHT", width, height, io_per_cell)
|
||||
cells["RIGHT"] = currcells
|
||||
|
||||
return cells, port_map
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def read_pinmapfile_data(pinmapfile):
|
||||
"""
|
||||
Loads and parses a pinmap file
|
||||
"""
|
||||
|
||||
# Read and parse the XML archfile
|
||||
parser = ET.XMLParser(resolve_entities=False, strip_cdata=False)
|
||||
xml_tree = ET.parse(pinmapfile, parser)
|
||||
xml_root = xml_tree.getroot()
|
||||
|
||||
if xml_root.get("name") is None:
|
||||
print("ERROR: No mandatory attribute 'name' specified in 'DEVICE' section")
|
||||
sys.exit(1)
|
||||
|
||||
if xml_root.get("family") is None:
|
||||
print("ERROR: No mandatory attribute 'family' specified in 'DEVICE' section")
|
||||
sys.exit(1)
|
||||
|
||||
if xml_root.get("width") is None:
|
||||
print("ERROR: No mandatory attribute 'width' specified in 'DEVICE' section")
|
||||
sys.exit(1)
|
||||
|
||||
if xml_root.get("height") is None:
|
||||
print("ERROR: No mandatory attribute 'height' specified in 'DEVICE' section")
|
||||
sys.exit(1)
|
||||
|
||||
if xml_root.get("z") is None:
|
||||
print("ERROR: No mandatory attribute 'z' specified in 'DEVICE' section")
|
||||
sys.exit(1)
|
||||
|
||||
# Parse IO cells
|
||||
io_cells, port_map = parse_io_cells(xml_root)
|
||||
|
||||
return io_cells, port_map
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def generate_pinmap_csv(pinmap_csv_file, io_cells):
|
||||
"""
|
||||
Generates pinmap csv file
|
||||
"""
|
||||
with open(pinmap_csv_file, "w", newline="") as csvfile:
|
||||
fieldnames = [
|
||||
"orientation",
|
||||
"row",
|
||||
"col",
|
||||
"pin_num_in_cell",
|
||||
"port_name",
|
||||
"mapped_pin",
|
||||
"GPIO_type",
|
||||
"Associated Clock",
|
||||
"Clock Edge",
|
||||
]
|
||||
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
||||
|
||||
writer.writeheader()
|
||||
for orientation, pin_map in io_cells.items():
|
||||
for pin_loc, pin_obj in pin_map.items():
|
||||
writer.writerow(
|
||||
{
|
||||
"orientation": orientation,
|
||||
"row": str(pin_obj.y),
|
||||
"col": str(pin_obj.x),
|
||||
"pin_num_in_cell": str(pin_obj.z),
|
||||
"port_name": pin_obj.mapped_pin,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Processes interface mapping xml file and generates template csv file
|
||||
"""
|
||||
# Parse arguments
|
||||
parser = argparse.ArgumentParser(description="Process interface mapping xml file to generate csv file.")
|
||||
|
||||
parser.add_argument("--pinmapfile", "-p", "-P", type=str, required=True, help="Input pin-mapping XML file")
|
||||
parser.add_argument(
|
||||
"--csv_file", "-c", "-C", type=str, default="template_pinmap.csv", help="Output template pinmap CSV file"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load all the necessary data from the pinmapfile
|
||||
io_cells, port_map = read_pinmapfile_data(args.pinmapfile)
|
||||
|
||||
# Generate the pinmap CSV
|
||||
generate_pinmap_csv(args.csv_file, io_cells)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
495
f4pga/utils/quicklogic/pp3/arch_import.py
Executable file
495
f4pga/utils/quicklogic/pp3/arch_import.py
Executable file
|
@ -0,0 +1,495 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import os
|
||||
import argparse
|
||||
import pickle
|
||||
from collections import OrderedDict
|
||||
|
||||
import lxml.etree as ET
|
||||
|
||||
from f4pga.utils.quicklogic.pp3.data_structs import ConnectionType, Loc
|
||||
|
||||
from f4pga.utils.quicklogic.pp3.tile_import import make_top_level_pb_type
|
||||
from f4pga.utils.quicklogic.pp3.tile_import import make_top_level_tile
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def is_direct(connection):
|
||||
"""
|
||||
Returns True if the connections spans two tiles directly. Not necessarly
|
||||
at the same location
|
||||
"""
|
||||
|
||||
if (
|
||||
connection.src.type == ConnectionType.TILE
|
||||
and connection.dst.type == ConnectionType.TILE
|
||||
and connection.is_direct is True
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def add_segment(xml_parent, segment):
|
||||
"""
|
||||
Adds a segment
|
||||
"""
|
||||
|
||||
segment_type = "bidir"
|
||||
|
||||
# Make XML
|
||||
xml_seg = ET.SubElement(
|
||||
xml_parent,
|
||||
"segment",
|
||||
{
|
||||
"name": segment.name,
|
||||
"length": str(segment.length),
|
||||
"freq": "1.0",
|
||||
"type": segment_type,
|
||||
"Rmetal": str(segment.r_metal),
|
||||
"Cmetal": str(segment.c_metal),
|
||||
},
|
||||
)
|
||||
|
||||
if segment_type == "unidir":
|
||||
ET.SubElement(xml_seg, "mux", {"name": "generic"})
|
||||
|
||||
elif segment_type == "bidir":
|
||||
ET.SubElement(xml_seg, "wire_switch", {"name": "generic"})
|
||||
ET.SubElement(xml_seg, "opin_switch", {"name": "generic"})
|
||||
|
||||
else:
|
||||
assert False, segment_type
|
||||
|
||||
e = ET.SubElement(xml_seg, "sb", {"type": "pattern"})
|
||||
e.text = " ".join(["1" for i in range(segment.length + 1)])
|
||||
e = ET.SubElement(xml_seg, "cb", {"type": "pattern"})
|
||||
e.text = " ".join(["1" for i in range(segment.length)])
|
||||
|
||||
|
||||
def add_switch(xml_parent, switch):
|
||||
"""
|
||||
Adds a switch
|
||||
"""
|
||||
|
||||
xml_switch = ET.SubElement(
|
||||
xml_parent,
|
||||
"switch",
|
||||
{
|
||||
"type": switch.type,
|
||||
"name": switch.name,
|
||||
"R": str(switch.r),
|
||||
"Cin": str(switch.c_in),
|
||||
"Cout": str(switch.c_out),
|
||||
"Tdel": str(switch.t_del),
|
||||
},
|
||||
)
|
||||
|
||||
if switch.type in ["mux", "tristate"]:
|
||||
xml_switch.attrib["Cinternal"] = str(switch.c_int)
|
||||
|
||||
|
||||
def initialize_arch(xml_arch, switches, segments):
|
||||
"""
|
||||
Initializes the architecture definition from scratch.
|
||||
"""
|
||||
|
||||
# .................................
|
||||
# Device
|
||||
xml_device = ET.SubElement(xml_arch, "device")
|
||||
|
||||
ET.SubElement(
|
||||
xml_device,
|
||||
"sizing",
|
||||
{
|
||||
"R_minW_nmos": "6000.0",
|
||||
"R_minW_pmos": "18000.0",
|
||||
},
|
||||
)
|
||||
|
||||
ET.SubElement(xml_device, "area", {"grid_logic_tile_area": "15000.0"})
|
||||
|
||||
xml = ET.SubElement(xml_device, "chan_width_distr")
|
||||
ET.SubElement(xml, "x", {"distr": "uniform", "peak": "1.0"})
|
||||
ET.SubElement(xml, "y", {"distr": "uniform", "peak": "1.0"})
|
||||
|
||||
ET.SubElement(xml_device, "connection_block", {"input_switch_name": "generic"})
|
||||
|
||||
ET.SubElement(
|
||||
xml_device,
|
||||
"switch_block",
|
||||
{
|
||||
"type": "wilton",
|
||||
"fs": "3",
|
||||
},
|
||||
)
|
||||
|
||||
ET.SubElement(
|
||||
xml_device,
|
||||
"default_fc",
|
||||
{
|
||||
"in_type": "frac",
|
||||
"in_val": "1.0",
|
||||
"out_type": "frac",
|
||||
"out_val": "1.0",
|
||||
},
|
||||
)
|
||||
|
||||
# .................................
|
||||
# Switchlist
|
||||
xml_switchlist = ET.SubElement(xml_arch, "switchlist")
|
||||
got_generic_switch = False
|
||||
|
||||
for switch in switches:
|
||||
add_switch(xml_switchlist, switch)
|
||||
|
||||
# Check for the generic switch
|
||||
if switch.name == "generic":
|
||||
got_generic_switch = True
|
||||
|
||||
# No generic switch
|
||||
assert got_generic_switch
|
||||
|
||||
# .................................
|
||||
# Segmentlist
|
||||
xml_seglist = ET.SubElement(xml_arch, "segmentlist")
|
||||
|
||||
for segment in segments:
|
||||
add_segment(xml_seglist, segment)
|
||||
|
||||
|
||||
def write_tiles(xml_arch, arch_tile_types, tile_types, equivalent_sites):
|
||||
"""
|
||||
Generates the "tiles" section of the architecture file
|
||||
"""
|
||||
|
||||
# The "tiles" section
|
||||
xml_tiles = xml_arch.find("tiles")
|
||||
if xml_tiles is None:
|
||||
xml_tiles = ET.SubElement(xml_arch, "tiles")
|
||||
|
||||
# Add tiles
|
||||
for tile_type, sub_tiles in arch_tile_types.items():
|
||||
|
||||
xml = make_top_level_tile(tile_type, sub_tiles, tile_types, equivalent_sites)
|
||||
|
||||
xml_tiles.append(xml)
|
||||
|
||||
|
||||
def write_pb_types(xml_arch, arch_pb_types, tile_types, nsmap):
|
||||
"""
|
||||
Generates the "complexblocklist" section.
|
||||
"""
|
||||
|
||||
# Complexblocklist
|
||||
xml_cplx = xml_arch.find("complexblocklist")
|
||||
if xml_cplx is None:
|
||||
xml_cplx = ET.SubElement(xml_arch, "complexblocklist")
|
||||
|
||||
# Add pb_types
|
||||
for pb_type in arch_pb_types:
|
||||
|
||||
xml = make_top_level_pb_type(tile_types[pb_type], nsmap)
|
||||
xml_cplx.append(xml)
|
||||
|
||||
|
||||
def write_models(xml_arch, arch_models, nsmap):
|
||||
"""
|
||||
Generates the "models" section.
|
||||
"""
|
||||
|
||||
# Models
|
||||
xml_models = xml_arch.find("models")
|
||||
if xml_models is None:
|
||||
xml_models = ET.SubElement(xml_arch, "models")
|
||||
|
||||
# Include cell models
|
||||
xi_include = "{{{}}}include".format(nsmap["xi"])
|
||||
for model in arch_models:
|
||||
name = model.lower()
|
||||
|
||||
# Be smart. Check if there is a file for that cell in the current
|
||||
# directory. If not then use the one from "primitives" path
|
||||
model_file = "./{}.model.xml".format(name)
|
||||
if not os.path.isfile(model_file):
|
||||
model_file = "../../primitives/{}/{}.model.xml".format(name, name)
|
||||
|
||||
ET.SubElement(
|
||||
xml_models,
|
||||
xi_include,
|
||||
{
|
||||
"href": model_file,
|
||||
"xpointer": "xpointer(models/child::node())",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def write_tilegrid(xml_arch, arch_tile_grid, loc_map, layout_name):
|
||||
"""
|
||||
Generates the "layout" section of the arch XML and appends it to the
|
||||
root given.
|
||||
"""
|
||||
|
||||
# Remove the "layout" tag if any
|
||||
xml_layout = xml_arch.find("layout")
|
||||
if xml_layout is not None:
|
||||
xml_arch.remove(xml_layout)
|
||||
|
||||
# Grid size
|
||||
xs = [flat_loc[0] for flat_loc in arch_tile_grid]
|
||||
ys = [flat_loc[1] for flat_loc in arch_tile_grid]
|
||||
w = max(xs) + 1
|
||||
h = max(ys) + 1
|
||||
|
||||
# Fixed layout
|
||||
xml_layout = ET.SubElement(xml_arch, "layout")
|
||||
xml_fixed = ET.SubElement(
|
||||
xml_layout,
|
||||
"fixed_layout",
|
||||
{
|
||||
"name": layout_name,
|
||||
"width": str(w),
|
||||
"height": str(h),
|
||||
},
|
||||
)
|
||||
|
||||
# Individual tiles
|
||||
for flat_loc, tile in arch_tile_grid.items():
|
||||
|
||||
if tile is None:
|
||||
continue
|
||||
|
||||
# Unpack
|
||||
tile_type, capacity = tile
|
||||
|
||||
# Single tile
|
||||
xml_sing = ET.SubElement(
|
||||
xml_fixed,
|
||||
"single",
|
||||
{
|
||||
"type": "TL-{}".format(tile_type.upper()),
|
||||
"x": str(flat_loc[0]),
|
||||
"y": str(flat_loc[1]),
|
||||
"priority": str(10), # Not sure if we need this
|
||||
},
|
||||
)
|
||||
|
||||
# Gather metadata
|
||||
metadata = []
|
||||
for i in range(capacity):
|
||||
loc = Loc(x=flat_loc[0], y=flat_loc[1], z=i)
|
||||
|
||||
if loc in loc_map.bwd:
|
||||
phy_loc = loc_map.bwd[loc]
|
||||
metadata.append("X{}Y{}".format(phy_loc.x, phy_loc.y))
|
||||
|
||||
# Emit metadata if any
|
||||
if len(metadata):
|
||||
xml_metadata = ET.SubElement(xml_sing, "metadata")
|
||||
xml_meta = ET.SubElement(
|
||||
xml_metadata,
|
||||
"meta",
|
||||
{
|
||||
"name": "fasm_prefix",
|
||||
},
|
||||
)
|
||||
xml_meta.text = " ".join(metadata)
|
||||
|
||||
|
||||
def write_direct_connections(xml_arch, tile_grid, connections):
|
||||
""" """
|
||||
|
||||
def get_tile(ep):
|
||||
"""
|
||||
Retireves tile for the given connection endpoint
|
||||
"""
|
||||
|
||||
if ep.loc in tile_grid and tile_grid[ep.loc] is not None:
|
||||
return tile_grid[ep.loc]
|
||||
|
||||
else:
|
||||
print("ERROR: No tile found for the connection endpoint", ep)
|
||||
return None
|
||||
|
||||
# Remove the "directlist" tag if any
|
||||
xml_directlist = xml_arch.find("directlist")
|
||||
if xml_directlist is not None:
|
||||
xml_arch.remove(xml_directlist)
|
||||
|
||||
# Make a new one
|
||||
xml_directlist = ET.SubElement(xml_arch, "directlist")
|
||||
|
||||
# Populate connections
|
||||
conns = [c for c in connections if is_direct(c)]
|
||||
for connection in conns:
|
||||
|
||||
src_tile = get_tile(connection.src)
|
||||
dst_tile = get_tile(connection.dst)
|
||||
|
||||
if not src_tile or not dst_tile:
|
||||
continue
|
||||
|
||||
src_name = "TL-{}.{}".format(src_tile.type, connection.src.pin)
|
||||
dst_name = "TL-{}.{}".format(dst_tile.type, connection.dst.pin)
|
||||
|
||||
name = "{}_at_X{}Y{}Z{}_to_{}_at_X{}Y{}Z{}".format(
|
||||
src_name,
|
||||
connection.src.loc.x,
|
||||
connection.src.loc.y,
|
||||
connection.src.loc.z,
|
||||
dst_name,
|
||||
connection.dst.loc.x,
|
||||
connection.dst.loc.y,
|
||||
connection.dst.loc.z,
|
||||
)
|
||||
|
||||
delta_loc = Loc(
|
||||
x=connection.dst.loc.x - connection.src.loc.x,
|
||||
y=connection.dst.loc.y - connection.src.loc.y,
|
||||
z=connection.dst.loc.z - connection.src.loc.z,
|
||||
)
|
||||
|
||||
# Format the direct connection tag
|
||||
ET.SubElement(
|
||||
xml_directlist,
|
||||
"direct",
|
||||
{
|
||||
"name": name,
|
||||
"from_pin": src_name,
|
||||
"to_pin": dst_name,
|
||||
"x_offset": str(delta_loc.x),
|
||||
"y_offset": str(delta_loc.y),
|
||||
"z_offset": str(delta_loc.z),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# Parse arguments
|
||||
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
|
||||
parser.add_argument("--vpr-db", type=str, required=True, help="VPR database file")
|
||||
parser.add_argument("--arch-out", type=str, default="arch.xml", help="Output arch XML file (def. arch.xml)")
|
||||
parser.add_argument("--device", type=str, default="quicklogic", help="Device name for the architecture")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
xi_url = "http://www.w3.org/2001/XInclude"
|
||||
ET.register_namespace("xi", xi_url)
|
||||
nsmap = {"xi": xi_url}
|
||||
|
||||
# Load data from the database
|
||||
with open(args.vpr_db, "rb") as fp:
|
||||
db = pickle.load(fp)
|
||||
|
||||
loc_map = db["loc_map"]
|
||||
vpr_tile_types = db["vpr_tile_types"]
|
||||
vpr_tile_grid = db["vpr_tile_grid"]
|
||||
vpr_equivalent_sites = db["vpr_equivalent_sites"]
|
||||
segments = db["segments"]
|
||||
switches = db["switches"]
|
||||
connections = db["connections"]
|
||||
|
||||
# Flatten the VPR tilegrid
|
||||
flat_tile_grid = dict()
|
||||
for vpr_loc, tile in vpr_tile_grid.items():
|
||||
|
||||
flat_loc = (vpr_loc.x, vpr_loc.y)
|
||||
if flat_loc not in flat_tile_grid:
|
||||
flat_tile_grid[flat_loc] = {}
|
||||
|
||||
if tile is not None:
|
||||
flat_tile_grid[flat_loc][vpr_loc.z] = tile.type
|
||||
|
||||
# Create the arch tile grid and arch tile types
|
||||
arch_tile_grid = dict()
|
||||
arch_tile_types = dict()
|
||||
arch_pb_types = set()
|
||||
arch_models = set()
|
||||
|
||||
for flat_loc, tiles in flat_tile_grid.items():
|
||||
|
||||
if len(tiles):
|
||||
|
||||
# Group identical sub-tiles together, maintain their order
|
||||
sub_tiles = OrderedDict()
|
||||
for z, tile in tiles.items():
|
||||
if tile not in sub_tiles:
|
||||
sub_tiles[tile] = 0
|
||||
sub_tiles[tile] += 1
|
||||
|
||||
# TODO: Make arch tile type name
|
||||
tile_type = tiles[0]
|
||||
|
||||
# Create the tile type with sub tile types for the arch
|
||||
arch_tile_types[tile_type] = sub_tiles
|
||||
|
||||
# Add each sub-tile to top-level pb_type list
|
||||
for tile in sub_tiles:
|
||||
arch_pb_types.add(tile)
|
||||
|
||||
# Add each cell of a sub-tile to the model list
|
||||
for tile in sub_tiles:
|
||||
for cell_type in vpr_tile_types[tile].cells.keys():
|
||||
arch_models.add(cell_type)
|
||||
|
||||
# Add the arch tile type to the arch tile grid
|
||||
arch_tile_grid[flat_loc] = (
|
||||
tile_type,
|
||||
len(tiles),
|
||||
)
|
||||
|
||||
else:
|
||||
|
||||
# Add an empty location
|
||||
arch_tile_grid[flat_loc] = None
|
||||
|
||||
# Initialize the arch XML if file not given
|
||||
xml_arch = ET.Element("architecture", nsmap=nsmap)
|
||||
initialize_arch(xml_arch, switches, segments)
|
||||
|
||||
# Add tiles
|
||||
write_tiles(xml_arch, arch_tile_types, vpr_tile_types, vpr_equivalent_sites)
|
||||
# Add pb_types
|
||||
write_pb_types(xml_arch, arch_pb_types, vpr_tile_types, nsmap)
|
||||
# Add models
|
||||
write_models(xml_arch, arch_models, nsmap)
|
||||
|
||||
# Write the tilegrid to arch
|
||||
write_tilegrid(xml_arch, arch_tile_grid, loc_map, args.device)
|
||||
|
||||
# Write direct connections
|
||||
write_direct_connections(xml_arch, vpr_tile_grid, connections)
|
||||
|
||||
# Save the arch
|
||||
ET.ElementTree(xml_arch).write(args.arch_out, pretty_print=True, xml_declaration=True, encoding="utf-8")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
489
f4pga/utils/quicklogic/pp3/connections.py
Normal file
489
f4pga/utils/quicklogic/pp3/connections.py
Normal file
|
@ -0,0 +1,489 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
Utility functions for making hop connections between switchboxes and locat
|
||||
switchbox - tile connections.
|
||||
"""
|
||||
import re
|
||||
|
||||
import sys
|
||||
|
||||
print("PYTHONPATH: {}".format(sys.path))
|
||||
from f4pga.utils.quicklogic.pp3.data_structs import (
|
||||
SwitchboxPinType,
|
||||
Loc,
|
||||
OPPOSITE_DIRECTION,
|
||||
Connection,
|
||||
ConnectionLoc,
|
||||
ConnectionType,
|
||||
PinDirection,
|
||||
)
|
||||
from f4pga.utils.quicklogic.pp3.utils import find_cell_in_tile
|
||||
|
||||
# =============================================================================
|
||||
|
||||
# A regex for regular HOP wires
|
||||
RE_HOP_WIRE = re.compile(r"^([HV])([0-9])([TBLR])([0-9])")
|
||||
|
||||
# A regex for HOP offsets
|
||||
RE_HOP = re.compile(r"^([\[\]A-Za-z0-9_]+)_([TBLR])([0-9]+)$")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def hop_to_str(offset):
|
||||
"""
|
||||
Formats a two-character string that uniquely identifies the hop offset.
|
||||
|
||||
>>> hop_to_str([-3, 0])
|
||||
'L3'
|
||||
>>> hop_to_str([1, 0])
|
||||
'R1'
|
||||
>>> hop_to_str([0, -2])
|
||||
'T2'
|
||||
>>> hop_to_str([0, +7])
|
||||
'B7'
|
||||
"""
|
||||
|
||||
# Zero offsets are not allowed
|
||||
assert offset[0] != 0 or offset[1] != 0, offset
|
||||
|
||||
# Diagonal offsets are not allowed
|
||||
if offset[0] != 0:
|
||||
assert offset[1] == 0, offset
|
||||
if offset[1] != 0:
|
||||
assert offset[0] == 0, offset
|
||||
|
||||
# Horizontal
|
||||
if offset[1] == 0:
|
||||
if offset[0] > 0:
|
||||
return "R{}".format(+offset[0])
|
||||
if offset[0] < 0:
|
||||
return "L{}".format(-offset[0])
|
||||
|
||||
# Vertical
|
||||
if offset[0] == 0:
|
||||
if offset[1] > 0:
|
||||
return "B{}".format(+offset[1])
|
||||
if offset[1] < 0:
|
||||
return "T{}".format(-offset[1])
|
||||
|
||||
# Should not happen
|
||||
assert False, offset
|
||||
|
||||
|
||||
def get_name_and_hop(name):
|
||||
"""
|
||||
Extracts wire name and hop offset given a hop wire name. Returns a tuple
|
||||
with (name, (hop_x, hop_y)).
|
||||
|
||||
When a wire name does not contain hop definition then the function
|
||||
returns (name, None).
|
||||
|
||||
Note: the hop offset is defined from the input (destination) perspective.
|
||||
|
||||
>>> get_name_and_hop("WIRE")
|
||||
('WIRE', None)
|
||||
>>> get_name_and_hop("V4T0_B3")
|
||||
('V4T0', (0, 3))
|
||||
>>> get_name_and_hop("H2R1_L1")
|
||||
('H2R1', (-1, 0))
|
||||
>>> get_name_and_hop("RAM_A[5]_T2")
|
||||
('RAM_A[5]', (0, -2))
|
||||
"""
|
||||
|
||||
# Check if the name defines a hop.
|
||||
match = RE_HOP.match(name)
|
||||
if match is None:
|
||||
return name, None
|
||||
|
||||
# Hop length
|
||||
length = int(match.group(3))
|
||||
assert length in [1, 2, 3, 4], (name, length)
|
||||
|
||||
# Hop direction
|
||||
direction = match.group(2)
|
||||
if direction == "T":
|
||||
hop = (0, -length)
|
||||
elif direction == "B":
|
||||
hop = (0, +length)
|
||||
elif direction == "L":
|
||||
hop = (-length, 0)
|
||||
elif direction == "R":
|
||||
hop = (+length, 0)
|
||||
else:
|
||||
assert False, (name, direction)
|
||||
|
||||
return match.group(1), hop
|
||||
|
||||
|
||||
def is_regular_hop_wire(name):
|
||||
"""
|
||||
Returns True if the wire name defines a regular HOP wire. Also performs
|
||||
sanity checks of the name.
|
||||
|
||||
>>> is_regular_hop_wire("H1R5")
|
||||
True
|
||||
>>> is_regular_hop_wire("V4B7")
|
||||
True
|
||||
>>> is_regular_hop_wire("WIRE")
|
||||
False
|
||||
>>> is_regular_hop_wire("MULT_Addr[17]_R3")
|
||||
False
|
||||
"""
|
||||
|
||||
# Match
|
||||
match = RE_HOP_WIRE.match(name)
|
||||
if match is None:
|
||||
return False
|
||||
|
||||
# Check length
|
||||
length = int(match.group(2))
|
||||
assert length in [1, 2, 4], name
|
||||
|
||||
# Check orientation
|
||||
orientation = match.group(1)
|
||||
direction = match.group(3)
|
||||
|
||||
if orientation == "H":
|
||||
assert direction in ["L", "R"], name
|
||||
if orientation == "V":
|
||||
assert direction in ["T", "B"], name
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def build_tile_connections(tile_types, tile_grid, switchbox_types, switchbox_grid):
|
||||
"""
|
||||
Build local and foreign connections between all switchboxes and tiles.
|
||||
"""
|
||||
connections = []
|
||||
|
||||
# Process switchboxes
|
||||
for loc, switchbox_type in switchbox_grid.items():
|
||||
switchbox = switchbox_types[switchbox_type]
|
||||
|
||||
# Get pins
|
||||
sbox_pins = [pin for pin in switchbox.pins if pin.type in [SwitchboxPinType.LOCAL, SwitchboxPinType.FOREIGN]]
|
||||
|
||||
for sbox_pin in sbox_pins:
|
||||
tile = None
|
||||
|
||||
# A local connection
|
||||
if sbox_pin.type == SwitchboxPinType.LOCAL:
|
||||
pin_name = sbox_pin.name
|
||||
tile_loc = loc
|
||||
|
||||
# A foreign connection
|
||||
elif sbox_pin.type == SwitchboxPinType.FOREIGN:
|
||||
|
||||
# Get the hop offset
|
||||
pin_name, hop = get_name_and_hop(sbox_pin.name)
|
||||
assert hop is not None, sbox_pin
|
||||
|
||||
tile_loc = Loc(x=loc.x + hop[0], y=loc.y + hop[1], z=loc.z)
|
||||
|
||||
# Get the tile
|
||||
if tile_loc not in tile_grid:
|
||||
print("WARNING: No tile at loc '{}' for pin '{}'".format(tile_loc, sbox_pin.name))
|
||||
continue
|
||||
|
||||
tile = tile_types[tile_grid[tile_loc].type]
|
||||
|
||||
# Find the pin in the tile
|
||||
for pin in tile.pins:
|
||||
if pin.direction == OPPOSITE_DIRECTION[sbox_pin.direction]:
|
||||
|
||||
# Check if the pin name refers to the full tile pin name
|
||||
# ie. with the cell name.
|
||||
if pin.name == pin_name:
|
||||
tile_pin = pin
|
||||
break
|
||||
|
||||
# Split the pin name into cell name + pin name, check only
|
||||
# if the latter matches.
|
||||
cell, name = pin.name.split("_", maxsplit=1)
|
||||
if name == pin_name:
|
||||
tile_pin = pin
|
||||
break
|
||||
else:
|
||||
tile_pin = None
|
||||
|
||||
# Pin not found
|
||||
if tile_pin is None:
|
||||
print(
|
||||
"WARNING: No pin in tile at '{}' found for switchbox pin '{}' of '{}' at '{}'".format(
|
||||
tile_loc, sbox_pin.name, switchbox.type, loc
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
# Add the connection
|
||||
src = ConnectionLoc(
|
||||
loc=loc,
|
||||
pin=sbox_pin.name,
|
||||
type=ConnectionType.SWITCHBOX,
|
||||
)
|
||||
dst = ConnectionLoc(
|
||||
loc=tile_loc,
|
||||
pin=tile_pin.name,
|
||||
type=ConnectionType.TILE,
|
||||
)
|
||||
|
||||
if sbox_pin.direction == PinDirection.OUTPUT:
|
||||
connection = Connection(src=src, dst=dst, is_direct=False)
|
||||
if sbox_pin.direction == PinDirection.INPUT:
|
||||
connection = Connection(src=dst, dst=src, is_direct=False)
|
||||
|
||||
connections.append(connection)
|
||||
|
||||
return connections
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def build_hop_connections(switchbox_types, switchbox_grid):
|
||||
"""
|
||||
Builds HOP connections between switchboxes.
|
||||
"""
|
||||
connections = []
|
||||
|
||||
# Determine the switchbox grid limits
|
||||
xs = set([loc.x for loc in switchbox_grid.keys()])
|
||||
ys = set([loc.y for loc in switchbox_grid.keys()])
|
||||
loc_min = Loc(min(xs), min(ys), 0)
|
||||
loc_max = Loc(max(xs), max(ys), 0)
|
||||
|
||||
# Identify all connections that go out of switchboxes
|
||||
for dst_loc, dst_switchbox_type in switchbox_grid.items():
|
||||
dst_switchbox = switchbox_types[dst_switchbox_type]
|
||||
|
||||
# Process HOP inputs. No need for looping over outputs as each output
|
||||
# should go into a HOP input.
|
||||
dst_pins = [pin for pin in dst_switchbox.inputs.values() if pin.type == SwitchboxPinType.HOP]
|
||||
for dst_pin in dst_pins:
|
||||
|
||||
# Parse the name, determine hop offset. Skip non-hop wires.
|
||||
hop_name, hop_ofs = get_name_and_hop(dst_pin.name)
|
||||
if hop_ofs is None:
|
||||
continue
|
||||
|
||||
# Check if we don't hop outside the FPGA grid.
|
||||
src_loc = Loc(dst_loc.x + hop_ofs[0], dst_loc.y + hop_ofs[1], 0)
|
||||
if src_loc.x < loc_min.x or src_loc.x > loc_max.x:
|
||||
continue
|
||||
if src_loc.y < loc_min.y or src_loc.y > loc_max.y:
|
||||
continue
|
||||
|
||||
# Get the switchbox at the source location
|
||||
if src_loc not in switchbox_grid:
|
||||
print(
|
||||
"WARNING: No switchbox at '{}' for input '{}' of switchbox '{}' at '{}'".format(
|
||||
src_loc, dst_pin.name, dst_switchbox_type, dst_loc
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
src_switchbox_type = switchbox_grid[src_loc]
|
||||
src_switchbox = switchbox_types[src_switchbox_type]
|
||||
|
||||
# Check if there is a matching input pin in that switchbox
|
||||
src_pins = [pin for pin in src_switchbox.outputs.values() if pin.name == hop_name]
|
||||
|
||||
if len(src_pins) != 1:
|
||||
print(
|
||||
"WARNING: No output pin '{}' in switchbox '{}'"
|
||||
" at '{}' for input '{}' of switchbox '{}' at '{}'".format(
|
||||
hop_name, src_switchbox_type, src_loc, dst_pin.name, dst_switchbox_type, dst_loc
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
src_pin = src_pins[0]
|
||||
|
||||
# Add the connection
|
||||
connection = Connection(
|
||||
src=ConnectionLoc(
|
||||
loc=src_loc,
|
||||
pin=src_pin.name,
|
||||
type=ConnectionType.SWITCHBOX,
|
||||
),
|
||||
dst=ConnectionLoc(
|
||||
loc=dst_loc,
|
||||
pin=dst_pin.name,
|
||||
type=ConnectionType.SWITCHBOX,
|
||||
),
|
||||
is_direct=False,
|
||||
)
|
||||
|
||||
connections.append(connection)
|
||||
|
||||
return connections
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def find_clock_cell(alias, tile_grid):
|
||||
|
||||
for loc, tile in tile_grid.items():
|
||||
if tile is None:
|
||||
continue
|
||||
|
||||
# Must have at least one "CLOCK" cell
|
||||
clock_cells = [c for c in tile.cells if c.type == "CLOCK"]
|
||||
if len(clock_cells) == 0:
|
||||
continue
|
||||
|
||||
for cell in clock_cells:
|
||||
if cell.alias == alias:
|
||||
return loc, tile, cell
|
||||
|
||||
return None, None, None
|
||||
|
||||
|
||||
def build_gmux_qmux_connections(tile_types, tile_grid, switchbox_types, switchbox_grid, clock_cells):
|
||||
|
||||
# Define names of all global clock wires.
|
||||
# Each global clock mux as an implicitly defined output equal to its name.
|
||||
clock_wires = list(clock_cells.keys())
|
||||
|
||||
# GMUX "IP" inputs are global clock wires too
|
||||
for clock_mux in clock_cells.values():
|
||||
if clock_mux.type == "GMUX":
|
||||
clock_wires.append(clock_mux.pin_map["IP"])
|
||||
|
||||
# Conections between clock cells (muxes)
|
||||
connections = []
|
||||
for clock_cell in clock_cells.values():
|
||||
|
||||
for pin_name, pin_conn in clock_cell.pin_map.items():
|
||||
|
||||
# Destination pin name. Treat CAND and QMUX destinations
|
||||
# differently as there are going to be no tiles for them.
|
||||
if clock_cell.type in ["CAND", "QMUX"]:
|
||||
dst_type = ConnectionType.CLOCK
|
||||
dst_pin_name = "{}.{}".format(clock_cell.name, pin_name)
|
||||
|
||||
else:
|
||||
dst_tile = tile_grid[clock_cell.loc]
|
||||
dst_cell = find_cell_in_tile(clock_cell.name, dst_tile)
|
||||
dst_type = ConnectionType.TILE
|
||||
|
||||
dst_pin_name = "{}{}_{}".format(dst_cell.type, dst_cell.index, pin_name)
|
||||
|
||||
# This pin connects to a global clock wire
|
||||
if pin_conn in clock_wires:
|
||||
|
||||
# Get the other cell
|
||||
other_cell = clock_cells.get(pin_conn, None)
|
||||
|
||||
# Not found in the clock cells. Probably it is the CLOCK cell
|
||||
# try finding it by its name / alias
|
||||
if other_cell is None:
|
||||
|
||||
src_loc, src_tile, src_cell = find_clock_cell(pin_conn, tile_grid)
|
||||
|
||||
# Didint find the cell
|
||||
if src_cell is None:
|
||||
print("WARNING: No source cell for global clock wire '{}'".format(pin_conn))
|
||||
continue
|
||||
|
||||
# Connect to the cell
|
||||
src_type = ConnectionType.TILE
|
||||
src_pin_name = "{}{}_{}".format(src_cell.type, src_cell.index, "IC")
|
||||
|
||||
is_direct = True
|
||||
|
||||
# Connect to the other cell
|
||||
else:
|
||||
src_loc = other_cell.loc
|
||||
|
||||
# Source pin name. Tread CAND and QMUX differently as there
|
||||
# are going to be no tiles for them.
|
||||
if other_cell.type in ["CAND", "QMUX"]:
|
||||
src_type = ConnectionType.CLOCK
|
||||
src_pin_name = "{}.{}".format(other_cell.name, "IZ")
|
||||
|
||||
else:
|
||||
src_tile = tile_grid[other_cell.loc]
|
||||
src_cell = find_cell_in_tile(other_cell.name, src_tile)
|
||||
src_type = ConnectionType.TILE
|
||||
|
||||
src_pin_name = "{}{}_{}".format(src_cell.type, src_cell.index, "IZ")
|
||||
|
||||
is_direct = False
|
||||
|
||||
# Make the connection
|
||||
connections.append(
|
||||
Connection(
|
||||
src=ConnectionLoc(loc=src_loc, pin=src_pin_name, type=src_type),
|
||||
dst=ConnectionLoc(loc=clock_cell.loc, pin=dst_pin_name, type=dst_type),
|
||||
is_direct=is_direct,
|
||||
)
|
||||
)
|
||||
|
||||
return connections
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def build_connections(tile_types, tile_grid, switchbox_types, switchbox_grid, clock_cells):
|
||||
"""
|
||||
Builds a connection map between switchboxes in the grid and between
|
||||
switchboxes and underlying tiles.
|
||||
"""
|
||||
connections = []
|
||||
|
||||
# Local and foreign tile connections
|
||||
connections += build_tile_connections(tile_types, tile_grid, switchbox_types, switchbox_grid)
|
||||
|
||||
# HOP connections
|
||||
connections += build_hop_connections(switchbox_types, switchbox_grid)
|
||||
|
||||
# GMUX and QMUX connections
|
||||
connections += build_gmux_qmux_connections(tile_types, tile_grid, switchbox_types, switchbox_grid, clock_cells)
|
||||
|
||||
return connections
|
||||
|
||||
|
||||
def check_connections(connections):
|
||||
"""
|
||||
Check if all connections are sane.
|
||||
- All connections should be point-to-point. No fanin/fanouts.
|
||||
"""
|
||||
|
||||
error = False
|
||||
|
||||
# Check if there are no duplicated connections going to the same destination
|
||||
dst_conn_locs = set()
|
||||
for connection in connections:
|
||||
if connection.dst in dst_conn_locs:
|
||||
error = True
|
||||
print("ERROR: Duplicate destination '{}'".format(connection.dst))
|
||||
dst_conn_locs.add(connection.dst)
|
||||
|
||||
# An error ocurred
|
||||
assert error is False
|
675
f4pga/utils/quicklogic/pp3/create_default_fasm.py
Executable file
675
f4pga/utils/quicklogic/pp3/create_default_fasm.py
Executable file
|
@ -0,0 +1,675 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
This utility generates a FASM file with a default bitstream configuration for
|
||||
the given device.
|
||||
"""
|
||||
import argparse
|
||||
import colorsys
|
||||
from enum import Enum
|
||||
|
||||
import lxml.etree as ET
|
||||
|
||||
from f4pga.utils.quicklogic.pp3.data_structs import PinDirection, SwitchboxPinType
|
||||
from f4pga.utils.quicklogic.pp3.data_import import import_data
|
||||
from f4pga.utils.quicklogic.pp3.utils import yield_muxes
|
||||
from f4pga.utils.quicklogic.pp3.switchbox_model import SwitchboxModel
|
||||
|
||||
# =============================================================================
|
||||
duplicate = {}
|
||||
|
||||
|
||||
class SwitchboxConfigBuilder:
|
||||
"""
|
||||
This class is responsible for routing a switchbox according to the
|
||||
requested parameters and writing FASM features that configure it.
|
||||
"""
|
||||
|
||||
class NodeType(Enum):
|
||||
MUX = 0
|
||||
SOURCE = 1
|
||||
SINK = 2
|
||||
|
||||
class Node:
|
||||
"""
|
||||
Represents a graph node that corresponds either to a switchbox mux
|
||||
output or to a virtual source / sink node.
|
||||
"""
|
||||
|
||||
def __init__(self, type, key):
|
||||
self.type = type
|
||||
self.key = key
|
||||
|
||||
# Current "net"
|
||||
self.net = None
|
||||
|
||||
# Mux input ids indexed by keys and mux selection
|
||||
self.inp = {}
|
||||
self.sel = None
|
||||
|
||||
# Mux inputs driven by this node as keys
|
||||
self.out = set()
|
||||
|
||||
def __init__(self, switchbox):
|
||||
self.switchbox = switchbox
|
||||
self.nodes = {}
|
||||
|
||||
# Build nodes representing the switchbox connectivity graph
|
||||
self._build_nodes()
|
||||
|
||||
def _build_nodes(self):
|
||||
"""
|
||||
Creates all nodes for routing.
|
||||
"""
|
||||
|
||||
# Create all mux nodes
|
||||
for stage, switch, mux in yield_muxes(self.switchbox):
|
||||
|
||||
# Create the node
|
||||
key = (stage.id, switch.id, mux.id)
|
||||
node = self.Node(self.NodeType.MUX, key)
|
||||
|
||||
# Store the node
|
||||
if stage.type not in self.nodes:
|
||||
self.nodes[stage.type] = {}
|
||||
|
||||
assert node.key not in self.nodes[stage.type], (stage.type, node.key)
|
||||
self.nodes[stage.type][node.key] = node
|
||||
|
||||
# Create all source and sink nodes, populate their connections with mux
|
||||
# nodes.
|
||||
for pin in self.switchbox.pins:
|
||||
|
||||
# Node type
|
||||
if pin.direction == PinDirection.INPUT:
|
||||
node_type = self.NodeType.SOURCE
|
||||
elif pin.direction == PinDirection.OUTPUT:
|
||||
node_type = self.NodeType.SINK
|
||||
else:
|
||||
assert False, node_type
|
||||
|
||||
# Create one for each stage type
|
||||
stage_ids = set([loc.stage_id for loc in pin.locs])
|
||||
for stage_id in stage_ids:
|
||||
|
||||
# Create the node
|
||||
key = pin.name
|
||||
node = self.Node(node_type, key)
|
||||
|
||||
# Initially annotate source nodes with net names
|
||||
if node.type == self.NodeType.SOURCE:
|
||||
node.net = pin.name
|
||||
|
||||
# Get the correct node list
|
||||
stage_type = self.switchbox.stages[stage_id].type
|
||||
assert stage_type in self.nodes, stage_type
|
||||
nodes = self.nodes[stage_type]
|
||||
|
||||
# Add the node
|
||||
assert node.key not in self.nodes, node.key
|
||||
nodes[node.key] = node
|
||||
|
||||
# Populate connections
|
||||
for pin_loc in pin.locs:
|
||||
|
||||
# Get the correct node list
|
||||
stage_type = self.switchbox.stages[pin_loc.stage_id].type
|
||||
assert stage_type in self.nodes, stage_type
|
||||
nodes = self.nodes[stage_type]
|
||||
|
||||
if pin.direction == PinDirection.INPUT:
|
||||
|
||||
# Get the mux node
|
||||
key = (pin_loc.stage_id, pin_loc.switch_id, pin_loc.mux_id)
|
||||
assert key in nodes, key
|
||||
node = nodes[key]
|
||||
|
||||
key = (self.switchbox.type, pin_loc.stage_id, pin_loc.switch_id, pin_loc.mux_id)
|
||||
if (
|
||||
key in duplicate # Mux has multiple inputs selected
|
||||
and (pin_loc.pin_id in duplicate[key]) # Current selection is duplicate
|
||||
and not (key[0].startswith("SB_TOP_IFC"))
|
||||
): # Ignore TOP switchboxes
|
||||
print("Warning: duplicate: {} - {}".format(key, pin_loc.pin_id))
|
||||
continue
|
||||
|
||||
# Append reference to the input pin to the node
|
||||
key = pin.name
|
||||
assert key == "GND" or key not in node.inp, key
|
||||
node.inp[key] = pin_loc.pin_id
|
||||
|
||||
# Get the SOURCE node
|
||||
key = pin.name
|
||||
assert key in nodes, key
|
||||
node = nodes[key]
|
||||
|
||||
# Append the mux node as a sink
|
||||
key = (pin_loc.stage_id, pin_loc.switch_id, pin_loc.mux_id)
|
||||
node.out.add(key)
|
||||
|
||||
elif pin.direction == PinDirection.OUTPUT:
|
||||
|
||||
# Get the sink node
|
||||
key = pin.name
|
||||
assert key in nodes, key
|
||||
node = nodes[key]
|
||||
assert node.type == self.NodeType.SINK
|
||||
|
||||
# Append reference to the mux
|
||||
key = (pin_loc.stage_id, pin_loc.switch_id, pin_loc.mux_id)
|
||||
node.inp[key] = 0
|
||||
|
||||
# Get the mux node
|
||||
key = (pin_loc.stage_id, pin_loc.switch_id, pin_loc.mux_id)
|
||||
assert key in nodes, key
|
||||
node = nodes[key]
|
||||
|
||||
# Append the sink as the mux sink
|
||||
key = pin.name
|
||||
node.out.add(key)
|
||||
|
||||
else:
|
||||
assert False, pin.direction
|
||||
|
||||
# Populate mux to mux connections
|
||||
for conn in self.switchbox.connections:
|
||||
|
||||
# Get the correct node list
|
||||
stage_type = self.switchbox.stages[conn.dst.stage_id].type
|
||||
assert stage_type in self.nodes, stage_type
|
||||
nodes = self.nodes[stage_type]
|
||||
|
||||
# Get the node
|
||||
key = (conn.dst.stage_id, conn.dst.switch_id, conn.dst.mux_id)
|
||||
assert key in nodes, key
|
||||
node = nodes[key]
|
||||
|
||||
# Add its input and pin index
|
||||
key = (conn.src.stage_id, conn.src.switch_id, conn.src.mux_id)
|
||||
node.inp[key] = conn.dst.pin_id
|
||||
|
||||
# Get the source node
|
||||
key = (conn.src.stage_id, conn.src.switch_id, conn.src.mux_id)
|
||||
assert key in nodes, key
|
||||
node = nodes[key]
|
||||
|
||||
# Add the destination node to its outputs
|
||||
key = (conn.dst.stage_id, conn.dst.switch_id, conn.dst.mux_id)
|
||||
node.out.add(key)
|
||||
|
||||
def stage_inputs(self, stage_type):
|
||||
"""
|
||||
Yields inputs of the given stage type
|
||||
"""
|
||||
assert stage_type in self.nodes, stage_type
|
||||
for node in self.nodes[stage_type].values():
|
||||
if node.type == self.NodeType.SOURCE:
|
||||
yield node.key
|
||||
|
||||
def stage_outputs(self, stage_type):
|
||||
"""
|
||||
Yields outputs of the given stage type
|
||||
"""
|
||||
assert stage_type in self.nodes, stage_type
|
||||
for node in self.nodes[stage_type].values():
|
||||
if node.type == self.NodeType.SINK:
|
||||
yield node.key
|
||||
|
||||
def propagate_input(self, stage_type, input_name):
|
||||
"""
|
||||
Recursively propagates a net from an input pin to all reachable
|
||||
mux / sink nodes.
|
||||
"""
|
||||
|
||||
# Get the correct node list
|
||||
assert stage_type in self.nodes, stage_type
|
||||
nodes = self.nodes[stage_type]
|
||||
|
||||
def walk(node):
|
||||
|
||||
# Examine all driven nodes
|
||||
for sink_key in node.out:
|
||||
assert sink_key in nodes, sink_key
|
||||
sink_node = nodes[sink_key]
|
||||
|
||||
# The sink is free
|
||||
if sink_node.net is None:
|
||||
|
||||
# Assign it to the net
|
||||
sink_node.net = node.net
|
||||
if sink_node.type == self.NodeType.MUX:
|
||||
sink_node.sel = sink_node.inp[node.key]
|
||||
|
||||
# Expand
|
||||
walk(sink_node)
|
||||
|
||||
# Find the source node
|
||||
assert input_name in nodes, input_name
|
||||
node = nodes[input_name]
|
||||
|
||||
# Walk downstream
|
||||
node.net = input_name
|
||||
walk(node)
|
||||
|
||||
def ripup(self, stage_type):
|
||||
"""
|
||||
Rips up all routes within the given stage
|
||||
"""
|
||||
assert stage_type in self.nodes, stage_type
|
||||
for node in self.nodes[stage_type].values():
|
||||
if node.type != self.NodeType.SOURCE:
|
||||
node.net = None
|
||||
node.sel = None
|
||||
|
||||
def check_nodes(self):
|
||||
"""
|
||||
Check if all mux nodes have their selections set
|
||||
"""
|
||||
result = True
|
||||
|
||||
for stage_type, nodes in self.nodes.items():
|
||||
for key, node in nodes.items():
|
||||
|
||||
if node.type == self.NodeType.MUX and node.sel is None:
|
||||
result = False
|
||||
print("WARNING: mux unconfigured", key)
|
||||
|
||||
return result
|
||||
|
||||
def fasm_features(self, loc):
|
||||
"""
|
||||
Returns a list of FASM lines that correspond to the routed switchbox
|
||||
configuration.
|
||||
"""
|
||||
lines = []
|
||||
|
||||
for stage_type, nodes in self.nodes.items():
|
||||
for key, node in nodes.items():
|
||||
|
||||
# For muxes with active selection
|
||||
if node.type == self.NodeType.MUX and node.sel is not None:
|
||||
stage_id, switch_id, mux_id = key
|
||||
|
||||
# Get FASM features using the switchbox model.
|
||||
features = SwitchboxModel.get_metadata_for_mux(
|
||||
loc, self.switchbox.stages[stage_id], switch_id, mux_id, node.sel
|
||||
)
|
||||
lines.extend(features)
|
||||
|
||||
return lines
|
||||
|
||||
def dump_dot(self):
|
||||
"""
|
||||
Dumps a routed switchbox visualization into Graphviz format for
|
||||
debugging purposes.
|
||||
"""
|
||||
dot = []
|
||||
|
||||
def key2str(key):
|
||||
if isinstance(key, str):
|
||||
return key
|
||||
else:
|
||||
return "st{}_sw{}_mx{}".format(*key)
|
||||
|
||||
def fixup_label(lbl):
|
||||
lbl = lbl.replace("[", "(").replace("]", ")")
|
||||
|
||||
# All nets
|
||||
nets = set()
|
||||
for nodes in self.nodes.values():
|
||||
for node in nodes.values():
|
||||
if node.net is not None:
|
||||
nets.add(node.net)
|
||||
|
||||
# Net colors
|
||||
node_colors = {None: "#C0C0C0"}
|
||||
edge_colors = {None: "#000000"}
|
||||
|
||||
nets = sorted(list(nets))
|
||||
for i, net in enumerate(nets):
|
||||
|
||||
hue = i / len(nets)
|
||||
light = 0.33
|
||||
saturation = 1.0
|
||||
|
||||
r, g, b = colorsys.hls_to_rgb(hue, light, saturation)
|
||||
color = "#{:02X}{:02X}{:02X}".format(
|
||||
int(r * 255.0),
|
||||
int(g * 255.0),
|
||||
int(b * 255.0),
|
||||
)
|
||||
|
||||
node_colors[net] = color
|
||||
edge_colors[net] = color
|
||||
|
||||
# Add header
|
||||
dot.append("digraph {} {{".format(self.switchbox.type))
|
||||
dot.append(' graph [nodesep="1.0", ranksep="20"];')
|
||||
dot.append(' splines = "false";')
|
||||
dot.append(" rankdir = LR;")
|
||||
dot.append(" margin = 20;")
|
||||
dot.append(" node [style=filled];")
|
||||
|
||||
# Stage types
|
||||
for stage_type, nodes in self.nodes.items():
|
||||
|
||||
# Stage header
|
||||
dot.append(' subgraph "cluster_{}" {{'.format(stage_type))
|
||||
dot.append(" label=\"Stage '{}'\";".format(stage_type))
|
||||
|
||||
# Nodes and internal mux edges
|
||||
for key, node in nodes.items():
|
||||
|
||||
# Source node
|
||||
if node.type == self.NodeType.SOURCE:
|
||||
name = "{}_inp_{}".format(stage_type, key2str(key))
|
||||
label = key
|
||||
color = node_colors[node.net]
|
||||
|
||||
dot.append(
|
||||
' "{}" [shape=octagon label="{}" fillcolor="{}"];'.format(
|
||||
name,
|
||||
label,
|
||||
color,
|
||||
)
|
||||
)
|
||||
|
||||
# Sink node
|
||||
elif node.type == self.NodeType.SINK:
|
||||
name = "{}_out_{}".format(stage_type, key2str(key))
|
||||
label = key
|
||||
color = node_colors[node.net]
|
||||
|
||||
dot.append(
|
||||
' "{}" [shape=octagon label="{}" fillcolor="{}"];'.format(
|
||||
name,
|
||||
label,
|
||||
color,
|
||||
)
|
||||
)
|
||||
|
||||
# Mux node
|
||||
elif node.type == self.NodeType.MUX:
|
||||
name = "{}_{}".format(stage_type, key2str(key))
|
||||
dot.append(' subgraph "cluster_{}" {{'.format(name))
|
||||
dot.append(' label="{}, sel={}";'.format(str(key), node.sel))
|
||||
|
||||
# Inputs
|
||||
for drv_key, pin in node.inp.items():
|
||||
if node.sel == pin:
|
||||
assert drv_key in nodes, drv_key
|
||||
net = nodes[drv_key].net
|
||||
else:
|
||||
net = None
|
||||
|
||||
name = "{}_{}_{}".format(stage_type, key2str(key), pin)
|
||||
label = pin
|
||||
color = node_colors[net]
|
||||
|
||||
dot.append(
|
||||
' "{}" [shape=ellipse label="{}" fillcolor="{}"];'.format(
|
||||
name,
|
||||
label,
|
||||
color,
|
||||
)
|
||||
)
|
||||
|
||||
# Output
|
||||
name = "{}_{}".format(stage_type, key2str(key))
|
||||
label = "out"
|
||||
color = node_colors[node.net]
|
||||
|
||||
dot.append(
|
||||
' "{}" [shape=ellipse label="{}" fillcolor="{}"];'.format(
|
||||
name,
|
||||
label,
|
||||
color,
|
||||
)
|
||||
)
|
||||
|
||||
# Internal mux edges
|
||||
for drv_key, pin in node.inp.items():
|
||||
if node.sel == pin:
|
||||
assert drv_key in nodes, drv_key
|
||||
net = nodes[drv_key].net
|
||||
else:
|
||||
net = None
|
||||
|
||||
src_name = "{}_{}_{}".format(stage_type, key2str(key), pin)
|
||||
dst_name = "{}_{}".format(stage_type, key2str(key))
|
||||
color = edge_colors[net]
|
||||
|
||||
dot.append(
|
||||
' "{}" -> "{}" [color="{}"];'.format(
|
||||
src_name,
|
||||
dst_name,
|
||||
color,
|
||||
)
|
||||
)
|
||||
|
||||
dot.append(" }")
|
||||
|
||||
else:
|
||||
assert False, node.type
|
||||
|
||||
# Mux to mux connections
|
||||
for key, node in nodes.items():
|
||||
|
||||
# Source node
|
||||
if node.type == self.NodeType.SOURCE:
|
||||
pass
|
||||
|
||||
# Sink node
|
||||
elif node.type == self.NodeType.SINK:
|
||||
assert len(node.inp) == 1, node.inp
|
||||
src_key = next(iter(node.inp.keys()))
|
||||
|
||||
dst_name = "{}_out_{}".format(stage_type, key2str(key))
|
||||
if isinstance(src_key, str):
|
||||
src_name = "{}_inp_{}".format(stage_type, key2str(src_key))
|
||||
else:
|
||||
src_name = "{}_{}".format(stage_type, key2str(src_key))
|
||||
|
||||
color = node_colors[node.net]
|
||||
|
||||
dot.append(
|
||||
' "{}" -> "{}" [color="{}"];'.format(
|
||||
src_name,
|
||||
dst_name,
|
||||
color,
|
||||
)
|
||||
)
|
||||
|
||||
# Mux node
|
||||
elif node.type == self.NodeType.MUX:
|
||||
for drv_key, pin in node.inp.items():
|
||||
if node.sel == pin:
|
||||
assert drv_key in nodes, drv_key
|
||||
net = nodes[drv_key].net
|
||||
else:
|
||||
net = None
|
||||
|
||||
dst_name = "{}_{}_{}".format(stage_type, key2str(key), pin)
|
||||
if isinstance(drv_key, str):
|
||||
src_name = "{}_inp_{}".format(stage_type, key2str(drv_key))
|
||||
else:
|
||||
src_name = "{}_{}".format(stage_type, key2str(drv_key))
|
||||
|
||||
color = edge_colors[net]
|
||||
|
||||
dot.append(
|
||||
' "{}" -> "{}" [color="{}"];'.format(
|
||||
src_name,
|
||||
dst_name,
|
||||
color,
|
||||
)
|
||||
)
|
||||
|
||||
else:
|
||||
assert False, node.type
|
||||
|
||||
# Stage footer
|
||||
dot.append(" }")
|
||||
|
||||
# Add footer
|
||||
dot.append("}")
|
||||
return "\n".join(dot)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# Parse arguments
|
||||
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
|
||||
parser.add_argument("--techfile", type=str, required=True, help="Quicklogic 'TechFile' XML file")
|
||||
parser.add_argument("--fasm", type=str, default="default.fasm", help="Output FASM file name")
|
||||
parser.add_argument(
|
||||
"--device", type=str, choices=["eos-s3"], default="eos-s3", help="Device name to generate the FASM file for"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dump-dot", action="store_true", help="Dump Graphviz .dot files for each routed switchbox type"
|
||||
)
|
||||
parser.add_argument("--allow-routing-failures", action="store_true", help="Skip switchboxes that fail routing")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Read and parse the XML file
|
||||
xml_tree = ET.parse(args.techfile)
|
||||
xml_root = xml_tree.getroot()
|
||||
|
||||
# Load data
|
||||
print("Loading data from the techfile...")
|
||||
data = import_data(xml_root)
|
||||
switchbox_types = data["switchbox_types"]
|
||||
switchbox_grid = data["switchbox_grid"]
|
||||
tile_types = data["tile_types"]
|
||||
tile_grid = data["tile_grid"]
|
||||
|
||||
# Route switchboxes
|
||||
print("Making switchbox routes...")
|
||||
|
||||
fasm = []
|
||||
fully_routed = 0
|
||||
partially_routed = 0
|
||||
|
||||
def input_rank(pin):
|
||||
"""
|
||||
Returns a rank of a switchbox input. Pins with the lowest rank should
|
||||
be expanded first.
|
||||
"""
|
||||
if pin.name == "GND":
|
||||
return 0
|
||||
elif pin.name == "VCC":
|
||||
return 1
|
||||
elif pin.type not in [SwitchboxPinType.HOP, SwitchboxPinType.GCLK]:
|
||||
return 2
|
||||
elif pin.type == SwitchboxPinType.HOP:
|
||||
return 3
|
||||
elif pin.type == SwitchboxPinType.GCLK:
|
||||
return 4
|
||||
|
||||
return 99
|
||||
|
||||
# Scan for duplicates
|
||||
for switchbox in switchbox_types.values():
|
||||
for pin in switchbox.pins:
|
||||
pinmap = {}
|
||||
for pin_loc in pin.locs:
|
||||
key = (switchbox.type, pin_loc.stage_id, pin_loc.switch_id, pin_loc.mux_id)
|
||||
if key not in pinmap:
|
||||
pinmap[key] = pin_loc.pin_id
|
||||
else:
|
||||
if key in duplicate:
|
||||
duplicate[key].append(pin_loc.pin_id)
|
||||
else:
|
||||
duplicate[key] = [pin_loc.pin_id]
|
||||
|
||||
# Process each switchbox type
|
||||
for switchbox in switchbox_types.values():
|
||||
print("", switchbox.type)
|
||||
|
||||
# Identify all locations of the switchbox
|
||||
locs = [loc for loc, type in switchbox_grid.items() if type == switchbox.type]
|
||||
|
||||
# Initialize the builder
|
||||
builder = SwitchboxConfigBuilder(switchbox)
|
||||
|
||||
# Sort the inputs according to their ranks.
|
||||
inputs = sorted(switchbox.inputs.values(), key=input_rank)
|
||||
|
||||
# Propagate them
|
||||
for stage in ["STREET", "HIGHWAY"]:
|
||||
for pin in inputs:
|
||||
if pin.name in builder.stage_inputs(stage):
|
||||
builder.propagate_input(stage, pin.name)
|
||||
|
||||
# Check if all nodes are configured
|
||||
routing_failed = not builder.check_nodes()
|
||||
|
||||
# Dump dot
|
||||
if args.dump_dot:
|
||||
dot = builder.dump_dot()
|
||||
fname = "defconfig_{}.dot".format(switchbox.type)
|
||||
with open(fname, "w") as fp:
|
||||
fp.write(dot)
|
||||
|
||||
# Routing failed
|
||||
if routing_failed:
|
||||
if not args.allow_routing_failures:
|
||||
exit(-1)
|
||||
|
||||
# Stats
|
||||
if routing_failed:
|
||||
partially_routed += len(locs)
|
||||
else:
|
||||
fully_routed += len(locs)
|
||||
|
||||
# Emit FASM features for each of them
|
||||
for loc in locs:
|
||||
fasm.extend(builder.fasm_features(loc))
|
||||
|
||||
print(" Total switchboxes: {}".format(len(switchbox_grid)))
|
||||
print(" Fully routed : {}".format(fully_routed))
|
||||
print(" Partially routed : {}".format(partially_routed))
|
||||
|
||||
# Power on all LOGIC cells
|
||||
for loc, tile in tile_grid.items():
|
||||
|
||||
# Get the tile type object
|
||||
tile_type = tile_types[tile.type]
|
||||
|
||||
# If this tile has a LOGIC cell then emit the FASM feature that
|
||||
# enables its power
|
||||
if "LOGIC" in tile_type.cells:
|
||||
feature = "X{}Y{}.LOGIC.LOGIC.Ipwr_gates.J_pwr_st".format(loc.x, loc.y)
|
||||
fasm.append(feature)
|
||||
|
||||
# Write FASM
|
||||
print("Writing FASM file...")
|
||||
with open(args.fasm, "w") as fp:
|
||||
fp.write("\n".join(fasm))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
169
f4pga/utils/quicklogic/pp3/create_ioplace.py
Executable file
169
f4pga/utils/quicklogic/pp3/create_ioplace.py
Executable file
|
@ -0,0 +1,169 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
""" Convert a PCF file into a VPR io.place file. """
|
||||
import argparse
|
||||
import csv
|
||||
import sys
|
||||
import re
|
||||
from collections import defaultdict
|
||||
|
||||
import f4pga.utils.vpr_io_place as vpr_io_place
|
||||
from f4pga.utils.pcf import parse_simple_pcf
|
||||
|
||||
# =============================================================================
|
||||
|
||||
# Known IOB types and VPR cells that can be placed at their sites
|
||||
IOB_TYPES = {
|
||||
"CLOCK": [
|
||||
"PB-CLOCK",
|
||||
],
|
||||
"BIDIR": [
|
||||
"PB-BIDIR",
|
||||
],
|
||||
"SDIOMUX": [
|
||||
"PB-SDIOMUX",
|
||||
],
|
||||
}
|
||||
|
||||
BLOCK_INSTANCE_RE = re.compile(r"^(?P<name>\S+)\[(?P<index>[0-9]+)\]$")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Convert a PCF file into a VPR io.place file.")
|
||||
parser.add_argument("--pcf", "-p", "-P", type=argparse.FileType("r"), required=True, help="PCF input file")
|
||||
parser.add_argument("--blif", "-b", type=argparse.FileType("r"), required=True, help="BLIF / eBLIF file")
|
||||
parser.add_argument("--map", "-m", "-M", type=argparse.FileType("r"), required=True, help="Pin map CSV file")
|
||||
parser.add_argument(
|
||||
"--output", "-o", "-O", type=argparse.FileType("w"), default=sys.stdout, help="The output io.place file"
|
||||
)
|
||||
parser.add_argument("--net", "-n", type=argparse.FileType("r"), required=True, help="top.net file")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
io_place = vpr_io_place.IoPlace()
|
||||
io_place.read_io_list_from_eblif(args.blif)
|
||||
io_place.load_block_names_from_net_file(args.net)
|
||||
|
||||
# Map of pad names to VPR locations.
|
||||
pad_map = defaultdict(lambda: dict())
|
||||
pad_alias_map = defaultdict(lambda: dict())
|
||||
|
||||
for pin_map_entry in csv.DictReader(args.map):
|
||||
|
||||
if pin_map_entry["type"] not in IOB_TYPES:
|
||||
continue
|
||||
|
||||
name = pin_map_entry["name"]
|
||||
alias = ""
|
||||
if "alias" in pin_map_entry:
|
||||
alias = pin_map_entry["alias"]
|
||||
|
||||
for t in IOB_TYPES[pin_map_entry["type"]]:
|
||||
pad_map[name][t] = (
|
||||
int(pin_map_entry["x"]),
|
||||
int(pin_map_entry["y"]),
|
||||
int(pin_map_entry["z"]),
|
||||
)
|
||||
if "alias" in pin_map_entry:
|
||||
pad_alias_map[alias] = name
|
||||
|
||||
used_pads = set()
|
||||
|
||||
for pcf_constraint in parse_simple_pcf(args.pcf):
|
||||
|
||||
# Skip non-io constraints
|
||||
if type(pcf_constraint).__name__ != "PcfIoConstraint":
|
||||
continue
|
||||
|
||||
pad_name = pcf_constraint.pad
|
||||
if not io_place.is_net(pcf_constraint.net):
|
||||
print(
|
||||
'PCF constraint "{}" from line {} constraints net {} \
|
||||
which is not in available netlist:\n{}'.format(
|
||||
pcf_constraint.line_str, pcf_constraint.line_num, pcf_constraint.net, "\n".join(io_place.get_nets())
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if pad_name not in pad_map and pad_name not in pad_alias_map:
|
||||
print(
|
||||
'PCF constraint "{}" from line {} constraints pad {} \
|
||||
which is not in available pad map:\n{}'.format(
|
||||
pcf_constraint.line_str, pcf_constraint.line_num, pad_name, "\n".join(sorted(pad_map.keys()))
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Alias is specified in pcf file so assign it to corresponding pad name
|
||||
if pad_name in pad_alias_map:
|
||||
pad_name = pad_alias_map[pad_name]
|
||||
|
||||
# Catch the same pad used multiple times
|
||||
if pad_name in used_pads:
|
||||
print(
|
||||
'PCF constraint "{}" from line {} constraints pad {} \
|
||||
which has already been constrained'.format(
|
||||
pcf_constraint.line_str, pcf_constraint.line_num, pad_name
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
used_pads.add(pad_name)
|
||||
|
||||
# Get the top-level block instance, strip its index
|
||||
inst = io_place.get_top_level_block_instance_for_net(pcf_constraint.net)
|
||||
if inst is None:
|
||||
continue
|
||||
|
||||
match = BLOCK_INSTANCE_RE.match(inst)
|
||||
assert match is not None, inst
|
||||
|
||||
inst = match.group("name")
|
||||
|
||||
# Pick correct loc for that pb_type
|
||||
locs = pad_map[pad_name]
|
||||
if inst not in locs:
|
||||
print(
|
||||
'PCF constraint "{}" from line {} constraints net {} of a block type {} \
|
||||
to a location for block types:\n{}'.format(
|
||||
pcf_constraint.line_str,
|
||||
pcf_constraint.line_num,
|
||||
pcf_constraint.net,
|
||||
inst,
|
||||
"\n".join(sorted(list(locs.keys()))),
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Constraint the net (block)
|
||||
loc = locs[inst]
|
||||
io_place.constrain_net(net_name=pcf_constraint.net, loc=loc, comment=pcf_constraint.line_str)
|
||||
|
||||
io_place.output_io_place(args.output)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
180
f4pga/utils/quicklogic/pp3/create_place_constraints.py
Executable file
180
f4pga/utils/quicklogic/pp3/create_place_constraints.py
Executable file
|
@ -0,0 +1,180 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import argparse
|
||||
import sys
|
||||
import csv
|
||||
|
||||
import f4pga.utils.eblif as eblif
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
|
||||
def get_cell_connection(cell, pin):
|
||||
"""
|
||||
Returns the name of the net connected to the given cell pin. Returns None
|
||||
if unconnected
|
||||
"""
|
||||
|
||||
# Only for subckt
|
||||
assert cell["type"] == "subckt"
|
||||
|
||||
# Find the connection and return it
|
||||
for i in range(1, len(cell["args"])):
|
||||
p, net = cell["args"][i].split("=")
|
||||
if p == pin:
|
||||
return net
|
||||
|
||||
# Not found
|
||||
return None
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Creates placement constraints other than IOs")
|
||||
|
||||
parser.add_argument(
|
||||
"--input", "-i", "-I", type=argparse.FileType("r"), default=sys.stdin, help="The input constraints place file."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
"-O",
|
||||
type=argparse.FileType("w"),
|
||||
default=sys.stdout,
|
||||
help="The output constraints place file.",
|
||||
)
|
||||
parser.add_argument("--map", type=argparse.FileType("r"), required=True, help="Clock pinmap CSV file")
|
||||
parser.add_argument("--blif", "-b", type=argparse.FileType("r"), required=True, help="BLIF / eBLIF file.")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load clock map
|
||||
clock_to_gmux = {}
|
||||
for row in csv.DictReader(args.map):
|
||||
name = row["name"]
|
||||
src_loc = (
|
||||
int(row["src.x"]),
|
||||
int(row["src.y"]),
|
||||
int(row["src.z"]),
|
||||
)
|
||||
dst_loc = (
|
||||
int(row["dst.x"]),
|
||||
int(row["dst.y"]),
|
||||
int(row["dst.z"]),
|
||||
)
|
||||
|
||||
clock_to_gmux[src_loc] = (dst_loc, name)
|
||||
|
||||
# Load EBLIF
|
||||
eblif_data = eblif.parse_blif(args.blif)
|
||||
|
||||
# Process the IO constraints file. Pass the constraints unchanged, store
|
||||
# them.
|
||||
io_constraints = {}
|
||||
|
||||
for line in args.input:
|
||||
|
||||
# Strip, skip comments
|
||||
line = line.strip()
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
|
||||
args.output.write(line + "\n")
|
||||
|
||||
# Get block and its location
|
||||
block, x, y, z = line.split()[0:4]
|
||||
io_constraints[block] = (
|
||||
int(x),
|
||||
int(y),
|
||||
int(z),
|
||||
)
|
||||
|
||||
# Analyze the BLIF netlist. Find clock inputs that go through CLOCK IOB to
|
||||
# GMUXes.
|
||||
clock_connections = []
|
||||
|
||||
IOB_CELL = {"type": "CLOCK_CELL", "ipin": "I_PAD", "opin": "O_CLK"}
|
||||
BUF_CELL = {"type": "GMUX_IP", "ipin": "IP", "opin": "IZ"}
|
||||
|
||||
for inp_net in eblif_data["inputs"]["args"]:
|
||||
|
||||
# This one is not constrained, skip it
|
||||
if inp_net not in io_constraints:
|
||||
continue
|
||||
|
||||
# Search for a CLOCK cell connected to that net
|
||||
for cell in eblif_data["subckt"]:
|
||||
if cell["type"] == "subckt" and cell["args"][0] == IOB_CELL["type"]:
|
||||
net = get_cell_connection(cell, IOB_CELL["ipin"])
|
||||
if net == inp_net:
|
||||
iob_cell = cell
|
||||
break
|
||||
else:
|
||||
continue
|
||||
|
||||
# Get the output net of the CLOCK cell
|
||||
con_net = get_cell_connection(iob_cell, IOB_CELL["opin"])
|
||||
if not con_net:
|
||||
continue
|
||||
|
||||
# Search for a GMUX connected to the CLOCK cell
|
||||
for cell in eblif_data["subckt"]:
|
||||
if cell["type"] == "subckt" and cell["args"][0] == BUF_CELL["type"]:
|
||||
net = get_cell_connection(cell, BUF_CELL["ipin"])
|
||||
if net == con_net:
|
||||
buf_cell = cell
|
||||
break
|
||||
else:
|
||||
continue
|
||||
|
||||
# Get the output net of the GMUX
|
||||
clk_net = get_cell_connection(buf_cell, BUF_CELL["opin"])
|
||||
if not clk_net:
|
||||
continue
|
||||
|
||||
# Store data
|
||||
clock_connections.append((inp_net, iob_cell, con_net, buf_cell, clk_net))
|
||||
|
||||
# Emit constraints for GCLK cells
|
||||
for inp_net, iob_cell, con_net, buf_cell, clk_net in clock_connections:
|
||||
|
||||
src_loc = io_constraints[inp_net]
|
||||
if src_loc not in clock_to_gmux:
|
||||
eprint("ERROR: No GMUX location for input CLOCK pad for net '{}' at {}".format(inp_net, src_loc))
|
||||
continue
|
||||
|
||||
dst_loc, name = clock_to_gmux[src_loc]
|
||||
|
||||
# FIXME: Silently assuming here that VPR will name the GMUX block as
|
||||
# the GMUX cell in EBLIF. In order to fix that there will be a need
|
||||
# to read & parse the packed netlist file.
|
||||
line = "{} {} {} {} # {}\n".format(buf_cell["cname"][0], *dst_loc, name)
|
||||
args.output.write(line)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
1397
f4pga/utils/quicklogic/pp3/data_import.py
Executable file
1397
f4pga/utils/quicklogic/pp3/data_import.py
Executable file
File diff suppressed because it is too large
Load diff
356
f4pga/utils/quicklogic/pp3/data_structs.py
Normal file
356
f4pga/utils/quicklogic/pp3/data_structs.py
Normal file
|
@ -0,0 +1,356 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
This file contains definitions of various data structutes used to hold tilegrid
|
||||
and routing information of a Quicklogic FPGA.
|
||||
"""
|
||||
from collections import namedtuple
|
||||
from enum import Enum
|
||||
|
||||
# =============================================================================
|
||||
"""
|
||||
Pin direction in terms of its function.
|
||||
"""
|
||||
|
||||
|
||||
class PinDirection(Enum):
|
||||
UNSPEC = 0
|
||||
INPUT = 1
|
||||
OUTPUT = 2
|
||||
|
||||
|
||||
"""
|
||||
An opposite direction map
|
||||
"""
|
||||
OPPOSITE_DIRECTION = {
|
||||
PinDirection.UNSPEC: PinDirection.UNSPEC,
|
||||
PinDirection.INPUT: PinDirection.OUTPUT,
|
||||
PinDirection.OUTPUT: PinDirection.INPUT,
|
||||
}
|
||||
"""
|
||||
A generic pin
|
||||
"""
|
||||
Pin = namedtuple("Pin", "name direction attrib")
|
||||
"""
|
||||
Pin direction in therms where is it "standing out" of a tile.
|
||||
"""
|
||||
|
||||
|
||||
class PinSide(Enum):
|
||||
UNSPEC = 0
|
||||
NORTH = 1
|
||||
SOUTH = 2
|
||||
EAST = 3
|
||||
WEST = 4
|
||||
|
||||
|
||||
"""
|
||||
This is a generic location in the tilegrid. Note that it also encounters for
|
||||
the sub-tile index z.
|
||||
"""
|
||||
Loc = namedtuple("Loc", "x y z")
|
||||
"""
|
||||
FPGA grid quadrant.
|
||||
"""
|
||||
Quadrant = namedtuple("Quadrant", "name x0 y0 x1 y1")
|
||||
"""
|
||||
Forwads and backward location mapping
|
||||
"""
|
||||
LocMap = namedtuple("LocMap", "fwd bwd")
|
||||
|
||||
# =============================================================================
|
||||
"""
|
||||
A cell type within a tile type representation (should be named "site" ?).
|
||||
Holds the cell type name and the list of its pins.
|
||||
"""
|
||||
CellType = namedtuple("CellType", "type pins")
|
||||
"""
|
||||
A cell instance within a tile. Binds a cell name with its type.
|
||||
|
||||
type - Cell type
|
||||
index - Index within the tile
|
||||
name - Cell name (not necessarly unique)
|
||||
alias - Cell alias (if present)
|
||||
"""
|
||||
Cell = namedtuple("Cell", "type index name alias")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class TileType(object):
|
||||
"""
|
||||
A tile type representation. The Quicklogic FPGA fabric does not define tiles.
|
||||
It has rather a group of cells bound to a common geographical location.
|
||||
"""
|
||||
|
||||
def __init__(self, type, cells, fake_const_pin=False):
|
||||
self.type = type
|
||||
self.cells = cells
|
||||
self.pins = []
|
||||
self.fake_const_pin = fake_const_pin
|
||||
|
||||
def make_pins(self, cells_library):
|
||||
"""
|
||||
Basing on the cell list and their pins generates the tile pins.
|
||||
"""
|
||||
self.pins = []
|
||||
|
||||
# Copy pins from all cells. Prefix their names with a cell name.
|
||||
for cell_type, cell_count in self.cells.items():
|
||||
for i in range(cell_count):
|
||||
for pin in cells_library[cell_type].pins:
|
||||
name = "{}{}_{}".format(cell_type, i, pin.name)
|
||||
|
||||
self.pins.append(Pin(name=name, direction=pin.direction, attrib=pin.attrib))
|
||||
|
||||
# Add the fake constant connection pin if marked
|
||||
if self.fake_const_pin:
|
||||
self.pins.append(Pin(name="FAKE_CONST", direction=PinDirection.INPUT, attrib={}))
|
||||
|
||||
|
||||
"""
|
||||
A tile instance within a tilegrid
|
||||
|
||||
type - Tile type
|
||||
name - Tile instance name
|
||||
cells - A list of Cell objects
|
||||
"""
|
||||
Tile = namedtuple("Tile", "type name cells")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class SwitchboxPinType(Enum):
|
||||
"""
|
||||
Switchbox pin types.
|
||||
"""
|
||||
|
||||
UNSPEC = 0 # Unknown.
|
||||
LOCAL = 1 # Connects to the tile at the same location as the switchbox.
|
||||
HOP = 2 # Generic hop, connects to another switchbox.
|
||||
GCLK = 3 # Connects to the global clock network.
|
||||
CONST = 4 # Connects to the global const network.
|
||||
FOREIGN = 5 # Connects to a tile at a different location.
|
||||
|
||||
|
||||
"""
|
||||
A location that identifies a pin inside a switchbox.
|
||||
|
||||
stage_id - Stage id
|
||||
switch_id - Switch id within the stage
|
||||
mux_id - Mux id within the switch
|
||||
pin_id - Pin id of the mux
|
||||
pin_direction - Logical direction of the pin
|
||||
"""
|
||||
SwitchboxPinLoc = namedtuple("SwitchboxPinLoc", "stage_id switch_id mux_id pin_id pin_direction")
|
||||
"""
|
||||
A top-level switchbox pin.
|
||||
|
||||
in - Pin id.
|
||||
name - Pin name
|
||||
direction - Pin direction.
|
||||
locs - A list of SwitchboxPinLoc objects representing connections to
|
||||
switches within the switchbox.
|
||||
type - The pin type as according to SwitchboxPinType
|
||||
"""
|
||||
SwitchboxPin = namedtuple("SwitchboxPin", "id name direction locs type")
|
||||
"""
|
||||
A switch pin within a switchbox
|
||||
|
||||
id - Pin id.
|
||||
name - Pin name. Only for top-level pins. For others is None.
|
||||
direction - Pin direction.
|
||||
"""
|
||||
SwitchPin = namedtuple("SwitchPin", "id name direction")
|
||||
"""
|
||||
A connection within a switchbox
|
||||
|
||||
src - Source location (always an output pin)
|
||||
dst - Destination location (always an input pin)
|
||||
"""
|
||||
SwitchConnection = namedtuple("SwitchConnection", "src dst")
|
||||
"""
|
||||
Sink timing
|
||||
|
||||
tdel - Constant propagation delay.
|
||||
c - Load capacitance.
|
||||
vpr_switch - VPR switch name
|
||||
"""
|
||||
SinkTiming = namedtuple("SinkTiming", "tdel c vpr_switch")
|
||||
"""
|
||||
Driver timing
|
||||
|
||||
tdel - Constant propagation delay.
|
||||
r - Driver resitance.
|
||||
vpr_switch - VPR switch name
|
||||
"""
|
||||
DriverTiming = namedtuple("DriverTiming", "tdel r vpr_switch")
|
||||
"""
|
||||
Mux edge timing data
|
||||
|
||||
driver - Driver parameters
|
||||
sink - Sink parameters
|
||||
"""
|
||||
MuxEdgeTiming = namedtuple("MuxEdgeTiming", "driver sink")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class Switchbox(object):
|
||||
"""
|
||||
This class holds information about a routing switchbox of a particular type.
|
||||
|
||||
A switchbox is implemented in CLOS architecture. It contains of multiple
|
||||
"stages". Outputs of previous stage go to the next one. A stage contains
|
||||
multiple switches. Each switch is a small M-to-N routing box.
|
||||
"""
|
||||
|
||||
class Mux(object):
|
||||
"""
|
||||
An individual multiplexer inside a switchbox
|
||||
"""
|
||||
|
||||
def __init__(self, id, switch):
|
||||
self.id = id # The mux ID
|
||||
self.switch = switch # Parent switch od
|
||||
self.inputs = {} # Input pins by their IDs
|
||||
self.output = None # The output pin
|
||||
self.timing = {} # Input timing (per input)
|
||||
|
||||
@property
|
||||
def pins(self):
|
||||
"""
|
||||
Yields all pins of the mux
|
||||
"""
|
||||
for pin in self.inputs.values():
|
||||
yield pin
|
||||
|
||||
yield self.output
|
||||
|
||||
class Switch(object):
|
||||
"""
|
||||
This is a sub-switchbox of a switchbox stage.
|
||||
"""
|
||||
|
||||
def __init__(self, id, stage):
|
||||
self.id = id # The switch ID
|
||||
self.stage = stage # Parent stage id
|
||||
self.muxes = {} # Muxes by their IDs
|
||||
|
||||
@property
|
||||
def pins(self):
|
||||
"""
|
||||
Yields all pins of the switch
|
||||
"""
|
||||
for mux in self.muxes.values():
|
||||
yield from mux.pins
|
||||
|
||||
class Stage(object):
|
||||
"""
|
||||
Represents a routing stage which has some attributes and consists of
|
||||
a column of Switch objects
|
||||
"""
|
||||
|
||||
def __init__(self, id, type=None):
|
||||
self.id = id # The stage ID
|
||||
self.type = type # The stage type ("HIGHWAY" or "STREET")
|
||||
self.switches = {} # Switches indexed by their IDs
|
||||
|
||||
@property
|
||||
def pins(self):
|
||||
"""
|
||||
Yields all pins of the stage
|
||||
"""
|
||||
for switch in self.switches.values():
|
||||
yield from switch.pins
|
||||
|
||||
# ...............................................................
|
||||
|
||||
def __init__(self, type):
|
||||
self.type = type # Switchbox type
|
||||
self.inputs = {} # Top-level inputs by their names
|
||||
self.outputs = {} # Top-level outputs by their names
|
||||
self.stages = {} # Stages by their IDs
|
||||
|
||||
self.connections = set() # Connections between stages
|
||||
|
||||
@property
|
||||
def pins(self):
|
||||
"""
|
||||
Yields all pins of the switchbox
|
||||
"""
|
||||
for pin in self.inputs.values():
|
||||
yield pin
|
||||
for pin in self.outputs.values():
|
||||
yield pin
|
||||
|
||||
|
||||
# =============================================================================
|
||||
"""
|
||||
A global clock network cell
|
||||
|
||||
type - Cell type.
|
||||
name - Cell name.
|
||||
loc - Location in the grid.
|
||||
quadrant - Clock quadrant
|
||||
pin_map - A dict with mapping of cell pins to switchbox pins.
|
||||
"""
|
||||
ClockCell = namedtuple("ClockCell", "type name loc quadrant pin_map")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class ConnectionType(Enum):
|
||||
"""
|
||||
Connection endpoint type
|
||||
"""
|
||||
|
||||
UNSPEC = 0 # Unspecified
|
||||
SWITCHBOX = 1 # Connection to a pin of a switchbox
|
||||
TILE = 2 # Connection to a pin of a tile
|
||||
CLOCK = 3 # Connection to a global clock network cell modelled using
|
||||
# routing resources only.
|
||||
|
||||
|
||||
# A connection endpoint location. Specifies location, pin name and connection
|
||||
# type.
|
||||
ConnectionLoc = namedtuple("ConnectionLoc", "loc pin type")
|
||||
|
||||
# A connection within the tilegrid
|
||||
Connection = namedtuple("Connection", "src dst is_direct")
|
||||
|
||||
# =============================================================================
|
||||
"""
|
||||
A package pin. Holds information about what cells the pin cooresponds to and
|
||||
where it is in the tilegrid.
|
||||
|
||||
name - The pin name
|
||||
alias - Alias
|
||||
loc - Location in the physical FPGA gric
|
||||
cell - Cell object that the package pin correspond to
|
||||
"""
|
||||
PackagePin = namedtuple("PackagePin", "name alias loc cell")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
# VPR segment
|
||||
VprSegment = namedtuple("VprSegment", "name length r_metal c_metal")
|
||||
|
||||
# VPR switch
|
||||
VprSwitch = namedtuple("VprSwitch", "name type t_del r c_in c_out c_int")
|
325
f4pga/utils/quicklogic/pp3/eos-s3/iomux_config.py
Executable file
325
f4pga/utils/quicklogic/pp3/eos-s3/iomux_config.py
Executable file
|
@ -0,0 +1,325 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
This is an utility script that allows to generate EOS S3 IOMUX configuration
|
||||
either from data in JSON format or from the given EBLIF netlist plus PCF
|
||||
constraints of the FPGA design.
|
||||
"""
|
||||
import argparse
|
||||
import csv
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
|
||||
from f4pga.utils.pcf import parse_simple_pcf
|
||||
from f4pga.utils.eblif import parse_blif
|
||||
|
||||
# =============================================================================
|
||||
|
||||
# Known IOB types and VPR cells that can be placed at their sites
|
||||
IOB_TYPES = {
|
||||
"CLOCK": [
|
||||
"PB-CLOCK",
|
||||
],
|
||||
"BIDIR": [
|
||||
"PB-BIDIR",
|
||||
],
|
||||
"SDIOMUX": [
|
||||
"PB-SDIOMUX",
|
||||
],
|
||||
}
|
||||
|
||||
# Default configuration of the IOMUX pad
|
||||
PAD_DEFAULT = {"func_sel": 0, "ctrl_sel": 0, "mode": "none", "pull": "none", "drive": 2, "slew": "slow", "schmitt": 0}
|
||||
|
||||
# Base address of the FBIO_SEL registers
|
||||
FBIOSEL_BASE = 0x40004D80
|
||||
|
||||
# Base address of the IOMUX registers
|
||||
IOMUX_BASE = 0x40004C00
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def generate_iomux_register_content(config):
|
||||
"""
|
||||
Generates a content of IOMUX registers according to the given config.
|
||||
"""
|
||||
iomux_regs = {}
|
||||
|
||||
# Generate content of the IOMUX_PAD_O_CTRL register for each pad
|
||||
for pad, pad_cfg in config["pads"].items():
|
||||
pad = int(pad)
|
||||
reg = 0
|
||||
|
||||
# Patch default settings with settings read from the config file
|
||||
pad_cfg = dict(PAD_DEFAULT, **pad_cfg)
|
||||
|
||||
func_sel = pad_cfg["func_sel"]
|
||||
assert func_sel in [0, 1], func_sel
|
||||
reg |= func_sel
|
||||
|
||||
ctrl_sel = pad_cfg["ctrl_sel"]
|
||||
assert ctrl_sel in ["A0", "others", "fabric"], ctrl_sel
|
||||
if ctrl_sel == "A0":
|
||||
reg |= 0 << 3
|
||||
elif ctrl_sel == "others":
|
||||
reg |= 1 << 3
|
||||
elif ctrl_sel == "fabric":
|
||||
reg |= 2 << 3
|
||||
|
||||
mode = pad_cfg["mode"]
|
||||
assert mode in ["none", "input", "output", "inout"], mode
|
||||
if mode == "none":
|
||||
oen = 0
|
||||
ren = 0
|
||||
elif mode == "input":
|
||||
oen = 0
|
||||
ren = 1
|
||||
elif mode == "output":
|
||||
oen = 1
|
||||
ren = 0
|
||||
elif mode == "inout":
|
||||
oen = 1
|
||||
ren = 1
|
||||
reg |= (oen << 5) | (ren << 11)
|
||||
|
||||
pull = pad_cfg["pull"]
|
||||
assert pull in ["none", "up", "down", "keeper"], pull
|
||||
if pull == "none":
|
||||
reg |= 0 << 6
|
||||
elif pull == "up":
|
||||
reg |= 1 << 6
|
||||
elif pull == "down":
|
||||
reg |= 2 << 6
|
||||
elif pull == "keeper":
|
||||
reg |= 3 << 6
|
||||
|
||||
drive = pad_cfg["drive"]
|
||||
assert drive in [2, 4, 8, 12], drive
|
||||
if drive == 2:
|
||||
reg |= 0 << 8
|
||||
elif drive == 4:
|
||||
reg |= 1 << 8
|
||||
elif drive == 8:
|
||||
reg |= 2 << 8
|
||||
elif drive == 12:
|
||||
reg |= 3 << 8
|
||||
|
||||
slew = pad_cfg["slew"]
|
||||
assert slew in ["slow", "fast"], slew
|
||||
if slew == "slow":
|
||||
reg |= 0 << 10
|
||||
elif slew == "fast":
|
||||
reg |= 1 << 10
|
||||
|
||||
schmitt = pad_cfg["schmitt"]
|
||||
assert schmitt in [0, 1], schmitt
|
||||
reg |= schmitt << 12
|
||||
|
||||
# Register address
|
||||
adr = IOMUX_BASE + pad * 4
|
||||
|
||||
# Store the value
|
||||
iomux_regs[adr] = reg
|
||||
|
||||
# Generate content of FBIO_SEL_1 and FBIO_SEL_2
|
||||
fbio_sel = {0: 0, 1: 0}
|
||||
for pad in config["pads"].keys():
|
||||
r = int(pad) // 32
|
||||
b = int(pad) % 32
|
||||
fbio_sel[r] |= 1 << b
|
||||
|
||||
iomux_regs[FBIOSEL_BASE + 0x0] = fbio_sel[0]
|
||||
iomux_regs[FBIOSEL_BASE + 0x4] = fbio_sel[1]
|
||||
|
||||
return iomux_regs
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Main
|
||||
"""
|
||||
|
||||
# Parse arguments
|
||||
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
|
||||
parser.add_argument("--json", default=None, type=str, help="Read IOMUX configuration from the given JSON file")
|
||||
|
||||
parser.add_argument("--eblif", default=None, type=str, help="EBLIF netlist file of a design")
|
||||
|
||||
parser.add_argument("--pcf", default=None, type=str, help="PCF constraints file for a design")
|
||||
parser.add_argument("--map", "-m", "-M", type=argparse.FileType("r"), required=True, help="Pin map CSV file")
|
||||
|
||||
parser.add_argument(
|
||||
"--output-format", default=None, type=str, help="Output format of IOMUX commands (openocd/jlink)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Read the requested configurtion from a JSON file
|
||||
if args.json is not None:
|
||||
|
||||
if args.pcf is not None or args.eblif is not None:
|
||||
print("Use either '--json' or '--pcf' + '--eblif' options!")
|
||||
exit(-1)
|
||||
|
||||
with open(args.json, "r") as fp:
|
||||
config = json.load(fp)
|
||||
|
||||
# Generate the config according to the EBLIF netlist and PCF constraints.
|
||||
else:
|
||||
|
||||
if args.json is not None or (args.eblif is None or args.pcf is None):
|
||||
print("Use either '--json' or '--pcf' + '--eblif' options!")
|
||||
exit(-1)
|
||||
|
||||
pad_map = {}
|
||||
pad_alias_map = {}
|
||||
|
||||
for pin_map_entry in csv.DictReader(args.map):
|
||||
|
||||
if pin_map_entry["type"] not in IOB_TYPES:
|
||||
continue
|
||||
|
||||
name = pin_map_entry["name"]
|
||||
alias = ""
|
||||
if "alias" in pin_map_entry:
|
||||
alias = pin_map_entry["alias"]
|
||||
pad_alias_map[alias] = name
|
||||
pad_map[name] = alias
|
||||
else:
|
||||
pad_map[name] = name
|
||||
|
||||
# Read and parse PCF
|
||||
with open(args.pcf, "r") as fp:
|
||||
pcf = list(parse_simple_pcf(fp))
|
||||
|
||||
# Read and parse BLIF/EBLIF
|
||||
with open(args.eblif, "r") as fp:
|
||||
eblif = parse_blif(fp)
|
||||
|
||||
# Build the config
|
||||
config = {"pads": {}}
|
||||
|
||||
eblif_inputs = eblif["inputs"]["args"]
|
||||
eblif_outputs = eblif["outputs"]["args"]
|
||||
|
||||
for constraint in pcf:
|
||||
|
||||
pad_name = constraint.pad
|
||||
|
||||
if pad_name not in pad_map and pad_name not in pad_alias_map:
|
||||
print(
|
||||
"PCF constraint '{}' from line {} constraints pad {} "
|
||||
"which is not in available pad map:\n{}".format(
|
||||
constraint.line_str, constraint.line_num, pad_name, "\n".join(sorted(pad_map.keys()))
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# get pad alias to get IO pad count
|
||||
pad_alias = ""
|
||||
if pad_name in pad_map:
|
||||
pad_alias = pad_map[pad_name]
|
||||
|
||||
# Alias is specified in pcf file so assign it to corresponding pad name
|
||||
if pad_name in pad_alias_map:
|
||||
pad_alias = pad_name
|
||||
|
||||
pad = None
|
||||
|
||||
match = re.match(r"^IO_([0-9]+)$", pad_alias)
|
||||
if match is not None:
|
||||
pad = int(match.group(1))
|
||||
|
||||
# Pad not found or out of range
|
||||
if pad is None or pad < 0 or pad >= 46:
|
||||
continue
|
||||
|
||||
# Detect inouts:
|
||||
def inout_name(name, suffix):
|
||||
expr = r"(?P<name>.*)(?P<index>\[[0-9]+\])$"
|
||||
match = re.fullmatch(expr, name)
|
||||
if match is not None:
|
||||
return match.group("name") + suffix + match.group("index")
|
||||
return name + suffix
|
||||
|
||||
is_inout_in = inout_name(constraint.net, "_$inp") in eblif_inputs
|
||||
is_inout_out = inout_name(constraint.net, "_$out") in eblif_outputs
|
||||
|
||||
if is_inout_in and is_inout_out:
|
||||
pad_config = {
|
||||
"ctrl_sel": "fabric",
|
||||
"mode": "inout",
|
||||
}
|
||||
|
||||
elif constraint.net in eblif_inputs:
|
||||
pad_config = {
|
||||
"ctrl_sel": "fabric",
|
||||
"mode": "input",
|
||||
}
|
||||
|
||||
# Configure as output
|
||||
elif constraint.net in eblif_outputs:
|
||||
pad_config = {
|
||||
"ctrl_sel": "fabric",
|
||||
"mode": "output",
|
||||
}
|
||||
|
||||
else:
|
||||
assert False, (constraint.net, constraint.pad)
|
||||
|
||||
config["pads"][str(pad)] = pad_config
|
||||
|
||||
# Convert the config to IOMUX register content
|
||||
iomux_regs = generate_iomux_register_content(config)
|
||||
|
||||
if args.output_format == "openocd":
|
||||
# Output openOCD process
|
||||
for adr in sorted(iomux_regs.keys()):
|
||||
print(" mww 0x{:08x} 0x{:08x}".format(adr, iomux_regs[adr]))
|
||||
elif args.output_format == "jlink":
|
||||
# Output JLink commands
|
||||
for adr in sorted(iomux_regs.keys()):
|
||||
print("w4 0x{:08x} 0x{:08x}".format(adr, iomux_regs[adr]))
|
||||
elif args.output_format == "binary":
|
||||
# Output binary file: <REGADDR 4B><REGVAL 4B>...
|
||||
for adr in sorted(iomux_regs.keys()):
|
||||
# first the address
|
||||
addr_bytes = int(adr).to_bytes(4, byteorder="little")
|
||||
# output the address as raw bytes, bypass the print(), LE, 4B
|
||||
sys.stdout.buffer.write(addr_bytes)
|
||||
# second the value
|
||||
val_bytes = int(iomux_regs[adr]).to_bytes(4, byteorder="little")
|
||||
# output the value as raw bytes, bypass the print(), LE, 4B
|
||||
sys.stdout.buffer.write(val_bytes)
|
||||
else:
|
||||
print("Use either 'openocd' or 'jlink' or 'binary' output format!")
|
||||
exit(-1)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
1054
f4pga/utils/quicklogic/pp3/fasm2bels.py
Normal file
1054
f4pga/utils/quicklogic/pp3/fasm2bels.py
Normal file
File diff suppressed because it is too large
Load diff
1281
f4pga/utils/quicklogic/pp3/prepare_vpr_database.py
Executable file
1281
f4pga/utils/quicklogic/pp3/prepare_vpr_database.py
Executable file
File diff suppressed because it is too large
Load diff
1414
f4pga/utils/quicklogic/pp3/routing_import.py
Executable file
1414
f4pga/utils/quicklogic/pp3/routing_import.py
Executable file
File diff suppressed because it is too large
Load diff
239
f4pga/utils/quicklogic/pp3/rr_utils.py
Normal file
239
f4pga/utils/quicklogic/pp3/rr_utils.py
Normal file
|
@ -0,0 +1,239 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
||||
from f4pga.utils.quicklogic.pp3.data_structs import Loc
|
||||
|
||||
from lib.rr_graph import tracks
|
||||
import lib.rr_graph.graph2 as rr
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def add_track(graph, track, segment_id, node_timing=None):
|
||||
"""
|
||||
Adds a track to the graph. Returns the node object representing the track
|
||||
node.
|
||||
"""
|
||||
|
||||
if node_timing is None:
|
||||
node_timing = rr.NodeTiming(r=0.0, c=0.0)
|
||||
|
||||
node_id = graph.add_track(track, segment_id, timing=node_timing)
|
||||
node = graph.nodes[-1]
|
||||
assert node.id == node_id
|
||||
|
||||
return node
|
||||
|
||||
|
||||
def add_node(graph, loc, direction, segment_id):
|
||||
"""
|
||||
Adds a track of length 1 to the graph. Returns the node object
|
||||
"""
|
||||
|
||||
return add_track(
|
||||
graph,
|
||||
tracks.Track(
|
||||
direction=direction,
|
||||
x_low=loc.x,
|
||||
x_high=loc.x,
|
||||
y_low=loc.y,
|
||||
y_high=loc.y,
|
||||
),
|
||||
segment_id,
|
||||
)
|
||||
|
||||
|
||||
def add_edge(graph, src_node_id, dst_node_id, switch_id, meta_name=None, meta_value=""):
|
||||
"""
|
||||
Adds an edge to the routing graph. If the given switch corresponds to a
|
||||
"pass" type switch then adds two edges going both ways.
|
||||
"""
|
||||
|
||||
# Sanity check
|
||||
assert src_node_id != dst_node_id, (src_node_id, dst_node_id, switch_id, meta_name, meta_value)
|
||||
|
||||
# Connect src to dst
|
||||
graph.add_edge(src_node_id, dst_node_id, switch_id, meta_name, meta_value)
|
||||
|
||||
# Check if the switch is of the "pass" type. If so then add an edge going
|
||||
# in the opposite way.
|
||||
switch = graph.switch_map[switch_id]
|
||||
if switch.type in [rr.SwitchType.SHORT, rr.SwitchType.PASS_GATE]:
|
||||
|
||||
graph.add_edge(dst_node_id, src_node_id, switch_id, meta_name, meta_value)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def node_joint_location(node_a, node_b):
|
||||
"""
|
||||
Given two VPR nodes returns a location of the point where they touch each
|
||||
other.
|
||||
"""
|
||||
|
||||
loc_a1 = Loc(node_a.loc.x_low, node_a.loc.y_low, 0)
|
||||
loc_a2 = Loc(node_a.loc.x_high, node_a.loc.y_high, 0)
|
||||
|
||||
loc_b1 = Loc(node_b.loc.x_low, node_b.loc.y_low, 0)
|
||||
loc_b2 = Loc(node_b.loc.x_high, node_b.loc.y_high, 0)
|
||||
|
||||
if loc_a1 == loc_b1:
|
||||
return loc_a1
|
||||
if loc_a1 == loc_b2:
|
||||
return loc_a1
|
||||
if loc_a2 == loc_b1:
|
||||
return loc_a2
|
||||
if loc_a2 == loc_b2:
|
||||
return loc_a2
|
||||
|
||||
assert False, (node_a, node_b)
|
||||
|
||||
|
||||
def connect(graph, src_node, dst_node, switch_id=None, segment_id=None, meta_name=None, meta_value=""):
|
||||
"""
|
||||
Connect two VPR nodes in a way that certain rules are obeyed.
|
||||
|
||||
The rules are:
|
||||
- a CHANX cannot connect directly to a CHANY and vice versa,
|
||||
- a CHANX cannot connect to an IPIN facing left or right,
|
||||
- a CHANY cannot connect to an IPIN facting top or bottom,
|
||||
- an OPIN facing left or right cannot connect to a CHANX,
|
||||
- an OPIN facing top or bottom cannot connect to a CHANY
|
||||
|
||||
Whenever a rule is not met then the connection is made through a padding
|
||||
node:
|
||||
|
||||
src -> [delayless] -> pad -> [desired switch] -> dst
|
||||
|
||||
Otherwise the connection is made directly
|
||||
|
||||
src -> [desired switch] -> dst
|
||||
|
||||
The influence of whether the rules are obeyed or not on the actual VPR
|
||||
behavior is unclear.
|
||||
"""
|
||||
|
||||
# Use the default delayless switch if none is given
|
||||
if switch_id is None:
|
||||
switch_id = graph.get_delayless_switch_id()
|
||||
|
||||
# Determine which segment to use if none given
|
||||
if segment_id is None:
|
||||
# If the source is IPIN/OPIN then use the same segment as used by
|
||||
# the destination.
|
||||
# If the destination is IPIN/OPIN then do the opposite.
|
||||
# Finally if both are CHANX/CHANY then use the source's segment.
|
||||
|
||||
if src_node.type in [rr.NodeType.IPIN, rr.NodeType.OPIN]:
|
||||
segment_id = dst_node.segment.segment_id
|
||||
elif dst_node.type in [rr.NodeType.IPIN, rr.NodeType.OPIN]:
|
||||
segment_id = src_node.segment.segment_id
|
||||
else:
|
||||
segment_id = src_node.segment.segment_id
|
||||
|
||||
# CHANX to CHANY or vice-versa
|
||||
chanx_to_chany = src_node.type == rr.NodeType.CHANX and dst_node.type == rr.NodeType.CHANY
|
||||
chany_to_chanx = src_node.type == rr.NodeType.CHANY and dst_node.type == rr.NodeType.CHANX
|
||||
chany_to_chany = src_node.type == rr.NodeType.CHANY and dst_node.type == rr.NodeType.CHANY
|
||||
chanx_to_chanx = src_node.type == rr.NodeType.CHANX and dst_node.type == rr.NodeType.CHANX
|
||||
if chany_to_chanx or chanx_to_chany:
|
||||
|
||||
# Check loc
|
||||
node_joint_location(src_node, dst_node)
|
||||
|
||||
# Connect directly
|
||||
add_edge(graph, src_node.id, dst_node.id, switch_id, meta_name, meta_value)
|
||||
|
||||
# CHANX to CHANX or CHANY to CHANY
|
||||
elif chany_to_chany or chanx_to_chanx:
|
||||
|
||||
loc = node_joint_location(src_node, dst_node)
|
||||
direction = "X" if src_node.type == rr.NodeType.CHANY else "Y"
|
||||
|
||||
# Padding node
|
||||
pad_node = add_node(graph, loc, direction, segment_id)
|
||||
|
||||
# Connect through the padding node
|
||||
add_edge(graph, src_node.id, pad_node.id, graph.get_delayless_switch_id())
|
||||
|
||||
add_edge(graph, pad_node.id, dst_node.id, switch_id, meta_name, meta_value)
|
||||
|
||||
# OPIN to CHANX/CHANY
|
||||
elif src_node.type == rr.NodeType.OPIN and dst_node.type in [rr.NodeType.CHANX, rr.NodeType.CHANY]:
|
||||
|
||||
# All OPINs go right (towards +X)
|
||||
assert src_node.loc.side == tracks.Direction.RIGHT, src_node
|
||||
|
||||
# Connected to CHANX
|
||||
if dst_node.type == rr.NodeType.CHANX:
|
||||
|
||||
loc = node_joint_location(src_node, dst_node)
|
||||
|
||||
# Padding node
|
||||
pad_node = add_node(graph, loc, "Y", segment_id)
|
||||
|
||||
# Connect through the padding node
|
||||
add_edge(graph, src_node.id, pad_node.id, graph.get_delayless_switch_id())
|
||||
|
||||
add_edge(graph, pad_node.id, dst_node.id, switch_id, meta_name, meta_value)
|
||||
|
||||
# Connected to CHANY
|
||||
elif dst_node.type == rr.NodeType.CHANY:
|
||||
|
||||
# Directly
|
||||
add_edge(graph, src_node.id, dst_node.id, switch_id, meta_name, meta_value)
|
||||
|
||||
# Should not happen
|
||||
else:
|
||||
assert False, dst_node
|
||||
|
||||
# CHANX/CHANY to IPIN
|
||||
elif dst_node.type == rr.NodeType.IPIN and src_node.type in [rr.NodeType.CHANX, rr.NodeType.CHANY]:
|
||||
|
||||
# All IPINs go top (toward +Y)
|
||||
assert dst_node.loc.side == tracks.Direction.TOP, dst_node
|
||||
|
||||
# Connected to CHANY
|
||||
if src_node.type == rr.NodeType.CHANY:
|
||||
|
||||
loc = node_joint_location(src_node, dst_node)
|
||||
|
||||
# Padding node
|
||||
pad_node = add_node(graph, loc, "X", segment_id)
|
||||
|
||||
# Connect through the padding node
|
||||
add_edge(graph, src_node.id, pad_node.id, graph.get_delayless_switch_id())
|
||||
|
||||
add_edge(graph, pad_node.id, dst_node.id, switch_id, meta_name, meta_value)
|
||||
|
||||
# Connected to CHANX
|
||||
elif src_node.type == rr.NodeType.CHANX:
|
||||
|
||||
# Directly
|
||||
add_edge(graph, src_node.id, dst_node.id, switch_id, meta_name, meta_value)
|
||||
|
||||
# Should not happen
|
||||
else:
|
||||
assert False, dst_node
|
||||
|
||||
# An unhandled case
|
||||
else:
|
||||
assert False, (src_node, dst_node)
|
533
f4pga/utils/quicklogic/pp3/switchbox_model.py
Normal file
533
f4pga/utils/quicklogic/pp3/switchbox_model.py
Normal file
|
@ -0,0 +1,533 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
from f4pga.utils.quicklogic.pp3.data_structs import PinDirection, ConnectionType
|
||||
from f4pga.utils.quicklogic.pp3.utils import yield_muxes
|
||||
from f4pga.utils.quicklogic.pp3.rr_utils import add_node, connect
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class SwitchboxModel(object):
|
||||
"""
|
||||
Represents a model of connectivity of a concrete instance of a switchbox.
|
||||
"""
|
||||
|
||||
def __init__(self, graph, loc, phy_loc, switchbox):
|
||||
self.graph = graph
|
||||
self.loc = loc
|
||||
self.phy_loc = phy_loc
|
||||
self.switchbox = switchbox
|
||||
|
||||
self.fixed_muxsels = set()
|
||||
self.fixed_muxes = None
|
||||
|
||||
self.mux_input_to_node = {}
|
||||
self.mux_output_to_node = {}
|
||||
|
||||
self.input_to_node = {}
|
||||
|
||||
@staticmethod
|
||||
def get_metadata_for_mux(loc, stage, switch_id, mux_id, pin_id):
|
||||
"""
|
||||
Formats fasm features for the given edge representin a switchbox mux.
|
||||
Returns a list of fasm features.
|
||||
"""
|
||||
metadata = []
|
||||
|
||||
# Format prefix
|
||||
prefix = "X{}Y{}.ROUTING".format(loc.x, loc.y)
|
||||
|
||||
# A mux in the HIGHWAY stage
|
||||
if stage.type == "HIGHWAY":
|
||||
feature = "I_highway.IM{}.I_pg{}".format(switch_id, pin_id)
|
||||
|
||||
# A mux in the STREET stage
|
||||
elif stage.type == "STREET":
|
||||
feature = "I_street.Isb{}{}.I_M{}.I_pg{}".format(stage.id + 1, switch_id + 1, mux_id, pin_id)
|
||||
|
||||
else:
|
||||
assert False, stage
|
||||
|
||||
metadata.append(".".join([prefix, feature]))
|
||||
return metadata
|
||||
|
||||
@staticmethod
|
||||
def get_chan_dirs_for_stage(stage):
|
||||
"""
|
||||
Returns channel directions for inputs and outputs of a stage.
|
||||
"""
|
||||
|
||||
if stage.type == "HIGHWAY":
|
||||
return "Y", "X"
|
||||
|
||||
elif stage.type == "STREET":
|
||||
dir_inp = "Y" if (stage.id % 2) else "X"
|
||||
dir_out = "X" if (stage.id % 2) else "Y"
|
||||
return dir_inp, dir_out
|
||||
|
||||
else:
|
||||
assert False, stage.type
|
||||
|
||||
@staticmethod
|
||||
def get_connection(switchbox, src, dst):
|
||||
"""
|
||||
Returns the SwitchboxConnection object that spans two muxes given their
|
||||
locations. Parameters src and dst should be tuples containing:
|
||||
(stage_id, switch_id, mux_id)
|
||||
"""
|
||||
|
||||
for connection in switchbox.connections:
|
||||
c_src = (connection.src.stage_id, connection.src.switch_id, connection.src.mux_id)
|
||||
c_dst = (connection.dst.stage_id, connection.dst.switch_id, connection.dst.mux_id)
|
||||
|
||||
if c_src == src and c_dst == dst:
|
||||
return connection
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_switchbox_routes(switchbox, out_name, inp_name):
|
||||
"""
|
||||
Returns a list of routes inside the switchbox that connect the given
|
||||
output pin with the given input pin.
|
||||
|
||||
Returns a list of lists. Each inner list contain tuples with
|
||||
(stage_id, switch_id, mux_id, pin_id)
|
||||
"""
|
||||
|
||||
# Route list
|
||||
routes = []
|
||||
|
||||
def walk(ctx, target_name, route=None):
|
||||
"""
|
||||
An inner recursive walk function. Walks from a location within
|
||||
the switchbox until the target input pin is reached.
|
||||
"""
|
||||
|
||||
# Copy/create the route list
|
||||
if route is None:
|
||||
route = []
|
||||
else:
|
||||
route = list(route)
|
||||
|
||||
# Add this mux
|
||||
route.append(ctx)
|
||||
|
||||
# Get the mux object
|
||||
stage_id, switch_id, mux_id = ctx
|
||||
|
||||
stage = switchbox.stages[stage_id]
|
||||
switch = stage.switches[switch_id]
|
||||
mux = switch.muxes[mux_id]
|
||||
|
||||
# Get its input connections
|
||||
connections = {}
|
||||
for connection in switchbox.connections:
|
||||
is_stage_id = connection.dst.stage_id == stage_id
|
||||
is_switch_id = connection.dst.switch_id == switch_id
|
||||
is_mux_id = connection.dst.mux_id == mux_id
|
||||
if is_stage_id and is_switch_id and is_mux_id:
|
||||
connections[connection.dst.pin_id] = connection
|
||||
|
||||
# Expand all its inputs
|
||||
for pin_id, pin in mux.inputs.items():
|
||||
|
||||
# An input goes to another mux, expand it
|
||||
if pin.name is None and pin_id in connections:
|
||||
connection = connections[pin_id]
|
||||
|
||||
next_ctx = (
|
||||
connection.src.stage_id,
|
||||
connection.src.switch_id,
|
||||
connection.src.mux_id,
|
||||
)
|
||||
walk(next_ctx, target_name, route)
|
||||
|
||||
# This is a switchbox input
|
||||
elif pin.name is not None:
|
||||
|
||||
# We've hit the target
|
||||
if pin.name == target_name:
|
||||
|
||||
# Append the current mux and its selection
|
||||
final_route = list(route)
|
||||
final_route[-1] = tuple(list(final_route[-1]) + [pin_id])
|
||||
|
||||
# Trace the route back, append mux selections
|
||||
for i in range(len(final_route) - 1):
|
||||
dst = final_route[i][:3]
|
||||
src = final_route[i + 1][:3]
|
||||
|
||||
connection = SwitchboxModel.get_connection(switchbox, src, dst)
|
||||
|
||||
sel = connection.dst.pin_id
|
||||
final_route[i] = tuple(list(final_route[i]) + [sel])
|
||||
|
||||
routes.append(final_route)
|
||||
|
||||
# Should not happen
|
||||
else:
|
||||
assert False, pin
|
||||
|
||||
# Get the output pin
|
||||
pin = switchbox.outputs[out_name]
|
||||
assert len(pin.locs) == 1
|
||||
loc = pin.locs[0]
|
||||
|
||||
# Walk from the output, collect routes
|
||||
ctx = (
|
||||
loc.stage_id,
|
||||
loc.switch_id,
|
||||
loc.mux_id,
|
||||
)
|
||||
walk(ctx, inp_name)
|
||||
|
||||
return routes
|
||||
|
||||
def _create_muxes(self):
|
||||
"""
|
||||
Creates nodes for muxes and internal edges within them. Annotates the
|
||||
internal edges with fasm data.
|
||||
|
||||
Builds maps of muxs' inputs and outpus to VPR nodes.
|
||||
"""
|
||||
|
||||
# Build mux driver timing map. Assign each mux output its timing data
|
||||
driver_timing = {}
|
||||
for connection in self.switchbox.connections:
|
||||
src = connection.src
|
||||
|
||||
stage = self.switchbox.stages[src.stage_id]
|
||||
switch = stage.switches[src.switch_id]
|
||||
mux = switch.muxes[src.mux_id]
|
||||
pin = mux.inputs[src.pin_id]
|
||||
|
||||
if pin.id not in mux.timing:
|
||||
continue
|
||||
|
||||
timing = mux.timing[pin.id].driver
|
||||
|
||||
key = (src.stage_id, src.switch_id, src.mux_id)
|
||||
if key in driver_timing:
|
||||
assert driver_timing[key] == timing, (self.loc, key, driver_timing[key], timing)
|
||||
else:
|
||||
driver_timing[key] = timing
|
||||
|
||||
# Create muxes
|
||||
segment_id = self.graph.get_segment_id_from_name("sbox")
|
||||
|
||||
for stage, switch, mux in yield_muxes(self.switchbox):
|
||||
dir_inp, dir_out = self.get_chan_dirs_for_stage(stage)
|
||||
|
||||
# Output node
|
||||
key = (stage.id, switch.id, mux.id)
|
||||
assert key not in self.mux_output_to_node
|
||||
|
||||
out_node = add_node(self.graph, self.loc, dir_out, segment_id)
|
||||
self.mux_output_to_node[key] = out_node
|
||||
|
||||
# Intermediate output node
|
||||
int_node = add_node(self.graph, self.loc, dir_out, segment_id)
|
||||
|
||||
# Get switch id for the switch assigned to the driver. If
|
||||
# there is none then use the delayless switch. Probably the
|
||||
# driver is connected to a const.
|
||||
if key in driver_timing:
|
||||
switch_id = self.graph.get_switch_id(driver_timing[key].vpr_switch)
|
||||
else:
|
||||
switch_id = self.graph.get_delayless_switch_id()
|
||||
|
||||
# Output driver edge
|
||||
connect(
|
||||
self.graph,
|
||||
int_node,
|
||||
out_node,
|
||||
switch_id=switch_id,
|
||||
segment_id=segment_id,
|
||||
)
|
||||
|
||||
# Input nodes + mux edges
|
||||
for pin in mux.inputs.values():
|
||||
|
||||
key = (stage.id, switch.id, mux.id, pin.id)
|
||||
assert key not in self.mux_input_to_node
|
||||
|
||||
# Input node
|
||||
inp_node = add_node(self.graph, self.loc, dir_inp, segment_id)
|
||||
self.mux_input_to_node[key] = inp_node
|
||||
|
||||
# Get mux metadata
|
||||
metadata = self.get_metadata_for_mux(self.phy_loc, stage, switch.id, mux.id, pin.id)
|
||||
|
||||
if len(metadata):
|
||||
meta_name = "fasm_features"
|
||||
meta_value = "\n".join(metadata)
|
||||
else:
|
||||
meta_name = None
|
||||
meta_value = ""
|
||||
|
||||
# Get switch id for the switch assigned to the mux edge. If
|
||||
# there is none then use the delayless switch. Probably the
|
||||
# edge is connected to a const.
|
||||
if pin.id in mux.timing:
|
||||
switch_id = self.graph.get_switch_id(mux.timing[pin.id].sink.vpr_switch)
|
||||
else:
|
||||
switch_id = self.graph.get_delayless_switch_id()
|
||||
|
||||
# Mux switch with appropriate timing and fasm metadata
|
||||
connect(
|
||||
self.graph,
|
||||
inp_node,
|
||||
int_node,
|
||||
switch_id=switch_id,
|
||||
segment_id=segment_id,
|
||||
meta_name=meta_name,
|
||||
meta_value=meta_value,
|
||||
)
|
||||
|
||||
def _connect_muxes(self):
|
||||
"""
|
||||
Creates VPR edges that connects muxes within the switchbox.
|
||||
"""
|
||||
|
||||
segment_id = self.graph.get_segment_id_from_name("sbox")
|
||||
switch_id = self.graph.get_switch_id("short")
|
||||
|
||||
# Add internal connections between muxes.
|
||||
for connection in self.switchbox.connections:
|
||||
src = connection.src
|
||||
dst = connection.dst
|
||||
|
||||
# Check
|
||||
assert src.pin_id == 0, src
|
||||
assert src.pin_direction == PinDirection.OUTPUT, src
|
||||
|
||||
# Get the input node
|
||||
key = (dst.stage_id, dst.switch_id, dst.mux_id, dst.pin_id)
|
||||
dst_node = self.mux_input_to_node[key]
|
||||
|
||||
# Get the output node
|
||||
key = (src.stage_id, src.switch_id, src.mux_id)
|
||||
src_node = self.mux_output_to_node[key]
|
||||
|
||||
# Connect
|
||||
connect(self.graph, src_node, dst_node, switch_id=switch_id, segment_id=segment_id)
|
||||
|
||||
def _create_input_drivers(self):
|
||||
"""
|
||||
Creates VPR nodes and edges that model input connectivity of the
|
||||
switchbox.
|
||||
"""
|
||||
|
||||
# Create a driver map containing all mux pin locations that are
|
||||
# connected to a driver. The map is indexed by (pin_name, vpr_switch)
|
||||
# and groups togeather inputs that should be driver by a specific
|
||||
# switch due to the timing model.
|
||||
driver_map = defaultdict(lambda: [])
|
||||
|
||||
for pin in self.switchbox.inputs.values():
|
||||
for loc in pin.locs:
|
||||
|
||||
stage = self.switchbox.stages[loc.stage_id]
|
||||
switch = stage.switches[loc.switch_id]
|
||||
mux = switch.muxes[loc.mux_id]
|
||||
pin = mux.inputs[loc.pin_id]
|
||||
|
||||
if pin.id not in mux.timing:
|
||||
vpr_switch = None
|
||||
else:
|
||||
vpr_switch = mux.timing[pin.id].driver.vpr_switch
|
||||
|
||||
key = (pin.name, vpr_switch)
|
||||
driver_map[key].append(loc)
|
||||
|
||||
# Create input nodes for each input pin
|
||||
segment_id = self.graph.get_segment_id_from_name("sbox")
|
||||
|
||||
for pin in self.switchbox.inputs.values():
|
||||
|
||||
node = add_node(self.graph, self.loc, "Y", segment_id)
|
||||
|
||||
assert pin.name not in self.input_to_node, pin.name
|
||||
self.input_to_node[pin.name] = node
|
||||
|
||||
# Create driver nodes, connect everything
|
||||
for (pin_name, vpr_switch), locs in driver_map.items():
|
||||
|
||||
# Create the driver node
|
||||
drv_node = add_node(self.graph, self.loc, "X", segment_id)
|
||||
|
||||
# Connect input node to the driver node. Use the switch with timing.
|
||||
inp_node = self.input_to_node[pin_name]
|
||||
|
||||
# Get switch id for the switch assigned to the driver. If
|
||||
# there is none then use the delayless switch. Probably the
|
||||
# driver is connected to a const.
|
||||
if vpr_switch is not None:
|
||||
switch_id = self.graph.get_switch_id(vpr_switch)
|
||||
else:
|
||||
switch_id = self.graph.get_delayless_switch_id()
|
||||
|
||||
# Connect
|
||||
connect(self.graph, inp_node, drv_node, switch_id=switch_id, segment_id=segment_id)
|
||||
|
||||
# Now connect the driver node with its loads
|
||||
switch_id = self.graph.get_switch_id("short")
|
||||
for loc in locs:
|
||||
|
||||
key = (loc.stage_id, loc.switch_id, loc.mux_id, loc.pin_id)
|
||||
dst_node = self.mux_input_to_node[key]
|
||||
|
||||
connect(self.graph, drv_node, dst_node, switch_id=switch_id, segment_id=segment_id)
|
||||
|
||||
def build(self):
|
||||
"""
|
||||
Build the switchbox model by creating and adding its nodes and edges
|
||||
to the RR graph.
|
||||
"""
|
||||
|
||||
# TODO: FIXME: When a switchbox model contains fixed muxes only they
|
||||
# should be removed and the rest of the switchbox should be added
|
||||
# to the rr graph. For now if there is any fixed mux, remove the
|
||||
# whole switchbox.
|
||||
if len(self.fixed_muxsels):
|
||||
|
||||
# A list of muxes to avoid
|
||||
self.fixed_muxes = set([f[:3] for f in self.fixed_muxsels])
|
||||
|
||||
print(
|
||||
"Switchbox model '{}' at '{}' contains '{}' fixed muxes.".format(
|
||||
self.switchbox.type, self.loc, len(self.fixed_muxes)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Create and connect muxes
|
||||
self._create_muxes()
|
||||
self._connect_muxes()
|
||||
|
||||
# Create and connect input drivers models
|
||||
self._create_input_drivers()
|
||||
|
||||
def get_input_node(self, pin_name):
|
||||
"""
|
||||
Returns a VPR node associated with the given input of the switchbox
|
||||
"""
|
||||
return self.input_to_node[pin_name]
|
||||
|
||||
def get_output_node(self, pin_name):
|
||||
"""
|
||||
Returns a VPR node associated with the given output of the switchbox
|
||||
"""
|
||||
|
||||
# Get the output pin
|
||||
pin = self.switchbox.outputs[pin_name]
|
||||
|
||||
assert len(pin.locs) == 1
|
||||
loc = pin.locs[0]
|
||||
|
||||
# Return its node
|
||||
key = (loc.stage_id, loc.switch_id, loc.mux_id)
|
||||
return self.mux_output_to_node[key]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class QmuxSwitchboxModel(SwitchboxModel):
|
||||
"""
|
||||
Represents a model of connectivity of a concrete instance of a switchbox
|
||||
located at a QMUX tile
|
||||
"""
|
||||
|
||||
def __init__(self, graph, loc, phy_loc, switchbox, qmux_cells, connections):
|
||||
super().__init__(graph, loc, phy_loc, switchbox)
|
||||
|
||||
self.qmux_cells = qmux_cells
|
||||
self.connections = connections
|
||||
|
||||
self.ctrl_routes = {}
|
||||
|
||||
def _find_control_routes(self):
|
||||
""" """
|
||||
PINS = (
|
||||
"IS0",
|
||||
"IS1",
|
||||
)
|
||||
|
||||
for cell in self.qmux_cells.values():
|
||||
|
||||
# Get IS0 and IS1 connection endpoints
|
||||
eps = {}
|
||||
for connection in self.connections:
|
||||
if connection.dst.type == ConnectionType.CLOCK:
|
||||
dst_cell, dst_pin = connection.dst.pin.split(".")
|
||||
|
||||
if dst_cell == cell.name and dst_pin in PINS:
|
||||
eps[dst_pin] = connection.src
|
||||
|
||||
# Find all routes for IS0 and IS1 pins that go to GND and VCC
|
||||
routes = {}
|
||||
for pin in PINS:
|
||||
|
||||
# Find the routes
|
||||
vcc_routes = self.get_switchbox_routes(self.switchbox, eps[pin].pin, "VCC")
|
||||
gnd_routes = self.get_switchbox_routes(self.switchbox, eps[pin].pin, "GND")
|
||||
|
||||
routes[pin] = {"VCC": vcc_routes, "GND": gnd_routes}
|
||||
|
||||
# Store
|
||||
self.ctrl_routes[cell.name] = routes
|
||||
|
||||
def build(self):
|
||||
"""
|
||||
Builds the QMUX switchbox model
|
||||
"""
|
||||
|
||||
# Find routes inside the switchbox for GMUX control pins
|
||||
self._find_control_routes()
|
||||
|
||||
# Filter routes so GND routes go through stage 2, switch 0 and VCC
|
||||
# routes go through stage 2, switch 1.
|
||||
for cell_name, cell_routes in self.ctrl_routes.items():
|
||||
for pin, pin_routes in cell_routes.items():
|
||||
for net, net_routes in pin_routes.items():
|
||||
|
||||
routes = []
|
||||
for route in net_routes:
|
||||
|
||||
# Assume 3-stage switchbox
|
||||
assert len(route) == 3, "FIXME: Assuming 3-stage switchbox!"
|
||||
|
||||
if route[1][1] == 0 and net == "GND":
|
||||
routes.append(route)
|
||||
if route[1][1] == 1 and net == "VCC":
|
||||
routes.append(route)
|
||||
|
||||
pin_routes[net] = routes
|
||||
|
||||
def get_input_node(self, pin_name):
|
||||
return None
|
||||
|
||||
def get_output_node(self, pin_name):
|
||||
return None
|
252
f4pga/utils/quicklogic/pp3/tile_import.py
Normal file
252
f4pga/utils/quicklogic/pp3/tile_import.py
Normal file
|
@ -0,0 +1,252 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import itertools
|
||||
from collections import defaultdict
|
||||
|
||||
import lxml.etree as ET
|
||||
|
||||
from f4pga.utils.quicklogic.pp3.data_structs import PinDirection
|
||||
from f4pga.utils.quicklogic.pp3.utils import fixup_pin_name, get_pin_name
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def add_ports(xml_parent, pins, buses=True):
|
||||
"""
|
||||
Adds ports to the tile/pb_type tag. Also returns the pins grouped by
|
||||
direction and into buses. When the parameter buses is set to False then
|
||||
each bus is split into individual signals.
|
||||
"""
|
||||
|
||||
# Group pins into buses
|
||||
pinlists = {
|
||||
"input": defaultdict(lambda: 0),
|
||||
"output": defaultdict(lambda: 0),
|
||||
"clock": defaultdict(lambda: 0),
|
||||
}
|
||||
|
||||
for pin in pins:
|
||||
if pin.direction == PinDirection.INPUT:
|
||||
if pin.attrib.get("clock", None) == "true":
|
||||
pinlist = pinlists["clock"]
|
||||
else:
|
||||
pinlist = pinlists["input"]
|
||||
elif pin.direction == PinDirection.OUTPUT:
|
||||
pinlist = pinlists["output"]
|
||||
else:
|
||||
assert False, pin
|
||||
|
||||
if buses:
|
||||
name, idx = get_pin_name(pin.name)
|
||||
if idx is None:
|
||||
pinlist[name] = 1
|
||||
else:
|
||||
pinlist[name] = max(pinlist[name], idx + 1)
|
||||
else:
|
||||
name = fixup_pin_name(pin.name)
|
||||
pinlist[name] = 1
|
||||
|
||||
# Generate the pinout
|
||||
for tag, pinlist in pinlists.items():
|
||||
for pin, count in pinlist.items():
|
||||
ET.SubElement(xml_parent, tag, {"name": pin, "num_pins": str(count)})
|
||||
|
||||
return pinlists
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def make_top_level_pb_type(tile_type, nsmap):
|
||||
"""
|
||||
Generates top-level pb_type wrapper for cells of a given tile type.
|
||||
"""
|
||||
pb_name = "PB-{}".format(tile_type.type.upper())
|
||||
xml_pb = ET.Element("pb_type", {"name": pb_name}, nsmap=nsmap)
|
||||
|
||||
# Ports
|
||||
add_ports(xml_pb, tile_type.pins, buses=False)
|
||||
|
||||
# Include cells
|
||||
xi_include = "{{{}}}include".format(nsmap["xi"])
|
||||
for cell_type, cell_count in tile_type.cells.items():
|
||||
xml_sub = ET.SubElement(xml_pb, "pb_type", {"name": cell_type.upper(), "num_pb": str(cell_count)})
|
||||
|
||||
name = cell_type.lower()
|
||||
|
||||
# Be smart. Check if there is a file for that cell in the current
|
||||
# directory. If not then use the one from "primitives" path
|
||||
pb_type_file = "./{}.pb_type.xml".format(name)
|
||||
if not os.path.isfile(pb_type_file):
|
||||
pb_type_file = "../../primitives/{}/{}.pb_type.xml".format(name, name)
|
||||
|
||||
ET.SubElement(
|
||||
xml_sub,
|
||||
xi_include,
|
||||
{
|
||||
"href": pb_type_file,
|
||||
"xpointer": "xpointer(pb_type/child::node())",
|
||||
},
|
||||
)
|
||||
|
||||
def tile_pin_to_cell_pin(name):
|
||||
match = re.match(r"^([A-Za-z_]+)([0-9]+)_(.*)$", name)
|
||||
assert match is not None, name
|
||||
|
||||
return "{}[{}].{}".format(match.group(1), match.group(2), match.group(3))
|
||||
|
||||
# Generate the interconnect
|
||||
xml_ic = ET.SubElement(xml_pb, "interconnect")
|
||||
|
||||
for pin in tile_type.pins:
|
||||
name, idx = get_pin_name(pin.name)
|
||||
|
||||
if tile_type.fake_const_pin and name == "FAKE_CONST":
|
||||
continue
|
||||
|
||||
cell_pin = name if idx is None else "{}[{}]".format(name, idx)
|
||||
tile_pin = fixup_pin_name(pin.name)
|
||||
|
||||
cell_pin = tile_pin_to_cell_pin(cell_pin)
|
||||
tile_pin = "{}.{}".format(pb_name, tile_pin)
|
||||
|
||||
if pin.direction == PinDirection.INPUT:
|
||||
ET.SubElement(
|
||||
xml_ic, "direct", {"name": "{}_to_{}".format(tile_pin, cell_pin), "input": tile_pin, "output": cell_pin}
|
||||
)
|
||||
|
||||
elif pin.direction == PinDirection.OUTPUT:
|
||||
ET.SubElement(
|
||||
xml_ic, "direct", {"name": "{}_to_{}".format(cell_pin, tile_pin), "input": cell_pin, "output": tile_pin}
|
||||
)
|
||||
|
||||
else:
|
||||
assert False, pin
|
||||
|
||||
# If the tile has a fake const input then connect it to each cell wrapper
|
||||
if tile_type.fake_const_pin:
|
||||
tile_pin = "{}.FAKE_CONST".format(pb_name)
|
||||
|
||||
for cell_type, cell_count in tile_type.cells.items():
|
||||
for i in range(cell_count):
|
||||
cell_pin = "{}[{}].{}".format(cell_type, i, "FAKE_CONST")
|
||||
|
||||
ET.SubElement(
|
||||
xml_ic,
|
||||
"direct",
|
||||
{"name": "{}_to_{}".format(tile_pin, cell_pin), "input": tile_pin, "output": cell_pin},
|
||||
)
|
||||
|
||||
return xml_pb
|
||||
|
||||
|
||||
def make_top_level_tile(tile_type, sub_tiles, tile_types, equivalent_tiles=None):
|
||||
"""
|
||||
Makes a tile definition for the given tile
|
||||
"""
|
||||
|
||||
# The tile tag
|
||||
tl_name = "TL-{}".format(tile_type.upper())
|
||||
xml_tile = ET.Element(
|
||||
"tile",
|
||||
{
|
||||
"name": tl_name,
|
||||
},
|
||||
)
|
||||
|
||||
# Make sub-tiles
|
||||
for sub_tile, capacity in sub_tiles.items():
|
||||
st_name = "ST-{}".format(sub_tile)
|
||||
|
||||
# The sub-tile tag
|
||||
xml_sub_tile = ET.SubElement(xml_tile, "sub_tile", {"name": st_name, "capacity": str(capacity)})
|
||||
|
||||
# Make the tile equivalent to itself
|
||||
if equivalent_tiles is None or sub_tile not in equivalent_tiles:
|
||||
equivalent_sub_tiles = {sub_tile: None}
|
||||
else:
|
||||
equivalent_sub_tiles = equivalent_tiles[sub_tile]
|
||||
|
||||
# Top-level ports
|
||||
tile_pinlists = add_ports(xml_sub_tile, tile_types[sub_tile].pins, False)
|
||||
|
||||
# Equivalent sites
|
||||
xml_equiv = ET.SubElement(xml_sub_tile, "equivalent_sites")
|
||||
for site_type, site_pinmap in equivalent_sub_tiles.items():
|
||||
|
||||
# Site tag
|
||||
pb_name = "PB-{}".format(site_type.upper())
|
||||
xml_site = ET.SubElement(xml_equiv, "site", {"pb_type": pb_name, "pin_mapping": "custom"})
|
||||
|
||||
# Same type, map one-to-one
|
||||
if tile_type.upper() == site_type.upper() or site_pinmap is None:
|
||||
|
||||
all_pins = {**tile_pinlists["clock"], **tile_pinlists["input"], **tile_pinlists["output"]}
|
||||
|
||||
for pin, count in all_pins.items():
|
||||
assert count == 1, (pin, count)
|
||||
ET.SubElement(
|
||||
xml_site, "direct", {"from": "{}.{}".format(st_name, pin), "to": "{}.{}".format(pb_name, pin)}
|
||||
)
|
||||
|
||||
# Explicit pinmap as a list of tuples (from, to)
|
||||
elif isinstance(site_pinmap, list):
|
||||
|
||||
for tl_pin, pb_pin in site_pinmap:
|
||||
ET.SubElement(
|
||||
xml_site,
|
||||
"direct",
|
||||
{"from": "{}.{}".format(st_name, tl_pin), "to": "{}.{}".format(pb_name, pb_pin)},
|
||||
)
|
||||
|
||||
# Should not happen
|
||||
else:
|
||||
assert False, (tl_name, st_name, pb_name)
|
||||
|
||||
# TODO: Add "fc" override for direct tile-to-tile connections if any.
|
||||
|
||||
# Pin locations
|
||||
pins_by_loc = {"left": [], "right": [], "bottom": [], "top": []}
|
||||
|
||||
# Make input pins go towards top and output pins go towards right.
|
||||
for pin, count in itertools.chain(tile_pinlists["clock"].items(), tile_pinlists["input"].items()):
|
||||
assert count == 1, (pin, count)
|
||||
pins_by_loc["top"].append(pin)
|
||||
|
||||
for pin, count in tile_pinlists["output"].items():
|
||||
assert count == 1, (pin, count)
|
||||
pins_by_loc["right"].append(pin)
|
||||
|
||||
# Dump pin locations
|
||||
xml_pinloc = ET.SubElement(xml_sub_tile, "pinlocations", {"pattern": "custom"})
|
||||
for loc, pins in pins_by_loc.items():
|
||||
if len(pins):
|
||||
xml_loc = ET.SubElement(xml_pinloc, "loc", {"side": loc})
|
||||
xml_loc.text = " ".join(["{}.{}".format(st_name, pin) for pin in pins])
|
||||
|
||||
# Switchblocks locations
|
||||
# This is actually not needed in the end but has to be present to make
|
||||
# VPR happy
|
||||
ET.SubElement(xml_sub_tile, "switchblock_locations", {"pattern": "all"})
|
||||
|
||||
return xml_tile
|
349
f4pga/utils/quicklogic/pp3/timing.py
Normal file
349
f4pga/utils/quicklogic/pp3/timing.py
Normal file
|
@ -0,0 +1,349 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import statistics
|
||||
|
||||
from copy import deepcopy
|
||||
from collections import defaultdict, namedtuple
|
||||
|
||||
from f4pga.utils.quicklogic.pp3.data_structs import VprSwitch, MuxEdgeTiming, DriverTiming, SinkTiming
|
||||
from f4pga.utils.quicklogic.pp3.utils import yield_muxes, add_named_item
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def linear_regression(xs, ys):
|
||||
"""
|
||||
Computes linear regression coefficients
|
||||
https://en.wikipedia.org/wiki/Simple_linear_regression
|
||||
|
||||
Returns a and b coefficients of the function f(y) = a * x + b
|
||||
"""
|
||||
x_mean = statistics.mean(xs)
|
||||
y_mean = statistics.mean(ys)
|
||||
|
||||
num, den = 0.0, 0.0
|
||||
for x, y in zip(xs, ys):
|
||||
num += (x - x_mean) * (y - y_mean)
|
||||
den += (x - x_mean) * (x - x_mean)
|
||||
|
||||
a = num / den
|
||||
b = y_mean - a * x_mean
|
||||
|
||||
return a, b
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def create_vpr_switch(type, tdel, r, c):
|
||||
"""
|
||||
Creates a VPR switch with the given parameters. Autmatically generates
|
||||
its name with these parameters encoded.
|
||||
|
||||
The VPR switch parameters are:
|
||||
- type: Switch type. See VPR docs for the available types
|
||||
- tdel: Constant propagation delay [s]
|
||||
- r: Internal resistance [ohm]
|
||||
- c: Internal capacitance (active only when the switch is "on") [F]
|
||||
"""
|
||||
|
||||
# Format the switch name
|
||||
name = ["sw"]
|
||||
name += ["T{:>08.6f}".format(tdel * 1e9)]
|
||||
name += ["R{:>08.6f}".format(r)]
|
||||
name += ["C{:>010.6f}".format(c * 1e12)]
|
||||
|
||||
# Create the VPR switch
|
||||
switch = VprSwitch(
|
||||
name="_".join(name),
|
||||
type=type,
|
||||
t_del=tdel,
|
||||
r=r,
|
||||
c_in=0.0,
|
||||
c_out=0.0,
|
||||
c_int=c,
|
||||
)
|
||||
|
||||
return switch
|
||||
|
||||
|
||||
def compute_switchbox_timing_model(switchbox, timing_data):
|
||||
"""
|
||||
Processes switchbox timing data.
|
||||
|
||||
The timing data is provided in a form of delays for each mux edge (path
|
||||
from its input pin to the output pin). The delay varies with number of
|
||||
active loads of the source.
|
||||
|
||||
This data is used to compute driver resistances and load capacitances
|
||||
as well as constant propagation delays.
|
||||
|
||||
The timing model assumes that each output of a mux has a certain resistance
|
||||
and constant propagation time. Then, every load has a capacitance which is
|
||||
connected when it is active. All capacitances are identical. The input
|
||||
timing data does not allow to distinguish between them. Additionally, each
|
||||
load can have a constant propagation delay.
|
||||
|
||||
For multiplexers that are driver by switchbox inputs, fake drivers are
|
||||
assumed solely for the purpose of the timing model.
|
||||
"""
|
||||
|
||||
# A helper struct
|
||||
Timing = namedtuple("Timing", "driver_r driver_tdel sink_c sink_tdel")
|
||||
|
||||
# Delay scaling factor
|
||||
FACTOR = 1.0
|
||||
|
||||
# Error threshold (for reporting) in ns
|
||||
ERROR_THRESHOLD = 0.4 * 1e-9
|
||||
|
||||
# Build a map of sinks for each driver
|
||||
# For internal drivers key = (stage_id, switch_id, mux_id)
|
||||
# For external drivers key = (stage_id, input_name)
|
||||
sink_map = defaultdict(lambda: [])
|
||||
|
||||
for connection in switchbox.connections:
|
||||
src = connection.src
|
||||
dst = connection.dst
|
||||
|
||||
dst_key = (dst.stage_id, dst.switch_id, dst.mux_id, dst.pin_id)
|
||||
src_key = (src.stage_id, src.switch_id, src.mux_id)
|
||||
|
||||
sink_map[src_key].append(dst_key)
|
||||
|
||||
for pin in switchbox.inputs.values():
|
||||
for loc in pin.locs:
|
||||
|
||||
dst_key = (loc.stage_id, loc.switch_id, loc.mux_id, loc.pin_id)
|
||||
src_key = (loc.stage_id, pin.name)
|
||||
|
||||
sink_map[src_key].append(dst_key)
|
||||
|
||||
# Compute timing model for each driver
|
||||
driver_timing = {}
|
||||
for driver, sinks in sink_map.items():
|
||||
|
||||
# Collect timing data for each sink edge
|
||||
edge_timings = {}
|
||||
for stage_id, switch_id, mux_id, pin_id in sinks:
|
||||
|
||||
# Try getting timing data. If not found then probably we are
|
||||
# computing timing for VCC or GND input.
|
||||
try:
|
||||
data = timing_data[stage_id][switch_id][mux_id][pin_id]
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
# Sanity check. The number of load counts must be equal to the
|
||||
# number of sinks for the driver.
|
||||
assert len(data) == len(sinks)
|
||||
|
||||
# Take the worst case (max), convert ns to seconds.
|
||||
data = {n: max(d) * 1e-9 for n, d in data.items()}
|
||||
|
||||
# Store
|
||||
key = (stage_id, switch_id, mux_id, pin_id)
|
||||
edge_timings[key] = data
|
||||
|
||||
# No timing data, probably it is a VCC or GND input
|
||||
if not len(edge_timings):
|
||||
continue
|
||||
|
||||
# Compute linear regression for each sink data
|
||||
coeffs = {}
|
||||
for sink in sinks:
|
||||
xs = sorted(edge_timings[sink].keys())
|
||||
ys = [edge_timings[sink][x] for x in xs]
|
||||
|
||||
a, b = linear_regression(xs, ys)
|
||||
|
||||
# Cannot have a < 0 (decreasing relation). If such thing happens
|
||||
# force the regression line to be flat.
|
||||
if a < 0.0:
|
||||
print(
|
||||
"WARNING: For '{} {}' the delay model slope is negative! (a={:.2e})".format(switchbox.type, sink, a)
|
||||
)
|
||||
a = 0.0
|
||||
|
||||
# Cannot have any delay higher than the model. Check if all delays
|
||||
# lie below the regression line and if not then shift the line up
|
||||
# accordingly.
|
||||
for x, y in zip(xs, ys):
|
||||
t = a * x + b
|
||||
if y > t:
|
||||
b += y - t
|
||||
|
||||
coeffs[sink] = (a, b)
|
||||
|
||||
# Assumed driver resistance [ohm]
|
||||
driver_r = 1.0
|
||||
|
||||
# Compute driver's Tdel
|
||||
driver_tdel = min([cfs[1] for cfs in coeffs.values()])
|
||||
|
||||
# Compute per-sink Tdel
|
||||
sink_tdel = {s: cfs[1] - driver_tdel for s, cfs in coeffs.items()}
|
||||
|
||||
# Compute sink capacitance. Since we have multiple edge timings that
|
||||
# should yield the same capacitance, compute one for each timing and
|
||||
# then choose the worst case (max).
|
||||
sink_cs = {s: (cfs[0] / (FACTOR * driver_r) - sink_tdel[s]) for s, cfs in coeffs.items()}
|
||||
sink_c = max(sink_cs.values())
|
||||
|
||||
# Sanity check
|
||||
assert sink_c >= 0.0, (switchbox.type, sink, sink_c)
|
||||
|
||||
# Compute error of the delay model
|
||||
for sink in sinks:
|
||||
|
||||
# Compute for this sink
|
||||
error = {}
|
||||
for n, true_delay in edge_timings[sink].items():
|
||||
model_delay = driver_tdel + FACTOR * driver_r * sink_c * n + sink_tdel[sink]
|
||||
error[n] = true_delay - model_delay
|
||||
|
||||
max_error = max([abs(e) for e in error.values()])
|
||||
|
||||
# Report the error
|
||||
if max_error > ERROR_THRESHOLD:
|
||||
print("WARNING: Error of the timing model of '{} {}' is too high:".format(switchbox.type, sink))
|
||||
print("--------------------------------------------")
|
||||
print("| # loads | actual | model | error |")
|
||||
print("|---------+----------+----------+----------|")
|
||||
|
||||
for n in edge_timings[sink].keys():
|
||||
print(
|
||||
"| {:<8}| {:<9.3f}| {:<9.3f}| {:<9.3f}|".format(
|
||||
n, 1e9 * edge_timings[sink][n], 1e9 * (edge_timings[sink][n] - error[n]), 1e9 * error[n]
|
||||
)
|
||||
)
|
||||
|
||||
print("--------------------------------------------")
|
||||
print("")
|
||||
|
||||
# Store the data
|
||||
driver_timing[driver] = Timing(
|
||||
driver_r=driver_r, driver_tdel=driver_tdel, sink_tdel={s: d for s, d in sink_tdel.items()}, sink_c=sink_c
|
||||
)
|
||||
|
||||
return driver_timing, sink_map
|
||||
|
||||
|
||||
def populate_switchbox_timing(switchbox, driver_timing, sink_map, vpr_switches):
|
||||
"""
|
||||
Populates the switchbox timing model by annotating its muxes with the timing
|
||||
data. Creates new VPR switches with required parameters or uses existing
|
||||
ones if already created.
|
||||
"""
|
||||
|
||||
# Populate timing data to the switchbox
|
||||
for driver, timing in driver_timing.items():
|
||||
|
||||
# Driver VPR switch
|
||||
driver_vpr_switch = create_vpr_switch(
|
||||
type="mux",
|
||||
tdel=timing.driver_tdel,
|
||||
r=timing.driver_r,
|
||||
c=0.0,
|
||||
)
|
||||
|
||||
driver_vpr_switch = add_named_item(vpr_switches, driver_vpr_switch, driver_vpr_switch.name)
|
||||
|
||||
# Annotate all driver's edges
|
||||
for sink in sink_map[driver]:
|
||||
stage_id, switch_id, mux_id, pin_id = sink
|
||||
|
||||
# Sink VPR switch
|
||||
sink_vpr_switch = create_vpr_switch(
|
||||
type="mux",
|
||||
tdel=timing.sink_tdel[sink],
|
||||
r=0.0,
|
||||
c=timing.sink_c,
|
||||
)
|
||||
|
||||
sink_vpr_switch = add_named_item(vpr_switches, sink_vpr_switch, sink_vpr_switch.name)
|
||||
|
||||
# Get the mux
|
||||
stage = switchbox.stages[stage_id]
|
||||
switch = stage.switches[switch_id]
|
||||
mux = switch.muxes[mux_id]
|
||||
|
||||
assert pin_id not in mux.timing
|
||||
|
||||
mux.timing[pin_id] = MuxEdgeTiming(
|
||||
driver=DriverTiming(tdel=timing.driver_tdel, r=timing.driver_r, vpr_switch=driver_vpr_switch.name),
|
||||
sink=SinkTiming(tdel=timing.sink_tdel, c=timing.sink_c, vpr_switch=sink_vpr_switch.name),
|
||||
)
|
||||
|
||||
|
||||
def copy_switchbox_timing(src_switchbox, dst_switchbox):
|
||||
"""
|
||||
Copies all timing information from the source switchbox to the destination
|
||||
one.
|
||||
"""
|
||||
|
||||
# Mux timing
|
||||
for dst_stage, dst_switch, dst_mux in yield_muxes(dst_switchbox):
|
||||
|
||||
src_stage = src_switchbox.stages[dst_stage.id]
|
||||
src_switch = src_stage.switches[dst_switch.id]
|
||||
src_mux = src_switch.muxes[dst_mux.id]
|
||||
|
||||
dst_mux.timing = deepcopy(src_mux.timing)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def add_vpr_switches_for_cell(cell_type, cell_timings):
|
||||
"""
|
||||
Creates VPR switches for IOPATH delays read from SDF file(s) for the given
|
||||
cell type.
|
||||
"""
|
||||
|
||||
# Filter timings for the cell
|
||||
timings = {k: v for k, v in cell_timings.items() if k.startswith(cell_type)}
|
||||
|
||||
# Add VPR switches
|
||||
vpr_switches = {}
|
||||
for celltype, cell_data in timings.items():
|
||||
for instance, inst_data in cell_data.items():
|
||||
|
||||
# Add IOPATHs
|
||||
for timing, timing_data in inst_data.items():
|
||||
if timing_data["type"].lower() != "iopath":
|
||||
continue
|
||||
|
||||
# Get data
|
||||
name = "{}.{}.{}.{}".format(cell_type, instance, timing_data["from_pin"], timing_data["to_pin"])
|
||||
tdel = timing_data["delay_paths"]["slow"]["avg"]
|
||||
|
||||
# Add the switch
|
||||
sw = VprSwitch(
|
||||
name=name,
|
||||
type="mux",
|
||||
t_del=tdel,
|
||||
r=0.0,
|
||||
c_in=0.0,
|
||||
c_out=0.0,
|
||||
c_int=0.0,
|
||||
)
|
||||
vpr_switches[sw.name] = sw
|
||||
|
||||
return vpr_switches
|
162
f4pga/utils/quicklogic/pp3/utils.py
Normal file
162
f4pga/utils/quicklogic/pp3/utils.py
Normal file
|
@ -0,0 +1,162 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import re
|
||||
|
||||
# =============================================================================
|
||||
|
||||
# A regex used for fixing pin names
|
||||
RE_PIN_NAME = re.compile(r"^([A-Za-z0-9_]+)(?:\[([0-9]+)\])?$")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def get_pin_name(name):
|
||||
"""
|
||||
Returns the pin name and its index in bus. If a pin is not a member of
|
||||
a bus then the index is None
|
||||
|
||||
>>> get_pin_name("WIRE")
|
||||
('WIRE', None)
|
||||
>>> get_pin_name("DATA[12]")
|
||||
('DATA', 12)
|
||||
"""
|
||||
|
||||
match = re.match(r"(?P<name>.*)\[(?P<idx>[0-9]+)\]$", name)
|
||||
if match:
|
||||
return match.group("name"), int(match.group("idx"))
|
||||
else:
|
||||
return name, None
|
||||
|
||||
|
||||
def fixup_pin_name(name):
|
||||
"""
|
||||
Renames a pin to make its name suitable for VPR.
|
||||
|
||||
>>> fixup_pin_name("A_WIRE")
|
||||
'A_WIRE'
|
||||
>>> fixup_pin_name("ADDRESS[17]")
|
||||
'ADDRESS_17'
|
||||
>>> fixup_pin_name("DATA[11]_X")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AssertionError: DATA[11]_X
|
||||
"""
|
||||
|
||||
match = RE_PIN_NAME.match(name)
|
||||
assert match is not None, name
|
||||
|
||||
groups = match.groups()
|
||||
if groups[1] is None:
|
||||
return groups[0]
|
||||
else:
|
||||
return "{}_{}".format(*groups)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def yield_muxes(switchbox):
|
||||
"""
|
||||
Yields all muxes of a switchbox. Returns tuples with:
|
||||
(stage, switch, mux)
|
||||
"""
|
||||
|
||||
for stage in switchbox.stages.values():
|
||||
for switch in stage.switches.values():
|
||||
for mux in switch.muxes.values():
|
||||
yield stage, switch, mux
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def get_quadrant_for_loc(loc, quadrants):
|
||||
"""
|
||||
Assigns a quadrant to the given location. Returns None if no one matches.
|
||||
"""
|
||||
|
||||
for quadrant in quadrants.values():
|
||||
if loc.x >= quadrant.x0 and loc.x <= quadrant.x1:
|
||||
if loc.y >= quadrant.y0 and loc.y <= quadrant.y1:
|
||||
return quadrant
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_loc_of_cell(cell_name, tile_grid):
|
||||
"""
|
||||
Returns loc of a cell with the given name in the tilegrid.
|
||||
"""
|
||||
|
||||
# Look for a tile that has the cell
|
||||
for loc, tile in tile_grid.items():
|
||||
if tile is None:
|
||||
continue
|
||||
|
||||
cell_names = [c.name for c in tile.cells]
|
||||
if cell_name in cell_names:
|
||||
return loc
|
||||
|
||||
# Not found
|
||||
return None
|
||||
|
||||
|
||||
def find_cell_in_tile(cell_name, tile):
|
||||
"""
|
||||
Finds a cell instance with the given name inside the given tile.
|
||||
Returns the Cell object if found and None otherwise.
|
||||
"""
|
||||
for cell in tile.cells:
|
||||
if cell.name == cell_name:
|
||||
return cell
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def add_named_item(item_dict, item, item_name):
|
||||
"""
|
||||
Adds a named item to the given dict if not already there. If it is there
|
||||
then returns the one from the dict.
|
||||
"""
|
||||
|
||||
if item_name not in item_dict:
|
||||
item_dict[item_name] = item
|
||||
|
||||
return item_dict[item_name]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def natural_keys(text):
|
||||
"""
|
||||
alist.sort(key=natural_keys) sorts in human order
|
||||
http://nedbatchelder.com/blog/200712/human_sorting.html
|
||||
(See Toothy's implementation in the comments)
|
||||
|
||||
https://stackoverflow.com/questions/5967500/how-to-correctly-sort-a-string-with-a-number-inside
|
||||
"""
|
||||
|
||||
def atoi(text):
|
||||
return int(text) if text.isdigit() else text
|
||||
|
||||
return [atoi(c) for c in re.split(r"(\d+)", text)]
|
717
f4pga/utils/quicklogic/pp3/verilogmodule.py
Normal file
717
f4pga/utils/quicklogic/pp3/verilogmodule.py
Normal file
|
@ -0,0 +1,717 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import re
|
||||
from collections import namedtuple, defaultdict
|
||||
|
||||
from f4pga.utils.quicklogic.pp3.data_structs import PinDirection
|
||||
|
||||
Element = namedtuple("Element", "loc type name ios")
|
||||
Wire = namedtuple("Wire", "srcloc name inverted")
|
||||
VerilogIO = namedtuple("VerilogIO", "name direction ioloc")
|
||||
|
||||
|
||||
def loc2str(loc):
|
||||
return "X" + str(loc.x) + "Y" + str(loc.y)
|
||||
|
||||
|
||||
class VModule(object):
|
||||
"""Represents a Verilog module for QLAL4S3B FASM"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vpr_tile_grid,
|
||||
vpr_tile_types,
|
||||
cells_library,
|
||||
pcf_data,
|
||||
belinversions,
|
||||
interfaces,
|
||||
designconnections,
|
||||
org_loc_map,
|
||||
cand_map,
|
||||
inversionpins,
|
||||
io_to_fbio,
|
||||
useinversionpins=True,
|
||||
):
|
||||
"""Prepares initial structures.
|
||||
|
||||
Refer to fasm2bels.py for input description.
|
||||
"""
|
||||
|
||||
self.vpr_tile_grid = vpr_tile_grid
|
||||
self.vpr_tile_types = vpr_tile_types
|
||||
self.cells_library = cells_library
|
||||
self.pcf_data = pcf_data
|
||||
self.belinversions = belinversions
|
||||
self.interfaces = interfaces
|
||||
self.designconnections = designconnections
|
||||
self.org_loc_map = org_loc_map
|
||||
self.cand_map = cand_map
|
||||
self.inversionpins = inversionpins
|
||||
self.useinversionpins = useinversionpins
|
||||
self.io_to_fbio = io_to_fbio
|
||||
|
||||
# dictionary holding inputs, outputs
|
||||
self.ios = {}
|
||||
# dictionary holding declared wires (wire value;)
|
||||
self.wires = {}
|
||||
# dictionary holding Verilog elements
|
||||
self.elements = defaultdict(dict)
|
||||
# dictionary holding assigns (assign key = value;)
|
||||
self.assigns = {}
|
||||
|
||||
# helper representing last input id
|
||||
self.last_input_id = 0
|
||||
# helper representing last output id
|
||||
self.last_output_id = 0
|
||||
|
||||
self.qlal4s3bmapping = {
|
||||
"LOGIC": "logic_cell_macro",
|
||||
"ASSP": "qlal4s3b_cell_macro",
|
||||
"BIDIR": "gpio_cell_macro",
|
||||
"RAM": "ram8k_2x1_cell_macro",
|
||||
"MULT": "qlal4s3_mult_cell_macro",
|
||||
"GMUX": "gclkbuff",
|
||||
"QMUX": "qhsckbuff",
|
||||
"CLOCK": "ckpad",
|
||||
"inv": "inv",
|
||||
}
|
||||
|
||||
self.qlal4s3_pinmap = {
|
||||
"ckpad": {
|
||||
"IP": "P",
|
||||
"IC": "Q",
|
||||
},
|
||||
"gclkbuff": {
|
||||
"IC": "A",
|
||||
"IZ": "Z",
|
||||
},
|
||||
"qhsckbuff": {"HSCKIN": "A", "IZ": "Z"},
|
||||
}
|
||||
|
||||
def group_vector_signals(self, signals, io=False):
|
||||
|
||||
# IOs beside name, have also direction, convert them to format
|
||||
# we can process
|
||||
if io:
|
||||
orig_ios = signals
|
||||
ios = dict()
|
||||
for s in signals:
|
||||
id = Wire(s.name, "io", False)
|
||||
ios[id] = s.name
|
||||
signals = ios
|
||||
|
||||
vectors = dict()
|
||||
new_signals = dict()
|
||||
|
||||
array = re.compile(r"(?P<varname>[a-zA-Z_][a-zA-Z_0-9$]+)\[(?P<arrindex>[0-9]+)\]")
|
||||
|
||||
# first find the vectors
|
||||
for signalid in signals:
|
||||
match = array.match(signals[signalid])
|
||||
if match:
|
||||
varname = match.group("varname")
|
||||
arrayindex = int(match.group("arrindex"))
|
||||
|
||||
if varname not in vectors:
|
||||
vectors[varname] = dict()
|
||||
vectors[varname]["max"] = 0
|
||||
vectors[varname]["min"] = 0
|
||||
|
||||
if arrayindex > vectors[varname]["max"]:
|
||||
vectors[varname]["max"] = arrayindex
|
||||
|
||||
if arrayindex < vectors[varname]["min"]:
|
||||
vectors[varname]["min"] = arrayindex
|
||||
|
||||
# if signal is not a part of a vector leave it
|
||||
else:
|
||||
new_signals[signalid] = signals[signalid]
|
||||
|
||||
# add vectors to signals dict
|
||||
for vec in vectors:
|
||||
name = "[{max}:{min}] {name}".format(max=vectors[vec]["max"], min=vectors[vec]["min"], name=vec)
|
||||
id = Wire(name, "vector", False)
|
||||
new_signals[id] = name
|
||||
|
||||
if io:
|
||||
# we need to restore the direction info
|
||||
new_ios = list()
|
||||
for s in new_signals:
|
||||
signalname = new_signals[s].split()
|
||||
signalname = signalname[-1]
|
||||
io = [x.direction for x in orig_ios if x.name.startswith(signalname)]
|
||||
direction = io[0]
|
||||
new_ios.append((direction, new_signals[s]))
|
||||
return new_ios
|
||||
else:
|
||||
return new_signals
|
||||
|
||||
def group_array_values(self, parameters: dict):
|
||||
"""Groups pin names that represent array indices.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
parameters: dict
|
||||
A dictionary holding original parameters
|
||||
|
||||
Returns
|
||||
-------
|
||||
dict: parameters with grouped array indices
|
||||
"""
|
||||
newparameters = dict()
|
||||
arraydst = re.compile(r"(?P<varname>[a-zA-Z_][a-zA-Z_0-9$]+)\[(?P<arrindex>[0-9]+)\]")
|
||||
for dst, src in parameters.items():
|
||||
match = arraydst.match(dst)
|
||||
if match:
|
||||
varname = match.group("varname")
|
||||
arrindex = int(match.group("arrindex"))
|
||||
if varname not in newparameters:
|
||||
newparameters[varname] = {arrindex: src}
|
||||
else:
|
||||
newparameters[varname][arrindex] = src
|
||||
else:
|
||||
newparameters[dst] = src
|
||||
return newparameters
|
||||
|
||||
def form_simple_assign(self, loc, parameters):
|
||||
ioname = self.get_io_name(loc)
|
||||
|
||||
assign = ""
|
||||
direction = self.get_io_config(parameters)
|
||||
|
||||
if direction == "input":
|
||||
assign = " assign {} = {};".format(parameters["IZ"], ioname)
|
||||
elif direction == "output":
|
||||
assign = " assign {} = {};".format(ioname, parameters["OQI"])
|
||||
elif direction is None:
|
||||
pass
|
||||
else:
|
||||
assert False, "Unknown IO configuration"
|
||||
|
||||
return assign
|
||||
|
||||
def form_verilog_element(self, loc, typ: str, name: str, parameters: dict):
|
||||
"""Creates an entry representing single Verilog submodule.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
loc: Loc
|
||||
Cell coordinates
|
||||
typ: str
|
||||
Cell type
|
||||
name: str
|
||||
Name of the submodule
|
||||
parameters: dict
|
||||
Map from input pin to source wire
|
||||
|
||||
Returns
|
||||
-------
|
||||
str: Verilog entry
|
||||
"""
|
||||
if typ == "BIDIR":
|
||||
|
||||
# We do not emit the BIDIR cell for non inout IOs
|
||||
direction = self.get_io_config(parameters)
|
||||
if direction is None:
|
||||
return ""
|
||||
elif direction != "inout":
|
||||
return self.form_simple_assign(loc, parameters)
|
||||
|
||||
params = []
|
||||
moduletype = self.qlal4s3bmapping[typ]
|
||||
pin_map = self.qlal4s3_pinmap.get(moduletype, dict())
|
||||
result = f" {moduletype} {name} ("
|
||||
fixedparameters = self.group_array_values(parameters)
|
||||
# get inputs, strip vector's pin indexes
|
||||
input_pins = [
|
||||
pin.name.split("[")[0] for pin in self.cells_library[typ].pins if pin.direction == PinDirection.INPUT
|
||||
]
|
||||
dummy_wires = []
|
||||
|
||||
for inpname, inp in fixedparameters.items():
|
||||
mapped_inpname = pin_map.get(inpname, inpname)
|
||||
if isinstance(inp, dict):
|
||||
arr = []
|
||||
dummy_wire = f"{moduletype}_{name}_{inpname}"
|
||||
max_dummy_index = 0
|
||||
need_dummy = False
|
||||
maxindex = max([val for val in inp.keys()])
|
||||
for i in reversed(range(maxindex + 1)):
|
||||
if i not in inp:
|
||||
# do not assign constants to outputs
|
||||
if inpname in input_pins:
|
||||
arr.append("1'b0")
|
||||
else:
|
||||
if i > max_dummy_index:
|
||||
max_dummy_index = i
|
||||
arr.append("{}[{}]".format(dummy_wire, i))
|
||||
need_dummy = True
|
||||
else:
|
||||
arr.append(inp[i])
|
||||
arrlist = ", ".join(arr)
|
||||
params.append(f".{mapped_inpname}({{{arrlist}}})")
|
||||
if need_dummy:
|
||||
dummy_wires.append(f" wire [{max_dummy_index}:0] {dummy_wire};")
|
||||
else:
|
||||
params.append(f".{mapped_inpname}({inp})")
|
||||
if self.useinversionpins:
|
||||
if typ in self.inversionpins:
|
||||
for toinvert, inversionpin in self.inversionpins[typ].items():
|
||||
if toinvert in self.belinversions[loc][typ]:
|
||||
params.append(f".{inversionpin}(1'b1)")
|
||||
else:
|
||||
params.append(f".{inversionpin}(1'b0)")
|
||||
|
||||
# handle BIDIRs and CLOCKs
|
||||
if typ in ["CLOCK", "BIDIR"]:
|
||||
ioname = self.get_io_name(loc)
|
||||
|
||||
moduletype = self.qlal4s3bmapping[typ]
|
||||
pin_map = self.qlal4s3_pinmap.get(moduletype, dict())
|
||||
|
||||
params.append(".{}({})".format(pin_map.get("IP", "IP"), ioname))
|
||||
|
||||
result += f',\n{" " * len(result)}'.join(sorted(params)) + ");\n"
|
||||
wires = ""
|
||||
for wire in dummy_wires:
|
||||
wires += f"\n{wire}"
|
||||
result = wires + "\n\n" + result
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_element_name(type, loc):
|
||||
"""Forms element name from its type and FASM feature name."""
|
||||
return f"{type}_X{loc.x}_Y{loc.y}"
|
||||
|
||||
@staticmethod
|
||||
def get_element_type(type):
|
||||
match = re.match(r"(?P<type>[A-Za-z]+)(?P<index>[0-9]+)?", type)
|
||||
assert match is not None, type
|
||||
return match.group("type")
|
||||
|
||||
def get_bel_type_and_connections(self, loc, connections, direction):
|
||||
"""For a given connection list returns a dictionary
|
||||
with bel types and connections to them
|
||||
"""
|
||||
|
||||
cells = self.vpr_tile_grid[loc].cells
|
||||
|
||||
if type(connections) == str:
|
||||
inputnames = [connections]
|
||||
# convert connections to dict
|
||||
connections = dict()
|
||||
connections[inputnames[0]] = inputnames[0]
|
||||
else:
|
||||
inputnames = [name for name in connections.keys()]
|
||||
|
||||
# Some switchbox inputs are named like "<cell><cell_index>_<pin>"
|
||||
# Make a list of them for compatison in the following step.
|
||||
cell_input_names = defaultdict(lambda: [])
|
||||
for name in inputnames:
|
||||
fields = name.split("_", maxsplit=1)
|
||||
if len(fields) == 2:
|
||||
cell, pin = fields
|
||||
cell_input_names[cell].append(pin)
|
||||
|
||||
used_cells = []
|
||||
for cell in cells:
|
||||
cell_name = "{}{}".format(cell.type, cell.index)
|
||||
|
||||
cellpins = [pin.name for pin in self.cells_library[cell.type].pins if pin.direction == direction]
|
||||
|
||||
# check every connection pin if it has
|
||||
for pin in cellpins:
|
||||
|
||||
# Cell name and pin name match
|
||||
if cell_name in cell_input_names:
|
||||
if pin in cell_input_names[cell_name]:
|
||||
used_cells.append(cell)
|
||||
break
|
||||
|
||||
# The pin name matches exactly the specified input name
|
||||
if pin in inputnames:
|
||||
used_cells.append(cell)
|
||||
break
|
||||
|
||||
# assing connection to a cell
|
||||
cell_connections = {}
|
||||
for cell in used_cells:
|
||||
cell_name = "{}{}".format(cell.type, cell.index)
|
||||
|
||||
cell_connections[cell_name] = dict()
|
||||
cellpins = [pin.name for pin in self.cells_library[cell.type].pins if pin.direction == direction]
|
||||
|
||||
for key in connections.keys():
|
||||
if key in cellpins:
|
||||
cell_connections[cell_name][key] = connections[key]
|
||||
|
||||
# Some switchbox inputs are named like
|
||||
# "<cell><cell_index>_<pin>". Break down the name and check.
|
||||
fields = key.split("_", maxsplit=1)
|
||||
if len(fields) == 2:
|
||||
key_cell, key_pin = fields
|
||||
if key_cell == cell_name and key_pin in cellpins:
|
||||
cell_connections[cell_name][key_pin] = connections[key]
|
||||
|
||||
return cell_connections
|
||||
|
||||
def new_io_name(self, direction):
|
||||
"""Creates a new IO name for a given direction.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
direction: str
|
||||
Direction of the IO, can be 'input' or 'output'
|
||||
"""
|
||||
# TODO add support for inout
|
||||
assert direction in ["input", "output", "inout"]
|
||||
if direction == "output":
|
||||
name = f"out_{self.last_output_id}"
|
||||
self.last_output_id += 1
|
||||
elif direction == "input":
|
||||
name = f"in_{self.last_input_id}"
|
||||
self.last_input_id += 1
|
||||
else:
|
||||
pass
|
||||
return name
|
||||
|
||||
def get_wire(self, loc, wire, inputname):
|
||||
"""Creates or gets an existing wire for a given source.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
loc: Loc
|
||||
Location of the destination cell
|
||||
wire: tuple
|
||||
A tuple of location of the source cell and source pin name
|
||||
inputname: str
|
||||
A name of the destination pin
|
||||
|
||||
Returns
|
||||
-------
|
||||
str: wire name
|
||||
"""
|
||||
isoutput = self.vpr_tile_grid[loc].type == "SYN_IO"
|
||||
if isoutput:
|
||||
# outputs are never inverted
|
||||
inverted = False
|
||||
else:
|
||||
# determine if inverted
|
||||
inverted = inputname in self.belinversions[loc][self.vpr_tile_grid[loc].type]
|
||||
wireid = Wire(wire[0], wire[1], inverted)
|
||||
if wireid in self.wires:
|
||||
# if wire already exists, use it
|
||||
return self.wires[wireid]
|
||||
|
||||
# first create uninverted wire
|
||||
uninvertedwireid = Wire(wire[0], wire[1], False)
|
||||
if uninvertedwireid in self.wires:
|
||||
# if wire already exists, use it
|
||||
wirename = self.wires[uninvertedwireid]
|
||||
else:
|
||||
srcname = self.vpr_tile_grid[wire[0]].name
|
||||
type_connections = self.get_bel_type_and_connections(wire[0], wire[1], PinDirection.OUTPUT)
|
||||
# there should be only one type here
|
||||
srctype = [type for type in type_connections.keys()][0]
|
||||
srconame = wire[1]
|
||||
if srctype == "SYN_IO":
|
||||
# if source is input, use its name
|
||||
if wire[0] not in self.ios:
|
||||
self.ios[wire[0]] = VerilogIO(name=self.new_io_name("input"), direction="input", ioloc=wire[0])
|
||||
assert self.ios[wire[0]].direction == "input"
|
||||
wirename = self.ios[wire[0]].name
|
||||
else:
|
||||
# form a new wire name
|
||||
wirename = f"{srcname}_{srconame}"
|
||||
if srctype not in self.elements[wire[0]]:
|
||||
# if the source element does not exist, create it
|
||||
self.elements[wire[0]][srctype] = Element(
|
||||
wire[0],
|
||||
self.get_element_type(srctype),
|
||||
self.get_element_name(srctype, wire[0]),
|
||||
{srconame: wirename},
|
||||
)
|
||||
else:
|
||||
# add wirename to the existing element
|
||||
self.elements[wire[0]][srctype].ios[srconame] = wirename
|
||||
if not isoutput and srctype != "SYN_IO":
|
||||
# add wire
|
||||
self.wires[uninvertedwireid] = wirename
|
||||
elif isoutput:
|
||||
# add assign to output
|
||||
self.assigns[self.ios[loc].name] = wirename
|
||||
|
||||
if not inverted or (self.useinversionpins and inputname in self.inversionpins[self.vpr_tile_grid[loc].type]):
|
||||
# if not inverted or we're not inverting, just finish
|
||||
return wirename
|
||||
|
||||
# else create an inverted and wire for it
|
||||
invertername = f"{wirename}_inverter"
|
||||
|
||||
invwirename = f"{wirename}_inv"
|
||||
|
||||
inverterios = {"Q": invwirename, "A": wirename}
|
||||
|
||||
inverterelement = Element(wire[0], "inv", invertername, inverterios)
|
||||
self.elements[wire[0]]["inv"] = inverterelement
|
||||
invertedwireid = Wire(wire[0], wire[1], True)
|
||||
self.wires[invertedwireid] = invwirename
|
||||
return invwirename
|
||||
|
||||
def parse_bels(self):
|
||||
"""Converts BELs to Verilog-like structures."""
|
||||
# TODO add support for direct input-to-output
|
||||
# first parse outputs to create wires for them
|
||||
|
||||
# parse outputs first to properly handle namings
|
||||
for currloc, connections in self.designconnections.items():
|
||||
type_connections = self.get_bel_type_and_connections(currloc, connections, PinDirection.OUTPUT)
|
||||
|
||||
for currtype, connections in type_connections.items():
|
||||
currname = self.get_element_name(currtype, currloc)
|
||||
outputs = {}
|
||||
|
||||
# Check each output
|
||||
for output_name, (
|
||||
loc,
|
||||
wire,
|
||||
) in connections.items():
|
||||
|
||||
# That wire is connected to something. Skip processing
|
||||
# of the cell here
|
||||
if loc is not None:
|
||||
continue
|
||||
|
||||
# Connect the global wire
|
||||
outputs[output_name] = wire
|
||||
|
||||
# No outputs connected, don't add.
|
||||
if not len(outputs):
|
||||
continue
|
||||
|
||||
# If Element does not exist, create it
|
||||
if currtype not in self.elements[currloc]:
|
||||
self.elements[currloc][currtype] = Element(
|
||||
currloc, self.get_element_type(currtype), currname, outputs
|
||||
)
|
||||
# Else update IOs
|
||||
else:
|
||||
self.elements[currloc][currtype].ios.update(outputs)
|
||||
|
||||
# process of BELs
|
||||
for currloc, connections in self.designconnections.items():
|
||||
# Extract type and form name for the BEL
|
||||
# Current location may be a multi cell location.
|
||||
# Split the connection list into a to a set of connections
|
||||
# for each used cell type
|
||||
type_connections = self.get_bel_type_and_connections(currloc, connections, PinDirection.INPUT)
|
||||
|
||||
for currtype in type_connections:
|
||||
currname = self.get_element_name(currtype, currloc)
|
||||
connections = type_connections[currtype]
|
||||
inputs = {}
|
||||
# form all inputs for the BEL
|
||||
for inputname, wire in connections.items():
|
||||
if wire[1] == "VCC":
|
||||
inputs[inputname] = "1'b1"
|
||||
continue
|
||||
elif wire[1] == "GND":
|
||||
inputs[inputname] = "1'b0"
|
||||
continue
|
||||
elif wire[1].startswith("CAND"):
|
||||
dst = (currloc, inputname)
|
||||
dst = self.org_loc_map.get(dst, dst)
|
||||
if dst[0] in self.cand_map:
|
||||
inputs[inputname] = self.cand_map[dst[0]][wire[1]]
|
||||
continue
|
||||
srctype = self.vpr_tile_grid[wire[0]].type
|
||||
srctype_cells = self.vpr_tile_types[srctype].cells
|
||||
if len(set(srctype_cells).intersection(set(["BIDIR", "LOGIC", "ASSP", "RAM", "MULT"]))) > 0:
|
||||
# FIXME handle already inverted pins
|
||||
# TODO handle inouts
|
||||
wirename = self.get_wire(currloc, wire, inputname)
|
||||
inputs[inputname] = wirename
|
||||
else:
|
||||
raise Exception("Not supported cell type {}".format(srctype))
|
||||
if currtype not in self.elements[currloc]:
|
||||
# If Element does not exist, create it
|
||||
self.elements[currloc][currtype] = Element(
|
||||
currloc, self.get_element_type(currtype), currname, inputs
|
||||
)
|
||||
else:
|
||||
# else update IOs
|
||||
self.elements[currloc][currtype].ios.update(inputs)
|
||||
|
||||
# Prune BELs that do not drive anythin (have all outputs disconnected)
|
||||
for loc, elements in list(self.elements.items()):
|
||||
for type, element in list(elements.items()):
|
||||
|
||||
# Handle IO cells
|
||||
if element.type in ["CLOCK", "BIDIR", "SDIOMUX"]:
|
||||
|
||||
if element.type == "CLOCK":
|
||||
direction = "input"
|
||||
else:
|
||||
direction = self.get_io_config(element.ios)
|
||||
|
||||
# Remove the cell if none is connected
|
||||
if direction is None:
|
||||
del elements[type]
|
||||
|
||||
# Handle non-io cells
|
||||
else:
|
||||
|
||||
# Get connected pin names and output pin names
|
||||
connected_pins = set(element.ios.keys())
|
||||
output_pins = set(
|
||||
[
|
||||
pin.name.split("[")[0]
|
||||
for pin in self.cells_library[element.type].pins
|
||||
if pin.direction == PinDirection.OUTPUT
|
||||
]
|
||||
)
|
||||
|
||||
# Remove the cell if none is connected
|
||||
if not len(connected_pins & output_pins):
|
||||
del elements[type]
|
||||
|
||||
# Prune the whole location
|
||||
if not len(elements):
|
||||
del self.elements[loc]
|
||||
|
||||
def get_io_name(self, loc):
|
||||
|
||||
# default pin name
|
||||
name = loc2str(loc) + "_inout"
|
||||
# check if we have the original name for this io
|
||||
if self.pcf_data is not None:
|
||||
pin = self.io_to_fbio.get(loc, None)
|
||||
if pin is not None and pin in self.pcf_data:
|
||||
name = self.pcf_data[pin]
|
||||
name = name.replace("(", "[")
|
||||
name = name.replace(")", "]")
|
||||
|
||||
return name
|
||||
|
||||
def get_io_config(self, ios):
|
||||
# decode direction
|
||||
# direction is configured by routing 1 or 0 to certain inputs
|
||||
|
||||
if "IE" in ios:
|
||||
output_en = ios["IE"] != "1'b0"
|
||||
else:
|
||||
# outputs is enabled by default
|
||||
output_en = True
|
||||
if "INEN" in ios:
|
||||
input_en = ios["INEN"] != "1'b0"
|
||||
else:
|
||||
# inputs are disabled by default
|
||||
input_en = False
|
||||
|
||||
if input_en and output_en:
|
||||
direction = "inout"
|
||||
elif input_en:
|
||||
direction = "input"
|
||||
elif output_en:
|
||||
direction = "output"
|
||||
else:
|
||||
direction = None
|
||||
|
||||
# Output unrouted. Discard
|
||||
if direction == "input":
|
||||
if "IZ" not in ios:
|
||||
return None
|
||||
|
||||
return direction
|
||||
|
||||
def generate_ios(self):
|
||||
"""Generates IOs and their wires
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
"""
|
||||
for eloc, locelements in self.elements.items():
|
||||
for element in locelements.values():
|
||||
if element.type in ["CLOCK", "BIDIR", "SDIOMUX"]:
|
||||
|
||||
if element.type == "CLOCK":
|
||||
direction = "input"
|
||||
else:
|
||||
direction = self.get_io_config(element.ios)
|
||||
|
||||
# Add the input if used
|
||||
if direction is not None:
|
||||
name = self.get_io_name(eloc)
|
||||
self.ios[eloc] = VerilogIO(name=name, direction=direction, ioloc=eloc)
|
||||
|
||||
def generate_verilog(self):
|
||||
"""Creates Verilog module
|
||||
|
||||
Returns
|
||||
-------
|
||||
str: A Verilog module for given BELs
|
||||
"""
|
||||
ios = ""
|
||||
wires = ""
|
||||
assigns = ""
|
||||
elements = ""
|
||||
|
||||
self.generate_ios()
|
||||
|
||||
if len(self.ios) > 0:
|
||||
sortedios = sorted(self.ios.values(), key=lambda x: (x.direction, x.name))
|
||||
grouped_ios = self.group_vector_signals(sortedios, True)
|
||||
ios = "\n "
|
||||
ios += ",\n ".join([f"{x[0]} {x[1]}" for x in grouped_ios])
|
||||
|
||||
grouped_wires = self.group_vector_signals(self.wires)
|
||||
if len(grouped_wires) > 0:
|
||||
wires += "\n"
|
||||
for wire in grouped_wires.values():
|
||||
wires += f" wire {wire};\n"
|
||||
|
||||
if len(self.assigns) > 0:
|
||||
assigns += "\n"
|
||||
for dst, src in self.assigns.items():
|
||||
assigns += f" assign {dst} = {src};\n"
|
||||
|
||||
if len(self.elements) > 0:
|
||||
for eloc, locelements in self.elements.items():
|
||||
for element in locelements.values():
|
||||
if element.type != "SYN_IO":
|
||||
elements += "\n"
|
||||
elements += self.form_verilog_element(eloc, element.type, element.name, element.ios)
|
||||
|
||||
verilog = f"module top ({ios});\n" f"{wires}" f"{assigns}" f"{elements}" f"\n" f"endmodule"
|
||||
return verilog
|
||||
|
||||
def generate_pcf(self):
|
||||
pcf = ""
|
||||
for io in self.ios.values():
|
||||
if io.ioloc in self.io_to_fbio:
|
||||
pcf += f"set_io {io.name} {self.io_to_fbio[io.ioloc]}\n"
|
||||
return pcf
|
||||
|
||||
def generate_qcf(self):
|
||||
qcf = "#[Fixed Pin Placement]\n"
|
||||
for io in self.ios.values():
|
||||
if io.ioloc in self.io_to_fbio:
|
||||
qcf += f"place {io.name} {self.io_to_fbio[io.ioloc]}\n"
|
||||
return qcf
|
203
f4pga/utils/quicklogic/pp3/vis_switchboxes.py
Executable file
203
f4pga/utils/quicklogic/pp3/vis_switchboxes.py
Executable file
|
@ -0,0 +1,203 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import argparse
|
||||
|
||||
import lxml.etree as ET
|
||||
|
||||
from f4pga.utils.quicklogic.pp3.data_structs import SwitchboxPinType
|
||||
from f4pga.utils.quicklogic.pp3.data_import import import_data
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def fixup_pin_name(name):
|
||||
return name.replace("[", "").replace("]", "")
|
||||
|
||||
|
||||
def switchbox_to_dot(switchbox, stage_types=("STREET", "HIGHWAY")):
|
||||
dot = []
|
||||
|
||||
TYPE_TO_COLOR = {
|
||||
SwitchboxPinType.UNSPEC: "#C0C0C0",
|
||||
SwitchboxPinType.LOCAL: "#C0C0FF",
|
||||
SwitchboxPinType.HOP: "#FFFFC0",
|
||||
SwitchboxPinType.CONST: "#FFC0C0",
|
||||
SwitchboxPinType.GCLK: "#C0FFC0",
|
||||
}
|
||||
|
||||
# Add header
|
||||
dot.append("digraph {} {{".format(switchbox.type))
|
||||
dot.append(' graph [nodesep="1.0", ranksep="20"];')
|
||||
dot.append(' splines = "false";')
|
||||
dot.append(" rankdir = LR;")
|
||||
dot.append(" margin = 20;")
|
||||
dot.append(" node [shape=record, style=filled, fillcolor=white];")
|
||||
|
||||
stage_ids_to_show = set([s.id for s in switchbox.stages.values() if s.type in stage_types])
|
||||
|
||||
# Top-level inputs
|
||||
dot.append(" subgraph cluster_inputs {")
|
||||
dot.append(" node [shape=ellipse, style=filled];")
|
||||
dot.append(' label="Inputs";')
|
||||
|
||||
for pin in switchbox.inputs.values():
|
||||
stage_ids = set([loc.stage_id for loc in pin.locs])
|
||||
if not len(stage_ids & stage_ids_to_show):
|
||||
continue
|
||||
|
||||
color = TYPE_TO_COLOR.get(pin.type, "#C0C0C0")
|
||||
name = "input_{}".format(fixup_pin_name(pin.name))
|
||||
dot.append(' {} [rank=0, label="{}", fillcolor="{}"];'.format(name, pin.name, color))
|
||||
|
||||
dot.append(" }")
|
||||
|
||||
# Top-level outputs
|
||||
dot.append(" subgraph cluster_outputs {")
|
||||
dot.append(" node [shape=ellipse, style=filled];")
|
||||
dot.append(' label="Outputs";')
|
||||
|
||||
rank = max(switchbox.stages.keys()) + 1
|
||||
|
||||
for pin in switchbox.outputs.values():
|
||||
stage_ids = set([loc.stage_id for loc in pin.locs])
|
||||
if not len(stage_ids & stage_ids_to_show):
|
||||
continue
|
||||
|
||||
color = TYPE_TO_COLOR[pin.type]
|
||||
name = "output_{}".format(fixup_pin_name(pin.name))
|
||||
dot.append(' {} [rank={}, label="{}", fillcolor="{}"];'.format(name, rank, pin.name, color))
|
||||
|
||||
dot.append(" }")
|
||||
|
||||
# Stages
|
||||
for stage in switchbox.stages.values():
|
||||
|
||||
if stage.type not in stage_types:
|
||||
continue
|
||||
|
||||
rank = stage.id + 1
|
||||
|
||||
dot.append(" subgraph cluster_st{} {{".format(stage.id))
|
||||
dot.append(" label=\"Stage #{} '{}'\";".format(stage.id, stage.type))
|
||||
dot.append(' bgcolor="#D0D0D0"')
|
||||
|
||||
# Switch
|
||||
for switch in stage.switches.values():
|
||||
dot.append(" subgraph cluster_st{}_sw{} {{".format(stage.id, switch.id))
|
||||
dot.append(' label="Switch #{}";'.format(switch.id))
|
||||
dot.append(' bgcolor="#F0F0F0"')
|
||||
|
||||
# Mux
|
||||
for mux in switch.muxes.values():
|
||||
inputs = sorted(mux.inputs.values(), key=lambda p: p.id)
|
||||
|
||||
mux_l = "Mux #{}".format(mux.id)
|
||||
inp_l = "|".join(["<i{}> {}. {}".format(p.id, p.id, p.name) for p in inputs])
|
||||
out_l = "<o{}> {}. {}".format(mux.output.id, mux.output.id, mux.output.name)
|
||||
label = "{}|{{{{{}}}|{{{}}}}}".format(mux_l, inp_l, out_l)
|
||||
name = "st{}_sw{}_mx{}".format(stage.id, switch.id, mux.id)
|
||||
|
||||
dot.append(' {} [rank="{}", label="{}"];'.format(name, rank, label))
|
||||
|
||||
dot.append(" }")
|
||||
|
||||
dot.append(" }")
|
||||
|
||||
# Internal connections
|
||||
for conn in switchbox.connections:
|
||||
|
||||
if switchbox.stages[conn.src.stage_id].type not in stage_types:
|
||||
continue
|
||||
if switchbox.stages[conn.dst.stage_id].type not in stage_types:
|
||||
continue
|
||||
|
||||
src_node = "st{}_sw{}_mx{}".format(conn.src.stage_id, conn.src.switch_id, conn.src.mux_id)
|
||||
src_port = "o{}".format(conn.src.pin_id)
|
||||
dst_node = "st{}_sw{}_mx{}".format(conn.dst.stage_id, conn.dst.switch_id, conn.dst.mux_id)
|
||||
dst_port = "i{}".format(conn.dst.pin_id)
|
||||
|
||||
dot.append(" {}:{} -> {}:{};".format(src_node, src_port, dst_node, dst_port))
|
||||
|
||||
# Input pin connections
|
||||
for pin in switchbox.inputs.values():
|
||||
src_node = "input_{}".format(fixup_pin_name(pin.name))
|
||||
|
||||
for loc in pin.locs:
|
||||
|
||||
if switchbox.stages[loc.stage_id].type not in stage_types:
|
||||
continue
|
||||
|
||||
dst_node = "st{}_sw{}_mx{}".format(loc.stage_id, loc.switch_id, loc.mux_id)
|
||||
dst_port = "i{}".format(loc.pin_id)
|
||||
|
||||
dot.append(" {} -> {}:{};".format(src_node, dst_node, dst_port))
|
||||
|
||||
# Output pin connections
|
||||
for pin in switchbox.outputs.values():
|
||||
dst_node = "output_{}".format(fixup_pin_name(pin.name))
|
||||
|
||||
for loc in pin.locs:
|
||||
|
||||
if switchbox.stages[loc.stage_id].type not in stage_types:
|
||||
continue
|
||||
|
||||
src_node = "st{}_sw{}_mx{}".format(loc.stage_id, loc.switch_id, loc.mux_id)
|
||||
src_port = "o{}".format(loc.pin_id)
|
||||
|
||||
dot.append(" {}:{} -> {};".format(src_node, src_port, dst_node))
|
||||
|
||||
# Footer
|
||||
dot.append("}")
|
||||
return "\n".join(dot)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# Parse arguments
|
||||
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
|
||||
parser.add_argument("i", type=str, help="Quicklogic 'TechFile' file")
|
||||
parser.add_argument(
|
||||
"--stages", type=str, default="STREET", help="Comma-separated list of stage types to view (def. STREET)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Read and parse the XML file
|
||||
xml_tree = ET.parse(args.i)
|
||||
xml_root = xml_tree.getroot()
|
||||
|
||||
# Load data
|
||||
data = import_data(xml_root)
|
||||
switchbox_types = data["switchbox_types"]
|
||||
|
||||
# Generate DOT files with switchbox visualizations
|
||||
for switchbox in switchbox_types.values():
|
||||
fname = "sbox_{}.dot".format(switchbox.type)
|
||||
with open(fname, "w") as fp:
|
||||
fp.write(switchbox_to_dot(switchbox, args.stages.split(",")))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
239
f4pga/utils/quicklogic/process_sdc_constraints.py
Normal file
239
f4pga/utils/quicklogic/process_sdc_constraints.py
Normal file
|
@ -0,0 +1,239 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
This script processes an SDC constraint file and replaces all references to
|
||||
pin names in place of net names with actual net names that are mapped to those
|
||||
pins. Pin-to-net mapping is read from a PCF constraint file.
|
||||
|
||||
Note that this script does not check whether the net names in the input PCF
|
||||
file are actually present in a design and whether the pin names are valid.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import csv
|
||||
|
||||
from f4pga.utils.pcf import parse_simple_pcf, PcfIoConstraint
|
||||
from f4pga.utils.eblif import parse_blif
|
||||
|
||||
|
||||
RE_INDICES = re.compile(r"(?P<name>\S+)\[(?P<i0>[0-9]+):(?P<i1>[0-9]+)\]")
|
||||
|
||||
|
||||
def collect_eblif_nets(eblif):
|
||||
"""
|
||||
Collects all net names that are present in the given parsed BLIF/EBLIF
|
||||
netlist. Returns a set of net names
|
||||
"""
|
||||
nets = set()
|
||||
|
||||
# First add all input and output nets
|
||||
for key in ["inputs", "outputs", "clocks"]:
|
||||
nets |= set(eblif.get(key, []))
|
||||
|
||||
# Look for cell-to-cell connections
|
||||
for cell in eblif.get("subckt", []):
|
||||
for conn in cell["args"][1:]:
|
||||
port, net = conn.split("=")
|
||||
nets.add(net)
|
||||
|
||||
for cell in eblif.get("names", []):
|
||||
nets |= set(cell["args"])
|
||||
|
||||
for cell in eblif.get("latch", []):
|
||||
args = cell["args"]
|
||||
assert len(args) >= 2
|
||||
nets.add(args[0])
|
||||
nets.add(args[1])
|
||||
|
||||
if len(args) >= 4:
|
||||
nets.add(args[3])
|
||||
|
||||
return nets
|
||||
|
||||
|
||||
def expand_indices(items):
|
||||
"""
|
||||
Expands index ranges for each item in the given iterable. For example
|
||||
converts a single item like: "pin[1:0]" into two "pin[1]" and "pin[0]".
|
||||
"""
|
||||
new_items = []
|
||||
|
||||
# Process each item
|
||||
for item in items:
|
||||
|
||||
# Match using regex. If there is no match then pass the item through
|
||||
match = RE_INDICES.fullmatch(item)
|
||||
if not match:
|
||||
new_items.append(item)
|
||||
continue
|
||||
|
||||
name = match.group("name")
|
||||
i0 = int(match.group("i0"))
|
||||
i1 = int(match.group("i1"))
|
||||
|
||||
# Generate individual indices
|
||||
if i0 == i1:
|
||||
indices = [i0]
|
||||
elif i0 < i1:
|
||||
indices = [i for i in range(i0, i1 + 1)]
|
||||
elif i0 > i1:
|
||||
indices = [i for i in range(i1, i0 + 1)]
|
||||
|
||||
# Generate names
|
||||
for i in indices:
|
||||
new_items.append("{}[{}]".format(name, i))
|
||||
|
||||
return new_items
|
||||
|
||||
|
||||
def process_get_ports(match, pad_to_net, valid_pins=None, valid_nets=None):
|
||||
"""
|
||||
Used as a callback in re.sub(). Responsible for substition of net names
|
||||
for pin names.
|
||||
|
||||
When the valid_pin list is provided the function checks if a name specified
|
||||
in SDC refers to any of them. If it is so and the pin name is not present
|
||||
in the PCF mapping an error is thrown - it is not possible to map the pin
|
||||
to a net.
|
||||
|
||||
When no valid_pin list is known then there is no possibility to check if
|
||||
a given name refers to a pin or net. Hence if it is present in the PCF
|
||||
mapping it is considered a pin name and gets remapped to a net accordingly.
|
||||
Otherwise it is just passed through.
|
||||
|
||||
Lastly, if the valid_nets list is provided the function checks if the
|
||||
final net name is valid and throws an error if it is not.
|
||||
"""
|
||||
|
||||
# Strip any spurious whitespace chars
|
||||
arg = match.group("arg").strip()
|
||||
|
||||
# A helper mapping func.
|
||||
def map_pad_to_net(pad):
|
||||
|
||||
# Unescape square brackets
|
||||
pad = pad.replace("\\[", "[")
|
||||
pad = pad.replace("\\]", "]")
|
||||
|
||||
# If we have a valid pins list and the pad is in the map then re-map it
|
||||
if valid_pins and pad in valid_pins:
|
||||
assert pad in pad_to_net, "The pin '{}' is not associated with any net in PCF".format(pad)
|
||||
net = pad_to_net[pad].net
|
||||
|
||||
# If we don't have a valid pins list then just look up in the PCF
|
||||
# mapping.
|
||||
elif not valid_pins and pad in pad_to_net:
|
||||
net = pad_to_net[pad].net
|
||||
|
||||
# Otherwise it looks like its a direct reference to a net so pass it
|
||||
# through.
|
||||
else:
|
||||
net = pad
|
||||
|
||||
# If we have a valit net list then validate the net name
|
||||
if valid_nets:
|
||||
assert net in valid_nets, "The net '{}' is not present in the netlist".format(net)
|
||||
|
||||
# Escape square brackets
|
||||
net = net.replace("[", "\\[")
|
||||
net = net.replace("]", "\\]")
|
||||
return net
|
||||
|
||||
# We have a list of ports, map each of them individually
|
||||
if arg[0] == "{" and arg[-1] == "}":
|
||||
arg = arg[1:-1].split()
|
||||
nets = ", ".join([map_pad_to_net(p) for p in arg])
|
||||
new_arg = "{{{}}}".format(nets)
|
||||
|
||||
# We have a single port, map it directly
|
||||
else:
|
||||
new_arg = map_pad_to_net(arg)
|
||||
|
||||
# Format the new statement
|
||||
return "[get_ports {}]".format(new_arg)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# Parse arguments
|
||||
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
|
||||
parser.add_argument("--sdc-in", type=str, required=True, help="Input SDC file")
|
||||
parser.add_argument("--pcf", type=str, required=True, help="Input PCF file")
|
||||
parser.add_argument("--sdc-out", type=str, required=True, help="Output SDC file")
|
||||
parser.add_argument("--eblif", type=str, default=None, help="Input EBLIF netlist file")
|
||||
parser.add_argument("--pin-map", type=str, default=None, help="Input CSV pin map file")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Read the input PCF file
|
||||
with open(args.pcf, "r") as fp:
|
||||
pcf_constraints = list(parse_simple_pcf(fp))
|
||||
|
||||
# Build a pad-to-net map
|
||||
pad_to_net = {}
|
||||
for constr in pcf_constraints:
|
||||
if isinstance(constr, PcfIoConstraint):
|
||||
assert constr.pad not in pad_to_net, "Multiple nets constrained to pin '{}'".format(constr.pad)
|
||||
pad_to_net[constr.pad] = constr
|
||||
|
||||
# Read the input SDC file
|
||||
with open(args.sdc_in, "r") as fp:
|
||||
sdc = fp.read()
|
||||
|
||||
# Read the input EBLIF file, extract all valid net names from it
|
||||
valid_nets = None
|
||||
if args.eblif is not None:
|
||||
with open(args.eblif, "r") as fp:
|
||||
eblif = parse_blif(fp)
|
||||
valid_nets = collect_eblif_nets(eblif)
|
||||
|
||||
# Reat the input pinmap CSV file, extract valid pin names from it
|
||||
valid_pins = None
|
||||
if args.pin_map is not None:
|
||||
with open(args.pin_map, "r") as fp:
|
||||
reader = csv.DictReader(fp)
|
||||
csv_data = list(reader)
|
||||
valid_pins = [line["mapped_pin"] for line in csv_data]
|
||||
valid_pins = set(expand_indices(valid_pins))
|
||||
|
||||
# Process the SDC
|
||||
def sub_cb(match):
|
||||
return process_get_ports(match, pad_to_net, valid_pins, valid_nets)
|
||||
|
||||
sdc_lines = sdc.splitlines()
|
||||
for i in range(len(sdc_lines)):
|
||||
if not sdc_lines[i].strip().startswith("#"):
|
||||
sdc_lines[i] = re.sub(r"\[\s*get_ports\s+(?P<arg>.*)\]", sub_cb, sdc_lines[i])
|
||||
|
||||
# Write the output SDC file
|
||||
sdc = "\n".join(sdc_lines) + "\n"
|
||||
with open(args.sdc_out, "w") as fp:
|
||||
fp.write(sdc)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
160
f4pga/utils/quicklogic/qlf_k4n8/create_ioplace.py
Executable file
160
f4pga/utils/quicklogic/qlf_k4n8/create_ioplace.py
Executable file
|
@ -0,0 +1,160 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
Convert a PCF file into a VPR io.place file.
|
||||
"""
|
||||
import argparse
|
||||
import csv
|
||||
import sys
|
||||
import re
|
||||
from collections import defaultdict
|
||||
|
||||
import f4pga.utils.vpr_io_place as vpr_io_place
|
||||
from f4pga.utils.quicklogic.pinmap_parse import read_pinmapfile_data
|
||||
from f4pga.utils.quicklogic.pinmap_parse import vec_to_scalar
|
||||
|
||||
from f4pga.utils.pcf import parse_simple_pcf
|
||||
|
||||
# =============================================================================
|
||||
|
||||
BLOCK_INSTANCE_RE = re.compile(r"^(?P<name>\S+)\[(?P<index>[0-9]+)\]$")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
def gen_io_def(args):
|
||||
"""
|
||||
Generate io.place file from pcf file
|
||||
"""
|
||||
io_place = vpr_io_place.IoPlace()
|
||||
io_place.read_io_list_from_eblif(args.blif)
|
||||
io_place.load_block_names_from_net_file(args.net)
|
||||
|
||||
# Load all the necessary data from the pinmap_xml
|
||||
io_cells, port_map = read_pinmapfile_data(args.pinmap_xml)
|
||||
|
||||
# Map of pad names to VPR locations.
|
||||
pad_map = defaultdict(lambda: dict())
|
||||
|
||||
with open(args.csv_file, mode="r") as csv_fp:
|
||||
reader = csv.DictReader(csv_fp)
|
||||
for line in reader:
|
||||
port_name_list = vec_to_scalar(line["port_name"])
|
||||
pin_name = vec_to_scalar(line["mapped_pin"])
|
||||
gpio_type = line["GPIO_type"]
|
||||
|
||||
if len(port_name_list) != len(pin_name):
|
||||
print(
|
||||
'CSV port name "{}" length does not match with mapped pin name "{}" length'.format(
|
||||
line["port_name"], line["mapped_pin"]
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
for port, pin in zip(port_name_list, pin_name):
|
||||
if port in port_map:
|
||||
curr_map = port_map[port]
|
||||
if gpio_type is None or gpio_type == "":
|
||||
pad_map[pin] = (int(curr_map.x), int(curr_map.y), int(curr_map.z))
|
||||
else:
|
||||
gpio_pin = pin + ":" + gpio_type.strip()
|
||||
pad_map[gpio_pin] = (int(curr_map.x), int(curr_map.y), int(curr_map.z))
|
||||
else:
|
||||
print(
|
||||
'Port name "{}" specified in csv file "{}" is invalid. {} "{}"'.format(
|
||||
line["port_name"], args.csv_file, "Specify from port names in xml file", args.pinmap_xml
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
for pcf_constraint in parse_simple_pcf(args.pcf):
|
||||
if type(pcf_constraint).__name__ == "PcfIoConstraint":
|
||||
pad_name = pcf_constraint.pad
|
||||
if not io_place.is_net(pcf_constraint.net):
|
||||
print(
|
||||
'PCF constraint "{}" from line {} constraints net {} {}:\n{}'.format(
|
||||
pcf_constraint.line_str,
|
||||
pcf_constraint.line_num,
|
||||
pcf_constraint.net,
|
||||
"\n".join(io_place.get_nets()),
|
||||
"which is not in available netlist",
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if pad_name not in pad_map:
|
||||
print(
|
||||
'PCF constraint "{}" from line {} constraints pad {} {}:\n{}'.format(
|
||||
pcf_constraint.line_str,
|
||||
pcf_constraint.line_num,
|
||||
pad_name,
|
||||
"\n".join(sorted(pad_map.keys())),
|
||||
"which is not in available pad map",
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Get the top-level block instance, strip its index
|
||||
inst = io_place.get_top_level_block_instance_for_net(pcf_constraint.net)
|
||||
if inst is None:
|
||||
continue
|
||||
|
||||
match = BLOCK_INSTANCE_RE.match(inst)
|
||||
assert match is not None, inst
|
||||
|
||||
inst = match.group("name")
|
||||
|
||||
# Constraint the net (block)
|
||||
locs = pad_map[pad_name]
|
||||
io_place.constrain_net(net_name=pcf_constraint.net, loc=locs, comment=pcf_constraint.line_str)
|
||||
|
||||
if io_place.constraints:
|
||||
io_place.output_io_place(args.output)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Convert a PCF file into a VPR io.place file
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description="Convert a PCF file into a VPR io.place file.")
|
||||
parser.add_argument("--pcf", "-p", "-P", type=argparse.FileType("r"), required=True, help="PCF input file")
|
||||
parser.add_argument("--blif", "-b", type=argparse.FileType("r"), required=True, help="BLIF / eBLIF file")
|
||||
parser.add_argument(
|
||||
"--output", "-o", "-O", type=argparse.FileType("w"), default=sys.stdout, help="The output io.place file"
|
||||
)
|
||||
parser.add_argument("--net", "-n", type=argparse.FileType("r"), required=True, help="top.net file")
|
||||
|
||||
parser.add_argument("--pinmap_xml", type=str, required=True, help="Input pin-mapping xml file")
|
||||
parser.add_argument("--csv_file", type=str, required=True, help="Input user-defined pinmap CSV file")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
gen_io_def(args)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
312
f4pga/utils/quicklogic/repacker/README.md
Normal file
312
f4pga/utils/quicklogic/repacker/README.md
Normal file
|
@ -0,0 +1,312 @@
|
|||
# Netlist repacking utility
|
||||
|
||||
## 1. Goal
|
||||
|
||||
Provide the same VPR operating mode pb_type into physical mode pb_type
|
||||
repacking as implemented in the OpenFPGA project.
|
||||
|
||||
https://openfpga.readthedocs.io/en/master/manual/openfpga_shell/openfpga_commands/fpga_bitstream_commands/#repack
|
||||
|
||||
## 2. Files affected by the repacker
|
||||
|
||||
The utility expects a VPR packed netlist, an EBLIF netlist and a VPR placement
|
||||
file as input and outputs a similar set of files representing the design after
|
||||
repacking. The placement is not altered, just the SHA checksum is updated in
|
||||
the file.
|
||||
|
||||
### VPR packed netlist (.net)
|
||||
|
||||
The packed netlist contains information about the clusters of a packed design.
|
||||
It tells which specific pb_type instances (pbs or blocks) are occupied by which
|
||||
cells from the circuit netlist. Intra-cluster routing as well as LUT port
|
||||
rotation is also stored there.
|
||||
|
||||
The repacker discards any existing intra-cluster routing, remaps blocks and
|
||||
their port connections according to rules provided by OpenFPGA architecture
|
||||
file and re-routes the cluster. The procedure is repeated for each cluster
|
||||
independently.
|
||||
|
||||
### Circuit netlist (.blif / .eblif)
|
||||
|
||||
The circuit netlist holds all nets and cells of a particular design along with
|
||||
its attributes and parameters. Repacking a cell from one block instance into
|
||||
another usually requires changing its type. All changes introduced to the
|
||||
packed netlist must be reflected in the circuit netlist hence it needs to be
|
||||
modified by the packer as well.
|
||||
|
||||
### VPR placement file (.place)
|
||||
|
||||
This file stores cluster placement on the device grid. The file content itself
|
||||
is not affected by the packer but its header is. The header stores a checksum
|
||||
computed over the packed netlist file to ensure that the two stays in sync.
|
||||
Modified packed netlist has a different checksum hence the placement file
|
||||
needs to have its header updated.
|
||||
|
||||
## 4. Tool operation
|
||||
|
||||
### Preparation
|
||||
|
||||
The preparation step includes loading and pre-processing all the information
|
||||
required for repacking
|
||||
|
||||
#### loading of VPR architecture
|
||||
|
||||
The VPR architecture XML file is loaded to obtain information about:
|
||||
- pb_type hierarchy
|
||||
- binding of netlist cell types (BLIF models) with pb_types
|
||||
- cell models used in the architecture.
|
||||
|
||||
Soon after the architecture file is read an internal representation of the
|
||||
pb_type hierarchy for each root pb_type (a.k.a. complex block - CLB) is built.
|
||||
For every leaf pb_type of class "lut" another child pb_type is added to the
|
||||
hierarchy. This is done to keep the pb_type hierarchy consistent with the packed
|
||||
netlist hierarchy where native LUTs contain the one extra level.
|
||||
|
||||
Next, all known leaf pb_types are scanned for cell type bindings and internal
|
||||
structures that represent known cell types are created. VPR architecture does
|
||||
not store port widths along with the model list so to get actual cell types
|
||||
both pb_type tree and the model list need to be examined.
|
||||
|
||||
#### Loading of repacking rules
|
||||
|
||||
Repacking rules may contain port maps. These port maps may or may not
|
||||
explicitly specify port pins (bits). For the purpose of the repacker
|
||||
implementation port map needs to be known down to each pin. To tell the
|
||||
pin-to-pin correspondence each rule is confronted with the pb_type it refers to
|
||||
(pb_types store port widths) and direct pin correspondences are created.
|
||||
This is called "port map expansion".
|
||||
|
||||
#### Loading and pre-processing of circuit netlist
|
||||
|
||||
The circuit BLIF/EBLIF netlist is loaded and stored internally.
|
||||
|
||||
A netlist contains top-level ports. These ports do not correspond to any cells.
|
||||
However, they will have to be represented by explicit netlist cells after
|
||||
repacking. To allow for that each top-level port is converted to a virtual cell
|
||||
and added to the netlist. This allows the repacking algorithm to operate
|
||||
regularly without the need of any special IO handling. Top-level netlist ports
|
||||
are removed.
|
||||
|
||||
#### Circuit netlist cleaning
|
||||
|
||||
Circuit netlist cleaning (for now) includes removal of buffer LUTs. Buffer LUTs
|
||||
are 1-bit LUTs configured as pass-through. They are used to stitch different
|
||||
nets together while preserving their original names. The implemented buffer LUT
|
||||
absorption matches the identical process performed by VPR and should be enabled
|
||||
for the repacker if enabled for the VPR flow.
|
||||
|
||||
#### Loading of the packed netlist
|
||||
|
||||
The packed netlist is loaded, parsed and stored using internal data structures
|
||||
|
||||
### Repacking
|
||||
|
||||
The repacking is done independently for each cluster. Clusters are processed
|
||||
one-by-one, repacked ones replace original ones in the packed netlist. The
|
||||
following steps are identical and are done in the same sequence for each
|
||||
cluster.
|
||||
|
||||
#### Net renaming
|
||||
Net renaming is a result of LUT buffer absorption. This is necessary to keep
|
||||
the packed netlist and the circuit netlist in sync.
|
||||
|
||||
#### Explicit instantiation of route-through LUTs
|
||||
In VPR a native LUT (.names) can be configured as an implicit buffer
|
||||
(a route-through) by the VPR packer whenever necessary. Such a lut is not
|
||||
present in the circuit netlist. However it still needs to be repacked into a
|
||||
physical LUT cell.
|
||||
|
||||
To achieve this goal all route-though LUTs in the packed netlist of the current
|
||||
cluster are identifier and explicit LUT blocks are inserted for them. After
|
||||
that corresponding LUT buffers are inserted into the circuit netlist. This
|
||||
ensures regularity of the repacking algorithm - no special handling is required
|
||||
for such LUTs. They will be subjected to repacking as any other block/cell.
|
||||
|
||||
#### Identification of blocks to be repacked and their targets
|
||||
|
||||
In the first step all blocks that are to be repacked are identified. For this
|
||||
the packed netlist of the cluster is scanned for leaf blocks. Each leaf block
|
||||
is compared against all known repacking rules. Whenever its path in the
|
||||
hierarchy matches a rule then it is considered to be repacked. Identified
|
||||
blocks are stored along with their matching rules on a list.
|
||||
|
||||
The second step is identification of target (destination) block(s) for each
|
||||
source block for the repacking to take place. To do that a function scans the
|
||||
hierarchy of the VPR architecture and identifies pb_types that match the
|
||||
destination path as defined by the repacking rule for the given block. A list of
|
||||
destination pb_types is created. The list should contain exactly one element.
|
||||
Zero elements means that there is no suitable pb_type present in the
|
||||
architecture and more than one means that there is an ambiguity. Both cases are
|
||||
reported as an error. Finally the destination pb_type is stored along with the
|
||||
block to be repacked.
|
||||
|
||||
If there are no blocks to be repacked then there is nothing more that can be
|
||||
done for this cluster and the algorithm moves to the next one.
|
||||
|
||||
#### Circuit netlist cell repacking
|
||||
|
||||
For each block to be repacked its corresponding cell instance in the circuit
|
||||
netlist and its cell type (model) is found. Cell type (model) of the destination
|
||||
pb_type is identified as well. The original cell is removed from the circuit
|
||||
netlist and a new one is created based on the destination pb_type model. The new
|
||||
cell connectivity is determined by the port map as defined by the repacking rule
|
||||
being applied. For LUTs the LUT port rotation is also taken into account here.
|
||||
|
||||
In case of a LUT the newly created LUT cell receives a parameter named "LUT"
|
||||
which stores the truth table.
|
||||
|
||||
If the block being repacked represents a top-level IO port then the port name is
|
||||
added to the list of top-level ports to be removed. When a top-level IO is
|
||||
repacked into a physical cell, no top-level IO port is needed anymore.
|
||||
|
||||
#### Cluster repacking
|
||||
|
||||
Repacking of cluster cells (in the packed netlist) is done implicitly via the
|
||||
pb_type routing graph. Routes present in the graph imply which blocks are used
|
||||
(instanced) and which are not. The advantage of that approach is that the
|
||||
repacking and rerouting of the cluster are both done at the same single step.
|
||||
|
||||
At the beginning the routing graph for the complex block is created. The graph
|
||||
represents all routing resources for a root pb_type (complex block) as defined
|
||||
in the VPR architecture. In the graph nodes represent pb_type ports and edges
|
||||
connections between them. Nodes may be associated with nets. Edges are
|
||||
associated with nets implicitly - if an edge spans two nodes of the same net
|
||||
then it also belongs to that net.
|
||||
|
||||
Once the graph is built all nodes representing ports of the destination blocks
|
||||
of the remapping are associated with net names. Ports are remapped according to
|
||||
the repacking rules. This is the step where the actual repacking takes place.
|
||||
Together with the repacked blocks the ports of the cluster block are associated
|
||||
with nets as well.
|
||||
|
||||
Following port and net association there is the routing stage. For each node
|
||||
that is a sink a route to the source node of the same net is determined.
|
||||
In case when the source node is a cluster block port and there is no route
|
||||
available the first cluster block port node that can be reached is taken. This
|
||||
works similarly to the VPR packer.
|
||||
|
||||
Currently the router is implemented as a greedy depth-first search. No edge
|
||||
timing information is used. For the complexity of the current repacking problems
|
||||
this implementation is sufficient - most of the routing paths do not have more
|
||||
than one or two alternatives.
|
||||
|
||||
Finally once the graph is fully routed it is converted back to a packed netlist
|
||||
of the cluster. The new cluster replaces the old one in the packed netlist.
|
||||
|
||||
#### IO blocks handling
|
||||
|
||||
During conversion from packed netlist to a routing graph and back information
|
||||
about IO block names is lost. To preserve the names the original ones are stored
|
||||
prior to the cluster repacking and are used to rename repacked IO blocks
|
||||
afterwards.
|
||||
|
||||
### Finishing
|
||||
|
||||
At this point all clusters were repacked along with the corresponding cells in
|
||||
the circuit netlist. Apart from writing the data to files there are a few
|
||||
additional steps that need to be performed.
|
||||
|
||||
#### Top-level port removal
|
||||
During cluster processing names of blocks representing top-level IO ports were
|
||||
stored. Those blocks got repacked into physical IO cells and the top-level IO
|
||||
ports are no longer needed so they are removed.
|
||||
|
||||
#### Synchronization of cells attributes and parameters
|
||||
|
||||
VPR stores cell attributes and parameters read from the circuit (EBLIF) netlist
|
||||
with the packed netlist as well. It complains if both don't match. During
|
||||
conversion from packed netlist to a routing graph and back block attributes and
|
||||
parameters are lost - they are not stored within the graph. So in this step they
|
||||
are taken from the circuit netlist and associated with corresponding packed
|
||||
netlist blocks again.
|
||||
|
||||
#### Circuit netlist cleaning
|
||||
|
||||
This step is necessary only if buffer LUT absorption is enabled. Before
|
||||
repacking buffer LUTs driving top-level ports cannot be removed as their removal
|
||||
would cause top-level IO port renaming. After repacking IO ports are represented
|
||||
as cells so the removal is possible.
|
||||
|
||||
#### Circuit netlist write back (EBLIF)
|
||||
|
||||
The circuit netlist is written to an EBLIF file. The use of the EBLIF format is
|
||||
necessary to store LUT truth tables as cell parameters. After repacking LUTs are
|
||||
no longer represented by .names hence require a parameter to store the truth
|
||||
table.
|
||||
|
||||
#### Packed netlist write back
|
||||
|
||||
This is an obvious step. The final packed netlist is serialized and written to
|
||||
an XML (.net) file.
|
||||
|
||||
#### Patching of VPR placement file
|
||||
If provided, the VPR placement file receives a patched header containing a new
|
||||
SHA256 checksum. The checksum corresponds to the packed netlist file after
|
||||
repacking.
|
||||
|
||||
## Implementation
|
||||
|
||||
The repacker is implemented using a set of Python scripts located under quicklogic/openfpga/utils.
|
||||
|
||||
* repack.py
|
||||
|
||||
This is the main repacking script that implements the core repacking algorithm steps. This one is to be invoked to perform repacking.
|
||||
|
||||
* pb_rr_graph.py
|
||||
|
||||
Utilities for representing and building pb_type routing graph based on VTR architecture
|
||||
|
||||
* pb_rr_graph_router.py
|
||||
|
||||
Implementation of a router which operates on the pb_type routing graph.
|
||||
|
||||
* pb_rr_graph_netlist.py
|
||||
|
||||
A set of utility functions for loading a packed netlist into a pb_type graph as well as for creating a packed netlist from a routed pb_type graph.
|
||||
|
||||
* pb_type.py
|
||||
|
||||
Data structures and utilities for representing the pb_type hierarchy
|
||||
|
||||
* netlist_cleaning.py
|
||||
|
||||
Utilities for cleaning circuit netlist
|
||||
|
||||
* packed_netlist.py
|
||||
|
||||
Utilities for reading and writing VPR packed netlist
|
||||
|
||||
* eblif.py
|
||||
|
||||
Utilities for reading and writing BLIF/EBLIF circuit netlist
|
||||
|
||||
* block_path.py
|
||||
|
||||
Utilities for representing hierarchical path of blocks and pb_types
|
||||
|
||||
## Important notes
|
||||
|
||||
### Circuit netlist cleaning
|
||||
|
||||
VPR can "clean" a circuit netlist and it does that by default.
|
||||
The cleaning includes:
|
||||
|
||||
* Buffer LUT absorption
|
||||
* Dangling block removal
|
||||
* Dangling top-level IO removal
|
||||
|
||||
The important issue is that normally the cleaned netlist is not written back
|
||||
to any file hence it cannot be accessed outside VPR. That cleaned circuit
|
||||
netlist must correspond to the packed netlist used. This means equal number of
|
||||
blocks and cells and matching names.
|
||||
|
||||
The issue is that the circuit netlist input to the repacker is not the actual
|
||||
netlist used by VPR that has to match with the packed netlist.
|
||||
|
||||
An obvious solution would be to disable all netlist cleaning operations in VPR.
|
||||
But that unfortunately leads to huge LUT usage as each buffer LUT is then
|
||||
treated by VPR as a regular cell.
|
||||
|
||||
An alternative solution, which has been implemented, is to recreate some of
|
||||
the netlist cleaning procedures in the repacker so that they both operate on
|
||||
the same effective circuit netlist. This is what has been done.
|
285
f4pga/utils/quicklogic/repacker/arch_xml_utils.py
Normal file
285
f4pga/utils/quicklogic/repacker/arch_xml_utils.py
Normal file
|
@ -0,0 +1,285 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
A number of utility functions useful for traversing pb_type hierarchy as
|
||||
defined in VPR architecture XML file.
|
||||
"""
|
||||
import re
|
||||
|
||||
import lxml.etree as ET
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def is_leaf_pbtype(xml_pbtype):
|
||||
"""
|
||||
Returns true when the given pb_type is a leaf.
|
||||
"""
|
||||
assert xml_pbtype.tag == "pb_type", xml_pbtype.tag
|
||||
return "blif_model" in xml_pbtype.attrib
|
||||
|
||||
|
||||
def get_parent_pb(xml_pbtype):
|
||||
"""
|
||||
Returns a parent pb_type of the given one or none if it is a top-level
|
||||
complex block
|
||||
"""
|
||||
assert xml_pbtype.tag in ["pb_type", "mode", "interconnect"], xml_pbtype.tag
|
||||
|
||||
# Get immediate parent
|
||||
xml_parent = xml_pbtype.getparent()
|
||||
|
||||
# We've hit the end
|
||||
if xml_parent is None or xml_parent.tag == "complexblocklist":
|
||||
return None
|
||||
|
||||
# The immediate parent is a mode, jump one more level up
|
||||
if xml_parent is not None and xml_parent.tag == "mode":
|
||||
xml_parent = xml_parent.getparent()
|
||||
|
||||
return xml_parent
|
||||
|
||||
|
||||
def get_parent_pb_and_mode(xml_pbtype):
|
||||
"""
|
||||
Returns a parent pb_type and mode element for the given pb_type. If there
|
||||
are no modes (implicit default mode) then the mode element returned is
|
||||
the same as the pb_type element.
|
||||
"""
|
||||
assert xml_pbtype.tag in ["pb_type", "mode"], xml_pbtype.tag
|
||||
|
||||
# Get immediate parent
|
||||
xml_parent = xml_pbtype.getparent()
|
||||
|
||||
# We've hit the end
|
||||
if xml_parent is None or xml_parent.tag == "complexblocklist":
|
||||
return None, None
|
||||
|
||||
assert xml_parent.tag in ["pb_type", "mode"], xml_parent.tag
|
||||
|
||||
# pb_type parent
|
||||
if xml_pbtype.tag == "pb_type":
|
||||
|
||||
if xml_parent.tag == "pb_type":
|
||||
return xml_parent, xml_parent
|
||||
|
||||
elif xml_parent.tag == "mode":
|
||||
return xml_parent.getparent(), xml_parent
|
||||
|
||||
elif xml_pbtype.tag == "mode":
|
||||
|
||||
return xml_parent.getparent(), xml_pbtype
|
||||
|
||||
|
||||
def get_pb_by_name(xml_parent, name):
|
||||
"""
|
||||
Searches for a pb_type with the given name inside a parent pb_type.
|
||||
Returns either a child or the parent.
|
||||
|
||||
The provided parent may be a mode but a pb_type is always returned.
|
||||
"""
|
||||
assert xml_parent.tag in ["pb_type", "mode"], xml_parent.tag
|
||||
|
||||
# Check the parent name. If this is a mode then check its parent name
|
||||
if xml_parent.tag == "mode":
|
||||
xml_parent_pb = get_parent_pb(xml_parent)
|
||||
if xml_parent_pb.attrib["name"] == name:
|
||||
return xml_parent_pb
|
||||
|
||||
else:
|
||||
if xml_parent.attrib["name"] == name:
|
||||
return xml_parent
|
||||
|
||||
# Check children
|
||||
for xml_pbtype in xml_parent.findall("pb_type"):
|
||||
if xml_pbtype.attrib["name"] == name:
|
||||
return xml_pbtype
|
||||
|
||||
# None found
|
||||
return None
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def yield_pb_children(xml_parent):
|
||||
"""
|
||||
Yields all child pb_types and their indices taking into account num_pb.
|
||||
"""
|
||||
for xml_child in xml_parent.findall("pb_type"):
|
||||
num_pb = int(xml_child.get("num_pb", 1))
|
||||
for i in range(num_pb):
|
||||
yield xml_child, i
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
INTERCONNECT_PORT_SPEC_RE = re.compile(
|
||||
r"((?P<pbtype>[A-Za-z0-9_]+)(\[(?P<indices>[0-9:]+)\])?\.)" r"(?P<port>[A-Za-z0-9_]+)(\[(?P<bits>[0-9:]+)\])?"
|
||||
)
|
||||
|
||||
|
||||
def get_pb_and_port(xml_ic, port_spec):
|
||||
""" """
|
||||
assert xml_ic.tag == "interconnect"
|
||||
|
||||
# Match the port name
|
||||
match = INTERCONNECT_PORT_SPEC_RE.fullmatch(port_spec)
|
||||
assert match is not None, port_spec
|
||||
|
||||
# Get the referenced pb_type
|
||||
xml_parent = xml_ic.getparent()
|
||||
xml_pbtype = get_pb_by_name(xml_parent, match.group("pbtype"))
|
||||
assert xml_pbtype is not None, port_spec
|
||||
|
||||
# Find the port
|
||||
port = match.group("port")
|
||||
for xml_port in xml_pbtype:
|
||||
if xml_port.tag in ["input", "output", "clock"]:
|
||||
|
||||
# Got it
|
||||
if xml_port.attrib["name"] == port:
|
||||
return xml_pbtype, xml_port
|
||||
else:
|
||||
assert False, (port_spec, xml_pbtype.attrib["name"], port)
|
||||
|
||||
|
||||
def yield_indices(index_spec):
|
||||
"""
|
||||
Given an index specification string as "<i1>:<i0>" or "<i>" yields all bits
|
||||
that it is comprised of. The function also supports cases when i0 > i1.
|
||||
"""
|
||||
|
||||
# None
|
||||
if index_spec is None:
|
||||
return
|
||||
|
||||
# Range
|
||||
elif ":" in index_spec:
|
||||
i0, i1 = [int(i) for i in index_spec.split(":")]
|
||||
|
||||
if i0 > i1:
|
||||
for i in range(i1, i0 + 1):
|
||||
yield i
|
||||
|
||||
elif i0 < i1:
|
||||
for i in range(i0, i1 + 1):
|
||||
yield i
|
||||
|
||||
else:
|
||||
yield i0
|
||||
|
||||
# Single value
|
||||
else:
|
||||
yield int(index_spec)
|
||||
|
||||
|
||||
def yield_pins(xml_ic, port_spec, skip_index=True):
|
||||
"""
|
||||
Yields individual port pins as "<pb_type>[<pb_index].<port>[<bit>]" given
|
||||
the port specification.
|
||||
"""
|
||||
assert xml_ic.tag == "interconnect"
|
||||
|
||||
# Match the port name
|
||||
match = INTERCONNECT_PORT_SPEC_RE.fullmatch(port_spec)
|
||||
assert match is not None, port_spec
|
||||
|
||||
# Get the referenced pb_type
|
||||
xml_parent = xml_ic.getparent()
|
||||
xml_pbtype = get_pb_by_name(xml_parent, match.group("pbtype"))
|
||||
assert xml_pbtype is not None, port_spec
|
||||
|
||||
# Find the port
|
||||
port = match.group("port")
|
||||
for xml_port in xml_pbtype:
|
||||
if xml_port.tag in ["input", "output", "clock"]:
|
||||
if xml_port.attrib["name"] == port:
|
||||
width = int(xml_port.attrib["num_pins"])
|
||||
break
|
||||
else:
|
||||
assert False, (port_spec, xml_pbtype.attrib["name"], port)
|
||||
|
||||
# Check if we are referencing an upstream parent
|
||||
is_parent_port = get_parent_pb(xml_ic) == xml_pbtype
|
||||
|
||||
# Build a list of pb_type indices
|
||||
if not is_parent_port:
|
||||
num_pb = int(xml_pbtype.get("num_pb", 1))
|
||||
indices = list(yield_indices(match.group("indices")))
|
||||
if not indices:
|
||||
if num_pb > 1:
|
||||
indices = list(range(0, num_pb))
|
||||
else:
|
||||
indices = [None]
|
||||
else:
|
||||
indices = [None]
|
||||
|
||||
# Build a list of bit indices
|
||||
bits = list(yield_indices(match.group("bits")))
|
||||
if not bits:
|
||||
if width > 1:
|
||||
bits = list(range(0, width))
|
||||
else:
|
||||
bits = [None]
|
||||
|
||||
# Yield individual pin names
|
||||
for i in indices:
|
||||
for j in bits:
|
||||
|
||||
name = match.group("pbtype")
|
||||
if i is not None:
|
||||
name += "[{}]".format(i)
|
||||
elif not skip_index:
|
||||
name += "[0]"
|
||||
|
||||
name += "." + match.group("port")
|
||||
if j is not None:
|
||||
name += "[{}]".format(j)
|
||||
elif not skip_index:
|
||||
name += "[0]"
|
||||
|
||||
yield name
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def append_metadata(xml_item, meta_name, meta_data):
|
||||
"""
|
||||
Appends metadata to an element of architecture
|
||||
"""
|
||||
assert xml_item.tag in ["pb_type", "mode", "direct", "mux"]
|
||||
|
||||
xml_meta = ET.Element("meta", {"name": meta_name})
|
||||
xml_meta.text = meta_data
|
||||
|
||||
# Use existing metadata section. If not present then create a new one
|
||||
xml_metadata = xml_item.find("metadata")
|
||||
if xml_metadata is None:
|
||||
xml_metadata = ET.Element("metadata")
|
||||
|
||||
# Append meta, check for conflict
|
||||
for item in xml_metadata.findall("meta"):
|
||||
assert item.attrib["name"] != xml_meta.attrib["name"]
|
||||
xml_metadata.append(xml_meta)
|
||||
|
||||
# Append
|
||||
if xml_metadata.getparent() is None:
|
||||
xml_item.append(xml_metadata)
|
98
f4pga/utils/quicklogic/repacker/block_path.py
Normal file
98
f4pga/utils/quicklogic/repacker/block_path.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
Block hierarchy path utilities.
|
||||
"""
|
||||
import re
|
||||
|
||||
# =============================================================================
|
||||
|
||||
# A regular expression for parsing path nodes
|
||||
PATH_NODE_RE = re.compile(r"^(?P<name>[^\s\[\]\.]+)(\[(?P<index>[0-9]+)\])?" r"(\[(?P<mode>[^\s\[\]\.]+)\])?$")
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class PathNode:
|
||||
"""
|
||||
A class for representing a single path node.
|
||||
|
||||
A string representation of a path node can have the following formats:
|
||||
- "<name>[<index>][<mode>]"
|
||||
- "<name>[<index>]"
|
||||
- "<name>[<mode>]"
|
||||
- "<name>"
|
||||
|
||||
Differentiation between index and mode is based on the assumption that an
|
||||
index may only contain numbers while a mode any other character as well.
|
||||
|
||||
FIXME: This may lead to ambiguity if a mode is named using digits only.
|
||||
"""
|
||||
|
||||
def __init__(self, name, index=None, mode=None):
|
||||
"""
|
||||
Generic constructor
|
||||
"""
|
||||
|
||||
assert isinstance(name, str) or name is None, name
|
||||
assert isinstance(index, int) or index is None, index
|
||||
assert isinstance(mode, str) or mode is None, mode
|
||||
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.mode = mode
|
||||
|
||||
@staticmethod
|
||||
def from_string(string):
|
||||
"""
|
||||
Converts a string representation to a PathNode object
|
||||
"""
|
||||
|
||||
# Match the regex
|
||||
match = PATH_NODE_RE.fullmatch(string)
|
||||
assert match is not None, string
|
||||
|
||||
name = match.group("name")
|
||||
index = match.group("index")
|
||||
mode = match.group("mode")
|
||||
|
||||
if index is not None:
|
||||
index = int(index)
|
||||
|
||||
return PathNode(name, index, mode)
|
||||
|
||||
def to_string(self):
|
||||
"""
|
||||
Converts the node into a string
|
||||
"""
|
||||
string = self.name
|
||||
|
||||
if self.index is not None:
|
||||
string += "[{}]".format(self.index)
|
||||
|
||||
if self.mode is not None:
|
||||
string += "[{}]".format(self.mode)
|
||||
|
||||
return string
|
||||
|
||||
def __str__(self):
|
||||
return self.to_string()
|
||||
|
||||
def __repr__(self):
|
||||
return self.to_string()
|
507
f4pga/utils/quicklogic/repacker/eblif_netlist.py
Normal file
507
f4pga/utils/quicklogic/repacker/eblif_netlist.py
Normal file
|
@ -0,0 +1,507 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
BLIF/EBLIF parsing, writing and manipulation utilities.
|
||||
|
||||
BLIF format specification:
|
||||
https://course.ece.cmu.edu/~ee760/760docs/blif.pdf
|
||||
|
||||
EBLIF format specification:
|
||||
https://docs.verilogtorouting.org/en/latest/vpr/file_formats/#extended-blif-eblif
|
||||
"""
|
||||
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class Cell:
|
||||
"""
|
||||
This class represents a single cell in a netlist
|
||||
"""
|
||||
|
||||
def __init__(self, type):
|
||||
|
||||
# Cell name. This one is for reference only. It won't be writteb back
|
||||
# with the .cname attribute. Use the cname field for that.
|
||||
self.name = None
|
||||
|
||||
# Cell type (model)
|
||||
self.type = type
|
||||
|
||||
# Ports and connections. Indexed by port specificatons
|
||||
# (as <port>[<bit>]), contains net names.
|
||||
self.ports = OrderedDict()
|
||||
|
||||
# For const sources ($const) this is the value of the constant
|
||||
# For luts ($lut) this is a list holding the truth table
|
||||
# For latches this is the initial value of the latch or None
|
||||
self.init = None
|
||||
|
||||
# Extended EBLIF data
|
||||
self.cname = None
|
||||
self.attributes = OrderedDict() # name: value, strings
|
||||
self.parameters = OrderedDict() # name: value, strings
|
||||
|
||||
def __str__(self):
|
||||
return "{} ({})".format(self.type, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
class Eblif:
|
||||
"""
|
||||
This class represents a top-level module of a BLIF/EBLIF netlist
|
||||
|
||||
The class contains BLIF/EBLIF parser and serialized. The parser support
|
||||
all EBLIF constructs except for .conn statements. It is possible to do a
|
||||
parser -> serializer round trip which should generate identical file as
|
||||
the one provided to the parser.
|
||||
|
||||
Netlist cells are stored using the Cell class (see above).
|
||||
|
||||
Cells defined as ".subckt <type>" are stored along with their type. Native
|
||||
BLIF cells (.names, .latch etc.) are represented via the following
|
||||
"built-in" cell models.
|
||||
|
||||
* $const - A constant generator (0-LUT). The output port is named "out"
|
||||
and the init parameter is set to the constant value generated.
|
||||
|
||||
* $lut - N-input LUT. The input port is named "lut_in" and the output one
|
||||
"lut_out". The init parameter contains the truth table. Log2 of size of
|
||||
the table defines the LUT width.
|
||||
|
||||
* $latch - A generic ff/latch with unknown type. Common ports are named:
|
||||
"D", "Q", and "clock". The init value specifies initial ff/latch state
|
||||
a per BLIF specification.
|
||||
|
||||
* $fe, $re, $ah, $al, $as - A ff/latch of type corresponding to BLIF
|
||||
definitions. Apart from different types these are identical to $latch
|
||||
|
||||
* $input, $output - These are used to represent top-level IO ports. The
|
||||
cells have single port named "inpad" and "outpad" respectively. To
|
||||
create / remove such cells use convert_ports_to_cells() and
|
||||
convert_cells_to_ports() methods.
|
||||
"""
|
||||
|
||||
def __init__(self, model):
|
||||
|
||||
# Top-level module name
|
||||
self.model = model
|
||||
|
||||
# Top-level input and output nets
|
||||
self.inputs = []
|
||||
self.outputs = []
|
||||
|
||||
# Cells (by names)
|
||||
self.cells = OrderedDict()
|
||||
|
||||
def add_cell(self, cell):
|
||||
"""
|
||||
Adds a cell. Generates its name if the cell doesn't have. Returns the
|
||||
name under which the cell is stored.
|
||||
"""
|
||||
|
||||
# No cell
|
||||
if cell is None:
|
||||
return None
|
||||
|
||||
# Use the cell name
|
||||
if cell.name:
|
||||
name = cell.name
|
||||
|
||||
# Generate a name
|
||||
else:
|
||||
index = 0
|
||||
while True:
|
||||
name = "{}{}".format(cell.type, index)
|
||||
if name not in self.cells:
|
||||
cell.name = name
|
||||
break
|
||||
|
||||
# Add it
|
||||
assert cell.name not in self.cells, cell.name
|
||||
self.cells[cell.name] = cell
|
||||
|
||||
return name
|
||||
|
||||
def find_cell(self, name, use_cname=True):
|
||||
"""
|
||||
Finds a cell in the netlist given its name, When use_cname is True
|
||||
then looks also by the cell's cname
|
||||
"""
|
||||
|
||||
# Try by name
|
||||
cell = self.cells.get(name, None)
|
||||
|
||||
# Try by cname if allowed
|
||||
if cell is None and use_cname:
|
||||
for c in self.cells.values():
|
||||
if c.cname == name:
|
||||
cell = c
|
||||
break
|
||||
|
||||
return cell
|
||||
|
||||
def convert_ports_to_cells(self):
|
||||
"""
|
||||
Converts top-level input and output ports to $input and $output cells
|
||||
"""
|
||||
|
||||
# Convert inputs
|
||||
for port in self.inputs:
|
||||
|
||||
cell = Cell("$input")
|
||||
cell.name = port
|
||||
cell.ports["inpad"] = port
|
||||
|
||||
self.add_cell(cell)
|
||||
|
||||
# Convert outputs
|
||||
for port in self.outputs:
|
||||
|
||||
cell = Cell("$output")
|
||||
cell.name = "out:" + port
|
||||
cell.cname = cell.name
|
||||
cell.ports["outpad"] = port
|
||||
|
||||
self.add_cell(cell)
|
||||
|
||||
# Clear top-level ports
|
||||
self.inputs = []
|
||||
self.outputs = []
|
||||
|
||||
def convert_cells_to_ports(self):
|
||||
"""
|
||||
Converts $input and $output cells into top-level ports
|
||||
"""
|
||||
for key in list(self.cells.keys()):
|
||||
cell = self.cells[key]
|
||||
|
||||
# Input
|
||||
if cell.type == "$input":
|
||||
assert "inpad" in cell.ports
|
||||
name = cell.ports["inpad"]
|
||||
|
||||
self.inputs.append(name)
|
||||
del self.cells[key]
|
||||
|
||||
# Output
|
||||
if cell.type == "$output":
|
||||
assert "outpad" in cell.ports
|
||||
name = cell.name.replace("out:", "")
|
||||
|
||||
self.outputs.append(name)
|
||||
del self.cells[key]
|
||||
|
||||
# Insert a buffer if the port name does not match the net name
|
||||
net = cell.ports["outpad"]
|
||||
if name != net:
|
||||
|
||||
cell = Cell("$lut")
|
||||
cell.name = name
|
||||
cell.ports["lut_in[0]"] = net
|
||||
cell.ports["lut_out"] = name
|
||||
cell.init = [0, 1]
|
||||
|
||||
self.add_cell(cell)
|
||||
|
||||
@staticmethod
|
||||
def from_string(string):
|
||||
"""
|
||||
Parses a BLIF/EBLIF netlist as a multi-line string. Returns an Eblif
|
||||
class instance.
|
||||
"""
|
||||
|
||||
def parse_single_output_cover(parts):
|
||||
"""
|
||||
Parses a single output cover of a BLIF truth table.
|
||||
"""
|
||||
|
||||
# FIXME: Add support for don't cares
|
||||
if "-" in parts[0]:
|
||||
assert False, "Don't cares ('-') not supported yet!"
|
||||
|
||||
# Assume only single address
|
||||
addr = int(parts[0][::-1], 2)
|
||||
data = int(parts[1])
|
||||
|
||||
yield addr, data
|
||||
|
||||
# Split lines, strip whitespace, remove blank ones
|
||||
lines = string.split("\n")
|
||||
lines = [line.strip() for line in lines]
|
||||
lines = [line for line in lines if line]
|
||||
|
||||
eblif = None
|
||||
cell = None
|
||||
|
||||
# Parse lines
|
||||
for line_no, line in enumerate(lines):
|
||||
fields = line.split()
|
||||
|
||||
# Reject comments
|
||||
for i in range(len(fields)):
|
||||
if fields[i].startswith("#"):
|
||||
fields = fields[:i]
|
||||
break
|
||||
|
||||
# Empty line
|
||||
if not fields:
|
||||
continue
|
||||
|
||||
# Look for .model
|
||||
if fields[0] == ".model":
|
||||
eblif = Eblif(fields[1])
|
||||
continue
|
||||
|
||||
# No EBLIF yet
|
||||
if not eblif:
|
||||
continue
|
||||
|
||||
# Input list
|
||||
if fields[0] == ".inputs":
|
||||
eblif.inputs = fields[1:]
|
||||
|
||||
# Output list
|
||||
elif fields[0] == ".outputs":
|
||||
eblif.outputs = fields[1:]
|
||||
|
||||
# Got a generic cell
|
||||
elif fields[0] == ".subckt":
|
||||
assert len(fields) >= 2
|
||||
eblif.add_cell(cell)
|
||||
|
||||
# Add the new cell
|
||||
cell = Cell(fields[1])
|
||||
|
||||
# Add ports and net connections
|
||||
for conn in fields[2:]:
|
||||
port, net = conn.split("=", maxsplit=1)
|
||||
cell.ports[port] = net
|
||||
|
||||
# Got a native flip-flop / latch
|
||||
elif fields[0] == ".latch":
|
||||
assert len(fields) >= 3
|
||||
eblif.add_cell(cell)
|
||||
|
||||
# Add the new cell
|
||||
cell = Cell("$latch")
|
||||
|
||||
# Input and output
|
||||
cell.ports["D"] = fields[1]
|
||||
cell.ports["Q"] = fields[2]
|
||||
|
||||
# Got type and control
|
||||
if len(fields) >= 5:
|
||||
cell.type = "$" + fields[3]
|
||||
cell.ports["clock"] = fields[4]
|
||||
|
||||
# Use the output net as cell name
|
||||
cell.name = cell.ports["Q"]
|
||||
|
||||
# Got initial value
|
||||
if len(fields) >= 6:
|
||||
cell.init = int(fields[5])
|
||||
else:
|
||||
cell.init = 3 # Unknown
|
||||
|
||||
# Got a native LUT
|
||||
elif fields[0] == ".names":
|
||||
assert len(fields) >= 2
|
||||
eblif.add_cell(cell)
|
||||
|
||||
# Determine LUT width
|
||||
width = len(fields[1:-1])
|
||||
|
||||
# Add the new cell
|
||||
type = "$lut" if width > 0 else "$const"
|
||||
cell = Cell(type)
|
||||
|
||||
# Initialize the truth table
|
||||
if type == "$lut":
|
||||
cell.init = [0 for i in range(2**width)]
|
||||
elif type == "$const":
|
||||
cell.init = 0
|
||||
|
||||
# Input connections
|
||||
for i, net in enumerate(fields[1:-1]):
|
||||
port = "lut_in[{}]".format(i)
|
||||
cell.ports[port] = net
|
||||
|
||||
# Output connection
|
||||
cell.ports["lut_out"] = fields[-1]
|
||||
|
||||
# Use the output net as cell name
|
||||
cell.name = cell.ports["lut_out"]
|
||||
|
||||
# LUT truth table chunk
|
||||
elif all([c in ("0", "1", "-") for c in fields[0]]):
|
||||
assert cell is not None
|
||||
assert cell.type in ["$lut", "$const"]
|
||||
|
||||
# The cell is a LUT
|
||||
if cell.type == "$lut":
|
||||
assert len(fields) == 2
|
||||
for addr, data in parse_single_output_cover(fields):
|
||||
cell.init[addr] = data
|
||||
|
||||
# The cell is a const source
|
||||
elif cell.type == "$const":
|
||||
assert len(fields) == 1
|
||||
cell.init = int(fields[0])
|
||||
|
||||
# Cell name
|
||||
elif fields[0] == ".cname":
|
||||
cell.name = fields[1]
|
||||
cell.cname = cell.name
|
||||
|
||||
# Cell attribute
|
||||
elif fields[0] == ".attr":
|
||||
cell.attributes[fields[1]] = fields[2]
|
||||
|
||||
# Cell parameter
|
||||
elif fields[0] == ".param":
|
||||
cell.parameters[fields[1]] = fields[2]
|
||||
|
||||
# End
|
||||
elif fields[0] == ".end":
|
||||
# FIXME: Mark that the end is reached and disregard following
|
||||
# keywords
|
||||
pass
|
||||
|
||||
# Unknown directive
|
||||
else:
|
||||
assert False, line
|
||||
|
||||
# Store the current cell
|
||||
eblif.add_cell(cell)
|
||||
|
||||
return eblif
|
||||
|
||||
@staticmethod
|
||||
def from_file(file_name):
|
||||
"""
|
||||
Parses a BLIF/EBLIF file. Returns an Eblif class instance.
|
||||
"""
|
||||
with open(file_name, "r") as fp:
|
||||
string = fp.read()
|
||||
return Eblif.from_string(string)
|
||||
|
||||
def to_string(self, cname=True, attr=True, param=True, consts=True):
|
||||
"""
|
||||
Formats EBLIF data as a multi-line EBLIF string. Additional parameters
|
||||
control what kind of extended (EBLIF) data will be written.
|
||||
"""
|
||||
|
||||
lines = []
|
||||
|
||||
# Header
|
||||
lines.append(".model {}".format(self.model))
|
||||
lines.append(".inputs {}".format(" ".join(self.inputs)))
|
||||
lines.append(".outputs {}".format(" ".join(self.outputs)))
|
||||
|
||||
# Cells
|
||||
for cell in self.cells.values():
|
||||
|
||||
# A constant source
|
||||
if cell.type == "$const":
|
||||
|
||||
# Skip consts
|
||||
if not consts:
|
||||
continue
|
||||
|
||||
lines.append(".names {}".format(str(cell.ports["lut_out"])))
|
||||
if cell.init != 0:
|
||||
lines.append(str(cell.init))
|
||||
|
||||
# A LUT
|
||||
elif cell.type == "$lut":
|
||||
|
||||
# Identify LUT input pins and their bind indices
|
||||
nets = {}
|
||||
for port, net in cell.ports.items():
|
||||
match = re.fullmatch(r"lut_in\[(?P<index>[0-9]+)\]", port)
|
||||
if match is not None:
|
||||
index = int(match.group("index"))
|
||||
nets[index] = net
|
||||
|
||||
# Write the cell header. Sort inputs by their indices
|
||||
keys = sorted(nets.keys())
|
||||
lines.append(".names {} {}".format(" ".join([nets[k] for k in keys]), str(cell.ports["lut_out"])))
|
||||
|
||||
# Write the truth table
|
||||
fmt = "{:0" + str(len(nets)) + "b}"
|
||||
tab = []
|
||||
for addr, data in enumerate(cell.init):
|
||||
if data != 0:
|
||||
tab.append(fmt.format(addr)[::-1] + " 1")
|
||||
lines.extend(sorted(tab))
|
||||
|
||||
# A latch
|
||||
elif cell.type in ["$fe", "$re", "$ah", "$al", "$as"]:
|
||||
|
||||
line = ".latch {} {} {} {} {}".format(
|
||||
str(cell.ports["D"]), str(cell.ports["Q"]), cell.type[1:], str(cell.ports["clock"]), str(cell.init)
|
||||
)
|
||||
lines.append(line)
|
||||
|
||||
# A generic latch controlled by a single global clock
|
||||
elif cell.type == "$latch":
|
||||
|
||||
line = ".latch {} {} {}".format(str(cell.ports["D"]), str(cell.ports["Q"]), str(cell.init))
|
||||
lines.append(line)
|
||||
|
||||
# A generic subcircuit
|
||||
else:
|
||||
|
||||
# The subcircuit along with its connections
|
||||
line = ".subckt {}".format(cell.type)
|
||||
for port, net in cell.ports.items():
|
||||
line += " {}={}".format(port, net)
|
||||
lines.append(line)
|
||||
|
||||
# Cell name
|
||||
if cname and cell.cname:
|
||||
lines.append(".cname {}".format(cell.cname))
|
||||
|
||||
# Cell attributes
|
||||
if attr:
|
||||
for k, v in cell.attributes.items():
|
||||
lines.append(".attr {} {}".format(k, v))
|
||||
|
||||
# Cell parameters
|
||||
if param:
|
||||
for k, v in cell.parameters.items():
|
||||
lines.append(".param {} {}".format(k, v))
|
||||
|
||||
# Footer
|
||||
lines.append(".end")
|
||||
|
||||
# Join all lines
|
||||
return "\n".join(lines)
|
||||
|
||||
def to_file(self, file_name, **kw):
|
||||
"""
|
||||
Writes EBLIF data to a file
|
||||
"""
|
||||
with open(file_name, "w") as fp:
|
||||
fp.write(self.to_string(**kw))
|
125
f4pga/utils/quicklogic/repacker/netlist_cleaning.py
Normal file
125
f4pga/utils/quicklogic/repacker/netlist_cleaning.py
Normal file
|
@ -0,0 +1,125 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
Utilities for cleaning circuit netlists
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def absorb_buffer_luts(netlist, outputs=False):
|
||||
"""
|
||||
Performs downstream absorbtion of buffer LUTs. All buffer LUTs are absorbed
|
||||
except for those that drive top-level output ports not to change output
|
||||
net names.
|
||||
|
||||
Returns a net map that defines the final net remapping after some nets
|
||||
got absorbed downstream.
|
||||
"""
|
||||
|
||||
INP_PORT = "lut_in[0]"
|
||||
OUT_PORT = "lut_out"
|
||||
|
||||
def is_buffer_lut(cell):
|
||||
"""
|
||||
Returns True when a cell is a buffer LUT to be absorbed.
|
||||
"""
|
||||
|
||||
# A pass-through LUT
|
||||
if cell.type == "$lut" and cell.init == [0, 1]:
|
||||
|
||||
# Get input and output nets
|
||||
assert INP_PORT in cell.ports, cell
|
||||
net_inp = cell.ports[INP_PORT]
|
||||
|
||||
assert OUT_PORT in cell.ports, cell
|
||||
net_out = cell.ports[OUT_PORT]
|
||||
|
||||
# Must not be driving any top-level outputs unless explicitly
|
||||
# allowed
|
||||
if (net_inp in netlist.inputs or net_out in netlist.outputs) and not outputs:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# Identify LUT buffers
|
||||
buffers = {key: cell for key, cell in netlist.cells.items() if is_buffer_lut(cell)}
|
||||
|
||||
# Merge them downstream
|
||||
net_map = {}
|
||||
for cell in buffers.values():
|
||||
|
||||
# Get input and output nets
|
||||
assert INP_PORT in cell.ports, cell
|
||||
net_inp = cell.ports[INP_PORT]
|
||||
|
||||
assert OUT_PORT in cell.ports, cell
|
||||
net_out = cell.ports[OUT_PORT]
|
||||
|
||||
# Cannot be a buffer connecting an input to an output directly
|
||||
if net_out in netlist.outputs and net_inp in netlist.inputs:
|
||||
continue
|
||||
|
||||
# This cell drives a top-level output directly. Change input nets and
|
||||
# leave the output one
|
||||
if net_out in netlist.outputs:
|
||||
|
||||
# Replace the output net in all cells with the input one
|
||||
for c in netlist.cells.values():
|
||||
for port, net in c.ports.items():
|
||||
if net == net_inp:
|
||||
c.ports[port] = net_out
|
||||
|
||||
# Update net map
|
||||
for net in net_map:
|
||||
if net_map[net] == net_inp:
|
||||
net_map[net] = net_out
|
||||
|
||||
net_map[net_inp] = net_out
|
||||
|
||||
# A regular buffer
|
||||
else:
|
||||
|
||||
# Replace the output net in all cells with the input one
|
||||
for c in netlist.cells.values():
|
||||
for port, net in c.ports.items():
|
||||
if net == net_out:
|
||||
c.ports[port] = net_inp
|
||||
|
||||
# Update net map
|
||||
for net in net_map:
|
||||
if net_map[net] == net_out:
|
||||
net_map[net] = net_inp
|
||||
|
||||
net_map[net_out] = net_inp
|
||||
|
||||
# Remove the cell
|
||||
del netlist.cells[cell.name]
|
||||
|
||||
logging.debug(" Absorbed {} buffer LUTs".format(len(buffers)))
|
||||
return net_map
|
||||
|
||||
|
||||
def sweep_dangling_cells(netlist):
|
||||
# TODO:
|
||||
pass
|
719
f4pga/utils/quicklogic/repacker/packed_netlist.py
Normal file
719
f4pga/utils/quicklogic/repacker/packed_netlist.py
Normal file
|
@ -0,0 +1,719 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
Utilities for handling VTR packed netlist (.net) data
|
||||
|
||||
VPR packed netlist format specification:
|
||||
https://docs.verilogtorouting.org/en/latest/vpr/file_formats/#packed-netlist-format-net
|
||||
"""
|
||||
import re
|
||||
import lxml.etree as ET
|
||||
|
||||
from f4pga.utils.quicklogic.repacker.block_path import PathNode
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class Connection:
|
||||
"""
|
||||
A class representing a connection of a port pin. For output ports it
|
||||
refers to child/sibling block, port and pin while for input ports it refers
|
||||
to the parent/sibling block, port and pin.
|
||||
"""
|
||||
|
||||
# A regex for parsing connection specification
|
||||
REGEX = re.compile(r"(?P<driver>\S+)\.(?P<port>\S+)\[(?P<pin>[0-9]+)\]->" r"(?P<interconnect>\S+)")
|
||||
|
||||
def __init__(self, driver, port, pin, interconnect):
|
||||
"""
|
||||
Basic constructor
|
||||
"""
|
||||
self.driver = driver
|
||||
self.port = port
|
||||
self.pin = pin
|
||||
self.interconnect = interconnect
|
||||
|
||||
@staticmethod
|
||||
def from_string(spec):
|
||||
"""
|
||||
Parses a specification string. Returns a Connection object
|
||||
|
||||
>>> Connection.from_string("parent.A[3]->interconnect")
|
||||
parent.A[3]->interconnect
|
||||
>>> Connection.from_string("parent.A->interconnect")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AssertionError: parent.A->interconnect
|
||||
>>> Connection.from_string("parent.A[3]")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AssertionError: parent.A[3]
|
||||
>>> Connection.from_string("Something")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AssertionError: Something
|
||||
"""
|
||||
assert isinstance(spec, str)
|
||||
|
||||
# Try match
|
||||
match = Connection.REGEX.fullmatch(spec)
|
||||
assert match is not None, spec
|
||||
|
||||
# Extract data
|
||||
return Connection(
|
||||
driver=match.group("driver"),
|
||||
port=match.group("port"),
|
||||
pin=int(match.group("pin")),
|
||||
interconnect=match.group("interconnect"),
|
||||
)
|
||||
|
||||
def to_string(self):
|
||||
"""
|
||||
Builds a specification string that can be stored in packed netlist
|
||||
"""
|
||||
return "{}.{}[{}]->{}".format(self.driver, self.port, self.pin, self.interconnect)
|
||||
|
||||
def __str__(self):
|
||||
return self.to_string()
|
||||
|
||||
def __repr__(self):
|
||||
return self.to_string()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class Port:
|
||||
"""
|
||||
A class representing a block port. Stores port information such as type
|
||||
(direction), bus width and its connections.
|
||||
"""
|
||||
|
||||
def __init__(self, name, type, width=1, connections=None):
|
||||
"""
|
||||
Basic constructor
|
||||
"""
|
||||
self.name = name
|
||||
self.type = type
|
||||
self.width = width
|
||||
|
||||
# Sanity check provided port connections
|
||||
if connections is not None:
|
||||
assert isinstance(connections, dict)
|
||||
|
||||
# Check each entry
|
||||
for pin, conn in connections.items():
|
||||
assert isinstance(pin, int), pin
|
||||
assert pin < self.width, (pin, width)
|
||||
assert isinstance(conn, Connection) or isinstance(conn, str), pin
|
||||
|
||||
self.connections = connections
|
||||
|
||||
else:
|
||||
self.connections = {}
|
||||
|
||||
# Rotation map
|
||||
self.rotation_map = None
|
||||
|
||||
@staticmethod
|
||||
def from_etree(elem, type):
|
||||
"""
|
||||
Builds a port class from the packed netlist (XML) representation.
|
||||
"""
|
||||
assert elem.tag == "port", elem.tag
|
||||
name = elem.attrib["name"]
|
||||
|
||||
conn = elem.text.strip().split()
|
||||
width = len(conn)
|
||||
|
||||
# Remove open connections
|
||||
conn = {i: conn[i] for i in range(width) if conn[i] != "open"}
|
||||
|
||||
# Build connection objects. Do that only for ports that specify a
|
||||
# connection to another port. Otherwise treat the name as a net name.
|
||||
for key in conn.keys():
|
||||
if "->" in conn[key]:
|
||||
conn[key] = Connection.from_string(conn[key])
|
||||
|
||||
return Port(name, type, width, conn)
|
||||
|
||||
def to_etree(self):
|
||||
"""
|
||||
Converts the port representation to the packed netlist (XML)
|
||||
representation.
|
||||
"""
|
||||
|
||||
# Format connections
|
||||
text = []
|
||||
for i in range(self.width):
|
||||
if i in self.connections:
|
||||
text.append(str(self.connections[i]))
|
||||
else:
|
||||
text.append("open")
|
||||
|
||||
elem = ET.Element("port", attrib={"name": self.name})
|
||||
elem.text = " ".join(text)
|
||||
return elem
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
Returns a user-readable description string
|
||||
"""
|
||||
return "{}[{}:0] ({})".format(self.name, self.width - 1, self.type[0].upper())
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class Block:
|
||||
"""
|
||||
A hierarchical block of a packed netlist.
|
||||
"""
|
||||
|
||||
def __init__(self, name, instance, mode=None, parent=None):
|
||||
|
||||
# Basic attributes
|
||||
self.name = name
|
||||
self.instance = instance
|
||||
self.mode = mode
|
||||
|
||||
# Identify the block type. FIXME: Use a regex here
|
||||
self.type = instance.split("[", maxsplit=1)[0]
|
||||
|
||||
# Ports indexed by names
|
||||
self.ports = {}
|
||||
|
||||
# Parent block
|
||||
self.parent = parent
|
||||
|
||||
# Child blocks indexed by instance
|
||||
self.blocks = {}
|
||||
|
||||
# Leaf block attributes and parameters
|
||||
self.attributes = {}
|
||||
self.parameters = {}
|
||||
|
||||
@staticmethod
|
||||
def from_etree(elem):
|
||||
"""
|
||||
Builds a block class from the packed netlist (XML) representation.
|
||||
"""
|
||||
assert elem.tag == "block", elem.tag
|
||||
|
||||
# Create the block with basic attributes
|
||||
block = Block(name=elem.attrib["name"], instance=elem.attrib["instance"], mode=elem.get("mode", "default"))
|
||||
|
||||
# Parse ports
|
||||
rotation_maps = {}
|
||||
for tag in ["inputs", "outputs", "clocks"]:
|
||||
port_type = tag[:-1]
|
||||
|
||||
xml_ports = elem.find(tag)
|
||||
if xml_ports is not None:
|
||||
for xml_port in xml_ports:
|
||||
|
||||
# Got a port rotation map
|
||||
if xml_port.tag == "port_rotation_map":
|
||||
port_name = xml_port.attrib["name"]
|
||||
rotation = xml_port.text
|
||||
|
||||
# Parse the map
|
||||
rotation_map = {}
|
||||
for i, j in enumerate(rotation.strip().split()):
|
||||
if j != "open":
|
||||
rotation_map[i] = int(j)
|
||||
|
||||
# Store it to be later associated with a port
|
||||
rotation_maps[port_name] = rotation_map
|
||||
|
||||
# Got a port
|
||||
else:
|
||||
port = Port.from_etree(xml_port, port_type)
|
||||
block.ports[port.name] = port
|
||||
|
||||
# Associate rotation maps with ports
|
||||
for port_name, rotation_map in rotation_maps.items():
|
||||
assert port_name in block.ports, port_name
|
||||
block.ports[port_name].rotation_map = rotation_map
|
||||
|
||||
# Recursively parse sub-blocks
|
||||
for xml_block in elem.findall("block"):
|
||||
|
||||
sub_block = Block.from_etree(xml_block)
|
||||
|
||||
sub_block.parent = block
|
||||
block.blocks[sub_block.instance] = sub_block
|
||||
|
||||
# Parse attributes and parameters
|
||||
for tag, data in zip(["attributes", "parameters"], [block.attributes, block.parameters]):
|
||||
|
||||
# Find the list
|
||||
xml_list = elem.find(tag)
|
||||
if xml_list is not None:
|
||||
|
||||
# Only a leaf block can have attributes / parameters
|
||||
assert block.is_leaf, "Non-leaf block '{}' with {}".format(block.instance, tag)
|
||||
|
||||
# Parse
|
||||
sub_tag = tag[:-1]
|
||||
for xml_item in xml_list.findall(sub_tag):
|
||||
data[xml_item.attrib["name"]] = xml_item.text
|
||||
|
||||
return block
|
||||
|
||||
def to_etree(self):
|
||||
"""
|
||||
Converts the block representation to the packed netlist (XML)
|
||||
representation.
|
||||
"""
|
||||
|
||||
# Base block element
|
||||
attrib = {
|
||||
"name": self.name,
|
||||
"instance": self.instance,
|
||||
}
|
||||
if not self.is_leaf:
|
||||
attrib["mode"] = self.mode if self.mode is not None else "default"
|
||||
|
||||
elem = ET.Element("block", attrib)
|
||||
|
||||
# If this is an "open" block then skip the remaining tags
|
||||
if self.name == "open":
|
||||
return elem
|
||||
|
||||
# Attributes / parameters
|
||||
if self.is_leaf:
|
||||
for tag, data in zip(["attributes", "parameters"], [self.attributes, self.parameters]):
|
||||
|
||||
xml_list = ET.Element(tag)
|
||||
|
||||
sub_tag = tag[:-1]
|
||||
for key, value in data.items():
|
||||
xml_item = ET.Element(sub_tag, {"name": key})
|
||||
xml_item.text = value
|
||||
xml_list.append(xml_item)
|
||||
|
||||
elem.append(xml_list)
|
||||
|
||||
# Ports
|
||||
for tag in ["inputs", "outputs", "clocks"]:
|
||||
xml_ports = ET.Element(tag)
|
||||
port_type = tag[:-1]
|
||||
|
||||
keys = self.ports.keys()
|
||||
for key in keys:
|
||||
port = self.ports[key]
|
||||
if port.type == port_type:
|
||||
|
||||
# Encode port
|
||||
xml_port = port.to_etree()
|
||||
xml_ports.append(xml_port)
|
||||
|
||||
# Rotation map
|
||||
if port.rotation_map:
|
||||
|
||||
# Encode
|
||||
rotation = []
|
||||
for i in range(port.width):
|
||||
rotation.append(str(port.rotation_map.get(i, "open")))
|
||||
|
||||
# Make an element
|
||||
xml_rotation_map = ET.Element("port_rotation_map", {"name": port.name})
|
||||
xml_rotation_map.text = " ".join(rotation)
|
||||
xml_ports.append(xml_rotation_map)
|
||||
|
||||
elem.append(xml_ports)
|
||||
|
||||
# Recurse
|
||||
keys = self.blocks.keys()
|
||||
for key in keys:
|
||||
xml_block = self.blocks[key].to_etree()
|
||||
elem.append(xml_block)
|
||||
|
||||
return elem
|
||||
|
||||
@property
|
||||
def is_leaf(self):
|
||||
"""
|
||||
Returns True when the block is a leaf block
|
||||
"""
|
||||
return len(self.blocks) == 0
|
||||
|
||||
@property
|
||||
def is_open(self):
|
||||
"""
|
||||
Returns True when the block is open
|
||||
"""
|
||||
return self.name == "open"
|
||||
|
||||
@property
|
||||
def is_route_throu(self):
|
||||
"""
|
||||
Returns True when the block is a route-throu native LUT
|
||||
"""
|
||||
|
||||
# VPR stores route-through LUTs as "open" blocks with mode set to
|
||||
# "wire".
|
||||
return self.is_leaf and self.name == "open" and self.mode == "wire"
|
||||
|
||||
def get_path(self, with_indices=True, with_modes=True, default_modes=True):
|
||||
"""
|
||||
Returns the full path to the block. When with_indices is True then
|
||||
index suffixes '[<index>]' are appended to block types. When
|
||||
with_modes is True then mode suffixes '[<mode>]' are appended. The
|
||||
default_modes flag controls appending suffixes for default modes.
|
||||
"""
|
||||
path = []
|
||||
block = self
|
||||
|
||||
# Walk towards the tree root
|
||||
while block is not None:
|
||||
|
||||
# Type or type with index (instance)
|
||||
if with_indices:
|
||||
node = block.instance
|
||||
else:
|
||||
node = block.type
|
||||
|
||||
# Mode suffix
|
||||
if not block.is_leaf and with_modes:
|
||||
if block.mode != "default" or default_modes:
|
||||
node += "[{}]".format(block.mode)
|
||||
|
||||
# Prepend
|
||||
path = [node] + path
|
||||
|
||||
# Go up
|
||||
block = block.parent
|
||||
|
||||
return ".".join(path)
|
||||
|
||||
def rename_cluster(self, name):
|
||||
"""
|
||||
Renames this block and all its children except leaf blocks
|
||||
"""
|
||||
|
||||
def walk(block):
|
||||
|
||||
if not block.is_leaf and not block.is_open:
|
||||
block.name = name
|
||||
|
||||
for child in block.blocks.values():
|
||||
walk(child)
|
||||
|
||||
walk(self)
|
||||
|
||||
def rename_nets(self, net_map):
|
||||
"""
|
||||
Renames all nets and leaf blocks that drive them according to the
|
||||
given map.
|
||||
"""
|
||||
|
||||
def walk(block):
|
||||
|
||||
# Rename nets in port connections. Check whether the block itself
|
||||
# should be renamed as well (output pads need to be).
|
||||
rename_block = block.name.startswith("out:")
|
||||
|
||||
for port in block.ports.values():
|
||||
for pin, conn in port.connections.items():
|
||||
|
||||
if isinstance(conn, str):
|
||||
port.connections[pin] = net_map.get(conn, conn)
|
||||
|
||||
if port.type == "output":
|
||||
rename_block = True
|
||||
|
||||
# Rename the leaf block if necessary
|
||||
if block.is_leaf and not block.is_open and rename_block:
|
||||
|
||||
if block.name in net_map:
|
||||
block.name = net_map[block.name]
|
||||
elif block.name.startswith("out:"):
|
||||
key = block.name[4:]
|
||||
if key in net_map:
|
||||
block.name = "out:" + net_map[key]
|
||||
|
||||
# Recurse
|
||||
for child in block.blocks.values():
|
||||
walk(child)
|
||||
|
||||
walk(self)
|
||||
|
||||
def get_neighboring_block(self, instance):
|
||||
"""
|
||||
Returns a neighboring block given in the vicinity of the current block
|
||||
given an instance name. The block can be:
|
||||
|
||||
- This block,
|
||||
- A child,
|
||||
- The parent,
|
||||
- A sibling (the parent's child).
|
||||
"""
|
||||
|
||||
# Strip index. FIXME: Use a regex here.
|
||||
block_type = instance.split("[", maxsplit=1)[0]
|
||||
|
||||
# Check self
|
||||
if self.instance == instance:
|
||||
return self
|
||||
|
||||
# Check children
|
||||
if instance in self.blocks:
|
||||
return self.blocks[instance]
|
||||
|
||||
# Check parent and siblings
|
||||
if self.parent is not None:
|
||||
|
||||
# Parent
|
||||
if self.parent.type == block_type:
|
||||
return self.parent
|
||||
|
||||
# Siblings
|
||||
if instance in self.parent.blocks:
|
||||
return self.parent.blocks[instance]
|
||||
|
||||
return None
|
||||
|
||||
def find_net_for_port(self, port, pin):
|
||||
"""
|
||||
Finds net for the given port name and pin index of the block.
|
||||
"""
|
||||
|
||||
# Get the port
|
||||
assert port in self.ports, (self.name, self.instance, (port, pin))
|
||||
port = self.ports[port]
|
||||
|
||||
# Unconnected
|
||||
if pin not in port.connections:
|
||||
return None
|
||||
|
||||
# Get the pin connection
|
||||
conn = port.connections[pin]
|
||||
|
||||
# The connection refers to a net directly
|
||||
if isinstance(conn, str):
|
||||
return conn
|
||||
|
||||
# Get driving block
|
||||
block = self.get_neighboring_block(conn.driver)
|
||||
assert block is not None, (self.instance, conn.driver)
|
||||
|
||||
# Recurse
|
||||
return block.find_net_for_port(conn.port, conn.pin)
|
||||
|
||||
def get_nets(self):
|
||||
"""
|
||||
Returns all nets that are present in this block and its children
|
||||
"""
|
||||
|
||||
nets = set()
|
||||
|
||||
# Recursive walk function
|
||||
def walk(block):
|
||||
|
||||
# Examine block ports
|
||||
for port in block.ports.values():
|
||||
for pin in range(port.width):
|
||||
|
||||
net = block.find_net_for_port(port.name, pin)
|
||||
if net:
|
||||
nets.add(net)
|
||||
|
||||
# Get the nets
|
||||
walk(self)
|
||||
return nets
|
||||
|
||||
def get_block_by_path(self, path):
|
||||
"""
|
||||
Returns a child block given its hierarchical path. The path must not
|
||||
include the current block.
|
||||
|
||||
The path may or may not contain modes. When a mode is given it will
|
||||
be used for matching. The path must contain indices.
|
||||
"""
|
||||
|
||||
def walk(block, parts):
|
||||
|
||||
# Check if instance matches
|
||||
instance = "{}[{}]".format(parts[0].name, parts[0].index)
|
||||
if block.instance != instance:
|
||||
return None
|
||||
|
||||
# Check if operating mode matches
|
||||
if parts[0].mode is not None:
|
||||
if block.mode != parts[0].mode:
|
||||
return None
|
||||
|
||||
# Next
|
||||
parts = parts[1:]
|
||||
|
||||
# No more path parts, this is the block
|
||||
if not parts:
|
||||
return block
|
||||
|
||||
# Find child block by its instance and recurse
|
||||
instance = "{}[{}]".format(parts[0].name, parts[0].index)
|
||||
if instance in block.blocks:
|
||||
return walk(block.blocks[instance], parts)
|
||||
|
||||
return None
|
||||
|
||||
# Prepend self to the path
|
||||
path = "{}[{}]".format(self.instance, self.mode) + "." + path
|
||||
|
||||
# Split and parse the path
|
||||
path = path.split(".")
|
||||
path = [PathNode.from_string(p) for p in path]
|
||||
|
||||
# Find the child
|
||||
return walk(self, path)
|
||||
|
||||
def count_leafs(self):
|
||||
"""
|
||||
Counts all non-open leaf blocks
|
||||
"""
|
||||
|
||||
def walk(block, count=0):
|
||||
|
||||
# This is a non-ope leaf, count it
|
||||
if block.is_leaf and not block.is_open:
|
||||
count += 1
|
||||
|
||||
# This is a non-leaf block. Recurse
|
||||
if not block.is_leaf:
|
||||
for child in block.blocks.values():
|
||||
count += walk(child)
|
||||
|
||||
return count
|
||||
|
||||
# Recursive walk and count
|
||||
return walk(self)
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
Returns a user-readable description string
|
||||
"""
|
||||
ref = self.instance
|
||||
if self.mode is not None:
|
||||
ref += "[{}]".format(self.mode)
|
||||
ref += " ({})".format(self.name)
|
||||
return ref
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class PackedNetlist:
|
||||
"""
|
||||
A VPR Packed netlist representation.
|
||||
|
||||
The packed netlist is organized as one huge block representing the whole
|
||||
FPGA with all placeable blocks (CLBs) as its children. Here we store the
|
||||
top-level block implicitly so all blocks mentioned in a PackedNetlist
|
||||
instance refer to individual placeable CLBs.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Basic constructor
|
||||
"""
|
||||
|
||||
# Architecture and atom netlist ids
|
||||
self.arch_id = None
|
||||
self.netlist_id = None
|
||||
|
||||
# Top-level block name and instance
|
||||
self.name = None
|
||||
self.instance = None
|
||||
|
||||
# Top-level ports (net names)
|
||||
self.ports = {
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"clocks": [],
|
||||
}
|
||||
|
||||
# CLBs
|
||||
self.blocks = {}
|
||||
|
||||
@staticmethod
|
||||
def from_etree(root):
|
||||
"""
|
||||
Reads the packed netlist from the given element tree
|
||||
"""
|
||||
assert root.tag == "block", root.tag
|
||||
|
||||
netlist = PackedNetlist()
|
||||
netlist.name = root.attrib["name"]
|
||||
netlist.instance = root.attrib["instance"]
|
||||
|
||||
netlist.arch_id = root.get("architecture_id", None)
|
||||
netlist.netlist_id = root.get("atom_netlist_id", None)
|
||||
|
||||
# Parse top-level ports
|
||||
for tag in ["inputs", "outputs", "clocks"]:
|
||||
xml_ports = root.find(tag)
|
||||
if xml_ports is not None and xml_ports.text:
|
||||
netlist.ports[tag] = xml_ports.text.strip().split()
|
||||
|
||||
# Parse CLBs
|
||||
for xml_block in root.findall("block"):
|
||||
block = Block.from_etree(xml_block)
|
||||
netlist.blocks[block.instance] = block
|
||||
|
||||
return netlist
|
||||
|
||||
def to_etree(self):
|
||||
"""
|
||||
Builds an element tree (XML) that represents the packed netlist
|
||||
"""
|
||||
|
||||
# Top-level root block
|
||||
attr = {
|
||||
"name": self.name,
|
||||
"instance": self.instance,
|
||||
}
|
||||
|
||||
if self.arch_id is not None:
|
||||
attr["architecture_id"] = self.arch_id
|
||||
if self.netlist_id is not None:
|
||||
attr["atom_netlist_id"] = self.netlist_id
|
||||
|
||||
root = ET.Element("block", attr)
|
||||
|
||||
# Top-level ports
|
||||
for tag in ["inputs", "outputs", "clocks"]:
|
||||
xml_ports = ET.Element(tag)
|
||||
xml_ports.text = " ".join(self.ports[tag])
|
||||
root.append(xml_ports)
|
||||
|
||||
# CLB blocks
|
||||
keys = self.blocks.keys()
|
||||
for key in keys:
|
||||
xml_block = self.blocks[key].to_etree()
|
||||
root.append(xml_block)
|
||||
|
||||
return root
|
570
f4pga/utils/quicklogic/repacker/pb_rr_graph.py
Normal file
570
f4pga/utils/quicklogic/repacker/pb_rr_graph.py
Normal file
|
@ -0,0 +1,570 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
A set of utils for VTR pb_type rr graphs
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import colorsys
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from f4pga.utils.quicklogic.repacker.block_path import PathNode
|
||||
|
||||
from f4pga.utils.quicklogic.repacker.pb_type import PortType
|
||||
|
||||
from f4pga.utils.quicklogic.repacker.arch_xml_utils import is_leaf_pbtype
|
||||
from f4pga.utils.quicklogic.repacker.arch_xml_utils import get_parent_pb
|
||||
from f4pga.utils.quicklogic.repacker.arch_xml_utils import yield_pb_children
|
||||
from f4pga.utils.quicklogic.repacker.arch_xml_utils import yield_pins
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class NodeType(Enum):
|
||||
"""
|
||||
Type of a pb graph node
|
||||
"""
|
||||
|
||||
PORT = 0
|
||||
SOURCE = 1
|
||||
SINK = 2
|
||||
|
||||
|
||||
class Node:
|
||||
"""
|
||||
This class represents a node in pb graph. Nodes are associated with port
|
||||
pins.
|
||||
"""
|
||||
|
||||
def __init__(self, id, type, port_type, path, net=None):
|
||||
self.id = id
|
||||
self.type = type
|
||||
self.port_type = port_type
|
||||
self.path = path
|
||||
self.net = net
|
||||
|
||||
def __str__(self):
|
||||
return "id:{:3d}, type:{}, port_type:{}, path:{}, net:{}".format(
|
||||
self.id, self.type, self.port_type, self.path, self.net
|
||||
)
|
||||
|
||||
|
||||
class Edge:
|
||||
"""
|
||||
This class represents an edge in pb_graph. Edges are associated with
|
||||
interconnect connections.
|
||||
"""
|
||||
|
||||
def __init__(self, src_id, dst_id, ic):
|
||||
self.src_id = src_id
|
||||
self.dst_id = dst_id
|
||||
self.ic = ic
|
||||
|
||||
def __str__(self):
|
||||
return "src_id:{}, dst_id:{}, ic:{}".format(self.src_id, self.dst_id, self.ic)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class Graph:
|
||||
"""
|
||||
Representation of a complex block routing graph.
|
||||
|
||||
This class stores only nodes and edges instead of a hierarchical tree of
|
||||
objects representing blocks (pb_types). Graph nodes are associated with
|
||||
pb_type ports. Each node has a path that uniquely identifies which port
|
||||
it represents.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
||||
# Nodes by id
|
||||
self.nodes = {}
|
||||
# Edges (assorted)
|
||||
self.edges = []
|
||||
|
||||
# Id of the next node to be added
|
||||
self.next_node_id = 0
|
||||
|
||||
def add_node(self, type, port_type, path, net=None):
|
||||
"""
|
||||
Adds a new node. Automatically assings its id
|
||||
"""
|
||||
node = Node(id=self.next_node_id, type=type, port_type=port_type, path=path, net=net)
|
||||
|
||||
self.nodes[node.id] = node
|
||||
self.next_node_id += 1
|
||||
|
||||
return node
|
||||
|
||||
def add_edge(self, src_id, dst_id, ic):
|
||||
"""
|
||||
Adds a new edge. Checks if the given node ids are valid.
|
||||
"""
|
||||
assert src_id in self.nodes, src_id
|
||||
assert dst_id in self.nodes, dst_id
|
||||
|
||||
edge = Edge(src_id=src_id, dst_id=dst_id, ic=ic)
|
||||
|
||||
self.edges.append(edge)
|
||||
|
||||
return edge
|
||||
|
||||
def clear_nets(self):
|
||||
"""
|
||||
Removes all net annotations
|
||||
"""
|
||||
for node in self.nodes.values():
|
||||
node.net = None
|
||||
|
||||
def edge_net(self, edge):
|
||||
"""
|
||||
Returns net associated with the given edge
|
||||
|
||||
Edges do not have explicit net annotations. It is assumed that when
|
||||
an edge binds two nodes that belong to the same net, that edge belongs
|
||||
to that net as well.
|
||||
"""
|
||||
src_node = self.nodes[edge.src_id]
|
||||
dst_node = self.nodes[edge.dst_id]
|
||||
|
||||
if src_node.net is None:
|
||||
return None
|
||||
if dst_node.net is None:
|
||||
return None
|
||||
|
||||
if src_node.net != dst_node.net:
|
||||
return None
|
||||
|
||||
return src_node.net
|
||||
|
||||
@staticmethod
|
||||
def from_etree(xml_clb, clb_instance=None):
|
||||
"""
|
||||
Builds a routing graph for the given complex block from VPR
|
||||
architecture XML description.
|
||||
"""
|
||||
assert xml_clb.tag == "pb_type"
|
||||
|
||||
# Create the graph
|
||||
graph = Graph()
|
||||
|
||||
# Handler for "lut" pb_types. It creates an additional level of
|
||||
# hierarchy to match the representation in packed netlist.
|
||||
def process_lut(xml_pbtype, up_node_map, path):
|
||||
|
||||
# The parent is actually a leaf so it does not have any modes
|
||||
# However, the mode name must equal to the pb_type name.
|
||||
curr_path = path + "[{}].lut[0]".format(xml_pbtype.attrib["name"])
|
||||
|
||||
# Copy nodes from the parent and connect them 1-to-1
|
||||
for parent_node in up_node_map.values():
|
||||
parent_port = parent_node.path.rsplit(".", maxsplit=1)[-1]
|
||||
|
||||
# Add node
|
||||
node = graph.add_node(parent_node.type, parent_node.port_type, ".".join([curr_path, parent_port]))
|
||||
|
||||
# Add edge
|
||||
ic = "direct:{}".format(xml_pbtype.attrib["name"])
|
||||
if parent_node.port_type in [PortType.INPUT, PortType.CLOCK]:
|
||||
graph.add_edge(parent_node.id, node.id, ic)
|
||||
elif parent_node.port_type == PortType.OUTPUT:
|
||||
graph.add_edge(node.id, parent_node.id, ic)
|
||||
else:
|
||||
assert False, parent_node
|
||||
|
||||
# Change parent port nodes to PORT
|
||||
for parent_node in up_node_map.values():
|
||||
parent_node.type = NodeType.PORT
|
||||
|
||||
# Recursive build function
|
||||
def process_pbtype(xml_pbtype, up_node_map, path=""):
|
||||
"""
|
||||
This function adds nodes for all child pb_type ports and edges
|
||||
connecting them with the parent pb_type ports. The process is
|
||||
repeated for each mode.
|
||||
|
||||
Before the first level of recursion nodes of the top-level pb_type
|
||||
need to be already built.
|
||||
"""
|
||||
|
||||
# Identify all modes. If there is none then add the default
|
||||
# implicit one.
|
||||
xml_modes = {x.attrib["name"]: x for x in xml_pbtype.findall("mode")}
|
||||
if not xml_modes:
|
||||
xml_modes = {"default": xml_pbtype}
|
||||
|
||||
# Process each mode
|
||||
for mode, xml_mode in xml_modes.items():
|
||||
|
||||
# Append mode name to the current path
|
||||
curr_path = path + "[{}]".format(mode)
|
||||
|
||||
# Enumerate childern, build their paths
|
||||
children = []
|
||||
for xml_child, index in yield_pb_children(xml_mode):
|
||||
|
||||
child_path = ".".join([curr_path, "{}[{}]".format(xml_child.attrib["name"], index)])
|
||||
|
||||
children.append(
|
||||
(
|
||||
xml_child,
|
||||
child_path,
|
||||
)
|
||||
)
|
||||
|
||||
# Build child pb_type nodes
|
||||
dn_node_map = {}
|
||||
for xml_child, child_path in children:
|
||||
nodes = graph._build_nodes(xml_child, child_path)
|
||||
dn_node_map.update(nodes)
|
||||
|
||||
# Special case, a "lut" class leaf pb_type
|
||||
cls = xml_child.get("class", None)
|
||||
if cls == "lut":
|
||||
process_lut(xml_child, nodes, child_path)
|
||||
|
||||
# Build interconnect edges
|
||||
xml_ic = xml_mode.find("interconnect")
|
||||
assert xml_ic is not None
|
||||
|
||||
graph._build_edges(xml_ic, curr_path, up_node_map, dn_node_map)
|
||||
|
||||
# Recurse
|
||||
for xml_child, child_path in children:
|
||||
if not is_leaf_pbtype(xml_child):
|
||||
process_pbtype(xml_child, dn_node_map, child_path)
|
||||
|
||||
# Set the top-level CLB path
|
||||
if clb_instance is None:
|
||||
path = xml_clb.attrib["name"] + "[0]"
|
||||
else:
|
||||
path = clb_instance
|
||||
|
||||
# Build top-level sources and sinks
|
||||
up_node_map = graph._build_nodes(xml_clb, path)
|
||||
|
||||
# Begin recustion
|
||||
process_pbtype(xml_clb, up_node_map, path)
|
||||
|
||||
return graph
|
||||
|
||||
def _build_nodes(self, xml_pbtype, prefix):
|
||||
"""
|
||||
Adds nodes for each pin of the given pb_type. Returns a map of pin
|
||||
names to node ids
|
||||
"""
|
||||
node_map = {}
|
||||
|
||||
# Sink/source node type map
|
||||
leaf_node_types = {
|
||||
"input": NodeType.SINK,
|
||||
"clock": NodeType.SINK,
|
||||
"output": NodeType.SOURCE,
|
||||
}
|
||||
|
||||
top_node_types = {
|
||||
"input": NodeType.SOURCE,
|
||||
"clock": NodeType.SOURCE,
|
||||
"output": NodeType.SINK,
|
||||
}
|
||||
|
||||
# Determine where the pb_type is in the hierarchy
|
||||
is_leaf = is_leaf_pbtype(xml_pbtype)
|
||||
is_top = get_parent_pb(xml_pbtype) is None
|
||||
assert not (is_top and is_leaf), (xml_pbtype.tag, xml_pbtype.attrib)
|
||||
|
||||
# Add nodes
|
||||
for xml_port in xml_pbtype:
|
||||
|
||||
if xml_port.tag in ["input", "output", "clock"]:
|
||||
width = int(xml_port.attrib["num_pins"])
|
||||
|
||||
# Determine node type
|
||||
if is_top:
|
||||
node_type = top_node_types[xml_port.tag]
|
||||
elif is_leaf:
|
||||
node_type = leaf_node_types[xml_port.tag]
|
||||
else:
|
||||
node_type = NodeType.PORT
|
||||
|
||||
# Determine node port direction
|
||||
port_type = PortType.from_string(xml_port.tag)
|
||||
|
||||
# Add a node for each pin
|
||||
for i in range(width):
|
||||
name = "{}[{}]".format(xml_port.attrib["name"], i)
|
||||
path = ".".join([prefix, name])
|
||||
node = self.add_node(node_type, port_type, path)
|
||||
|
||||
node_map[path] = node
|
||||
|
||||
return node_map
|
||||
|
||||
def _build_edges(self, xml_ic, path, up_node_map, dn_node_map):
|
||||
"""
|
||||
Builds edges for the given interconnect. Uses node maps to bind
|
||||
port pin names with node ids.
|
||||
"""
|
||||
|
||||
# Join node maps
|
||||
node_map = {**up_node_map, **dn_node_map}
|
||||
|
||||
# Get parent pb_type and its name
|
||||
xml_pbtype = get_parent_pb(xml_ic)
|
||||
parent_name = xml_pbtype.attrib["name"]
|
||||
|
||||
# Split the path
|
||||
path_parts = path.split(".")
|
||||
|
||||
# A helper function
|
||||
def get_node_path(pin):
|
||||
|
||||
# Split parts
|
||||
parts = pin.split(".")
|
||||
assert len(parts) == 2, pin
|
||||
|
||||
# Parse the pb_type referred by the pin
|
||||
pin_pbtype = PathNode.from_string(parts[0])
|
||||
|
||||
# This pin refers to the parent
|
||||
if pin_pbtype.name == parent_name:
|
||||
ref_pbtype = PathNode.from_string(path_parts[-1])
|
||||
|
||||
# Fixup pb_type reference name
|
||||
part = "{}[{}]".format(
|
||||
pin_pbtype.name,
|
||||
ref_pbtype.index,
|
||||
)
|
||||
parts = path_parts[:-1] + [part] + parts[1:]
|
||||
|
||||
else:
|
||||
parts = path_parts + parts
|
||||
|
||||
# Assemble the full path
|
||||
node_path = ".".join(parts)
|
||||
return node_path
|
||||
|
||||
# Process interconnects
|
||||
for xml_conn in xml_ic:
|
||||
|
||||
# Direct
|
||||
if xml_conn.tag == "direct":
|
||||
inps = list(yield_pins(xml_ic, xml_conn.attrib["input"], False))
|
||||
outs = list(yield_pins(xml_ic, xml_conn.attrib["output"], False))
|
||||
|
||||
assert len(inps) == len(outs), (xml_conn.tag, xml_conn.attrib, len(inps), len(outs))
|
||||
|
||||
# Add edges
|
||||
for inp, out in zip(inps, outs):
|
||||
inp = get_node_path(inp)
|
||||
out = get_node_path(out)
|
||||
|
||||
self.add_edge(src_id=node_map[inp].id, dst_id=node_map[out].id, ic=xml_conn.attrib["name"])
|
||||
|
||||
# Mux
|
||||
elif xml_conn.tag == "mux":
|
||||
inp_ports = xml_conn.attrib["input"].split()
|
||||
|
||||
# Get output pins, should be only one
|
||||
out_pins = list(yield_pins(xml_ic, xml_conn.attrib["output"], False))
|
||||
assert len(out_pins) == 1, xml_ic.attrib
|
||||
|
||||
# Build edges for each input port
|
||||
for inp_port in inp_ports:
|
||||
|
||||
# Get input pins, should be only one
|
||||
inp_pins = list(yield_pins(xml_ic, inp_port, False))
|
||||
assert len(inp_pins) == 1, xml_conn.attrib
|
||||
|
||||
# Add edge
|
||||
inp = get_node_path(inp_pins[0])
|
||||
out = get_node_path(out_pins[0])
|
||||
|
||||
self.add_edge(src_id=node_map[inp].id, dst_id=node_map[out].id, ic=xml_conn.attrib["name"])
|
||||
|
||||
# Complete
|
||||
elif xml_conn.tag == "complete":
|
||||
inp_ports = xml_conn.attrib["input"].split()
|
||||
out_ports = xml_conn.attrib["output"].split()
|
||||
|
||||
# Build lists of all input and all output pins
|
||||
inp_pins = []
|
||||
for inp_port in inp_ports:
|
||||
inp_pins.extend(list(yield_pins(xml_ic, inp_port, False)))
|
||||
|
||||
out_pins = []
|
||||
for out_port in out_ports:
|
||||
out_pins.extend(list(yield_pins(xml_ic, out_port, False)))
|
||||
|
||||
# Add edges
|
||||
for inp_pin, out_pin in itertools.product(inp_pins, out_pins):
|
||||
inp = get_node_path(inp_pin)
|
||||
out = get_node_path(out_pin)
|
||||
|
||||
self.add_edge(src_id=node_map[inp].id, dst_id=node_map[out].id, ic=xml_conn.attrib["name"])
|
||||
|
||||
def dump_dot(self, color_by="type", nets_only=False, highlight_nodes=None):
|
||||
"""
|
||||
Returns a string with the graph in DOT format suitable for the
|
||||
Graphviz.
|
||||
|
||||
Nodes can be colored by "type" or "net". When nets_only is True then
|
||||
only nodes and edges that belong to a net are dumped.
|
||||
"""
|
||||
dot = []
|
||||
|
||||
# Build net color map
|
||||
if color_by == "net":
|
||||
nets = set([node.net for node in self.nodes.values()]) - set([None])
|
||||
nets = sorted(list(nets))
|
||||
|
||||
net_colors = {}
|
||||
for i, net in enumerate(nets):
|
||||
|
||||
h = i / len(nets)
|
||||
l = 0.50 # noqa: E741
|
||||
s = 1.0
|
||||
|
||||
r, g, b = colorsys.hls_to_rgb(h, l, s)
|
||||
color = "#{:02X}{:02X}{:02X}".format(
|
||||
int(r * 255.0),
|
||||
int(g * 255.0),
|
||||
int(b * 255.0),
|
||||
)
|
||||
|
||||
net_colors[net] = color
|
||||
|
||||
# Node color
|
||||
def node_color(node, highlight=False):
|
||||
|
||||
if highlight_nodes:
|
||||
if highlight:
|
||||
return "#FF2020"
|
||||
elif node.net:
|
||||
return "#808080"
|
||||
else:
|
||||
return "#C0C0C0"
|
||||
|
||||
# By type
|
||||
if color_by == "type":
|
||||
if node.type == NodeType.SOURCE:
|
||||
return "#C08080"
|
||||
elif node.type == NodeType.SINK:
|
||||
return "#8080C0"
|
||||
else:
|
||||
return "#C0C0C0"
|
||||
|
||||
# By net
|
||||
elif color_by == "net":
|
||||
if node.net is None:
|
||||
return "#C0C0C0"
|
||||
else:
|
||||
return net_colors[node.net]
|
||||
|
||||
# Default
|
||||
else:
|
||||
return "#C0C0C0"
|
||||
|
||||
# Edge color
|
||||
def edge_color(edge):
|
||||
|
||||
if color_by == "net":
|
||||
net = self.edge_net(edge)
|
||||
if net:
|
||||
return net_colors[net]
|
||||
else:
|
||||
return "#C0C0C0"
|
||||
|
||||
# Default
|
||||
else:
|
||||
return "#C0C0C0"
|
||||
|
||||
# Add header
|
||||
dot.append("digraph g {")
|
||||
dot.append(" rankdir=LR;")
|
||||
dot.append(" ratio=0.5;")
|
||||
dot.append(" splines=false;")
|
||||
dot.append(" node [style=filled];")
|
||||
|
||||
# Build nodes
|
||||
nodes = {}
|
||||
for node in self.nodes.values():
|
||||
|
||||
if nets_only and node.net is None:
|
||||
continue
|
||||
|
||||
highlight = highlight_nodes is not None and node.id in highlight_nodes # noqa: E127
|
||||
|
||||
parts = node.path.split(".")
|
||||
label = "{}: {}".format(node.id, ".".join(parts[-2:]))
|
||||
color = node_color(node, highlight)
|
||||
rank = 0
|
||||
|
||||
if node.net:
|
||||
xlabel = node.net
|
||||
else:
|
||||
xlabel = ""
|
||||
|
||||
if node.type == NodeType.SOURCE:
|
||||
shape = "diamond"
|
||||
elif node.type == NodeType.SINK:
|
||||
shape = "octagon"
|
||||
else:
|
||||
shape = "ellipse"
|
||||
|
||||
if rank not in nodes:
|
||||
nodes[rank] = []
|
||||
|
||||
nodes[rank].append({"id": node.id, "label": label, "xlabel": xlabel, "color": color, "shape": shape})
|
||||
|
||||
# Add nodes
|
||||
for rank, nodes in nodes.items():
|
||||
dot.append(" {")
|
||||
|
||||
for node in nodes:
|
||||
dot.append(
|
||||
' node_{} [label="{}",xlabel="{}",fillcolor="{}",shape={}];'.format(
|
||||
node["id"],
|
||||
node["label"],
|
||||
node["xlabel"],
|
||||
node["color"],
|
||||
node["shape"],
|
||||
)
|
||||
)
|
||||
|
||||
dot.append(" }")
|
||||
|
||||
# Add edges
|
||||
for edge in self.edges:
|
||||
|
||||
if nets_only:
|
||||
if not self.edge_net(edge):
|
||||
continue
|
||||
|
||||
label = edge.ic
|
||||
color = edge_color(edge)
|
||||
|
||||
dot.append(' node_{} -> node_{} [label="{}",color="{}"];'.format(edge.src_id, edge.dst_id, label, color))
|
||||
|
||||
# Footer
|
||||
dot.append("}")
|
||||
return "\n".join(dot)
|
324
f4pga/utils/quicklogic/repacker/pb_rr_graph_netlist.py
Normal file
324
f4pga/utils/quicklogic/repacker/pb_rr_graph_netlist.py
Normal file
|
@ -0,0 +1,324 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
A set of utility functions responsible for loading a packed netlist into a
|
||||
complex block routing graph and creating a packed netlist from a graph with
|
||||
routing information.
|
||||
"""
|
||||
|
||||
from f4pga.utils.quicklogic.repacker.block_path import PathNode
|
||||
|
||||
import f4pga.utils.quicklogic.repacker.packed_netlist as packed_netlist
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def get_block_by_path(block, path):
|
||||
"""
|
||||
Returns a block given its hierarchical path. The path must be a list of
|
||||
PathNode objects.
|
||||
"""
|
||||
|
||||
if len(path) == 0:
|
||||
return block
|
||||
|
||||
# Find instance
|
||||
instance = "{}[{}]".format(path[0].name, path[0].index)
|
||||
if instance in block.blocks:
|
||||
block = block.blocks[instance]
|
||||
|
||||
# Check operating mode
|
||||
if path[0].mode is not None:
|
||||
if block.mode != path[0].mode:
|
||||
return None
|
||||
|
||||
# Recurse
|
||||
return get_block_by_path(block, path[1:])
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def load_clb_nets_into_pb_graph(clb_block, clb_graph):
|
||||
"""
|
||||
Loads packed netlist of the given CLB block into its routing graph
|
||||
"""
|
||||
|
||||
# Annotate nodes with nets
|
||||
for node in clb_graph.nodes.values():
|
||||
|
||||
# Disassemble node path parts
|
||||
parts = node.path.split(".")
|
||||
parts = [PathNode.from_string(p) for p in parts]
|
||||
|
||||
# Check if the node belongs to this CLB
|
||||
block_inst = "{}[{}]".format(parts[0].name, parts[0].index)
|
||||
assert block_inst == clb_block.instance, (block_inst, clb_block.instance)
|
||||
|
||||
# Find the block referred to by the node
|
||||
block = get_block_by_path(clb_block, parts[1:-1])
|
||||
if block is None:
|
||||
continue
|
||||
|
||||
# Find a corresponding port in the block
|
||||
node_port = parts[-1]
|
||||
if node_port.name not in block.ports:
|
||||
continue
|
||||
block_port = block.ports[node_port.name]
|
||||
|
||||
# Find a net for the port pin and assign it
|
||||
net = block.find_net_for_port(block_port.name, node_port.index)
|
||||
node.net = net
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def build_packed_netlist_from_pb_graph(clb_graph):
|
||||
"""
|
||||
This function builds a packed netlist fragment given an annotated (with
|
||||
nets) CLB graph. The created netlist fragment will be missing information:
|
||||
|
||||
- Atom block names
|
||||
- Atom block attributes and parameters
|
||||
|
||||
The missing information has to be supplied externally as it is not stored
|
||||
within a graph.
|
||||
|
||||
The first step is to create the block hierarchy (without any ports yet).
|
||||
This is done by scanning the graph and creating blocks for nodes that
|
||||
belong to nets. Only those nodes are used here.
|
||||
|
||||
The second stage is open block insertion. Whenever a non-leaf is in use
|
||||
it should have all of its children present. Unused ones should be makred
|
||||
as "open".
|
||||
|
||||
The third stage is adding ports to the blocks and connectivity information.
|
||||
To gather all necessary information all the nodes of the graph are needed.
|
||||
|
||||
The final step is leaf block name assignment. Leaf blocks get their names
|
||||
based on nets they drive. A special case is a block representing an output
|
||||
that doesn't drive anything. Such blocks get names based on their inputs
|
||||
but prefixed with "out:".
|
||||
"""
|
||||
|
||||
# Build node connectivity. These two maps holds upstream and downstream
|
||||
# node sets for a given node. They consider active nodes only.
|
||||
nodes_up = {}
|
||||
|
||||
for edge in clb_graph.edges:
|
||||
|
||||
# Check if the edge is active
|
||||
if clb_graph.edge_net(edge) is None:
|
||||
continue
|
||||
|
||||
# Up map
|
||||
if edge.dst_id not in nodes_up:
|
||||
nodes_up[edge.dst_id] = set()
|
||||
nodes_up[edge.dst_id].add((edge.src_id, edge.ic))
|
||||
|
||||
# Create the block hierarchy for nodes that have nets assigned.
|
||||
clb_block = None
|
||||
for node in clb_graph.nodes.values():
|
||||
|
||||
# No net
|
||||
if node.net is None:
|
||||
continue
|
||||
|
||||
# Disassemble node path parts
|
||||
parts = node.path.split(".")
|
||||
parts = [PathNode.from_string(p) for p in parts]
|
||||
|
||||
# Create the root CLB
|
||||
instance = "{}[{}]".format(parts[0].name, parts[0].index)
|
||||
if clb_block is None:
|
||||
clb_block = packed_netlist.Block(name="clb", instance=instance) # FIXME:
|
||||
else:
|
||||
assert clb_block.instance == instance
|
||||
|
||||
# Follow the path, create blocks
|
||||
parent = clb_block
|
||||
for prev_part, curr_part in zip(parts[0:-2], parts[1:-1]):
|
||||
instance = "{}[{}]".format(curr_part.name, curr_part.index)
|
||||
parent_mode = prev_part.mode
|
||||
|
||||
# Get an existing block
|
||||
if instance in parent.blocks:
|
||||
block = parent.blocks[instance]
|
||||
|
||||
# Create a new block
|
||||
else:
|
||||
block = packed_netlist.Block(name="", instance=instance, parent=parent)
|
||||
block.name = "block@{:08X}".format(id(block))
|
||||
parent.blocks[instance] = block
|
||||
|
||||
# Set / verify operating mode of the parent
|
||||
if parent_mode is not None:
|
||||
assert parent.mode in [None, parent_mode], (parent.mode, parent_mode)
|
||||
parent.mode = parent_mode
|
||||
|
||||
# Next level
|
||||
parent = block
|
||||
|
||||
# Check if the CLB got created
|
||||
assert clb_block is not None
|
||||
|
||||
# Add open blocks.
|
||||
for node in clb_graph.nodes.values():
|
||||
|
||||
# Consider only nodes without nets
|
||||
if node.net:
|
||||
continue
|
||||
|
||||
# Disassemble node path parts
|
||||
parts = node.path.split(".")
|
||||
parts = [PathNode.from_string(p) for p in parts]
|
||||
|
||||
if len(parts) < 3:
|
||||
continue
|
||||
|
||||
# Find parent block
|
||||
parent = get_block_by_path(clb_block, parts[1:-2])
|
||||
if not parent:
|
||||
continue
|
||||
|
||||
# Operating mode of the parent must match
|
||||
if parent.mode != parts[-3].mode:
|
||||
continue
|
||||
|
||||
# Check if the parent contains an open block correesponding to this
|
||||
# node.
|
||||
curr_part = parts[-2]
|
||||
instance = "{}[{}]".format(curr_part.name, curr_part.index)
|
||||
|
||||
if instance in parent.blocks:
|
||||
continue
|
||||
|
||||
# Create an open block
|
||||
block = packed_netlist.Block(name="open", instance=instance, parent=parent)
|
||||
parent.blocks[block.instance] = block
|
||||
|
||||
# Add block ports and their connections
|
||||
for node in clb_graph.nodes.values():
|
||||
|
||||
# Disassemble node path parts
|
||||
parts = node.path.split(".")
|
||||
parts = [PathNode.from_string(p) for p in parts]
|
||||
|
||||
# Find the block. If not found it meas that it is not active and
|
||||
# shouldn't be present in the netlist
|
||||
block = get_block_by_path(clb_block, parts[1:-1])
|
||||
if block is None:
|
||||
continue
|
||||
|
||||
# The block is open, skip
|
||||
if block.is_open:
|
||||
continue
|
||||
|
||||
# Get the port, add it if not present
|
||||
port_name = parts[-1].name
|
||||
port_type = node.port_type.name.lower()
|
||||
if port_name not in block.ports:
|
||||
|
||||
# The relevant information will be updated as more nodes gets
|
||||
# discovered.
|
||||
port = packed_netlist.Port(name=port_name, type=port_type)
|
||||
block.ports[port_name] = port
|
||||
|
||||
else:
|
||||
|
||||
port = block.ports[port_name]
|
||||
assert port.type == port_type, (port.type, port_type)
|
||||
|
||||
# Extend the port width if necessary
|
||||
bit_index = parts[-1].index
|
||||
port.width = max(port.width, bit_index + 1)
|
||||
|
||||
# The port is not active, nothing more to be done here
|
||||
if node.net is None:
|
||||
continue
|
||||
|
||||
# Identify driver of the port
|
||||
driver_node = None
|
||||
driver_conn = None
|
||||
if node.id in nodes_up:
|
||||
assert len(nodes_up[node.id]) <= 1, node.path
|
||||
driver_node, driver_conn = next(iter(nodes_up[node.id]))
|
||||
|
||||
# Got a driver, this is an intermediate port
|
||||
if driver_node is not None:
|
||||
|
||||
# Get the driver pb_type and port
|
||||
driver_path = clb_graph.nodes[driver_node].path.split(".")
|
||||
driver_path = [PathNode.from_string(driver_path[i]) for i in [-2, -1]]
|
||||
|
||||
# When a connection refers to the immediate parent do not include
|
||||
# block index suffix
|
||||
driver_instance = driver_path[0].name
|
||||
if block.parent is None or block.parent.type != driver_path[0].name:
|
||||
driver_instance += "[{}]".format(driver_path[0].index)
|
||||
|
||||
# Add the connection
|
||||
port.connections[bit_index] = packed_netlist.Connection(
|
||||
driver_instance, driver_path[1].name, driver_path[1].index, driver_conn
|
||||
)
|
||||
|
||||
# No driver, this is a source pin
|
||||
else:
|
||||
port.connections[bit_index] = node.net
|
||||
|
||||
# Assign names to leaf blocks
|
||||
def leaf_walk(block):
|
||||
|
||||
# A leaf
|
||||
if block.is_leaf and not block.is_open:
|
||||
|
||||
# Identify all output pins that drive nets
|
||||
nets = []
|
||||
for port in block.ports.values():
|
||||
if port.type == "output":
|
||||
for net in port.connections.values():
|
||||
if isinstance(net, str):
|
||||
nets.append(net)
|
||||
|
||||
# No nets driven, this is an output pad
|
||||
if not nets:
|
||||
assert "outpad" in block.ports
|
||||
|
||||
port = block.ports["outpad"]
|
||||
assert port.type == "input", port
|
||||
assert port.width == 1, port
|
||||
|
||||
net = block.find_net_for_port("outpad", 0)
|
||||
nets = ["out:" + net]
|
||||
|
||||
# Build block name and assign it
|
||||
if nets:
|
||||
block.name = "_".join(nets)
|
||||
|
||||
# Recurse
|
||||
for child in block.blocks.values():
|
||||
leaf_walk(child)
|
||||
|
||||
leaf_walk(clb_block)
|
||||
|
||||
# Return the top-level CLB block
|
||||
return clb_block
|
237
f4pga/utils/quicklogic/repacker/pb_rr_graph_router.py
Normal file
237
f4pga/utils/quicklogic/repacker/pb_rr_graph_router.py
Normal file
|
@ -0,0 +1,237 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
This file implements a simple graph router used for complex block routing
|
||||
graphs.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from f4pga.utils.quicklogic.repacker.pb_rr_graph import NodeType
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class Net:
|
||||
"""
|
||||
This class represents a net for the router. It holds the driver (source)
|
||||
node as well as all sink node ids.
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
self.sources = set()
|
||||
self.sinks = set()
|
||||
|
||||
self.is_routed = False
|
||||
|
||||
def __str__(self):
|
||||
return "{}, {} sources, max_fanout={}, {}".format(
|
||||
self.name, len(self.sources), len(self.sinks), "routed" if self.is_routed else "unrouted"
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class Router:
|
||||
"""
|
||||
Simple graph router.
|
||||
|
||||
Currently the routed does a greedy depth-first routing. Each time a path
|
||||
from a sink to a source is found it gets immediately annotated with nets.
|
||||
"""
|
||||
|
||||
def __init__(self, graph):
|
||||
self.graph = graph
|
||||
self.nets = {}
|
||||
|
||||
# Discover nets from the graph
|
||||
self.discover_nets()
|
||||
|
||||
def discover_nets(self):
|
||||
"""
|
||||
Scans the graph looking for nets
|
||||
"""
|
||||
|
||||
# TODO: For now look for nets only assuming that all of them are
|
||||
# unrouted.
|
||||
sources = {}
|
||||
sinks = {}
|
||||
|
||||
for node in self.graph.nodes.values():
|
||||
|
||||
if node.type not in [NodeType.SOURCE, NodeType.SINK]:
|
||||
continue
|
||||
|
||||
# No net
|
||||
if node.net is None:
|
||||
continue
|
||||
|
||||
# Got a source
|
||||
if node.type == NodeType.SOURCE:
|
||||
if node.net not in sources:
|
||||
sources[node.net] = set()
|
||||
sources[node.net].add(node.id)
|
||||
|
||||
# Got a sink
|
||||
elif node.type == NodeType.SINK:
|
||||
if node.net not in sinks:
|
||||
sinks[node.net] = set()
|
||||
sinks[node.net].add(node.id)
|
||||
|
||||
# Make nets
|
||||
nets = set(sinks.keys()) | set(sources.keys())
|
||||
for net_name in nets:
|
||||
|
||||
net = Net(net_name)
|
||||
|
||||
# A net may or may not have a source node(s). If there are no
|
||||
# sources then one will be created during routing when a route
|
||||
# reaches a node of the top-level CLB.
|
||||
if net_name in sources:
|
||||
net.sources = sources[net_name]
|
||||
|
||||
# A net may or may not have at leas one sink node. If there are
|
||||
# no sinks then no routing will be done.
|
||||
if net_name in sinks:
|
||||
net.sinks = sinks[net_name]
|
||||
|
||||
self.nets[net_name] = net
|
||||
|
||||
# DEBUG
|
||||
logging.debug(" Nets:")
|
||||
keys = sorted(list(self.nets.keys()))
|
||||
for key in keys:
|
||||
logging.debug(" " + str(self.nets[key]))
|
||||
|
||||
def route_net(self, net, debug=False):
|
||||
"""
|
||||
Routes a single net.
|
||||
"""
|
||||
|
||||
top_level_sources = set()
|
||||
|
||||
def walk_depth_first(node, curr_route=None):
|
||||
|
||||
# FIXME: Two possible places for optimization:
|
||||
# - create an edge lookup list indexed by dst node ids
|
||||
# - do not copy the current route list for each recursion level
|
||||
|
||||
# Track the route
|
||||
if not curr_route:
|
||||
curr_route = []
|
||||
curr_route.append(node.id)
|
||||
|
||||
# We've hit a node of the same net. Finish.
|
||||
if node.type in [NodeType.SOURCE, NodeType.PORT]:
|
||||
if node.net == net.name:
|
||||
return curr_route
|
||||
|
||||
# The node is aleady occupied by a different net
|
||||
if node.net is not None:
|
||||
if node.net != net.name:
|
||||
return None
|
||||
|
||||
# This node is a free source node. If it is a top-level source then
|
||||
# store it.
|
||||
if node.type == NodeType.SOURCE and node.net is None:
|
||||
if node.path.count(".") == 1:
|
||||
top_level_sources.add(node.id)
|
||||
|
||||
# Check all incoming edges
|
||||
for edge in self.graph.edges:
|
||||
if edge.dst_id == node.id:
|
||||
|
||||
# Recurse
|
||||
next_node = self.graph.nodes[edge.src_id]
|
||||
route = walk_depth_first(next_node, list(curr_route))
|
||||
|
||||
# We have a route, terminate
|
||||
if route:
|
||||
return route
|
||||
|
||||
return None
|
||||
|
||||
# Search for a route
|
||||
logging.debug(" " + net.name)
|
||||
|
||||
# This net has no sinks. Remove annotation from the source nodes
|
||||
if not net.sinks:
|
||||
for node_id in net.sources:
|
||||
node = self.graph.nodes[node_id]
|
||||
node.net = None
|
||||
|
||||
# Route all sinks to any of the net sources
|
||||
for sink in net.sinks:
|
||||
|
||||
# Find the route
|
||||
node = self.graph.nodes[sink]
|
||||
|
||||
top_level_sources = set()
|
||||
route = walk_depth_first(node)
|
||||
|
||||
# No route found. Check if we have some free top-level ports that
|
||||
# we can use.
|
||||
if not route and top_level_sources:
|
||||
|
||||
# Use the frist one
|
||||
top_node_id = next(iter(top_level_sources))
|
||||
top_node = self.graph.nodes[top_node_id]
|
||||
top_node.net = net.name
|
||||
|
||||
# Retry
|
||||
route = walk_depth_first(node)
|
||||
|
||||
# No route found
|
||||
if not route:
|
||||
logging.critical(" No route found!")
|
||||
|
||||
# DEBUG
|
||||
if debug:
|
||||
with open("unrouted.dot", "w") as fp:
|
||||
fp.write(self.graph.dump_dot(color_by="net", highlight_nodes=net.sources | set([sink])))
|
||||
|
||||
# Raise an exception
|
||||
raise RuntimeError(
|
||||
"Unroutable net '{}' from {} to {}".format(
|
||||
net.name, [self.graph.nodes[node_id] for node_id in net.sources], self.graph.nodes[sink]
|
||||
)
|
||||
)
|
||||
|
||||
# Annotate all nodes of the route
|
||||
for node_id in route:
|
||||
self.graph.nodes[node_id].net = net.name
|
||||
|
||||
# Success
|
||||
net.is_routed = True
|
||||
|
||||
def route_nets(self, nets=None, debug=False):
|
||||
"""
|
||||
Routes net with specified names or all nets if no names are given.
|
||||
"""
|
||||
|
||||
# Use all if explicit list not provided
|
||||
if nets is None:
|
||||
nets = sorted(list(self.nets.keys()))
|
||||
|
||||
# Route nets
|
||||
for net_name in nets:
|
||||
self.route_net(self.nets[net_name], debug=debug)
|
438
f4pga/utils/quicklogic/repacker/pb_type.py
Normal file
438
f4pga/utils/quicklogic/repacker/pb_type.py
Normal file
|
@ -0,0 +1,438 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
Utilities for representing pb_type hierarchy as defined in VPR architecture
|
||||
"""
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from enum import Enum
|
||||
|
||||
from f4pga.utils.quicklogic.repacker.arch_xml_utils import is_leaf_pbtype
|
||||
|
||||
from f4pga.utils.quicklogic.repacker.block_path import PathNode
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class PortType(Enum):
|
||||
"""
|
||||
Type and direction of a pb_type port.
|
||||
"""
|
||||
|
||||
UNSPEC = 0
|
||||
INPUT = 1
|
||||
OUTPUT = 2
|
||||
CLOCK = 3
|
||||
|
||||
@staticmethod
|
||||
def from_string(s):
|
||||
"""
|
||||
Convert a string to its corresponding enumerated value
|
||||
"""
|
||||
s = s.lower()
|
||||
|
||||
if s == "input":
|
||||
return PortType.INPUT
|
||||
elif s == "output":
|
||||
return PortType.OUTPUT
|
||||
elif s == "clock":
|
||||
return PortType.CLOCK
|
||||
else:
|
||||
assert False, s
|
||||
|
||||
|
||||
class Port:
|
||||
"""
|
||||
A port of pb_type
|
||||
"""
|
||||
|
||||
def __init__(self, type, name, width=1, cls=None):
|
||||
"""
|
||||
Basic constructor
|
||||
"""
|
||||
self.type = type
|
||||
self.name = name
|
||||
self.width = width
|
||||
self.cls = cls
|
||||
|
||||
@staticmethod
|
||||
def from_etree(elem):
|
||||
"""
|
||||
Create the object from its ElementTree representation
|
||||
"""
|
||||
assert elem.tag in ["input", "output", "clock"], elem.tag
|
||||
|
||||
# Create the port
|
||||
port = Port(
|
||||
type=PortType.from_string(elem.tag),
|
||||
name=elem.attrib["name"],
|
||||
width=int(elem.get("num_pins", "1")),
|
||||
cls=elem.get("port_class", None),
|
||||
)
|
||||
|
||||
return port
|
||||
|
||||
def yield_pins(self, range_spec=None):
|
||||
"""
|
||||
Yields pin names given index range specification. If the range is
|
||||
not given then yields all pins.
|
||||
"""
|
||||
|
||||
# Selected pins
|
||||
if range_spec is not None:
|
||||
|
||||
# TODO: Compile the regex upfront
|
||||
match = re.fullmatch(r"((?P<i1>[0-9]+):)?(?P<i0>[0-9]+)", range_spec)
|
||||
assert match is not None, range_spec
|
||||
|
||||
i0 = int(match.group("i0"))
|
||||
i1 = int(match.group("i1")) if match.group("i1") else None
|
||||
|
||||
assert i0 < self.width, range_spec
|
||||
|
||||
# Range
|
||||
if i1 is not None:
|
||||
assert i1 < self.width, range_spec
|
||||
|
||||
if i1 > i0:
|
||||
indices = range(i0, i1 + 1)
|
||||
elif i1 < i0:
|
||||
# Do not yield in reverse order when indices are reversed
|
||||
indices = range(i1, i0 + 1)
|
||||
else:
|
||||
indices = [i0]
|
||||
|
||||
# Single number
|
||||
else:
|
||||
indices = [i0]
|
||||
|
||||
# All pins
|
||||
else:
|
||||
indices = range(self.width)
|
||||
|
||||
# Yield names
|
||||
for i in indices:
|
||||
yield (self.name, i)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class Model:
|
||||
"""
|
||||
A leaf cell model.
|
||||
|
||||
VPR architecture XML does not store port widths along with models so
|
||||
here we build the list of models from leaf pb_types.
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.ports = {}
|
||||
|
||||
@property
|
||||
def blif_model(self):
|
||||
"""
|
||||
Returns a BLIF model statement for this model.
|
||||
"""
|
||||
if self.name[0] != ".":
|
||||
return ".subckt {}".format(self.name)
|
||||
return self.name
|
||||
|
||||
@staticmethod
|
||||
def collect_models(root_pbtype):
|
||||
"""
|
||||
Recursively walks over the pb_type hierarchy and collects models
|
||||
"""
|
||||
|
||||
models = {}
|
||||
|
||||
def walk(pb_type):
|
||||
|
||||
# This is a mode, recurse
|
||||
if isinstance(pb_type, Mode):
|
||||
for child in pb_type.pb_types.values():
|
||||
walk(child)
|
||||
|
||||
# This is a pb_type. Make a model if it is a leaf
|
||||
elif isinstance(pb_type, PbType):
|
||||
|
||||
# Not a leaf, recurse for modes
|
||||
if not pb_type.is_leaf:
|
||||
for mode in pb_type.modes.values():
|
||||
walk(mode)
|
||||
return
|
||||
|
||||
# Get BLIF model
|
||||
assert pb_type.blif_model is not None, pb_type.name
|
||||
blif_model = pb_type.blif_model.split(maxsplit=1)[-1]
|
||||
|
||||
# FIXME: Skip built-ins for now
|
||||
if blif_model[0] == ".":
|
||||
return
|
||||
|
||||
# Already have that one
|
||||
# TODO: Check if ports match!
|
||||
if blif_model in models:
|
||||
return
|
||||
|
||||
# Build the model
|
||||
model = Model(blif_model)
|
||||
model.ports = deepcopy(pb_type.ports)
|
||||
models[model.name] = model
|
||||
|
||||
else:
|
||||
assert False, type(pb_type)
|
||||
|
||||
# Walk from the given root pb_type and create models
|
||||
walk(root_pbtype)
|
||||
return models
|
||||
|
||||
def __str__(self):
|
||||
string = self.name
|
||||
for port in self.ports.values():
|
||||
string += " {}:{}[{}:0]".format(port.type.name[0].upper(), port.name, port.width - 1)
|
||||
return string
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class PbType:
|
||||
"""
|
||||
A pb_type
|
||||
"""
|
||||
|
||||
def __init__(self, name, num_pb=1, cls=None):
|
||||
"""
|
||||
Basic constructor
|
||||
"""
|
||||
self.name = name
|
||||
self.num_pb = num_pb
|
||||
self.cls = cls
|
||||
self.blif_model = None
|
||||
|
||||
# Parent (a Mode or None)
|
||||
self.parent = None
|
||||
# Modes (indexed by name)
|
||||
self.modes = {}
|
||||
|
||||
# Ports (indexed by name)
|
||||
self.ports = {}
|
||||
|
||||
@property
|
||||
def is_leaf(self):
|
||||
"""
|
||||
Returns True if this pb_type is a leaf
|
||||
"""
|
||||
if "default" in self.modes and len(self.modes) == 1:
|
||||
return len(self.modes["default"].pb_types) == 0
|
||||
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def from_etree(elem):
|
||||
"""
|
||||
Create the object from its ElementTree representation
|
||||
"""
|
||||
assert elem.tag == "pb_type", elem.tag
|
||||
|
||||
# Create the pb_type
|
||||
name = elem.attrib["name"]
|
||||
num_pb = int(elem.get("num_pb", "1"))
|
||||
cls = elem.get("class", None)
|
||||
|
||||
pb_type = PbType(name, num_pb, cls)
|
||||
|
||||
# BLIF model
|
||||
pb_type.blif_model = elem.get("blif_model", None)
|
||||
|
||||
# Identify all modes. If there is none then add the default
|
||||
# implicit one.
|
||||
xml_modes = {x.attrib["name"]: x for x in elem.findall("mode")}
|
||||
if not xml_modes:
|
||||
xml_modes = {"default": elem}
|
||||
|
||||
# Build modes
|
||||
pb_type.modes = {}
|
||||
for name, xml_mode in xml_modes.items():
|
||||
mode = Mode.from_etree(xml_mode)
|
||||
mode.parent = pb_type
|
||||
pb_type.modes[mode.name] = mode
|
||||
|
||||
# Build ports
|
||||
for xml_port in elem:
|
||||
if xml_port.tag in ["input", "output", "clock"]:
|
||||
|
||||
port = Port.from_etree(xml_port)
|
||||
pb_type.ports[port.name] = port
|
||||
|
||||
# This is a native LUT leaf pb_type. Add one more level of hierarchy
|
||||
if is_leaf_pbtype(elem) and cls == "lut":
|
||||
|
||||
# Rename the default mode so that it matches the pb_type name
|
||||
mode = pb_type.modes["default"]
|
||||
mode.name = pb_type.name
|
||||
pb_type.modes = {mode.name: mode}
|
||||
|
||||
# Add a child pb_type named "lut" to the mode
|
||||
child = PbType(name="lut", cls="lut")
|
||||
child.blif_model = ".names"
|
||||
child.parent = mode
|
||||
mode.pb_types[child.name] = child
|
||||
|
||||
# Add a default mode to the child pb_type
|
||||
mode = Mode("default")
|
||||
mode.parent = child
|
||||
child.modes[mode.name] = mode
|
||||
|
||||
# Copy ports from the current pb_type
|
||||
child.ports = deepcopy(pb_type.ports)
|
||||
|
||||
return pb_type
|
||||
|
||||
def yield_port_pins(self, port_spec):
|
||||
"""
|
||||
Given a port specification string yields its pin names
|
||||
"""
|
||||
|
||||
# TODO: Compile the regex upfront
|
||||
match = re.fullmatch(r"(?P<port>[^\s\[\]\.]+)(\[(?P<bits>[^\s\[\]]+)\])?", port_spec)
|
||||
assert match is not None, port_spec
|
||||
|
||||
port = match.group("port")
|
||||
bits = match.group("bits")
|
||||
|
||||
# Find the port
|
||||
assert port in self.ports, (self.name, port)
|
||||
port = self.ports[port]
|
||||
|
||||
# Yield the bits
|
||||
yield from port.yield_pins(bits)
|
||||
|
||||
def find(self, path):
|
||||
"""
|
||||
Finds a pb_type or a mode given its hierarchical path.
|
||||
"""
|
||||
|
||||
# Split the path or assume this is an already splitted list.
|
||||
if isinstance(path, str):
|
||||
path = path.split(".")
|
||||
path = [PathNode.from_string(p) for p in path]
|
||||
else:
|
||||
assert isinstance(path, list), type(path)
|
||||
|
||||
# Walk the hierarchy along the path
|
||||
pbtype = self
|
||||
while True:
|
||||
|
||||
# Pop a node from the path
|
||||
part = path[0]
|
||||
path = path[1:]
|
||||
|
||||
# The path node must not have an index
|
||||
assert part.index is None, part
|
||||
|
||||
# Check name
|
||||
if part.name != pbtype.name:
|
||||
break
|
||||
|
||||
# Explicit mode
|
||||
if part.mode is not None:
|
||||
if part.mode not in pbtype.modes:
|
||||
break
|
||||
mode = pbtype.modes[part.mode]
|
||||
|
||||
# No more path, return the mode
|
||||
if not path:
|
||||
return mode
|
||||
|
||||
# Mode not given
|
||||
else:
|
||||
|
||||
# No more path, return the pb_type
|
||||
if not path:
|
||||
return pbtype
|
||||
|
||||
# Get the implicit mode
|
||||
if len(pbtype.modes) > 1:
|
||||
break
|
||||
mode = next(iter(pbtype.modes.values()))
|
||||
|
||||
# Find the child pb_type
|
||||
part = path[0]
|
||||
if part.name not in mode.pb_types:
|
||||
break
|
||||
|
||||
pbtype = mode.pb_types[part.name]
|
||||
|
||||
# Not found
|
||||
return None
|
||||
|
||||
|
||||
class Mode:
|
||||
"""
|
||||
A mode of a pb_type
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
"""
|
||||
Basic constructor
|
||||
"""
|
||||
self.name = name
|
||||
|
||||
# Parent (a PbType or None)
|
||||
self.parent = None
|
||||
# pb_types indexed by names
|
||||
self.pb_types = {}
|
||||
|
||||
def yield_children(self):
|
||||
"""
|
||||
Yields all child pb_types and their indices taking into account num_pb.
|
||||
"""
|
||||
for child in self.pb_types.values():
|
||||
for i in range(child.num_pb):
|
||||
yield child, i
|
||||
|
||||
@staticmethod
|
||||
def from_etree(elem):
|
||||
"""
|
||||
Create the object from its ElementTree representation
|
||||
"""
|
||||
assert elem.tag in ["mode", "pb_type"], elem.tag
|
||||
|
||||
# This element refers to a pb_type so it is the default mode
|
||||
if elem.tag == "pb_type":
|
||||
name = "default"
|
||||
else:
|
||||
name = elem.attrib["name"]
|
||||
|
||||
# Create the mode
|
||||
mode = Mode(name)
|
||||
|
||||
# Build child pb_types
|
||||
for xml_pbtype in elem.findall("pb_type"):
|
||||
pb_type = PbType.from_etree(xml_pbtype)
|
||||
pb_type.parent = mode
|
||||
mode.pb_types[pb_type.name] = pb_type
|
||||
|
||||
return mode
|
1421
f4pga/utils/quicklogic/repacker/repack.py
Executable file
1421
f4pga/utils/quicklogic/repacker/repack.py
Executable file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,85 @@
|
|||
.model top
|
||||
.inputs clk
|
||||
.outputs led[0] led[1] led[2] led[3]
|
||||
.names $false
|
||||
.names $true
|
||||
1
|
||||
.names $undef
|
||||
.names cnt[1] cnt[0] $auto$alumacc.cc:485:replace_alu$5.Y[1]
|
||||
01 1
|
||||
10 1
|
||||
.names cnt[2] cnt[1] cnt[0] $auto$alumacc.cc:485:replace_alu$5.Y[2]
|
||||
011 1
|
||||
100 1
|
||||
101 1
|
||||
110 1
|
||||
.names cnt[3] cnt[2] cnt[1] cnt[0] $auto$alumacc.cc:485:replace_alu$5.Y[3]
|
||||
0111 1
|
||||
1000 1
|
||||
1001 1
|
||||
1010 1
|
||||
1011 1
|
||||
1100 1
|
||||
1101 1
|
||||
1110 1
|
||||
.names led[0] $abc$215$new_n22_ $auto$alumacc.cc:485:replace_alu$5.Y[4]
|
||||
01 1
|
||||
10 1
|
||||
.names cnt[2] cnt[3] cnt[1] cnt[0] $abc$215$new_n22_
|
||||
1111 1
|
||||
.names led[1] led[0] $abc$215$new_n22_ $auto$alumacc.cc:485:replace_alu$5.Y[5]
|
||||
011 1
|
||||
100 1
|
||||
101 1
|
||||
110 1
|
||||
.names led[2] $abc$215$new_n25_ $abc$215$new_n22_ $auto$alumacc.cc:485:replace_alu$5.Y[6]
|
||||
011 1
|
||||
100 1
|
||||
101 1
|
||||
110 1
|
||||
.names led[0] led[1] $abc$215$new_n25_
|
||||
11 1
|
||||
.names led[3] led[2] $abc$215$new_n25_ $abc$215$new_n22_ $auto$alumacc.cc:485:replace_alu$5.Y[7]
|
||||
0111 1
|
||||
1000 1
|
||||
1001 1
|
||||
1010 1
|
||||
1011 1
|
||||
1100 1
|
||||
1101 1
|
||||
1110 1
|
||||
.names cnt[0] $auto$alumacc.cc:485:replace_alu$5.X[0]
|
||||
0 1
|
||||
.latch $auto$alumacc.cc:485:replace_alu$5.X[0] cnt[0] re clk 0
|
||||
.latch $auto$alumacc.cc:485:replace_alu$5.Y[1] cnt[1] re clk 0
|
||||
.latch $auto$alumacc.cc:485:replace_alu$5.Y[2] cnt[2] re clk 0
|
||||
.latch $auto$alumacc.cc:485:replace_alu$5.Y[3] cnt[3] re clk 0
|
||||
.latch $auto$alumacc.cc:485:replace_alu$5.Y[4] led[0] re clk 0
|
||||
.latch $auto$alumacc.cc:485:replace_alu$5.Y[5] led[1] re clk 0
|
||||
.latch $auto$alumacc.cc:485:replace_alu$5.Y[6] led[2] re clk 0
|
||||
.latch $auto$alumacc.cc:485:replace_alu$5.Y[7] led[3] re clk 0
|
||||
.names cnt[1] $auto$alumacc.cc:485:replace_alu$5.X[1]
|
||||
1 1
|
||||
.names cnt[2] $auto$alumacc.cc:485:replace_alu$5.X[2]
|
||||
1 1
|
||||
.names cnt[3] $auto$alumacc.cc:485:replace_alu$5.X[3]
|
||||
1 1
|
||||
.names led[0] $auto$alumacc.cc:485:replace_alu$5.X[4]
|
||||
1 1
|
||||
.names led[1] $auto$alumacc.cc:485:replace_alu$5.X[5]
|
||||
1 1
|
||||
.names led[2] $auto$alumacc.cc:485:replace_alu$5.X[6]
|
||||
1 1
|
||||
.names led[3] $auto$alumacc.cc:485:replace_alu$5.X[7]
|
||||
1 1
|
||||
.names $auto$alumacc.cc:485:replace_alu$5.X[0] $auto$alumacc.cc:485:replace_alu$5.Y[0]
|
||||
1 1
|
||||
.names led[0] cnt[4]
|
||||
1 1
|
||||
.names led[1] cnt[5]
|
||||
1 1
|
||||
.names led[2] cnt[6]
|
||||
1 1
|
||||
.names led[3] cnt[7]
|
||||
1 1
|
||||
.end
|
|
@ -0,0 +1,49 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
|
||||
from eblif_netlist import Eblif # noqa: E402
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def test_netlist_roundtrip():
|
||||
|
||||
basedir = os.path.dirname(__file__)
|
||||
golden_file = os.path.join(basedir, "netlist.golden.eblif")
|
||||
|
||||
# Load and parse the EBLIF file
|
||||
eblif = Eblif.from_file(golden_file)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
|
||||
# Write the EBLIF back
|
||||
output_file = os.path.join(tempdir, "netlist.output.eblif")
|
||||
eblif.to_file(output_file)
|
||||
|
||||
# Compare the two files
|
||||
with open(golden_file, "r") as fp:
|
||||
golden_data = fp.read().rstrip()
|
||||
with open(output_file, "r") as fp:
|
||||
output_data = fp.read().rstrip()
|
||||
|
||||
assert golden_data == output_data
|
40
f4pga/utils/quicklogic/repacker/tests/identity.xsl
Normal file
40
f4pga/utils/quicklogic/repacker/tests/identity.xsl
Normal file
|
@ -0,0 +1,40 @@
|
|||
<?xml version="1.0"?>
|
||||
<!--
|
||||
Copyright (C) 2020 The SymbiFlow Authors.
|
||||
|
||||
Use of this source code is governed by a ISC-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/ISC
|
||||
|
||||
SPDX-License-Identifier: ISC
|
||||
-->
|
||||
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
||||
<xsl:output method="xml" indent="yes"/>
|
||||
<xsl:strip-space elements="*"/>
|
||||
|
||||
<xsl:template match="@*">
|
||||
<xsl:copy/>
|
||||
</xsl:template>
|
||||
|
||||
<xsl:template match="*">
|
||||
<xsl:copy>
|
||||
<xsl:apply-templates select="@*"/>
|
||||
<xsl:apply-templates/>
|
||||
</xsl:copy>
|
||||
</xsl:template>
|
||||
|
||||
<xsl:template match="text()|processing-instruction()">
|
||||
<xsl:copy>
|
||||
<xsl:apply-templates select="text()|processing-instruction()"/>
|
||||
</xsl:copy>
|
||||
</xsl:template>
|
||||
|
||||
<xsl:param name="strip_comments" select="''" />
|
||||
<xsl:template match="comment()">
|
||||
<xsl:choose>
|
||||
<xsl:when test="$strip_comments"></xsl:when>
|
||||
<xsl:otherwise><xsl:copy /></xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:template>
|
||||
|
||||
</xsl:stylesheet>
|
|
@ -0,0 +1,6 @@
|
|||
.model top
|
||||
.inputs a
|
||||
.outputs o
|
||||
.names a o
|
||||
0 1
|
||||
.end
|
|
@ -0,0 +1,7 @@
|
|||
.model top
|
||||
.inputs a
|
||||
.outputs o
|
||||
.subckt frac_lut4_arith in[0]=a lut4_out=o
|
||||
.param LUT 1010101010101010
|
||||
.param MODE 0
|
||||
.end
|
156
f4pga/utils/quicklogic/repacker/tests/lut_padding/lut1_0.net
Normal file
156
f4pga/utils/quicklogic/repacker/tests/lut_padding/lut1_0.net
Normal file
|
@ -0,0 +1,156 @@
|
|||
<?xml version="1.0"?>
|
||||
<block name="top.net" instance="FPGA_packed_netlist[0]" architecture_id="SHA256:75d400c4d0f05a6b7a7cc26b5315f29d3cbc1d64579f5a3d3b40f77b1deed75c" atom_netlist_id="SHA256:bcfc8354052a41bff85e4c6f44f7a8c3b27a73ca9ed4db64341c3852c6d323c1">
|
||||
<inputs>a</inputs>
|
||||
<outputs>out:o</outputs>
|
||||
<clocks></clocks>
|
||||
<block name="o" instance="clb[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="I">open open open open open open open open open open open open open open a open open open open open open open open open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="lreset">open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="O">open open open open open open open fle[7].out[0]->clbouts2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="open" instance="fle[0]" />
|
||||
<block name="open" instance="fle[1]" />
|
||||
<block name="open" instance="fle[2]" />
|
||||
<block name="open" instance="fle[3]" />
|
||||
<block name="open" instance="fle[4]" />
|
||||
<block name="open" instance="fle[5]" />
|
||||
<block name="open" instance="fle[6]" />
|
||||
<block name="o" instance="fle[7]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">clb.I[14]->crossbar_fle[7].in[0] open open open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="o" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 open open open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut4[0].out[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="o" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 open open open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="o" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 open open open</port>
|
||||
<port_rotation_map name="in">0 open open open</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">o</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="out:o" instance="io[1]" mode="io_output">
|
||||
<inputs>
|
||||
<port name="f2a_i">o</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="out:o" instance="io_output[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="f2a_i">io.f2a_i[0]->io_output-f2a_i</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="open" instance="ff[0]" />
|
||||
<block name="out:o" instance="outpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="outpad">io_output.f2a_i[0]->mux1</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="a" instance="io[2]" mode="io_input">
|
||||
<inputs>
|
||||
<port name="f2a_i">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">io_input[0].a2f_o[0]->io-a2f_o</port>
|
||||
<port name="sc_out">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="a" instance="io_input[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">inpad[0].inpad[0]->mux2</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="a" instance="inpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs />
|
||||
<outputs>
|
||||
<port name="inpad">a</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
|
@ -0,0 +1,7 @@
|
|||
.model top
|
||||
.inputs a
|
||||
.outputs o
|
||||
.subckt frac_lut4_arith in[1]=a lut4_out=o
|
||||
.param LUT 1100110011001100
|
||||
.param MODE 0
|
||||
.end
|
156
f4pga/utils/quicklogic/repacker/tests/lut_padding/lut1_1.net
Normal file
156
f4pga/utils/quicklogic/repacker/tests/lut_padding/lut1_1.net
Normal file
|
@ -0,0 +1,156 @@
|
|||
<?xml version="1.0"?>
|
||||
<block name="top.net" instance="FPGA_packed_netlist[0]" architecture_id="SHA256:75d400c4d0f05a6b7a7cc26b5315f29d3cbc1d64579f5a3d3b40f77b1deed75c" atom_netlist_id="SHA256:bcfc8354052a41bff85e4c6f44f7a8c3b27a73ca9ed4db64341c3852c6d323c1">
|
||||
<inputs>a</inputs>
|
||||
<outputs>out:o</outputs>
|
||||
<clocks></clocks>
|
||||
<block name="o" instance="clb[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="I">open open open open open open open open open open open open open open a open open open open open open open open open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="lreset">open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="O">open open open open open open open fle[7].out[0]->clbouts2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="open" instance="fle[0]" />
|
||||
<block name="open" instance="fle[1]" />
|
||||
<block name="open" instance="fle[2]" />
|
||||
<block name="open" instance="fle[3]" />
|
||||
<block name="open" instance="fle[4]" />
|
||||
<block name="open" instance="fle[5]" />
|
||||
<block name="open" instance="fle[6]" />
|
||||
<block name="o" instance="fle[7]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">open clb.I[14]->crossbar_fle[7].in[1] open open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="o" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">open fle.in[1]->direct1 open open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut4[0].out[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="o" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">open ble4.in[1]->direct1 open open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="o" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">open lut4.in[1]->direct:lut4 open open</port>
|
||||
<port_rotation_map name="in">open 0 open open</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">o</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="out:o" instance="io[1]" mode="io_output">
|
||||
<inputs>
|
||||
<port name="f2a_i">o</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="out:o" instance="io_output[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="f2a_i">io.f2a_i[0]->io_output-f2a_i</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="open" instance="ff[0]" />
|
||||
<block name="out:o" instance="outpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="outpad">io_output.f2a_i[0]->mux1</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="a" instance="io[2]" mode="io_input">
|
||||
<inputs>
|
||||
<port name="f2a_i">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">io_input[0].a2f_o[0]->io-a2f_o</port>
|
||||
<port name="sc_out">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="a" instance="io_input[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">inpad[0].inpad[0]->mux2</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="a" instance="inpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs />
|
||||
<outputs>
|
||||
<port name="inpad">a</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
|
@ -0,0 +1,7 @@
|
|||
.model top
|
||||
.inputs a
|
||||
.outputs o
|
||||
.subckt frac_lut4_arith in[2]=a lut4_out=o
|
||||
.param LUT 1111000011110000
|
||||
.param MODE 0
|
||||
.end
|
156
f4pga/utils/quicklogic/repacker/tests/lut_padding/lut1_2.net
Normal file
156
f4pga/utils/quicklogic/repacker/tests/lut_padding/lut1_2.net
Normal file
|
@ -0,0 +1,156 @@
|
|||
<?xml version="1.0"?>
|
||||
<block name="top.net" instance="FPGA_packed_netlist[0]" architecture_id="SHA256:75d400c4d0f05a6b7a7cc26b5315f29d3cbc1d64579f5a3d3b40f77b1deed75c" atom_netlist_id="SHA256:bcfc8354052a41bff85e4c6f44f7a8c3b27a73ca9ed4db64341c3852c6d323c1">
|
||||
<inputs>a</inputs>
|
||||
<outputs>out:o</outputs>
|
||||
<clocks></clocks>
|
||||
<block name="o" instance="clb[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="I">open open open open open open open open open open open open open open a open open open open open open open open open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="lreset">open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="O">open open open open open open open fle[7].out[0]->clbouts2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="open" instance="fle[0]" />
|
||||
<block name="open" instance="fle[1]" />
|
||||
<block name="open" instance="fle[2]" />
|
||||
<block name="open" instance="fle[3]" />
|
||||
<block name="open" instance="fle[4]" />
|
||||
<block name="open" instance="fle[5]" />
|
||||
<block name="open" instance="fle[6]" />
|
||||
<block name="o" instance="fle[7]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">open open clb.I[14]->crossbar_fle[7].in[2] open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="o" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">open open fle.in[2]->direct1 open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut4[0].out[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="o" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">open open ble4.in[2]->direct1 open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="o" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">open open lut4.in[2]->direct:lut4 open</port>
|
||||
<port_rotation_map name="in">open open 0 open</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">o</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="out:o" instance="io[1]" mode="io_output">
|
||||
<inputs>
|
||||
<port name="f2a_i">o</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="out:o" instance="io_output[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="f2a_i">io.f2a_i[0]->io_output-f2a_i</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="open" instance="ff[0]" />
|
||||
<block name="out:o" instance="outpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="outpad">io_output.f2a_i[0]->mux1</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="a" instance="io[2]" mode="io_input">
|
||||
<inputs>
|
||||
<port name="f2a_i">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">io_input[0].a2f_o[0]->io-a2f_o</port>
|
||||
<port name="sc_out">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="a" instance="io_input[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">inpad[0].inpad[0]->mux2</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="a" instance="inpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs />
|
||||
<outputs>
|
||||
<port name="inpad">a</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
|
@ -0,0 +1,7 @@
|
|||
.model top
|
||||
.inputs a
|
||||
.outputs o
|
||||
.subckt frac_lut4_arith in[3]=a lut4_out=o
|
||||
.param LUT 1111111100000000
|
||||
.param MODE 0
|
||||
.end
|
156
f4pga/utils/quicklogic/repacker/tests/lut_padding/lut1_3.net
Normal file
156
f4pga/utils/quicklogic/repacker/tests/lut_padding/lut1_3.net
Normal file
|
@ -0,0 +1,156 @@
|
|||
<?xml version="1.0"?>
|
||||
<block name="top.net" instance="FPGA_packed_netlist[0]" architecture_id="SHA256:75d400c4d0f05a6b7a7cc26b5315f29d3cbc1d64579f5a3d3b40f77b1deed75c" atom_netlist_id="SHA256:bcfc8354052a41bff85e4c6f44f7a8c3b27a73ca9ed4db64341c3852c6d323c1">
|
||||
<inputs>a</inputs>
|
||||
<outputs>out:o</outputs>
|
||||
<clocks></clocks>
|
||||
<block name="o" instance="clb[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="I">open open open open open open open open open open open open open open a open open open open open open open open open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="lreset">open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="O">open open open open open open open fle[7].out[0]->clbouts2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="open" instance="fle[0]" />
|
||||
<block name="open" instance="fle[1]" />
|
||||
<block name="open" instance="fle[2]" />
|
||||
<block name="open" instance="fle[3]" />
|
||||
<block name="open" instance="fle[4]" />
|
||||
<block name="open" instance="fle[5]" />
|
||||
<block name="open" instance="fle[6]" />
|
||||
<block name="o" instance="fle[7]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">open open open clb.I[14]->crossbar_fle[7].in[3]</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="o" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">open open open fle.in[3]->direct1</port>
|
||||
<port name="reset">open</port>
|
||||
<port name="preset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut4[0].out[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="o" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">open open open ble4.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="o" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">open open open lut4.in[3]->direct:lut4</port>
|
||||
<port_rotation_map name="in">open open open 0</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">o</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="out:o" instance="io[1]" mode="io_output">
|
||||
<inputs>
|
||||
<port name="f2a_i">o</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="out:o" instance="io_output[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="f2a_i">io.f2a_i[0]->io_output-f2a_i</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="open" instance="ff[0]" />
|
||||
<block name="out:o" instance="outpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="outpad">io_output.f2a_i[0]->mux1</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="a" instance="io[2]" mode="io_input">
|
||||
<inputs>
|
||||
<port name="f2a_i">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">io_input[0].a2f_o[0]->io-a2f_o</port>
|
||||
<port name="sc_out">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open open open open</port>
|
||||
</clocks>
|
||||
<block name="a" instance="io_input[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="a2f_o">inpad[0].inpad[0]->mux2</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="a" instance="inpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs />
|
||||
<outputs>
|
||||
<port name="inpad">a</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
|
@ -0,0 +1,93 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import sys
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
|
||||
from repack import main as repack_main # noqa: E402
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"lut_width, lut_inputs",
|
||||
[
|
||||
("1", "0"),
|
||||
("1", "1"),
|
||||
("1", "2"),
|
||||
("1", "3"),
|
||||
],
|
||||
)
|
||||
def test_lut_padding(monkeypatch, lut_width, lut_inputs):
|
||||
"""
|
||||
Tests repacking of a single lut with some inputs unconnected. Verifies
|
||||
results against golden references.
|
||||
"""
|
||||
|
||||
basedir = os.path.dirname(__file__)
|
||||
|
||||
qlfpga_plugins = os.path.join(
|
||||
basedir, "..", "..", "..", "..", "..", "..", "build", "quicklogic", "third_party", "qlfpga-symbiflow-plugins"
|
||||
)
|
||||
|
||||
vpr_arch = os.path.join(qlfpga_plugins, "qlf_k4n8/slow/vpr_arch/UMC22nm_vpr.xml")
|
||||
repacking_rules = os.path.join(qlfpga_plugins, "qlf_k4n8/repacking_rules.json")
|
||||
|
||||
eblif_ref = os.path.join(basedir, "lut{}_{}.golden.eblif".format(lut_width, lut_inputs))
|
||||
eblif_in = os.path.join(basedir, "lut{}.eblif".format(lut_width))
|
||||
net_in = os.path.join(basedir, "lut{}_{}.net".format(lut_width, lut_inputs))
|
||||
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
|
||||
eblif_out = os.path.join(tempdir, "out.eblif")
|
||||
net_out = os.path.join(tempdir, "out.net")
|
||||
|
||||
# Substitute commandline arguments
|
||||
monkeypatch.setattr(
|
||||
"sys.argv",
|
||||
[
|
||||
"repack.py",
|
||||
"--vpr-arch",
|
||||
vpr_arch,
|
||||
"--repacking-rules",
|
||||
repacking_rules,
|
||||
"--eblif-in",
|
||||
eblif_in,
|
||||
"--net-in",
|
||||
net_in,
|
||||
"--eblif-out",
|
||||
eblif_out,
|
||||
"--net-out",
|
||||
net_out,
|
||||
],
|
||||
)
|
||||
|
||||
# Invoke the repacker
|
||||
repack_main()
|
||||
|
||||
# Compare output with the golden reference
|
||||
with open(eblif_ref, "r") as fp:
|
||||
golden_data = fp.read().rstrip()
|
||||
with open(eblif_out, "r") as fp:
|
||||
output_data = fp.read().rstrip()
|
||||
|
||||
assert golden_data == output_data
|
|
@ -0,0 +1,746 @@
|
|||
<?xml version="1.0"?>
|
||||
<block name="top.net" instance="FPGA_packed_netlist[0]" architecture_id="SHA256:e4eb21dd40067be5f659dcebb9ade6d70c1a29c93255ad92035a9dccd7d4b2a0" atom_netlist_id="SHA256:907bae6d91adf12dd9385e7cdf495d7ed3f8e56911cf5256c991a11fdd7b9f1f">
|
||||
<inputs>clk</inputs>
|
||||
<outputs>out:led[0] out:led[1] out:led[2] out:led[3]</outputs>
|
||||
<clocks>clk</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[7]" instance="clb[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="I">cnt[0] cnt[1] open open open open open open open open open open open open open open open open open open open open open open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="O">open open open fle[3].out[0]->clbouts1 fle[4].out[0]->clbouts2 fle[5].out[0]->clbouts2 open fle[7].out[0]->clbouts2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
<port name="cout_copy">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clk</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[2]" instance="fle[0]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">clb.I[0]->crossbar_fle[0].in[0] clb.I[1]->crossbar_fle[0].in[1] fle[0].out[0]->crossbar_fle[0].in[2] open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clb.clk[0]->clb.clk_to_fle[0].clk</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[2]" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 fle.in[1]->direct1 fle.in[2]->direct1 open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ff[0].Q[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">fle.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[2]" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 ble4.in[1]->direct1 ble4.in[2]->direct1 open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[2]" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 lut4.in[1]->direct:lut4 lut4.in[2]->direct:lut4 open</port>
|
||||
<port_rotation_map name="in">2 1 0 open</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$auto$alumacc.cc:485:replace_alu$5.Y[2]</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="cnt[2]" instance="ff[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="D">lut4[0].out[0]->direct2</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="Q">cnt[2]</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">ble4.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[3]" instance="fle[1]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">clb.I[0]->crossbar_fle[1].in[0] fle[0].out[0]->crossbar_fle[1].in[1] clb.I[1]->crossbar_fle[1].in[2] fle[1].out[0]->crossbar_fle[1].in[3]</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clb.clk[0]->clb.clk_to_fle[1].clk</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[3]" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 fle.in[1]->direct1 fle.in[2]->direct1 fle.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ff[0].Q[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">fle.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[3]" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 ble4.in[1]->direct1 ble4.in[2]->direct1 ble4.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[3]" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 lut4.in[1]->direct:lut4 lut4.in[2]->direct:lut4 lut4.in[3]->direct:lut4</port>
|
||||
<port_rotation_map name="in">3 1 2 0</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$auto$alumacc.cc:485:replace_alu$5.Y[3]</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="cnt[3]" instance="ff[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="D">lut4[0].out[0]->direct2</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="Q">cnt[3]</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">ble4.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="$abc$215$new_n22_" instance="fle[2]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">fle[1].out[0]->crossbar_fle[2].in[0] fle[0].out[0]->crossbar_fle[2].in[1] clb.I[0]->crossbar_fle[2].in[2] clb.I[1]->crossbar_fle[2].in[3]</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="$abc$215$new_n22_" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 fle.in[1]->direct1 fle.in[2]->direct1 fle.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut4[0].out[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="$abc$215$new_n22_" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 ble4.in[1]->direct1 ble4.in[2]->direct1 ble4.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$abc$215$new_n22_" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 lut4.in[1]->direct:lut4 lut4.in[2]->direct:lut4 lut4.in[3]->direct:lut4</port>
|
||||
<port_rotation_map name="in">1 0 3 2</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$abc$215$new_n22_</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[4]" instance="fle[3]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">fle[3].out[0]->crossbar_fle[3].in[0] open open fle[2].out[0]->crossbar_fle[3].in[3]</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clb.clk[0]->clb.clk_to_fle[3].clk</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[4]" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 open open fle.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ff[0].Q[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">fle.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[4]" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 open open ble4.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[4]" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 open open lut4.in[3]->direct:lut4</port>
|
||||
<port_rotation_map name="in">0 open open 1</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$auto$alumacc.cc:485:replace_alu$5.Y[4]</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="led[0]" instance="ff[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="D">lut4[0].out[0]->direct2</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="Q">led[0]</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">ble4.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[5]" instance="fle[4]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">fle[4].out[0]->crossbar_fle[4].in[0] fle[2].out[0]->crossbar_fle[4].in[1] fle[3].out[0]->crossbar_fle[4].in[2] open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clb.clk[0]->clb.clk_to_fle[4].clk</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[5]" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 fle.in[1]->direct1 fle.in[2]->direct1 open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ff[0].Q[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">fle.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[5]" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 ble4.in[1]->direct1 ble4.in[2]->direct1 open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[5]" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 lut4.in[1]->direct:lut4 lut4.in[2]->direct:lut4 open</port>
|
||||
<port_rotation_map name="in">0 2 1 open</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$auto$alumacc.cc:485:replace_alu$5.Y[5]</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="led[1]" instance="ff[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="D">lut4[0].out[0]->direct2</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="Q">led[1]</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">ble4.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[6]" instance="fle[5]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">fle[6].out[0]->crossbar_fle[5].in[0] fle[5].out[0]->crossbar_fle[5].in[1] open fle[2].out[0]->crossbar_fle[5].in[3]</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clb.clk[0]->clb.clk_to_fle[5].clk</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[6]" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 fle.in[1]->direct1 open fle.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ff[0].Q[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">fle.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[6]" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 ble4.in[1]->direct1 open ble4.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[6]" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 lut4.in[1]->direct:lut4 open lut4.in[3]->direct:lut4</port>
|
||||
<port_rotation_map name="in">1 0 open 2</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$auto$alumacc.cc:485:replace_alu$5.Y[6]</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="led[2]" instance="ff[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="D">lut4[0].out[0]->direct2</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="Q">led[2]</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">ble4.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="$abc$215$new_n25_" instance="fle[6]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">fle[3].out[0]->crossbar_fle[6].in[0] fle[4].out[0]->crossbar_fle[6].in[1] open open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="$abc$215$new_n25_" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 fle.in[1]->direct1 open open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut4[0].out[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">open</port>
|
||||
</clocks>
|
||||
<block name="$abc$215$new_n25_" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 ble4.in[1]->direct1 open open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$abc$215$new_n25_" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 lut4.in[1]->direct:lut4 open open</port>
|
||||
<port_rotation_map name="in">0 1 open open</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$abc$215$new_n25_</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="open" instance="ff[0]" />
|
||||
</block>
|
||||
</block>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[7]" instance="fle[7]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">fle[6].out[0]->crossbar_fle[7].in[0] fle[2].out[0]->crossbar_fle[7].in[1] fle[7].out[0]->crossbar_fle[7].in[2] fle[5].out[0]->crossbar_fle[7].in[3]</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clb.clk[0]->clb.clk_to_fle[7].clk</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[7]" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 fle.in[1]->direct1 fle.in[2]->direct1 fle.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ff[0].Q[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">fle.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[7]" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 ble4.in[1]->direct1 ble4.in[2]->direct1 ble4.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[7]" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 lut4.in[1]->direct:lut4 lut4.in[2]->direct:lut4 lut4.in[3]->direct:lut4</port>
|
||||
<port_rotation_map name="in">2 3 0 1</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$auto$alumacc.cc:485:replace_alu$5.Y[7]</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="led[3]" instance="ff[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="D">lut4[0].out[0]->direct2</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="Q">led[3]</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">ble4.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[1]" instance="clb[1]" mode="default">
|
||||
<inputs>
|
||||
<port name="I">open open open open open open open open open open open open open open open open open open open open open open open open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="O">open open open open open open fle[6].out[0]->clbouts2 fle[7].out[0]->clbouts2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
<port name="cout_copy">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clk</port>
|
||||
</clocks>
|
||||
<block name="open" instance="fle[0]" />
|
||||
<block name="open" instance="fle[1]" />
|
||||
<block name="open" instance="fle[2]" />
|
||||
<block name="open" instance="fle[3]" />
|
||||
<block name="open" instance="fle[4]" />
|
||||
<block name="open" instance="fle[5]" />
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.X[0]" instance="fle[6]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">open open fle[6].out[0]->crossbar_fle[6].in[2] open</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clb.clk[0]->clb.clk_to_fle[6].clk</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.X[0]" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">open open fle.in[2]->direct1 open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ff[0].Q[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">fle.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.X[0]" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">open open ble4.in[2]->direct1 open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.X[0]" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">open open lut4.in[2]->direct:lut4 open</port>
|
||||
<port_rotation_map name="in">open open 0 open</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$auto$alumacc.cc:485:replace_alu$5.X[0]</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="cnt[0]" instance="ff[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="D">lut4[0].out[0]->direct2</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="Q">cnt[0]</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">ble4.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[1]" instance="fle[7]" mode="n1_lut4">
|
||||
<inputs>
|
||||
<port name="in">fle[7].out[0]->crossbar_fle[7].in[0] open open fle[6].out[0]->crossbar_fle[7].in[3]</port>
|
||||
<port name="reg_in">open</port>
|
||||
<port name="sc_in">open</port>
|
||||
<port name="cin">open</port>
|
||||
<port name="reset">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ble4[0].out[0]->direct2</port>
|
||||
<port name="reg_out">open</port>
|
||||
<port name="sc_out">open</port>
|
||||
<port name="cout">open</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">clb.clk[0]->clb.clk_to_fle[7].clk</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[1]" instance="ble4[0]" mode="default">
|
||||
<inputs>
|
||||
<port name="in">fle.in[0]->direct1 open open fle.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">ff[0].Q[0]->mux1</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">fle.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[1]" instance="lut4[0]" mode="lut4">
|
||||
<inputs>
|
||||
<port name="in">ble4.in[0]->direct1 open open ble4.in[3]->direct1</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">lut[0].out[0]->direct:lut4</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="$auto$alumacc.cc:485:replace_alu$5.Y[1]" instance="lut[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="in">lut4.in[0]->direct:lut4 open open lut4.in[3]->direct:lut4</port>
|
||||
<port_rotation_map name="in">0 open open 1</port_rotation_map>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="out">$auto$alumacc.cc:485:replace_alu$5.Y[1]</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="cnt[1]" instance="ff[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="D">lut4[0].out[0]->direct2</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="Q">cnt[1]</port>
|
||||
</outputs>
|
||||
<clocks>
|
||||
<port name="clk">ble4.clk[0]->direct3</port>
|
||||
</clocks>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
||||
<block name="out:led[0]" instance="io[2]" mode="outpad">
|
||||
<inputs>
|
||||
<port name="outpad">led[0]</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="inpad">open</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="out:led[0]" instance="outpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="outpad">io.outpad[0]->outpad</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="out:led[1]" instance="io[3]" mode="outpad">
|
||||
<inputs>
|
||||
<port name="outpad">led[1]</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="inpad">open</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="out:led[1]" instance="outpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="outpad">io.outpad[0]->outpad</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="out:led[2]" instance="io[4]" mode="outpad">
|
||||
<inputs>
|
||||
<port name="outpad">led[2]</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="inpad">open</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="out:led[2]" instance="outpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="outpad">io.outpad[0]->outpad</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="out:led[3]" instance="io[5]" mode="outpad">
|
||||
<inputs>
|
||||
<port name="outpad">led[3]</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="inpad">open</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="out:led[3]" instance="outpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs>
|
||||
<port name="outpad">io.outpad[0]->outpad</port>
|
||||
</inputs>
|
||||
<outputs />
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
<block name="clk" instance="io[6]" mode="inpad">
|
||||
<inputs>
|
||||
<port name="outpad">open</port>
|
||||
</inputs>
|
||||
<outputs>
|
||||
<port name="inpad">inpad[0].inpad[0]->inpad</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
<block name="clk" instance="inpad[0]">
|
||||
<attributes />
|
||||
<parameters />
|
||||
<inputs />
|
||||
<outputs>
|
||||
<port name="inpad">clk</port>
|
||||
</outputs>
|
||||
<clocks />
|
||||
</block>
|
||||
</block>
|
||||
</block>
|
|
@ -0,0 +1,72 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import lxml.etree as ET
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
|
||||
from packed_netlist import PackedNetlist # noqa: E402
|
||||
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def test_netlist_roundtrip():
|
||||
|
||||
basedir = os.path.dirname(__file__)
|
||||
golden_file = os.path.join(basedir, "netlist.golden.net")
|
||||
|
||||
# Load the XSLT for sorting
|
||||
xslt_file = os.path.join(basedir, "../sort_netlist.xsl")
|
||||
xslt = ET.parse(xslt_file)
|
||||
xform = ET.XSLT(xslt)
|
||||
|
||||
# Load the golden netlist XML
|
||||
xml_tree = ET.parse(golden_file, ET.XMLParser(remove_blank_text=True))
|
||||
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
|
||||
# Transform and save the golden file
|
||||
sorted_golden_file = os.path.join(tempdir, "netlist.golden.sorted.net")
|
||||
with open(sorted_golden_file, "w") as fp:
|
||||
et = xform(xml_tree)
|
||||
st = '<?xml version="1.0">\n' + ET.tostring(et, pretty_print=True).decode("utf-8")
|
||||
fp.write(st)
|
||||
|
||||
# Build packed netlist
|
||||
netlist = PackedNetlist.from_etree(xml_tree.getroot())
|
||||
|
||||
# Convert the netlist back to element tree
|
||||
xml_tree = ET.ElementTree(netlist.to_etree())
|
||||
|
||||
# Transform and save the output file
|
||||
sorted_output_file = os.path.join(tempdir, "netlist.output.sorted.net")
|
||||
with open(sorted_output_file, "w") as fp:
|
||||
et = xform(xml_tree)
|
||||
st = '<?xml version="1.0">\n' + ET.tostring(et, pretty_print=True).decode("utf-8")
|
||||
fp.write(st)
|
||||
|
||||
# Compare the two files
|
||||
with open(sorted_golden_file, "r") as fp:
|
||||
golden_data = fp.read()
|
||||
with open(sorted_output_file, "r") as fp:
|
||||
output_data = fp.read()
|
||||
|
||||
assert golden_data == output_data
|
36
f4pga/utils/quicklogic/repacker/tests/sort_netlist.xsl
Normal file
36
f4pga/utils/quicklogic/repacker/tests/sort_netlist.xsl
Normal file
|
@ -0,0 +1,36 @@
|
|||
<?xml version="1.0"?>
|
||||
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
||||
<xsl:include href="identity.xsl" />
|
||||
|
||||
<!-- Sort everything -->
|
||||
<xsl:template match="block">
|
||||
<xsl:copy>
|
||||
|
||||
<xsl:apply-templates select="@*">
|
||||
<xsl:sort select="name()" order="ascending"/>
|
||||
</xsl:apply-templates>
|
||||
|
||||
<xsl:apply-templates select="attributes"/>
|
||||
<xsl:apply-templates select="parameters"/>
|
||||
|
||||
<xsl:apply-templates select="inputs">
|
||||
<xsl:sort select="@name" order="ascending"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:apply-templates select="outputs">
|
||||
<xsl:sort select="@name" order="ascending"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:apply-templates select="clocks">
|
||||
<xsl:sort select="@name" order="ascending"/>
|
||||
</xsl:apply-templates>
|
||||
|
||||
<xsl:apply-templates select="block">
|
||||
<xsl:sort select="@instance" order="ascending"/>
|
||||
</xsl:apply-templates>
|
||||
|
||||
<xsl:apply-templates select="*[not(self::attributes or self::parameters or self::inputs or self::outputs or self::clocks or self::block)]"/>
|
||||
|
||||
</xsl:copy>
|
||||
</xsl:template>
|
||||
|
||||
</xsl:stylesheet>
|
||||
|
71
f4pga/utils/quicklogic/yosys_fixup_cell_names.py
Normal file
71
f4pga/utils/quicklogic/yosys_fixup_cell_names.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2022 F4PGA Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
This is an utility script that performs cell instance renaming in a design in
|
||||
Yosys JSON format. Cell instance names containing dots are altered so that
|
||||
all dots are replaced with underscores.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
|
||||
|
||||
def fixup_cell_names(design):
|
||||
"""
|
||||
Scans Yosys' JSON data structure and replaces cell instance names that
|
||||
contains dots in names with other character.
|
||||
"""
|
||||
|
||||
# Process modules
|
||||
modules = design["modules"]
|
||||
for mod_name, mod_data in modules.items():
|
||||
print(mod_name)
|
||||
|
||||
# Process cells
|
||||
cells = mod_data["cells"]
|
||||
for cell_name in list(cells.keys()):
|
||||
|
||||
# Fixup name
|
||||
if "." in cell_name:
|
||||
new_name = cell_name.replace(".", "_")
|
||||
assert new_name not in cells, new_name
|
||||
|
||||
cells[new_name] = cells[cell_name]
|
||||
del cells[cell_name]
|
||||
|
||||
return design
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("i", type=str, help="Yosys JSON in")
|
||||
parser.add_argument("o", type=str, help="Yosys JSON out")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.i, "r") as fp:
|
||||
design = fixup_cell_names(json.load(fp))
|
||||
|
||||
with open(args.o, "w") as fp:
|
||||
json.dump(design, fp, indent=2)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -413,7 +413,7 @@ if [[ '{device}' =~ ^(qlf_.*)$ ]]; then
|
|||
exit -1
|
||||
fi
|
||||
|
||||
'{python3}' '{scripts_dir}/qlf_k4n8_create_ioplace.py' \
|
||||
'{python3}' -m f4pga.utils.quicklogic.qlf_k4n8.create_ioplace \
|
||||
--pcf '{pcf}' \
|
||||
--blif '{eblif}' \
|
||||
--pinmap_xml '{archs_dir}'/"${{DEVICE_PATH}}_${{DEVICE_PATH}}/${{PINMAPXML}}" \
|
||||
|
@ -437,14 +437,14 @@ elif [[ '{device}' =~ ^(ql-.*)$ ]]; then
|
|||
DEVICE_PATH='{device}_wlcsp'
|
||||
PINMAP='{archs_dir}'/"${{DEVICE_PATH}}/${{PINMAPCSV}}"
|
||||
|
||||
'{python3}' '{scripts_dir}/pp3_create_ioplace.py' \
|
||||
'{python3}' -m f4pga.utils.quicklogic.pp3.create_ioplace \
|
||||
--pcf '{pcf}' \
|
||||
--blif '{eblif}' \
|
||||
--map "$PINMAP" \
|
||||
--net '{net}' \
|
||||
> '{place_file_prefix}_io.place'
|
||||
|
||||
'{python3}' '{scripts_dir}/pp3_create_place_constraints.py' \
|
||||
'{python3}' -m f4pga.utils.quicklogic.pp3.create_place_constraints \
|
||||
--blif '{eblif}' \
|
||||
--map '{archs_dir}'/"${{DEVICE_PATH}}/${{CLKMAPCSV}}" \
|
||||
-i '{place_file_prefix}_io.place' \
|
||||
|
@ -456,7 +456,7 @@ elif [[ '{device}' =~ ^(ql-.*)$ ]]; then
|
|||
+ "\n".join(
|
||||
[
|
||||
f"""
|
||||
'{python3}' '{scripts_dir}/pp3_eos_s3_iomux_config.py' \
|
||||
'{python3}' -m f4pga.utils.quicklogic.pp3.eos-s3.iomux_config \
|
||||
--eblif '{eblif}' \
|
||||
--pcf '{pcf}' \
|
||||
--map "$PINMAP" \
|
||||
|
@ -687,7 +687,7 @@ DESIGN=${EBLIF/.eblif/}
|
|||
"""
|
||||
+ f"""
|
||||
PYTHONPATH='{F4PGA_SHARE_DIR}/scripts':$PYTHONPATH \
|
||||
'{python3}' '{F4PGA_SHARE_DIR}/scripts/repacker/repack.py' \
|
||||
'{python3}' -m f4pga.utils.quicklogic.repacker.repack \
|
||||
--vpr-arch ${{ARCH_DEF}} \
|
||||
--repacking-rules ${{ARCH_DIR}}/${{DEVICE_NAME}}.repacking_rules.json \
|
||||
$JSON_ARGS \
|
||||
|
@ -750,7 +750,7 @@ ARCH_DIR='{F4PGA_SHARE_DIR}/arch/'"${{DEVICE_1}}_${{DEVICE_1}}"
|
|||
PINMAP_XML=${ARCH_DIR}/${PINMAPXML}
|
||||
"""
|
||||
+ f"""
|
||||
'{python3}' '{F4PGA_SHARE_DIR}/scripts/create_lib.py' \
|
||||
'{python3}' -m f4pga.utils.quicklogic.create_lib \
|
||||
-n "${{DEV}}_0P72_SSM40" \
|
||||
-m fpga_top \
|
||||
-c '{part}' \
|
||||
|
@ -794,7 +794,7 @@ def fasm2bels():
|
|||
|
||||
p_run_bash_cmds(
|
||||
f"""
|
||||
'{python3}' '{F4PGA_SHARE_DIR}/scripts/fasm2bels.py' '{args.bit}' \
|
||||
'{python3}' -m f4pga.utils.quicklogic.pp3.fasm2bels '{args.bit}' \
|
||||
--phy-db '{F4PGA_SHARE_DIR}/arch/{args.device}_wlcsp/db_phy.pickle' \
|
||||
--device-name "${{DEVICE/ql-/}}" \
|
||||
--package-name '{args.part}' \
|
||||
|
|
|
@ -294,7 +294,6 @@ else
|
|||
PCF_MAKE="\${current_dir}/${BUILDDIR}/${TOP}_dummy.pcf"
|
||||
fi
|
||||
|
||||
PROCESS_SDC="$F4PGA_SHARE_DIR"/scripts/process_sdc_constraints.py
|
||||
if ! [ -z "$SDC" ]; then
|
||||
if ! [ -f "$SOURCE"/$SDC ];then
|
||||
echo "The sdc file: $SDC is missing at: $SOURCE"
|
||||
|
@ -340,7 +339,7 @@ all: \${BUILDDIR}/\${TOP}.${RUN_TILL}\n\
|
|||
cd \${BUILDDIR} && symbiflow_synth -t \${TOP} -v \${VERILOG} -F \${FAMILY} -d \${DEVICE} -p \${PCF} -P \${PART} ${COMPILE_EXTRA_ARGS[*]} > $LOG_FILE 2>&1\n\
|
||||
\n\
|
||||
\${BUILDDIR}/\${TOP}.sdc: \${BUILDDIR}/\${TOP}.eblif\n\
|
||||
python3 ${PROCESS_SDC} --sdc-in \${SDC_IN} --sdc-out \$@ --pcf \${PCF} --eblif \${BUILDDIR}/\${TOP}.eblif --pin-map \${PINMAP_CSV}\n\
|
||||
python3 -m f4pga.utils.quicklogic.process_sdc_constraints --sdc-in \${SDC_IN} --sdc-out \$@ --pcf \${PCF} --eblif \${BUILDDIR}/\${TOP}.eblif --pin-map \${PINMAP_CSV}\n\
|
||||
\n\
|
||||
\${BUILDDIR}/\${TOP}.net: \${BUILDDIR}/\${TOP}.eblif \${BUILDDIR}/\${TOP}.sdc\n\
|
||||
cd \${BUILDDIR} && symbiflow_pack -e \${TOP}.eblif -f \${FAMILY} -d \${DEVICE} -s \${SDC} -c \${PNR_CORNER} >> $LOG_FILE 2>&1\n\
|
||||
|
|
|
@ -116,9 +116,6 @@ if [ ${#VERILOG_FILES[@]} -eq 0 ]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
SPLIT_INOUTS="${F4PGA_SHARE_DIR}"/scripts/split_inouts.py
|
||||
CONVERT_OPTS="${F4PGA_SHARE_DIR}"/scripts/convert_compile_opts.py
|
||||
|
||||
PINMAPCSV="pinmap_${PART}.csv"
|
||||
|
||||
SYNTH_TCL_PATH="$(python3 -m f4pga.wrappers.tcl synth "${FAMILY}")"
|
||||
|
@ -158,7 +155,7 @@ else
|
|||
fi
|
||||
fi
|
||||
|
||||
YOSYS_COMMANDS=`echo ${EXTRA_ARGS[*]} | python3 ${CONVERT_OPTS}`
|
||||
YOSYS_COMMANDS=`echo ${EXTRA_ARGS[*]} | python3 -m f4pga.utils.quicklogic.convert_compile_opts`
|
||||
YOSYS_COMMANDS="${YOSYS_COMMANDS//$'\n'/'; '}"
|
||||
|
||||
LOG=${TOP}_synth.log
|
||||
|
|
|
@ -175,7 +175,7 @@ stat
|
|||
|
||||
# Write output JSON, fixup cell names using an external Python script
|
||||
write_json $::env(OUT_JSON).org.json
|
||||
exec $::env(PYTHON3) $::env(UTILS_PATH)/yosys_fixup_cell_names.py $::env(OUT_JSON).org.json $::env(OUT_JSON)
|
||||
exec $::env(PYTHON3) -m f4pga.utils.quicklogic.yosys_fixup_cell_names $::env(OUT_JSON).org.json $::env(OUT_JSON)
|
||||
|
||||
# Read the fixed JSON back and write verilog
|
||||
design -reset
|
||||
|
|
Loading…
Reference in a new issue