build/xilinx/vivado: use GenericToolchain

This commit is contained in:
Gwenhael Goavec-Merou 2022-06-26 09:06:20 +02:00
parent 8446c5b342
commit 564c062074
1 changed files with 163 additions and 183 deletions

View File

@ -15,6 +15,7 @@ from migen.fhdl.structure import _Fragment
from litex.build.generic_platform import * from litex.build.generic_platform import *
from litex.build import tools from litex.build import tools
from litex.build.xilinx import common from litex.build.xilinx import common
from litex.build.generic_toolchain import GenericToolchain
# Constraints (.xdc) ------------------------------------------------------------------------------- # Constraints (.xdc) -------------------------------------------------------------------------------
@ -67,39 +68,6 @@ def _build_xdc(named_sc, named_pc):
r += "\n" + "\n\n".join(named_pc) r += "\n" + "\n\n".join(named_pc)
return r return r
# Script -------------------------------------------------------------------------------------------
def _build_script(build_name):
if sys.platform in ["win32", "cygwin"]:
script_contents = "REM Autogenerated by LiteX / git: " + tools.get_litex_git_revision() + "\n"
script_contents += "vivado -mode batch -source " + build_name + ".tcl\n"
script_file = "build_" + build_name + ".bat"
tools.write_to_file(script_file, script_contents)
else:
script_contents = "# Autogenerated by LiteX / git: " + tools.get_litex_git_revision() + "\nset -e\n"
if os.getenv("LITEX_ENV_VIVADO", False):
script_contents += "source " + os.path.join(os.getenv("LITEX_ENV_VIVADO"), "settings64.sh\n")
script_contents += "vivado -mode batch -source " + build_name + ".tcl\n"
script_file = "build_" + build_name + ".sh"
tools.write_to_file(script_file, script_contents)
return script_file
def _run_script(script):
if sys.platform in ["win32", "cygwin"]:
shell = ["cmd", "/c"]
else:
shell = ["bash"]
if which("vivado") is None and os.getenv("LITEX_ENV_VIVADO", False) == False:
msg = "Unable to find or source Vivado toolchain, please either:\n"
msg += "- Source Vivado's settings manually.\n"
msg += "- Or set LITEX_ENV_VIVADO environment variant to Vivado's settings path.\n"
msg += "- Or add Vivado toolchain to your $PATH."
raise OSError(msg)
if tools.subprocess_call_filtered(shell + [script], common.colors) != 0:
raise OSError("Error occured during Vivado's script execution.")
# XilinxVivadoToolchain ---------------------------------------------------------------------------- # XilinxVivadoToolchain ----------------------------------------------------------------------------
class XilinxVivadoCommands(list): class XilinxVivadoCommands(list):
@ -118,7 +86,7 @@ class XilinxVivadoCommands(list):
return named_commands return named_commands
class XilinxVivadoToolchain: class XilinxVivadoToolchain(GenericToolchain):
attr_translate = { attr_translate = {
"keep": ("dont_touch", "true"), "keep": ("dont_touch", "true"),
"no_retiming": ("dont_touch", "true"), "no_retiming": ("dont_touch", "true"),
@ -130,6 +98,7 @@ class XilinxVivadoToolchain:
} }
def __init__(self): def __init__(self):
super().__init__()
self.bitstream_commands = [] self.bitstream_commands = []
self.additional_commands = [] self.additional_commands = []
self.pre_synthesis_commands = XilinxVivadoCommands() self.pre_synthesis_commands = XilinxVivadoCommands()
@ -142,28 +111,116 @@ class XilinxVivadoToolchain:
self.vivado_post_place_phys_opt_directive = None self.vivado_post_place_phys_opt_directive = None
self.vivado_route_directive = "default" self.vivado_route_directive = "default"
self.vivado_post_route_phys_opt_directive = "default" self.vivado_post_route_phys_opt_directive = "default"
self.clocks = dict() self._synth_mode = "vivado"
self.false_paths = set() self._enable_xpm = False
def _build_tcl(self, platform, build_name, synth_mode, enable_xpm, vns): def finalize(self):
assert synth_mode in ["vivado", "yosys"] # Convert clocks and false path to platform commands
self._build_clock_constraints()
self._build_false_path_constraints()
def build(self, platform, fragment,
synth_mode = "vivado",
enable_xpm = False,
**kwargs):
self._synth_mode = synth_mode
self._enable_xpm = enable_xpm
return self._build(platform, fragment, **kwargs)
# Constraints (.xdc) ---------------------------------------------------------------------------
def _format_xdc_constraint(self, c):
if isinstance(c, Pins):
return "set_property LOC " + c.identifiers[0]
elif isinstance(c, IOStandard):
return "set_property IOSTANDARD " + c.name
elif isinstance(c, Drive):
return "set_property DRIVE " + str(c.strength)
elif isinstance(c, Misc):
return "set_property " + c.misc.replace("=", " ")
elif isinstance(c, Inverted):
return None
else:
raise ValueError("unknown constraint {}".format(c))
def build_io_constraints(self):
r = _build_xdc(self.named_sc, self.named_pc)
tools.write_to_file(self._build_name + ".xdc", r)
return (self._build_name + ".xdc", "XDC")
# Timing Constraints (in xdc file) -------------------------------------------------------------
def _build_clock_constraints(self):
self.platform.add_platform_command(_xdc_separator("Clock constraints"))
def get_clk_type(clk):
return {
False : "nets",
True : "ports",
}[hasattr(clk, "port")]
for clk, period in sorted(self.clocks.items(), key=lambda x: x[0].duid):
self.platform.add_platform_command(
"create_clock -name {clk} -period " + str(period) +
" [get_" + get_clk_type(clk) + " {clk}]", clk=clk)
for _from, _to in sorted(self.false_paths, key=lambda x: (x[0].duid, x[1].duid)):
self.platform.add_platform_command(
"set_clock_groups "
"-group [get_clocks -include_generated_clocks -of [get_" + get_clk_type(_from) + " {_from}]] "
"-group [get_clocks -include_generated_clocks -of [get_" + get_clk_type(_to) + " {_to}]] "
"-asynchronous",
_from=_from, _to=_to)
# Make sure add_*_constraint cannot be used again
self.clocks.clear()
self.false_paths.clear()
def _build_false_path_constraints(self):
self.platform.add_platform_command(_xdc_separator("False path constraints"))
# The asynchronous input to a MultiReg is a false path
self.platform.add_platform_command(
"set_false_path -quiet "
"-through [get_nets -hierarchical -filter {{mr_ff == TRUE}}]"
)
# The asychronous reset input to the AsyncResetSynchronizer is a false path
self.platform.add_platform_command(
"set_false_path -quiet "
"-to [get_pins -filter {{REF_PIN_NAME == PRE}} "
"-of_objects [get_cells -hierarchical -filter {{ars_ff1 == TRUE || ars_ff2 == TRUE}}]]"
)
# clock_period-2ns to resolve metastability on the wire between the AsyncResetSynchronizer FFs
self.platform.add_platform_command(
"set_max_delay 2 -quiet "
"-from [get_pins -filter {{REF_PIN_NAME == C}} "
"-of_objects [get_cells -hierarchical -filter {{ars_ff1 == TRUE}}]] "
"-to [get_pins -filter {{REF_PIN_NAME == D}} "
"-of_objects [get_cells -hierarchical -filter {{ars_ff2 == TRUE}}]]"
)
def build_timing_constraints(self, vns):
# FIXME: -> self ?
self._vns = vns
# Project (.tcl) -------------------------------------------------------------------------------
def build_project(self):
assert self._synth_mode in ["vivado", "yosys"]
tcl = [] tcl = []
# Create project # Create project
tcl.append("\n# Create Project\n") tcl.append("\n# Create Project\n")
tcl.append("create_project -force -name {} -part {}".format(build_name, platform.device)) tcl.append("create_project -force -name {} -part {}".format(self._build_name, self.platform.device))
tcl.append("set_msg_config -id {Common 17-55} -new_severity {Warning}") tcl.append("set_msg_config -id {Common 17-55} -new_severity {Warning}")
# Enable Xilinx Parameterized Macros # Enable Xilinx Parameterized Macros
if enable_xpm: if self._enable_xpm:
tcl.append("\n# Enable Xilinx Parameterized Macros\n") tcl.append("\n# Enable Xilinx Parameterized Macros\n")
tcl.append("set_property XPM_LIBRARIES {XPM_CDC XPM_MEMORY} [current_project]") tcl.append("set_property XPM_LIBRARIES {XPM_CDC XPM_MEMORY} [current_project]")
# Add sources (when Vivado used for synthesis) # Add sources (when Vivado used for synthesis)
if synth_mode == "vivado": if self._synth_mode == "vivado":
tcl.append("\n# Add Sources\n") tcl.append("\n# Add Sources\n")
# "-include_dirs {}" crashes Vivado 2016.4 # "-include_dirs {}" crashes Vivado 2016.4
for filename, language, library, *copy in platform.sources: for filename, language, library, *copy in self.platform.sources:
filename_tcl = "{" + filename + "}" filename_tcl = "{" + filename + "}"
if (language == "systemverilog"): if (language == "systemverilog"):
tcl.append("read_verilog -v " + filename_tcl) tcl.append("read_verilog -v " + filename_tcl)
@ -180,13 +237,13 @@ class XilinxVivadoToolchain:
# Add EDIFs # Add EDIFs
tcl.append("\n# Add EDIFs\n") tcl.append("\n# Add EDIFs\n")
for filename in platform.edifs: for filename in self.platform.edifs:
filename_tcl = "{" + filename + "}" filename_tcl = "{" + filename + "}"
tcl.append("read_edif " + filename_tcl) tcl.append("read_edif " + filename_tcl)
# Add IPs # Add IPs
tcl.append("\n# Add IPs\n") tcl.append("\n# Add IPs\n")
for filename, disable_constraints in platform.ips.items(): for filename, disable_constraints in self.platform.ips.items():
if filename.endswith("tcl"): if filename.endswith("tcl"):
tcl += open(filename, "r").read().splitlines() tcl += open(filename, "r").read().splitlines()
else: else:
@ -202,31 +259,31 @@ class XilinxVivadoToolchain:
# Add constraints # Add constraints
tcl.append("\n# Add constraints\n") tcl.append("\n# Add constraints\n")
tcl.append("read_xdc {}.xdc".format(build_name)) tcl.append("read_xdc {}.xdc".format(self._build_name))
tcl.append("set_property PROCESSING_ORDER EARLY [get_files {}.xdc]".format(build_name)) tcl.append("set_property PROCESSING_ORDER EARLY [get_files {}.xdc]".format(self._build_name))
# Add pre-synthesis commands # Add pre-synthesis commands
tcl.append("\n# Add pre-synthesis commands\n") tcl.append("\n# Add pre-synthesis commands\n")
tcl.extend(c.format(build_name=build_name) for c in self.pre_synthesis_commands.resolve(vns)) tcl.extend(c.format(build_name=self._build_name) for c in self.pre_synthesis_commands.resolve(self._vns))
# Synthesis # Synthesis
if synth_mode == "vivado": if self._synth_mode == "vivado":
tcl.append("\n# Synthesis\n") tcl.append("\n# Synthesis\n")
synth_cmd = "synth_design -directive {} -top {} -part {}".format(self.vivado_synth_directive, synth_cmd = "synth_design -directive {} -top {} -part {}".format(self.vivado_synth_directive,
build_name, platform.device) self._build_name, self.platform.device)
if platform.verilog_include_paths: if self.platform.verilog_include_paths:
synth_cmd += " -include_dirs {{{}}}".format(" ".join(platform.verilog_include_paths)) synth_cmd += " -include_dirs {{{}}}".format(" ".join(self.platform.verilog_include_paths))
tcl.append(synth_cmd) tcl.append(synth_cmd)
elif synth_mode == "yosys": elif self._synth_mode == "yosys":
tcl.append("\n# Read Yosys EDIF\n") tcl.append("\n# Read Yosys EDIF\n")
tcl.append("read_edif {}.edif".format(build_name)) tcl.append("read_edif {}.edif".format(self._build_name))
tcl.append("link_design -top {} -part {}".format(build_name, platform.device)) tcl.append("link_design -top {} -part {}".format(self._build_name, self.platform.device))
else: else:
raise OSError("Unknown synthesis mode! {}".format(synth_mode)) raise OSError("Unknown synthesis mode! {}".format(self._synth_mode))
tcl.append("\n# Synthesis report\n") tcl.append("\n# Synthesis report\n")
tcl.append("report_timing_summary -file {}_timing_synth.rpt".format(build_name)) tcl.append("report_timing_summary -file {}_timing_synth.rpt".format(self._build_name))
tcl.append("report_utilization -hierarchical -file {}_utilization_hierarchical_synth.rpt".format(build_name)) tcl.append("report_utilization -hierarchical -file {}_utilization_hierarchical_synth.rpt".format(self._build_name))
tcl.append("report_utilization -file {}_utilization_synth.rpt".format(build_name)) tcl.append("report_utilization -file {}_utilization_synth.rpt".format(self._build_name))
# Optimize # Optimize
tcl.append("\n# Optimize design\n") tcl.append("\n# Optimize design\n")
@ -235,11 +292,11 @@ class XilinxVivadoToolchain:
# Incremental implementation # Incremental implementation
if self.incremental_implementation: if self.incremental_implementation:
tcl.append("\n# Read design checkpoint\n") tcl.append("\n# Read design checkpoint\n")
tcl.append("read_checkpoint -incremental {}_route.dcp".format(build_name)) tcl.append("read_checkpoint -incremental {}_route.dcp".format(self._build_name))
# Add pre-placement commands # Add pre-placement commands
tcl.append("\n# Add pre-placement commands\n") tcl.append("\n# Add pre-placement commands\n")
tcl.extend(c.format(build_name=build_name) for c in self.pre_placement_commands.resolve(vns)) tcl.extend(c.format(build_name=self._build_name) for c in self.pre_placement_commands.resolve(self._vns))
# Placement # Placement
tcl.append("\n# Placement\n") tcl.append("\n# Placement\n")
@ -247,154 +304,77 @@ class XilinxVivadoToolchain:
if self.vivado_post_place_phys_opt_directive: if self.vivado_post_place_phys_opt_directive:
tcl.append("phys_opt_design -directive {}".format(self.vivado_post_place_phys_opt_directive)) tcl.append("phys_opt_design -directive {}".format(self.vivado_post_place_phys_opt_directive))
tcl.append("\n# Placement report\n") tcl.append("\n# Placement report\n")
tcl.append("report_utilization -hierarchical -file {}_utilization_hierarchical_place.rpt".format(build_name)) tcl.append("report_utilization -hierarchical -file {}_utilization_hierarchical_place.rpt".format(self._build_name))
tcl.append("report_utilization -file {}_utilization_place.rpt".format(build_name)) tcl.append("report_utilization -file {}_utilization_place.rpt".format(self._build_name))
tcl.append("report_io -file {}_io.rpt".format(build_name)) tcl.append("report_io -file {}_io.rpt".format(self._build_name))
tcl.append("report_control_sets -verbose -file {}_control_sets.rpt".format(build_name)) tcl.append("report_control_sets -verbose -file {}_control_sets.rpt".format(self._build_name))
tcl.append("report_clock_utilization -file {}_clock_utilization.rpt".format(build_name)) tcl.append("report_clock_utilization -file {}_clock_utilization.rpt".format(self._build_name))
# Add pre-routing commands # Add pre-routing commands
tcl.append("\n# Add pre-routing commands\n") tcl.append("\n# Add pre-routing commands\n")
tcl.extend(c.format(build_name=build_name) for c in self.pre_routing_commands.resolve(vns)) tcl.extend(c.format(build_name=self._build_name) for c in self.pre_routing_commands.resolve(self._vns))
# Routing # Routing
tcl.append("\n# Routing\n") tcl.append("\n# Routing\n")
tcl.append("route_design -directive {}".format(self.vivado_route_directive)) tcl.append("route_design -directive {}".format(self.vivado_route_directive))
tcl.append("phys_opt_design -directive {}".format(self.vivado_post_route_phys_opt_directive)) tcl.append("phys_opt_design -directive {}".format(self.vivado_post_route_phys_opt_directive))
tcl.append("write_checkpoint -force {}_route.dcp".format(build_name)) tcl.append("write_checkpoint -force {}_route.dcp".format(self._build_name))
tcl.append("\n# Routing report\n") tcl.append("\n# Routing report\n")
tcl.append("report_timing_summary -no_header -no_detailed_paths") tcl.append("report_timing_summary -no_header -no_detailed_paths")
tcl.append("report_route_status -file {}_route_status.rpt".format(build_name)) tcl.append("report_route_status -file {}_route_status.rpt".format(self._build_name))
tcl.append("report_drc -file {}_drc.rpt".format(build_name)) tcl.append("report_drc -file {}_drc.rpt".format(self._build_name))
tcl.append("report_timing_summary -datasheet -max_paths 10 -file {}_timing.rpt".format(build_name)) tcl.append("report_timing_summary -datasheet -max_paths 10 -file {}_timing.rpt".format(self._build_name))
tcl.append("report_power -file {}_power.rpt".format(build_name)) tcl.append("report_power -file {}_power.rpt".format(self._build_name))
for bitstream_command in self.bitstream_commands: for bitstream_command in self.bitstream_commands:
tcl.append(bitstream_command.format(build_name=build_name)) tcl.append(bitstream_command.format(build_name=self._build_name))
# Bitstream generation # Bitstream generation
tcl.append("\n# Bitstream generation\n") tcl.append("\n# Bitstream generation\n")
tcl.append("write_bitstream -force {}.bit ".format(build_name)) tcl.append("write_bitstream -force {}.bit ".format(self._build_name))
for additional_command in self.additional_commands: for additional_command in self.additional_commands:
tcl.append(additional_command.format(build_name=build_name)) tcl.append(additional_command.format(build_name=self._build_name))
# Quit # Quit
tcl.append("\n# End\n") tcl.append("\n# End\n")
tcl.append("quit") tcl.append("quit")
tools.write_to_file(build_name + ".tcl", "\n".join(tcl)) tools.write_to_file(self._build_name + ".tcl", "\n".join(tcl))
def _build_clock_constraints(self, platform): # Script ---------------------------------------------------------------------------------------
platform.add_platform_command(_xdc_separator("Clock constraints"))
def get_clk_type(clk):
return {
False : "nets",
True : "ports",
}[hasattr(clk, "port")]
for clk, period in sorted(self.clocks.items(), key=lambda x: x[0].duid):
platform.add_platform_command(
"create_clock -name {clk} -period " + str(period) +
" [get_" + get_clk_type(clk) + " {clk}]", clk=clk)
for _from, _to in sorted(self.false_paths, key=lambda x: (x[0].duid, x[1].duid)):
platform.add_platform_command(
"set_clock_groups "
"-group [get_clocks -include_generated_clocks -of [get_" + get_clk_type(_from) + " {_from}]] "
"-group [get_clocks -include_generated_clocks -of [get_" + get_clk_type(_to) + " {_to}]] "
"-asynchronous",
_from=_from, _to=_to)
# Make sure add_*_constraint cannot be used again
del self.clocks
del self.false_paths
def _build_false_path_constraints(self, platform): def build_script(self):
platform.add_platform_command(_xdc_separator("False path constraints")) if sys.platform in ["win32", "cygwin"]:
# The asynchronous input to a MultiReg is a false path script_contents = "REM Autogenerated by LiteX / git: " + tools.get_litex_git_revision() + "\n"
platform.add_platform_command( script_contents += "vivado -mode batch -source " + self._build_name + ".tcl\n"
"set_false_path -quiet " script_file = "build_" + self._build_name + ".bat"
"-through [get_nets -hierarchical -filter {{mr_ff == TRUE}}]" tools.write_to_file(script_file, script_contents)
) else:
# The asychronous reset input to the AsyncResetSynchronizer is a false path script_contents = "# Autogenerated by LiteX / git: " + tools.get_litex_git_revision() + "\nset -e\n"
platform.add_platform_command( if os.getenv("LITEX_ENV_VIVADO", False):
"set_false_path -quiet " script_contents += "source " + os.path.join(os.getenv("LITEX_ENV_VIVADO"), "settings64.sh\n")
"-to [get_pins -filter {{REF_PIN_NAME == PRE}} " script_contents += "vivado -mode batch -source " + self._build_name + ".tcl\n"
"-of_objects [get_cells -hierarchical -filter {{ars_ff1 == TRUE || ars_ff2 == TRUE}}]]" script_file = "build_" + self._build_name + ".sh"
) tools.write_to_file(script_file, script_contents)
# clock_period-2ns to resolve metastability on the wire between the AsyncResetSynchronizer FFs return script_file
platform.add_platform_command(
"set_max_delay 2 -quiet "
"-from [get_pins -filter {{REF_PIN_NAME == C}} "
"-of_objects [get_cells -hierarchical -filter {{ars_ff1 == TRUE}}]] "
"-to [get_pins -filter {{REF_PIN_NAME == D}} "
"-of_objects [get_cells -hierarchical -filter {{ars_ff2 == TRUE}}]]"
)
def run_script(self, script):
if synth_mode == "yosys":
common._run_yosys(self.platform.device, self.platform.sources,
self.platform.verilog_include_paths, self._build_name)
if sys.platform in ["win32", "cygwin"]:
shell = ["cmd", "/c"]
else:
shell = ["bash"]
def build(self, platform, fragment, if which("vivado") is None and os.getenv("LITEX_ENV_VIVADO", False) == False:
build_dir = "build", msg = "Unable to find or source Vivado toolchain, please either:\n"
build_name = "top", msg += "- Source Vivado's settings manually.\n"
run = True, msg += "- Or set LITEX_ENV_VIVADO environment variant to Vivado's settings path.\n"
synth_mode = "vivado", msg += "- Or add Vivado toolchain to your $PATH."
enable_xpm = False, raise OSError(msg)
**kwargs):
# Create build directory if tools.subprocess_call_filtered(shell + [script], common.colors) != 0:
os.makedirs(build_dir, exist_ok=True) raise OSError("Error occured during Vivado's script execution.")
cwd = os.getcwd()
os.chdir(build_dir)
# Finalize design
if not isinstance(fragment, _Fragment):
fragment = fragment.get_fragment()
platform.finalize(fragment)
# Generate verilog
v_output = platform.get_verilog(fragment, name=build_name, **kwargs)
# Generate timing constraints
self._build_clock_constraints(platform)
self._build_false_path_constraints(platform)
# Add verilog to project.
named_sc, named_pc = platform.resolve_signals(v_output.ns)
v_file = build_name + ".v"
v_output.write(v_file)
platform.add_source(v_file)
# Generate design project (.tcl)
self._build_tcl(
platform = platform,
build_name = build_name,
synth_mode = synth_mode,
enable_xpm = enable_xpm,
vns = v_output.ns,
)
# Generate design constraints (.xdc)
tools.write_to_file(build_name + ".xdc", _build_xdc(named_sc, named_pc))
# Run
if run:
if synth_mode == "yosys":
common._run_yosys(platform.device, platform.sources, platform.verilog_include_paths, build_name)
script = _build_script(build_name)
_run_script(script)
os.chdir(cwd)
return v_output.ns
def add_period_constraint(self, platform, clk, period):
clk.attr.add("keep")
period = math.floor(period*1e3)/1e3 # round to lowest picosecond
if clk in self.clocks:
if period != self.clocks[clk]:
raise ValueError("Clock already constrained to {:.2f}ns, new constraint to {:.2f}ns"
.format(self.clocks[clk], period))
self.clocks[clk] = period
def add_false_path_constraint(self, platform, from_, to):
from_.attr.add("keep")
to.attr.add("keep")
if (to, from_) not in self.false_paths:
self.false_paths.add((from_, to))
def vivado_build_args(parser): def vivado_build_args(parser):
toolchain_group = parser.add_argument_group(title="Toolchain options") toolchain_group = parser.add_argument_group(title="Toolchain options")