Merge pull request #1072 from AndrewD/master
efinix and general improvements
This commit is contained in:
commit
adf5665f21
|
@ -80,6 +80,21 @@ class EfinixDbParser():
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def get_block_instance_names(self, block):
|
||||||
|
dmap = self.get_device_map(self.device)
|
||||||
|
die = self.get_die_file_name(dmap)
|
||||||
|
tree = et.parse(self.efinity_db_path + 'die/' + die)
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
peri = root.findall('efxpt:periphery_instance', namespaces)
|
||||||
|
names = []
|
||||||
|
for p in peri:
|
||||||
|
if p.get('block') == block:
|
||||||
|
names.append(p.get('name'))
|
||||||
|
|
||||||
|
print(f"block {block}: names:{names}")
|
||||||
|
return names
|
||||||
|
|
||||||
def get_pll_inst_from_gpio_inst(self, dmap, inst):
|
def get_pll_inst_from_gpio_inst(self, dmap, inst):
|
||||||
die = self.get_die_file_name(dmap)
|
die = self.get_die_file_name(dmap)
|
||||||
tree = et.parse(self.efinity_db_path + 'die/' + die)
|
tree = et.parse(self.efinity_db_path + 'die/' + die)
|
||||||
|
|
|
@ -156,7 +156,9 @@ def _build_peri(efinity_path, build_name, partnumber, named_sc, named_pc, fragme
|
||||||
|
|
||||||
tools.write_to_file("iface.py", header + gen + gpio + add + footer)
|
tools.write_to_file("iface.py", header + gen + gpio + add + footer)
|
||||||
|
|
||||||
subprocess.call([efinity_path + '/bin/python3', 'iface.py'])
|
if subprocess.call([efinity_path + '/bin/python3', 'iface.py']) != 0:
|
||||||
|
raise OSError("Error occurred during Efinity peri script execution.")
|
||||||
|
|
||||||
|
|
||||||
# Project configuration ------------------------------------------------------------------------
|
# Project configuration ------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -316,7 +318,8 @@ class EfinityToolchain():
|
||||||
|
|
||||||
# Run
|
# Run
|
||||||
if run:
|
if run:
|
||||||
subprocess.call([self.efinity_path + '/scripts/efx_run.py', build_name + '.xml', '-f', 'compile'])
|
if subprocess.call([self.efinity_path + '/scripts/efx_run.py', build_name + '.xml', '-f', 'compile']) != 0:
|
||||||
|
raise OSError("Error occurred during efx_run script execution.")
|
||||||
|
|
||||||
os.chdir(cwd)
|
os.chdir(cwd)
|
||||||
|
|
||||||
|
|
|
@ -22,9 +22,6 @@ class EfinixPlatform(GenericPlatform):
|
||||||
self.timing_model = self.device[-2:]
|
self.timing_model = self.device[-2:]
|
||||||
self.device = self.device[:-2]
|
self.device = self.device[:-2]
|
||||||
|
|
||||||
self.pll_available = ['PLL_TL0', 'PLL_TR0', 'PLL_TR1', 'PLL_TR2', 'PLL_TR3', 'PLL_BR0', 'PLL_BR1', 'PLL_BR2', 'PLL_BL0']
|
|
||||||
self.pll_used = []
|
|
||||||
|
|
||||||
if os.getenv("LITEX_ENV_EFINITY", False) == False:
|
if os.getenv("LITEX_ENV_EFINITY", False) == False:
|
||||||
msg = "Unable to find or source Efinity toolchain, please either:\n"
|
msg = "Unable to find or source Efinity toolchain, please either:\n"
|
||||||
msg += "- Set LITEX_ENV_EFINITY environment variant to Efinity path.\n"
|
msg += "- Set LITEX_ENV_EFINITY environment variant to Efinity path.\n"
|
||||||
|
@ -40,6 +37,8 @@ class EfinixPlatform(GenericPlatform):
|
||||||
raise ValueError("Unknown toolchain")
|
raise ValueError("Unknown toolchain")
|
||||||
|
|
||||||
self.parser = EfinixDbParser(self.efinity_path, self.device)
|
self.parser = EfinixDbParser(self.efinity_path, self.device)
|
||||||
|
self.pll_available = self.parser.get_block_instance_names('pll')
|
||||||
|
self.pll_used = []
|
||||||
|
|
||||||
def get_verilog(self, *args, special_overrides=dict(), **kwargs):
|
def get_verilog(self, *args, special_overrides=dict(), **kwargs):
|
||||||
so = dict(common.efinix_special_overrides)
|
so = dict(common.efinix_special_overrides)
|
||||||
|
|
|
@ -155,7 +155,10 @@ class ConnectorManager:
|
||||||
r = []
|
r = []
|
||||||
for identifier in identifiers:
|
for identifier in identifiers:
|
||||||
if ":" in identifier:
|
if ":" in identifier:
|
||||||
|
try:
|
||||||
conn, pn = identifier.split(":")
|
conn, pn = identifier.split(":")
|
||||||
|
except ValueError as err:
|
||||||
|
raise ValueError(f"\"{identifier}\" {err}") from err
|
||||||
if pn.isdigit():
|
if pn.isdigit():
|
||||||
pn = int(pn)
|
pn = int(pn)
|
||||||
|
|
||||||
|
|
|
@ -731,7 +731,7 @@ class SoC(Module):
|
||||||
self.logger.info(colorer("Creating SoC... ({})".format(build_time())))
|
self.logger.info(colorer("Creating SoC... ({})".format(build_time())))
|
||||||
self.logger.info(colorer("-"*80, color="bright"))
|
self.logger.info(colorer("-"*80, color="bright"))
|
||||||
self.logger.info("FPGA device : {}.".format(platform.device))
|
self.logger.info("FPGA device : {}.".format(platform.device))
|
||||||
self.logger.info("System clock: {:3.2f}MHz.".format(sys_clk_freq/1e6))
|
self.logger.info("System clock: {:3.3f}MHz.".format(sys_clk_freq/1e6))
|
||||||
|
|
||||||
# SoC attributes ---------------------------------------------------------------------------
|
# SoC attributes ---------------------------------------------------------------------------
|
||||||
self.platform = platform
|
self.platform = platform
|
||||||
|
|
|
@ -419,11 +419,18 @@ class CSRStorage(_CompoundCSR):
|
||||||
self.sync += self.re.eq(sc.re)
|
self.sync += self.re.eq(sc.re)
|
||||||
|
|
||||||
def read(self):
|
def read(self):
|
||||||
"""Read method for simulation."""
|
"""Read method for simulation.
|
||||||
|
|
||||||
|
Side effects: none (asynchronous)."""
|
||||||
return (yield self.storage)
|
return (yield self.storage)
|
||||||
|
|
||||||
def write(self, value):
|
def write(self, value):
|
||||||
"""Write method for simulation."""
|
"""Write method for simulation.
|
||||||
|
|
||||||
|
Side effects: synchronous advances simulation clk by one tick."""
|
||||||
|
if bits_for(value) > self.size:
|
||||||
|
raise ValueError(f"value {value} exceeds range of {self.size} bit CSR {self.name}.")
|
||||||
|
|
||||||
yield self.storage.eq(value)
|
yield self.storage.eq(value)
|
||||||
yield self.re.eq(1)
|
yield self.re.eq(1)
|
||||||
if hasattr(self, "fields"):
|
if hasattr(self, "fields"):
|
||||||
|
|
Loading…
Reference in New Issue