core: another cleanup/simplify pass
This commit is contained in:
parent
a269e67a10
commit
2233bc290e
|
@ -126,7 +126,7 @@ if hasattr(soc, "io"):
|
||||||
LiteScopeIO
|
LiteScopeIO
|
||||||
-----------
|
-----------
|
||||||
Width: {}
|
Width: {}
|
||||||
""".format(soc.io.dw)
|
""".format(soc.io.data_width)
|
||||||
)
|
)
|
||||||
|
|
||||||
if hasattr(soc, "analyzer"):
|
if hasattr(soc, "analyzer"):
|
||||||
|
@ -136,7 +136,7 @@ LiteScopeAnalyzer
|
||||||
Width: {}
|
Width: {}
|
||||||
Depth: {}
|
Depth: {}
|
||||||
===============================""".format(
|
===============================""".format(
|
||||||
soc.analyzer.dw,
|
soc.analyzer.data_width,
|
||||||
soc.analyzer.depth
|
soc.analyzer.depth
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
|
@ -10,10 +10,10 @@ from litex.soc.interconnect import stream
|
||||||
|
|
||||||
|
|
||||||
class LiteScopeIO(Module, AutoCSR):
|
class LiteScopeIO(Module, AutoCSR):
|
||||||
def __init__(self, dw):
|
def __init__(self, data_width):
|
||||||
self.dw = dw
|
self.data_width = data_width
|
||||||
self.input = Signal(dw)
|
self.input = Signal(data_width)
|
||||||
self.output = Signal(dw)
|
self.output = Signal(data_width)
|
||||||
|
|
||||||
# # #
|
# # #
|
||||||
|
|
||||||
|
@ -23,21 +23,21 @@ class LiteScopeIO(Module, AutoCSR):
|
||||||
return self.gpio.get_csrs()
|
return self.gpio.get_csrs()
|
||||||
|
|
||||||
|
|
||||||
def core_layout(dw):
|
def core_layout(data_width):
|
||||||
return [("data", dw), ("hit", 1)]
|
return [("data", data_width), ("hit", 1)]
|
||||||
|
|
||||||
|
|
||||||
class FrontendTrigger(Module, AutoCSR):
|
class _Trigger(Module, AutoCSR):
|
||||||
def __init__(self, dw, depth=16):
|
def __init__(self, data_width, depth=16):
|
||||||
self.sink = sink = stream.Endpoint(core_layout(dw))
|
self.sink = sink = stream.Endpoint(core_layout(data_width))
|
||||||
self.source = source = stream.Endpoint(core_layout(dw))
|
self.source = source = stream.Endpoint(core_layout(data_width))
|
||||||
|
|
||||||
self.enable = CSRStorage()
|
self.enable = CSRStorage()
|
||||||
self.done = CSRStatus()
|
self.done = CSRStatus()
|
||||||
|
|
||||||
self.mem_write = CSR()
|
self.mem_write = CSR()
|
||||||
self.mem_mask = CSRStorage(dw)
|
self.mem_mask = CSRStorage(data_width)
|
||||||
self.mem_value = CSRStorage(dw)
|
self.mem_value = CSRStorage(data_width)
|
||||||
self.mem_full = CSRStatus()
|
self.mem_full = CSRStatus()
|
||||||
|
|
||||||
# # #
|
# # #
|
||||||
|
@ -53,7 +53,7 @@ class FrontendTrigger(Module, AutoCSR):
|
||||||
self.specials += MultiReg(done, self.done.status)
|
self.specials += MultiReg(done, self.done.status)
|
||||||
|
|
||||||
# memory and configuration
|
# memory and configuration
|
||||||
mem = stream.AsyncFIFO([("mask", dw), ("value", dw)], depth)
|
mem = stream.AsyncFIFO([("mask", data_width), ("value", data_width)], depth)
|
||||||
mem = ClockDomainsRenamer({"write": "sys", "read": "scope"})(mem)
|
mem = ClockDomainsRenamer({"write": "sys", "read": "scope"})(mem)
|
||||||
self.submodules += mem
|
self.submodules += mem
|
||||||
self.comb += [
|
self.comb += [
|
||||||
|
@ -82,10 +82,10 @@ class FrontendTrigger(Module, AutoCSR):
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class FrontendSubSampler(Module, AutoCSR):
|
class _SubSampler(Module, AutoCSR):
|
||||||
def __init__(self, dw):
|
def __init__(self, data_width):
|
||||||
self.sink = sink = stream.Endpoint(core_layout(dw))
|
self.sink = sink = stream.Endpoint(core_layout(data_width))
|
||||||
self.source = source = stream.Endpoint(core_layout(dw))
|
self.source = source = stream.Endpoint(core_layout(data_width))
|
||||||
|
|
||||||
self.value = CSRStorage(16)
|
self.value = CSRStorage(16)
|
||||||
|
|
||||||
|
@ -113,10 +113,10 @@ class FrontendSubSampler(Module, AutoCSR):
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class AnalyzerMux(Module, AutoCSR):
|
class _Mux(Module, AutoCSR):
|
||||||
def __init__(self, dw, n):
|
def __init__(self, data_width, n):
|
||||||
self.sinks = sinks = [stream.Endpoint(core_layout(dw)) for i in range(n)]
|
self.sinks = sinks = [stream.Endpoint(core_layout(data_width)) for i in range(n)]
|
||||||
self.source = source = stream.Endpoint(core_layout(dw))
|
self.source = source = stream.Endpoint(core_layout(data_width))
|
||||||
|
|
||||||
self.value = CSRStorage(bits_for(n))
|
self.value = CSRStorage(bits_for(n))
|
||||||
|
|
||||||
|
@ -131,25 +131,9 @@ class AnalyzerMux(Module, AutoCSR):
|
||||||
self.comb += Case(value, cases)
|
self.comb += Case(value, cases)
|
||||||
|
|
||||||
|
|
||||||
class AnalyzerFrontend(Module, AutoCSR):
|
class _Storage(Module, AutoCSR):
|
||||||
def __init__(self, dw):
|
def __init__(self, data_width, depth):
|
||||||
self.sink = stream.Endpoint(core_layout(dw))
|
self.sink = sink = stream.Endpoint(core_layout(data_width))
|
||||||
self.source = stream.Endpoint(core_layout(dw))
|
|
||||||
|
|
||||||
# # #
|
|
||||||
|
|
||||||
self.submodules.trigger = FrontendTrigger(dw)
|
|
||||||
self.submodules.subsampler = FrontendSubSampler(dw)
|
|
||||||
self.submodules.pipeline = stream.Pipeline(
|
|
||||||
self.sink,
|
|
||||||
self.trigger,
|
|
||||||
self.subsampler,
|
|
||||||
self.source)
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyzerStorage(Module, AutoCSR):
|
|
||||||
def __init__(self, dw, depth):
|
|
||||||
self.sink = sink = stream.Endpoint(core_layout(dw))
|
|
||||||
|
|
||||||
self.enable = CSRStorage()
|
self.enable = CSRStorage()
|
||||||
self.done = CSRStatus()
|
self.done = CSRStatus()
|
||||||
|
@ -159,7 +143,7 @@ class AnalyzerStorage(Module, AutoCSR):
|
||||||
|
|
||||||
self.mem_valid = CSRStatus()
|
self.mem_valid = CSRStatus()
|
||||||
self.mem_ready = CSR()
|
self.mem_ready = CSR()
|
||||||
self.mem_data = CSRStatus(dw)
|
self.mem_data = CSRStatus(data_width)
|
||||||
|
|
||||||
# # #
|
# # #
|
||||||
|
|
||||||
|
@ -185,9 +169,9 @@ class AnalyzerStorage(Module, AutoCSR):
|
||||||
self.specials += MultiReg(done, self.done.status)
|
self.specials += MultiReg(done, self.done.status)
|
||||||
|
|
||||||
# memory
|
# memory
|
||||||
mem = stream.SyncFIFO([("data", dw)], depth, buffered=True)
|
mem = stream.SyncFIFO([("data", data_width)], depth, buffered=True)
|
||||||
mem = ClockDomainsRenamer("scope")(mem)
|
mem = ClockDomainsRenamer("scope")(mem)
|
||||||
cdc = stream.AsyncFIFO([("data", dw)], 4)
|
cdc = stream.AsyncFIFO([("data", data_width)], 4)
|
||||||
cdc = ClockDomainsRenamer(
|
cdc = ClockDomainsRenamer(
|
||||||
{"write": "scope", "read": "sys"})(cdc)
|
{"write": "scope", "read": "sys"})(cdc)
|
||||||
self.submodules += mem, cdc
|
self.submodules += mem, cdc
|
||||||
|
@ -239,56 +223,66 @@ class AnalyzerStorage(Module, AutoCSR):
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def _format_groups(groups):
|
|
||||||
if not isinstance(groups, dict):
|
|
||||||
groups = {0 : groups}
|
|
||||||
new_groups = {}
|
|
||||||
for n, signals in groups.items():
|
|
||||||
if not isinstance(signals, list):
|
|
||||||
signals = [signals]
|
|
||||||
|
|
||||||
split_signals = []
|
|
||||||
for s in signals:
|
|
||||||
if isinstance(s, Record):
|
|
||||||
split_signals.extend(s.flatten())
|
|
||||||
else:
|
|
||||||
split_signals.append(s)
|
|
||||||
new_groups[n] = split_signals
|
|
||||||
return new_groups
|
|
||||||
|
|
||||||
|
|
||||||
class LiteScopeAnalyzer(Module, AutoCSR):
|
class LiteScopeAnalyzer(Module, AutoCSR):
|
||||||
def __init__(self, groups, depth, cd="sys"):
|
def __init__(self, groups, depth, cd="sys"):
|
||||||
self.groups = _format_groups(groups)
|
self.groups = groups = self.format_groups(groups)
|
||||||
self.dw = max([sum([len(s) for s in g]) for g in self.groups.values()])
|
|
||||||
|
|
||||||
self.depth = depth
|
self.depth = depth
|
||||||
|
|
||||||
|
self.data_width = data_width = max([sum([len(s)
|
||||||
|
for s in g]) for g in groups.values()])
|
||||||
|
|
||||||
# # #
|
# # #
|
||||||
|
|
||||||
|
# create scope clock domain
|
||||||
self.clock_domains.cd_scope = ClockDomain()
|
self.clock_domains.cd_scope = ClockDomain()
|
||||||
self.comb += [
|
self.comb += [
|
||||||
self.cd_scope.clk.eq(ClockSignal(cd)),
|
self.cd_scope.clk.eq(ClockSignal(cd)),
|
||||||
self.cd_scope.rst.eq(ResetSignal(cd))
|
self.cd_scope.rst.eq(ResetSignal(cd))
|
||||||
]
|
]
|
||||||
|
|
||||||
self.submodules.mux = AnalyzerMux(self.dw, len(self.groups))
|
# mux
|
||||||
for i, signals in self.groups.items():
|
self.submodules.mux = _Mux(data_width, len(groups))
|
||||||
|
for i, signals in groups.items():
|
||||||
self.comb += [
|
self.comb += [
|
||||||
self.mux.sinks[i].valid.eq(1),
|
self.mux.sinks[i].valid.eq(1),
|
||||||
self.mux.sinks[i].data.eq(Cat(signals))
|
self.mux.sinks[i].data.eq(Cat(signals))
|
||||||
]
|
]
|
||||||
self.submodules.frontend = AnalyzerFrontend(self.dw)
|
|
||||||
self.submodules.storage = AnalyzerStorage(self.dw, depth)
|
# frontend
|
||||||
self.comb += [
|
self.submodules.trigger = _Trigger(data_width)
|
||||||
self.mux.source.connect(self.frontend.sink),
|
self.submodules.subsampler = _SubSampler(data_width)
|
||||||
self.frontend.source.connect(self.storage.sink)
|
|
||||||
]
|
# storage
|
||||||
|
self.submodules.storage = _Storage(data_width, depth)
|
||||||
|
|
||||||
|
# pipeline
|
||||||
|
self.submodules.pipeline = stream.Pipeline(
|
||||||
|
self.mux.source,
|
||||||
|
self.trigger,
|
||||||
|
self.subsampler,
|
||||||
|
self.storage.sink)
|
||||||
|
|
||||||
|
def format_groups(self, groups):
|
||||||
|
if not isinstance(groups, dict):
|
||||||
|
groups = {0 : groups}
|
||||||
|
new_groups = {}
|
||||||
|
for n, signals in groups.items():
|
||||||
|
if not isinstance(signals, list):
|
||||||
|
signals = [signals]
|
||||||
|
|
||||||
|
split_signals = []
|
||||||
|
for s in signals:
|
||||||
|
if isinstance(s, Record):
|
||||||
|
split_signals.extend(s.flatten())
|
||||||
|
else:
|
||||||
|
split_signals.append(s)
|
||||||
|
new_groups[n] = split_signals
|
||||||
|
return new_groups
|
||||||
|
|
||||||
def export_csv(self, vns, filename):
|
def export_csv(self, vns, filename):
|
||||||
def format_line(*args):
|
def format_line(*args):
|
||||||
return ",".join(args) + "\n"
|
return ",".join(args) + "\n"
|
||||||
r = format_line("config", "None", "dw", str(self.dw))
|
r = format_line("config", "None", "data_width", str(self.data_width))
|
||||||
r += format_line("config", "None", "depth", str(self.depth))
|
r += format_line("config", "None", "depth", str(self.depth))
|
||||||
for i, signals in self.groups.items():
|
for i, signals in self.groups.items():
|
||||||
for s in signals:
|
for s in signals:
|
||||||
|
|
|
@ -21,10 +21,10 @@ class LiteScopeAnalyzerDriver:
|
||||||
self.get_layouts()
|
self.get_layouts()
|
||||||
self.build()
|
self.build()
|
||||||
self.group = 0
|
self.group = 0
|
||||||
self.data = DumpData(self.dw)
|
self.data = DumpData(self.data_width)
|
||||||
|
|
||||||
# disable frontend and storage
|
# disable trigger and storage
|
||||||
self.frontend_trigger_enable.write(0)
|
self.trigger_enable.write(0)
|
||||||
self.storage_enable.write(0)
|
self.storage_enable.write(0)
|
||||||
|
|
||||||
def get_config(self):
|
def get_config(self):
|
||||||
|
@ -66,15 +66,15 @@ class LiteScopeAnalyzerDriver:
|
||||||
self.mux_value.write(value)
|
self.mux_value.write(value)
|
||||||
|
|
||||||
def add_trigger(self, value=0, mask=0, cond=None):
|
def add_trigger(self, value=0, mask=0, cond=None):
|
||||||
if self.frontend_trigger_mem_full.read():
|
if self.trigger_mem_full.read():
|
||||||
raise ValueError("Trigger memory full, too much conditions")
|
raise ValueError("Trigger memory full, too much conditions")
|
||||||
if cond is not None:
|
if cond is not None:
|
||||||
for k, v in cond.items():
|
for k, v in cond.items():
|
||||||
value |= getattr(self, k + "_o")*v
|
value |= getattr(self, k + "_o")*v
|
||||||
mask |= getattr(self, k + "_m")
|
mask |= getattr(self, k + "_m")
|
||||||
self.frontend_trigger_mem_mask.write(mask)
|
self.trigger_mem_mask.write(mask)
|
||||||
self.frontend_trigger_mem_value.write(value)
|
self.trigger_mem_value.write(value)
|
||||||
self.frontend_trigger_mem_write.write(1)
|
self.trigger_mem_write.write(1)
|
||||||
|
|
||||||
def add_rising_edge_trigger(self, name):
|
def add_rising_edge_trigger(self, name):
|
||||||
self.add_trigger(getattr(self, name + "_o")*0, getattr(self, name + "_m"))
|
self.add_trigger(getattr(self, name + "_o")*0, getattr(self, name + "_m"))
|
||||||
|
@ -88,7 +88,7 @@ class LiteScopeAnalyzerDriver:
|
||||||
self.add_trigger(value, mask, cond)
|
self.add_trigger(value, mask, cond)
|
||||||
|
|
||||||
def configure_subsampler(self, value):
|
def configure_subsampler(self, value):
|
||||||
self.frontend_subsampler_value.write(value-1)
|
self.subsampler_value.write(value-1)
|
||||||
|
|
||||||
def run(self, offset, length):
|
def run(self, offset, length):
|
||||||
if self.debug:
|
if self.debug:
|
||||||
|
@ -96,7 +96,7 @@ class LiteScopeAnalyzerDriver:
|
||||||
self.storage_offset.write(offset)
|
self.storage_offset.write(offset)
|
||||||
self.storage_length.write(length)
|
self.storage_length.write(length)
|
||||||
self.storage_enable.write(1)
|
self.storage_enable.write(1)
|
||||||
self.frontend_trigger_enable.write(1)
|
self.trigger_enable.write(1)
|
||||||
|
|
||||||
def done(self):
|
def done(self):
|
||||||
return self.storage_done.read()
|
return self.storage_done.read()
|
||||||
|
|
Loading…
Reference in New Issue