mirror of
https://github.com/enjoy-digital/litex.git
synced 2025-01-04 09:52:26 -05:00
Token: support idle_wait
This commit is contained in:
parent
6f99241585
commit
a67f483f0f
4 changed files with 18 additions and 6 deletions
|
@ -29,7 +29,7 @@ def adrgen_gen():
|
|||
|
||||
def dumper_gen():
|
||||
while True:
|
||||
t = Token("data")
|
||||
t = Token("data", idle_wait=True)
|
||||
yield t
|
||||
print("Received: " + str(t.value["d"]))
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ class TokenExchanger(PureSimulable):
|
|||
self.generator = generator
|
||||
self.actor = actor
|
||||
self.active = set()
|
||||
self.busy = True
|
||||
self.done = False
|
||||
|
||||
def _process_transactions(self, s):
|
||||
|
@ -39,12 +40,15 @@ class TokenExchanger(PureSimulable):
|
|||
else:
|
||||
raise TypeError
|
||||
self.active -= completed
|
||||
if not self.active:
|
||||
self.busy = True
|
||||
|
||||
def _next_transactions(self):
|
||||
try:
|
||||
transactions = next(self.generator)
|
||||
except StopIteration:
|
||||
self.done = True
|
||||
self.busy = False
|
||||
transactions = None
|
||||
if isinstance(transactions, Token):
|
||||
self.active = {transactions}
|
||||
|
@ -56,6 +60,8 @@ class TokenExchanger(PureSimulable):
|
|||
self.active = set()
|
||||
else:
|
||||
raise TypeError
|
||||
if all(transaction.idle_wait for transaction in self.active):
|
||||
self.busy = False
|
||||
|
||||
def do_simulation(self, s):
|
||||
if not self.done:
|
||||
|
@ -70,7 +76,7 @@ class SimActor(Actor):
|
|||
self.token_exchanger = TokenExchanger(generator, self)
|
||||
|
||||
def update_busy(self, s):
|
||||
s.wr(self.busy, not self.token_exchanger.done)
|
||||
s.wr(self.busy, self.token_exchanger.busy)
|
||||
|
||||
def get_fragment(self):
|
||||
return self.token_exchanger.get_fragment() + Fragment(sim=[self.update_busy])
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
class Token:
|
||||
def __init__(self, endpoint, value=None):
|
||||
def __init__(self, endpoint, value=None, idle_wait=False):
|
||||
self.endpoint = endpoint
|
||||
self.value = value
|
||||
|
||||
self.idle_wait = idle_wait
|
||||
|
|
|
@ -13,6 +13,8 @@ from migen.pytholite.expr import ExprCompiler
|
|||
class Pytholite(UnifiedIOObject):
|
||||
def __init__(self, dataflow=None, buses={}):
|
||||
super().__init__(dataflow, buses)
|
||||
if dataflow is not None:
|
||||
self.busy.reset = 1
|
||||
self.memory_ports = dict((mem, mem.get_port(write_capable=True, we_granularity=8))
|
||||
for mem in self._memories)
|
||||
|
||||
|
@ -43,14 +45,18 @@ class _TokenPullExprCompiler(ExprCompiler):
|
|||
def _gen_df_io(compiler, modelname, to_model, from_model):
|
||||
epname = ast.literal_eval(to_model["endpoint"])
|
||||
values = to_model["value"]
|
||||
idle_wait = ast.literal_eval(to_model["idle_wait"])
|
||||
ep = compiler.ioo.endpoints[epname]
|
||||
if idle_wait:
|
||||
state = [compiler.ioo.busy.eq(0)]
|
||||
else:
|
||||
state = []
|
||||
|
||||
if isinstance(values, ast.Name) and values.id == "None":
|
||||
# token pull from sink
|
||||
if not isinstance(ep, Sink):
|
||||
raise TypeError("Attempted to pull from source")
|
||||
ec = _TokenPullExprCompiler(compiler.symdict, modelname, ep)
|
||||
state = []
|
||||
for target_regs, expr in from_model:
|
||||
cexpr = ec.visit_expr(expr)
|
||||
state += [reg.load(cexpr) for reg in target_regs]
|
||||
|
@ -67,7 +73,6 @@ def _gen_df_io(compiler, modelname, to_model, from_model):
|
|||
raise TypeError("Attempted to read from pushed token")
|
||||
if not isinstance(values, ast.Dict):
|
||||
raise NotImplementedError
|
||||
state = []
|
||||
for akey, value in zip(values.keys, values.values):
|
||||
key = ast.literal_eval(akey)
|
||||
signal = getattr(ep.token, key)
|
||||
|
@ -190,6 +195,7 @@ def gen_io(compiler, modelname, model, to_model, to_model_kw, from_model):
|
|||
desc = [
|
||||
"endpoint",
|
||||
("value", ast.Name("None", ast.Load())),
|
||||
("idle_wait", ast.Name("False", ast.Load()))
|
||||
]
|
||||
args = _decode_args(desc, to_model, to_model_kw)
|
||||
return _gen_df_io(compiler, modelname, args, from_model)
|
||||
|
|
Loading…
Reference in a new issue