flow: saner endpoint management
This commit is contained in:
parent
20425703fa
commit
3c7161cc34
|
@ -154,8 +154,10 @@ def _try_module_name(frame):
|
|||
else:
|
||||
return None
|
||||
|
||||
def _make_signal_name(name=None):
|
||||
frame = inspect.currentframe().f_back.f_back
|
||||
def _make_signal_name(name=None, back=2):
|
||||
frame = inspect.currentframe()
|
||||
for i in range(back):
|
||||
frame = frame.f_back
|
||||
|
||||
if name is None:
|
||||
line = inspect.getframeinfo(frame).code_context[0]
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from migen.fhdl.structure import *
|
||||
from migen.fhdl.structure import _make_signal_name
|
||||
from migen.corelogic.misc import optree
|
||||
from migen.corelogic.record import *
|
||||
|
||||
class SchedulingModel:
|
||||
COMBINATORIAL, SEQUENTIAL, PIPELINE, DYNAMIC = range(4)
|
||||
|
@ -97,17 +99,18 @@ def _control_fragment_pipe(latency, stb_i, ack_o, stb_o, ack_i, busy, pipe_ce):
|
|||
return Fragment(comb, sync)
|
||||
|
||||
class Actor:
|
||||
def __init__(self, scheduling_model, sinks=None, sources=None, endpoints=None):
|
||||
def __init__(self, scheduling_model, *endpoint_descriptions, endpoints=None):
|
||||
self.scheduling_model = scheduling_model
|
||||
if endpoints is None:
|
||||
if isinstance(sinks, list):
|
||||
self.endpoints = [Sink(sink) for sink in sinks]
|
||||
else:
|
||||
self.endpoints = [Sink(sinks)]
|
||||
if isinstance(sources, list):
|
||||
self.endpoints += [Source(source) for source in sources]
|
||||
else:
|
||||
self.endpoints.append(Source(sources))
|
||||
self.endpoints = {}
|
||||
for desc in endpoint_descriptions:
|
||||
# desc: (name, Sink/Source, token layout or existing record)
|
||||
if isinstance(desc[2], Record):
|
||||
token = desc[2]
|
||||
else:
|
||||
token = Record(desc[2], name=_make_signal_name(desc[0], 1))
|
||||
ep = desc[1](token)
|
||||
self.endpoints[desc[0]] = ep
|
||||
else:
|
||||
self.endpoints = endpoints
|
||||
self.busy = Signal()
|
||||
|
@ -116,25 +119,25 @@ class Actor:
|
|||
elif self.scheduling_model.model == SchedulingModel.PIPELINE:
|
||||
self.pipe_ce = Signal()
|
||||
|
||||
def token(self, ep):
|
||||
return self.endpoints[ep].token
|
||||
|
||||
def filter_endpoints(self, cl):
|
||||
return [k for k, v in self.endpoints.items() if isinstance(v, cl)]
|
||||
|
||||
def sinks(self):
|
||||
return [x for x in self.endpoints if isinstance(x, Sink)]
|
||||
return self.filter_endpoints(Sink)
|
||||
|
||||
def sources(self):
|
||||
return [x for x in self.endpoints if isinstance(x, Source)]
|
||||
|
||||
return self.filter_endpoints(Source)
|
||||
|
||||
def get_control_fragment(self):
|
||||
if len(self.endpoints) != 2:
|
||||
raise ValueError("Actors with automatic control fragment must have exactly two endpoints.")
|
||||
if isinstance(self.endpoints[0], Sink):
|
||||
assert(isinstance(self.endpoints[1], Source))
|
||||
sink = self.endpoints[0]
|
||||
source = self.endpoints[1]
|
||||
elif isinstance(self.endpoints[0], Source):
|
||||
assert(isinstance(self.endpoints[1], Sink))
|
||||
sink = self.endpoints[1]
|
||||
source = self.endpoints[0]
|
||||
else:
|
||||
raise ValueError("Actors with automatic control fragment must have one sink and one source. Consider using plumbing actors.")
|
||||
def get_single_ep(l):
|
||||
if len(l) != 1:
|
||||
raise ValueError("Actors with automatic control fragment must have exactly one sink and one source. Consider using plumbing actors.")
|
||||
return self.endpoints[l[0]]
|
||||
sink = get_single_ep(self.sinks())
|
||||
source = get_single_ep(self.sources())
|
||||
stb_i = sink.stb
|
||||
ack_o = sink.ack
|
||||
stb_o = source.stb
|
||||
|
|
|
@ -7,15 +7,15 @@ from migen.corelogic import divider
|
|||
class _SimpleBinary(Actor):
|
||||
def __init__(self, op, bv_op, bv_r):
|
||||
self.op = op
|
||||
self.operands = Record([('a', bv_op), ('b', bv_op)])
|
||||
self.result = Record([('r', bv_r)])
|
||||
Actor.__init__(self,
|
||||
SchedulingModel(SchedulingModel.COMBINATORIAL),
|
||||
self.operands, self.result)
|
||||
("operands", Sink, [('a', bv_op), ('b', bv_op)]),
|
||||
("result", Source, [('r', bv_r)]))
|
||||
|
||||
def get_process_fragment(self):
|
||||
return Fragment([
|
||||
self.result.r.eq(_Operator(self.op, [self.operands.a, self.operands.b]))
|
||||
self.token("result").r.eq(_Operator(self.op,
|
||||
[self.token("operands").a, self.token("operands").b]))
|
||||
])
|
||||
|
||||
class Add(_SimpleBinary):
|
||||
|
@ -61,11 +61,10 @@ class NE(_SimpleBinary):
|
|||
class DivMod(Actor):
|
||||
def __init__(self, width):
|
||||
self.div = divider.Inst(width)
|
||||
self.operands = Record([('dividend', self.div.dividend_i), ('divisor', self.div.divisor_i)])
|
||||
self.result = Record([('quotient', self.div.quotient_o), ('remainder', self.div.remainder_o)])
|
||||
Actor.__init__(self,
|
||||
SchedulingModel(SchedulingModel.SEQUENTIAL, width),
|
||||
self.operands, self.result)
|
||||
("operands", Sink, [("dividend", self.div.dividend_i), ("divisor", self.div.divisor_i)]),
|
||||
("result", Source, [("quotient", self.div.quotient_o), ("remainder", self.div.remainder_o)]))
|
||||
|
||||
def get_process_fragment(self):
|
||||
return self.div.get_fragment() + Fragment([self.div.start_i.eq(self.trigger)])
|
||||
|
|
|
@ -7,17 +7,18 @@ from migen.flow.network import *
|
|||
|
||||
def _get_bin_sigs(a, b):
|
||||
assert id(a.dfg) == id(b.dfg)
|
||||
return (a.endp.token_signal(), b.endp.token_signal())
|
||||
return (a.actor.endpoints[a.endp].token_signal(),
|
||||
b.actor.endpoints[b.endp].token_signal())
|
||||
|
||||
def _simple_binary(a, b, actor_class):
|
||||
(signal_self, signal_other) = _get_bin_sigs(a, b)
|
||||
width = max(signal_self.bv.width, signal_other.bv.width)
|
||||
signed = signal_self.bv.signed and signal_other.bv.signed
|
||||
actor = actor_class(BV(width, signed))
|
||||
combinator = Combinator(actor.operands.layout(), ["a"], ["b"])
|
||||
combinator = Combinator(actor.token("operands").layout(), ["a"], ["b"])
|
||||
add_connection(a.dfg, combinator, actor)
|
||||
add_connection(a.dfg, a.actor, combinator, a.endp, combinator.sinks()[0])
|
||||
add_connection(a.dfg, b.actor, combinator, b.endp, combinator.sinks()[1])
|
||||
add_connection(a.dfg, a.actor, combinator, a.endp, "sink0")
|
||||
add_connection(a.dfg, b.actor, combinator, b.endp, "sink1")
|
||||
return make_composable(a.dfg, actor)
|
||||
|
||||
class ComposableSource():
|
||||
|
@ -65,11 +66,10 @@ class ComposableSource():
|
|||
return _simple_binary(other, self, LE)
|
||||
|
||||
def make_composable(dfg, actor):
|
||||
sources = actor.sources()
|
||||
l = [ComposableSource(dfg, actor, source) for source in sources]
|
||||
if len(l) > 1:
|
||||
return tuple(l)
|
||||
elif len(l) > 0:
|
||||
return l[0]
|
||||
r = [ComposableSource(dfg, actor, k) for k in sorted(actor.sources())]
|
||||
if len(r) > 1:
|
||||
return tuple(r)
|
||||
elif len(r) > 0:
|
||||
return r[0]
|
||||
else:
|
||||
return None
|
||||
|
|
|
@ -5,24 +5,15 @@ from migen.flow.actor import *
|
|||
from migen.corelogic.misc import optree
|
||||
|
||||
class CompositeActor(Actor):
|
||||
def __init__(self, dfg):
|
||||
def __init__(self, dfg): # TODO: endpoints
|
||||
self.dfg = dfg
|
||||
# Internal unconnected endpoints become our endpoints. Determine them.
|
||||
our_endpoints = []
|
||||
for node in self.dfg:
|
||||
endpoints = set(node.endpoints)
|
||||
for u, v, d in self.dfg.in_edges([node], data=True):
|
||||
endpoints.remove(d['sink'])
|
||||
for u, v, d in self.dfg.out_edges([node], data=True):
|
||||
endpoints.remove(d['source'])
|
||||
our_endpoints += list(endpoints)
|
||||
Actor.__init__(self,
|
||||
SchedulingModel(SchedulingModel.DYNAMIC),
|
||||
endpoints=our_endpoints)
|
||||
SchedulingModel(SchedulingModel.DYNAMIC))
|
||||
|
||||
def get_fragment(self):
|
||||
this = sum([get_conn_fragment(x[2]['source'], x[2]['sink'])
|
||||
for x in self.dfg.edges(data=True)], Fragment())
|
||||
this_fragments = [get_conn_fragment(x[0].endpoints[x[2]["source"]], x[1].endpoints[x[2]["sink"]])
|
||||
for x in self.dfg.edges(data=True)]
|
||||
this = sum(this_fragments, Fragment())
|
||||
others = sum([node.get_fragment() for node in self.dfg], Fragment())
|
||||
busy = Fragment([self.busy.eq(optree('|', [node.busy for node in self.dfg]))])
|
||||
return this + others + busy
|
||||
|
@ -36,8 +27,4 @@ def add_connection(dfg, source_node, sink_node, source_ep=None, sink_ep=None):
|
|||
sink_eps = sink_node.sinks()
|
||||
assert(len(sink_eps) == 1)
|
||||
sink_ep = sink_eps[0]
|
||||
assert(isinstance(source_ep, Source))
|
||||
assert(isinstance(sink_ep, Sink))
|
||||
assert(source_ep in source_node.endpoints)
|
||||
assert(sink_ep in sink_node.endpoints)
|
||||
dfg.add_edge(source_node, sink_node, source=source_ep, sink=sink_ep)
|
||||
|
|
|
@ -5,39 +5,46 @@ from migen.corelogic.misc import optree
|
|||
|
||||
class Buffer(Actor):
|
||||
def __init__(self, layout):
|
||||
self.d = Record(layout)
|
||||
self.q = Record(layout)
|
||||
Actor.__init__(self,
|
||||
SchedulingModel(SchedulingModel.PIPELINE, 1),
|
||||
self.d, self.q)
|
||||
("d", Sink, layout), ("q", Source, layout))
|
||||
|
||||
def get_process_fragment(self):
|
||||
sigs_d = self.d.flatten()
|
||||
sigs_q = self.q.flatten()
|
||||
sigs_d = self.token("d").flatten()
|
||||
sigs_q = self.token("q").flatten()
|
||||
sync = [If(self.pipe_ce, Cat(*sigs_q).eq(Cat(*sigs_d)))]
|
||||
return Fragment(sync=sync)
|
||||
|
||||
class Combinator(Actor):
|
||||
def __init__(self, layout, *subrecords):
|
||||
self.destination = Record(layout)
|
||||
self.ins = [self.destination.subrecord(*subr) for subr in subrecords]
|
||||
source = Record(layout)
|
||||
subrecords = [source.subrecord(*subr) for subr in subrecords]
|
||||
eps = [("sink{0}".format(x[0]), Sink, x[1])
|
||||
for x in zip(range(len(subrecords)), subrecords)]
|
||||
ep_source = ("source", Source, source)
|
||||
eps.append(ep_source)
|
||||
Actor.__init__(self,
|
||||
SchedulingModel(SchedulingModel.COMBINATORIAL),
|
||||
self.ins, self.destination)
|
||||
*eps)
|
||||
|
||||
def get_fragment(self):
|
||||
source = self.sources()[0]
|
||||
sinks = self.sinks()
|
||||
source = self.endpoints["source"]
|
||||
sinks = [self.endpoints["sink{0}".format(n)]
|
||||
for n in range(len(self.endpoints)-1)]
|
||||
comb = [source.stb.eq(optree('&', [sink.stb for sink in sinks]))]
|
||||
comb += [sink.ack.eq(source.ack & source.stb) for sink in sinks]
|
||||
return Fragment(comb)
|
||||
|
||||
class Splitter(Actor):
|
||||
def __init__(self, layout, *subrecords):
|
||||
self.source = Record(layout)
|
||||
self.outs = [self.source.subrecord(*subr) for subr in subrecords]
|
||||
sink = Record(layout)
|
||||
subrecords = [sink.subrecord(*subr) for subr in subrecords]
|
||||
eps = [("source{0}".format(x[0]), Source, x[1])
|
||||
for x in zip(range(len(subrecords)), subrecords)]
|
||||
ep_sink = ("sink", Sink, sink)
|
||||
eps.append(ep_sink)
|
||||
Actor.__init__(self,
|
||||
SchedulingModel(SchedulingModel.COMBINATORIAL),
|
||||
self.source, self.outs)
|
||||
*eps)
|
||||
|
||||
# TODO def get_fragment(self):
|
||||
|
|
Loading…
Reference in New Issue