flow: network
This commit is contained in:
parent
3c1dada9cf
commit
0b195a244d
|
@ -1,22 +1,23 @@
|
|||
import networkx as nx
|
||||
|
||||
from migen.fhdl import verilog
|
||||
from migen.flow.ala import *
|
||||
from migen.flow.plumbing import *
|
||||
from migen.flow.network import *
|
||||
|
||||
def get_actor_fragments(*actors):
|
||||
return sum([a.get_control_fragment() + a.get_process_fragment() for a in actors], Fragment())
|
||||
|
||||
act = Adder(32)
|
||||
comb = Combinator(act.operands, ["a"], ["b"])
|
||||
comb = Combinator(act.operands.template(), ["a"], ["b"])
|
||||
outbuf = Buffer(act.result.template())
|
||||
frag = get_actor_fragments(act, comb, outbuf)
|
||||
|
||||
stb_a = comb.sinks[0].stb
|
||||
ack_a = comb.sinks[0].ack
|
||||
stb_b = comb.sinks[1].stb
|
||||
ack_b = comb.sinks[1].ack
|
||||
stb_a.name = "stb_a_i"
|
||||
ack_a.name = "ack_a_o"
|
||||
stb_b.name = "stb_b_i"
|
||||
ack_b.name = "stb_b_o"
|
||||
a = comb.ins[0].a
|
||||
b = comb.ins[1].b
|
||||
a.name = "a"
|
||||
b.name = "b"
|
||||
print(verilog.convert(frag, ios={stb_a, ack_a, stb_b, ack_b, a, b}))
|
||||
g = nx.MultiDiGraph()
|
||||
g.add_nodes_from([act, comb, outbuf])
|
||||
add_connection(g, comb, act)
|
||||
add_connection(g, act, outbuf)
|
||||
c = CompositeActor(g)
|
||||
|
||||
frag = c.get_control_fragment() + c.get_process_fragment()
|
||||
|
||||
print(verilog.convert(frag))
|
||||
|
|
|
@ -73,3 +73,6 @@ class Record:
|
|||
elif isinstance(e, Record):
|
||||
l += e.flatten()
|
||||
return l
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.template())
|
||||
|
|
|
@ -20,11 +20,25 @@ class SchedulingModel:
|
|||
raise AttributeError
|
||||
|
||||
class Endpoint:
|
||||
def __init__(self, actor, token):
|
||||
self.actor = actor
|
||||
def __init__(self, token):
|
||||
self.token = token
|
||||
self.stb = Signal()
|
||||
self.ack = Signal()
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Endpoint " + str(self.token) + ">"
|
||||
|
||||
|
||||
class Sink(Endpoint):
|
||||
def __repr__(self):
|
||||
return "<Sink " + str(self.token) + ">"
|
||||
|
||||
class Source(Endpoint):
|
||||
def __repr__(self):
|
||||
return "<Source " + str(self.token) + ">"
|
||||
|
||||
def _control_fragment_comb(stb_i, ack_o, stb_o, ack_i):
|
||||
return Fragment([stb_o.eq(stb_i), ack_o.eq(ack_i)])
|
||||
|
@ -71,28 +85,47 @@ def _control_fragment_pipe(latency, stb_i, ack_o, stb_o, ack_i, pipe_ce):
|
|||
return Fragment(comb, sync)
|
||||
|
||||
class Actor:
|
||||
def __init__(self, scheduling_model, sinks, sources):
|
||||
def __init__(self, scheduling_model, sinks=None, sources=None, endpoints=None):
|
||||
self.scheduling_model = scheduling_model
|
||||
if isinstance(sinks, list):
|
||||
self.sinks = [Endpoint(self, sink) for sink in sinks]
|
||||
if endpoints is None:
|
||||
if isinstance(sinks, list):
|
||||
self.endpoints = [Sink(sink) for sink in sinks]
|
||||
else:
|
||||
self.endpoints = [Sink(sinks)]
|
||||
if isinstance(sources, list):
|
||||
self.endpoints += [Source(source) for source in sources]
|
||||
else:
|
||||
self.endpoints.append(Source(sources))
|
||||
else:
|
||||
self.sinks = [Endpoint(self, sinks)]
|
||||
if isinstance(sources, list):
|
||||
self.sources = [Endpoint(self, source) for source in sources]
|
||||
else:
|
||||
self.sources = [Endpoint(self, sources)]
|
||||
self.endpoints = endpoints
|
||||
if self.scheduling_model.model == SchedulingModel.SEQUENTIAL:
|
||||
self.trigger = Signal()
|
||||
elif self.scheduling_model.model == SchedulingModel.PIPELINE:
|
||||
self.pipe_ce = Signal()
|
||||
|
||||
def sinks(self):
|
||||
return [x for x in self.endpoints if isinstance(x, Sink)]
|
||||
|
||||
def sources(self):
|
||||
return [x for x in self.endpoints if isinstance(x, Source)]
|
||||
|
||||
def get_control_fragment(self):
|
||||
if len(self.sinks) != 1 or len(self.sources) != 1:
|
||||
if len(self.endpoints) != 2:
|
||||
raise ValueError("Actors with automatic control fragment must have exactly two endpoints.")
|
||||
if isinstance(self.endpoints[0], Sink):
|
||||
assert(isinstance(self.endpoints[1], Source))
|
||||
sink = self.endpoints[0]
|
||||
source = self.endpoints[1]
|
||||
elif isinstance(self.endpoints[0], Source):
|
||||
assert(isinstance(self.endpoints[1], Sink))
|
||||
sink = self.endpoints[1]
|
||||
source = self.endpoints[0]
|
||||
else:
|
||||
raise ValueError("Actors with automatic control fragment must have one sink and one source. Consider using plumbing actors.")
|
||||
stb_i = self.sinks[0].stb
|
||||
ack_o = self.sinks[0].ack
|
||||
stb_o = self.sources[0].stb
|
||||
ack_i = self.sources[0].ack
|
||||
stb_i = sink.stb
|
||||
ack_o = sink.ack
|
||||
stb_o = source.stb
|
||||
ack_i = source.ack
|
||||
if self.scheduling_model.model == SchedulingModel.COMBINATORIAL:
|
||||
return _control_fragment_comb(stb_i, ack_o, stb_o, ack_i)
|
||||
elif self.scheduling_model.model == SchedulingModel.SEQUENTIAL:
|
||||
|
@ -104,6 +137,24 @@ class Actor:
|
|||
|
||||
def get_process_fragment(self):
|
||||
raise NotImplementedError("Actor classes must overload get_process_fragment")
|
||||
|
||||
def __repr__(self):
|
||||
return "<Actor " + repr(self.scheduling_model) + " " + repr(self.sinks()) + " " + repr(self.sources()) + ">"
|
||||
|
||||
def get_actor_fragments(*actors):
|
||||
return sum([a.get_control_fragment() + a.get_process_fragment() for a in actors], Fragment())
|
||||
def get_conn_control_fragment(source, sink):
|
||||
assert(isinstance(source, Source))
|
||||
assert(isinstance(sink, Sink))
|
||||
comb = [
|
||||
source.ack.eq(sink.ack),
|
||||
sink.stb.eq(source.stb)
|
||||
]
|
||||
return Fragment(comb)
|
||||
|
||||
def get_conn_process_fragment(source, sink):
|
||||
assert(isinstance(source, Source))
|
||||
assert(isinstance(sink, Sink))
|
||||
assert(sink.token.compatible(source.token))
|
||||
sigs_source = source.token.flatten()
|
||||
sigs_sink = sink.token.flatten()
|
||||
comb = [Cat(*sigs_sink).eq(Cat(*sigs_source))]
|
||||
return Fragment(comb)
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
import networkx as nx
|
||||
|
||||
from migen.flow.actor import *
|
||||
|
||||
class CompositeActor(Actor):
|
||||
def __init__(self, dfg):
|
||||
self.dfg = dfg
|
||||
# Internal unconnected endpoints become our endpoints. Determine them.
|
||||
our_endpoints = []
|
||||
for node in self.dfg:
|
||||
endpoints = set(node.endpoints)
|
||||
for u, v, d in self.dfg.in_edges([node], data=True):
|
||||
endpoints.remove(d['sink'])
|
||||
for u, v, d in self.dfg.out_edges([node], data=True):
|
||||
endpoints.remove(d['source'])
|
||||
our_endpoints += list(endpoints)
|
||||
Actor.__init__(self,
|
||||
SchedulingModel(SchedulingModel.DYNAMIC),
|
||||
endpoints=our_endpoints)
|
||||
|
||||
def get_control_fragment(self):
|
||||
this = sum([get_conn_control_fragment(x[2]['source'], x[2]['sink'])
|
||||
for x in self.dfg.edges(data=True)], Fragment())
|
||||
others = sum([node.get_control_fragment() for node in self.dfg], Fragment())
|
||||
return this + others
|
||||
|
||||
def get_process_fragment(self):
|
||||
this = sum([get_conn_process_fragment(x[2]['source'], x[2]['sink'])
|
||||
for x in self.dfg.edges(data=True)], Fragment())
|
||||
others = sum([node.get_process_fragment() for node in self.dfg], Fragment())
|
||||
return this + others
|
||||
|
||||
def add_connection(dfg, source_node, sink_node, source_ep=None, sink_ep=None):
|
||||
if source_ep is None:
|
||||
source_eps = source_node.sources()
|
||||
assert(len(source_eps) == 1)
|
||||
source_ep = source_eps[0]
|
||||
if sink_ep is None:
|
||||
sink_eps = sink_node.sinks()
|
||||
assert(len(sink_eps) == 1)
|
||||
sink_ep = sink_eps[0]
|
||||
assert(isinstance(source_ep, Source))
|
||||
assert(isinstance(sink_ep, Sink))
|
||||
assert(source_ep in source_node.endpoints)
|
||||
assert(sink_ep in sink_node.endpoints)
|
||||
dfg.add_edge(source_node, sink_node, source=source_ep, sink=sink_ep)
|
|
@ -14,30 +14,34 @@ class Buffer(Actor):
|
|||
def get_process_fragment(self):
|
||||
sigs_d = self.d.flatten()
|
||||
sigs_q = self.q.flatten()
|
||||
sync = [Cat(*sigs_q).eq(Cat(*sigs_d))]
|
||||
sync = [If(self.pipe_ce, Cat(*sigs_q).eq(Cat(*sigs_d)))]
|
||||
return Fragment(sync=sync)
|
||||
|
||||
class Combinator(Actor):
|
||||
def __init__(self, destination, *subrecords):
|
||||
self.ins = [destination.subrecord(*subr) for subr in subrecords]
|
||||
def __init__(self, template, *subrecords):
|
||||
self.destination = Record(template)
|
||||
self.ins = [self.destination.subrecord(*subr) for subr in subrecords]
|
||||
Actor.__init__(self,
|
||||
SchedulingModel(SchedulingModel.COMBINATORIAL),
|
||||
self.ins, destination)
|
||||
self.ins, self.destination)
|
||||
|
||||
def get_process_fragment(self):
|
||||
return Fragment() # nothing to do
|
||||
|
||||
def get_control_fragment(self):
|
||||
comb = [self.sources[0].stb.eq(optree('&', [sink.stb for sink in self.sinks]))]
|
||||
comb += [sink.ack.eq(self.sources[0].ack & self.sources[0].stb) for sink in self.sinks]
|
||||
source = self.sources()[0]
|
||||
sinks = self.sinks()
|
||||
comb = [source.stb.eq(optree('&', [sink.stb for sink in sinks]))]
|
||||
comb += [sink.ack.eq(source.ack & source.stb) for sink in sinks]
|
||||
return Fragment(comb)
|
||||
|
||||
class Splitter(Actor):
|
||||
def __init__(self, source, *subrecords):
|
||||
self.outs = [source.subrecord(*subr) for subr in subrecords]
|
||||
def __init__(self, template, *subrecords):
|
||||
self.source = Record(template)
|
||||
self.outs = [self.source.subrecord(*subr) for subr in subrecords]
|
||||
Actor.__init__(self,
|
||||
SchedulingModel(SchedulingModel.COMBINATORIAL),
|
||||
source, self.outs)
|
||||
self.source, self.outs)
|
||||
|
||||
def get_process_fragment(self):
|
||||
return Fragment() # nothing to do
|
||||
|
|
Loading…
Reference in New Issue