-
Notifications
You must be signed in to change notification settings - Fork 131
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
New and Improved MapFusion #1629
base: main
Are you sure you want to change the base?
Changes from 80 commits
aa433fe
71a88a1
bc87ddb
497a2d6
9e36447
7a48e0d
d609045
52c4542
3b758bf
377b428
db4864b
f395acd
b1ab95e
945ca8f
940b9b6
ecae361
64d07fd
33a0edf
dbb989e
e142881
ff018f4
ec6339a
9267ea9
fc2db8a
4d9f11d
2b91465
73f4415
94ecd19
3f3f8a3
c23ed39
dad61cb
a57ebb3
439d6f3
13c80ec
12a5cf7
995ef4d
dc4ed31
eb48391
d53302d
3f54e1f
0f46c1c
0585112
61160e4
584635a
b2dea1d
fba6682
62b0288
8858021
09deb95
1e14f26
92c7097
041b3da
5556bd3
e3461c3
6f5edc5
35a5426
20b728e
5dd9c6b
617fb8f
04cadbe
d8b3547
8fed4fe
567f459
9cad08d
866d815
a5846b5
7ccdd9c
d4041b7
a444992
a023f7c
5c49eee
63e78c9
896ac68
33f9fdd
fcffb22
0ddb3c2
05ffee4
6de85c7
8c86662
dfc92e7
11a3167
44cf6ad
914d67b
5e25816
259d17c
db26320
fa67492
3453c6c
90731af
f659cd8
244e3ea
11a509e
724d1f5
10ae1e2
d909379
cc7324b
6429d91
3d1cd9e
a412394
fa21bd3
6e13941
d8da3c6
e2285f0
4832c3c
fd3b48a
8fa7cb2
9139149
0a5aeaf
e2bc10d
aa3619f
a740d16
d07e2c5
fbc8469
5c354c6
abf739c
2b17111
1e8f66f
3ab46d8
b1fc9d1
fdc6424
e2c41b5
bcaed23
243611d
c53f939
a3842f9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -747,6 +747,12 @@ def _read_and_write_sets(self) -> Tuple[Dict[AnyStr, List[Subset]], Dict[AnyStr, | |
Determines what data is read and written in this subgraph, returning | ||
dictionaries from data containers to all subsets that are read/written. | ||
""" | ||
|
||
# Ensures that the `{src,dst}_subset` are properly set. | ||
# TODO: find where the problems are | ||
for edge in self.edges(): | ||
edge.data.try_initialize(self.sdfg, self, edge) | ||
|
||
read_set = collections.defaultdict(list) | ||
write_set = collections.defaultdict(list) | ||
from dace.sdfg import utils # Avoid cyclic import | ||
|
@@ -756,37 +762,59 @@ def _read_and_write_sets(self) -> Tuple[Dict[AnyStr, List[Subset]], Dict[AnyStr, | |
ws = collections.defaultdict(list) | ||
# Traverse in topological order, so data that is written before it | ||
# is read is not counted in the read set | ||
# TODO: This only works if every data descriptor is only once in a path. | ||
for n in utils.dfs_topological_sort(sg, sources=sg.source_nodes()): | ||
if isinstance(n, nd.AccessNode): | ||
in_edges = sg.in_edges(n) | ||
out_edges = sg.out_edges(n) | ||
# Filter out memlets which go out but the same data is written to the AccessNode by another memlet | ||
for out_edge in list(out_edges): | ||
for in_edge in list(in_edges): | ||
if (in_edge.data.data == out_edge.data.data | ||
and in_edge.data.dst_subset.covers(out_edge.data.src_subset)): | ||
out_edges.remove(out_edge) | ||
break | ||
|
||
for e in in_edges: | ||
# skip empty memlets | ||
if e.data.is_empty(): | ||
continue | ||
# Store all subsets that have been written | ||
ws[n.data].append(e.data.subset) | ||
for e in out_edges: | ||
# skip empty memlets | ||
if e.data.is_empty(): | ||
if not isinstance(n, nd.AccessNode): | ||
continue | ||
ac_desc = n.desc(self.sdfg) | ||
ac_size = ac_desc.total_size | ||
in_edges = [in_edge for in_edge in sg.in_edges(n) if not in_edge.data.is_empty()] | ||
out_edges = [out_edge for out_edge in sg.out_edges(n) if not out_edge.data.is_empty()] | ||
|
||
# In some conditions subsets can be `None`, we will now clean them. | ||
in_subsets = dict() | ||
for in_edge in in_edges: | ||
assert in_edge.data.dst_subset is not None or (in_edge.data.num_elements() == ac_size) | ||
in_subsets[in_edge] = ( | ||
sbs.Range.from_array(ac_desc) | ||
if in_edge.data.dst_subset is None | ||
else in_edge.data.dst_subset | ||
) | ||
out_subsets = dict() | ||
for out_edge in out_edges: | ||
assert (out_edge.data.src_subset is not None) or (out_edge.data.num_elements() == ac_size) | ||
out_subsets[out_edge] = ( | ||
sbs.Range.from_array(ac_desc) | ||
if out_edge.data.src_subset is None | ||
else out_edge.data.src_subset | ||
) | ||
|
||
# Filter out memlets which go out but the same data is written to the AccessNode by another memlet | ||
for out_edge in list(out_edges): | ||
for in_edge in in_edges: | ||
if out_edge.data.data != in_edge.data.data: | ||
# NOTE: This check does not make any sense, and is in my view wrong. | ||
# As it will filter out some accesses but not all, which one solely | ||
# depends on how the memelts were created. | ||
# See also [issue #1643](https://github.com/spcl/dace/issues/1643). | ||
continue | ||
rs[n.data].append(e.data.subset) | ||
if in_subsets[in_edge].covers(out_subsets[out_edge]): | ||
out_edges.remove(out_edge) | ||
break | ||
|
||
if in_edges: | ||
ws[n.data].extend(in_subsets.values()) | ||
if out_edges: | ||
rs[n.data].extend(out_subsets[out_edge] for out_edge in out_edges) | ||
|
||
# Union all subgraphs, so an array that was excluded from the read | ||
# set because it was written first is still included if it is read | ||
# in another subgraph | ||
for data, accesses in rs.items(): | ||
read_set[data] += accesses | ||
for data, accesses in ws.items(): | ||
write_set[data] += accesses | ||
return read_set, write_set | ||
return copy.deepcopy((read_set, write_set)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why is it necessary to make a copy here? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Because, the subsets are still linked to the Memlets, so if you modify them then you change them at the Memlets.
|
||
|
||
def read_and_write_sets(self) -> Tuple[Set[AnyStr], Set[AnyStr]]: | ||
""" | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am not 100% about the naming solution - having 3/5 different kinds of MapFusion seems like a poor solution that will lead to confusion - between Serial,Parallel, OTF and the original MapFusions versions are proliferating. Would it not be preferrable to fix/choose one? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I agree, but the original map fusion was unable to do parallel fusing, the available solutions are:
The best solution is probably 2. because then everything will work as before. But I do not care what do you think @tbennun @acalotoiu |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Can you please add more comments to make it easier to follow what is being done?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I added more comments.