-
Notifications
You must be signed in to change notification settings - Fork 0
/
ranker_viz
276 lines (276 loc) · 9.3 KB
/
ranker_viz
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
digraph {
graph [size="35.699999999999996,35.699999999999996"]
node [align=left fontname=monospace fontsize=10 height=0.2 ranksep=0.1 shape=box style=filled]
1701653345152 [label="
(1, 5)" fillcolor=darkolivegreen1]
1701651673920 [label=DivBackward0]
1701651676704 -> 1701651673920
1701651676704 [label=SubBackward0]
1701651671472 -> 1701651676704
1701651671472 [label=AddmmBackward0]
1701651674880 -> 1701651671472
1701653343472 [label="output_layer.bias
(5)" fillcolor=lightblue]
1701653343472 -> 1701651674880
1701651674880 [label=AccumulateGrad]
1701651674496 -> 1701651671472
1701651674496 [label=CatBackward0]
1701651674784 -> 1701651674496
1701651674784 [label=AddmmBackward0]
1701651673728 -> 1701651674784
1701653342352 [label="partial_subnets.0.bias
(1)" fillcolor=lightblue]
1701653342352 -> 1701651673728
1701651673728 [label=AccumulateGrad]
1701651673296 -> 1701651674784
1701651673296 [label=CatBackward0]
1701651675936 -> 1701651673296
1701651675936 [label=AddmmBackward0]
1701651671568 -> 1701651675936
1701653340192 [label="full_subnets.0.bias
(1)" fillcolor=lightblue]
1701653340192 -> 1701651671568
1701651671568 [label=AccumulateGrad]
1701651676176 -> 1701651675936
1701651676176 [label=TBackward0]
1701651673824 -> 1701651676176
1701652716720 [label="full_subnets.0.weight
(1, 5)" fillcolor=lightblue]
1701652716720 -> 1701651673824
1701651673824 [label=AccumulateGrad]
1701651676272 -> 1701651673296
1701651676272 [label=AddmmBackward0]
1701651673776 -> 1701651676272
1701653341472 [label="full_subnets.1.bias
(1)" fillcolor=lightblue]
1701653341472 -> 1701651673776
1701651673776 [label=AccumulateGrad]
1701651671760 -> 1701651676272
1701651671760 [label=TBackward0]
1701651673632 -> 1701651671760
1701653340832 [label="full_subnets.1.weight
(1, 5)" fillcolor=lightblue]
1701653340832 -> 1701651673632
1701651673632 [label=AccumulateGrad]
1701651674160 -> 1701651673296
1701651674160 [label=AddmmBackward0]
1701651671184 -> 1701651674160
1701653341552 [label="full_subnets.2.bias
(1)" fillcolor=lightblue]
1701653341552 -> 1701651671184
1701651671184 [label=AccumulateGrad]
1701651671616 -> 1701651674160
1701651671616 [label=TBackward0]
1701651671232 -> 1701651671616
1701653342272 [label="full_subnets.2.weight
(1, 5)" fillcolor=lightblue]
1701653342272 -> 1701651671232
1701651671232 [label=AccumulateGrad]
1701651674112 -> 1701651673296
1701651674112 [label=AddmmBackward0]
1701651672240 -> 1701651674112
1701653342192 [label="full_subnets.3.bias
(1)" fillcolor=lightblue]
1701653342192 -> 1701651672240
1701651672240 [label=AccumulateGrad]
1701651671952 -> 1701651674112
1701651671952 [label=TBackward0]
1701651672192 -> 1701651671952
1701653342032 [label="full_subnets.3.weight
(1, 5)" fillcolor=lightblue]
1701653342032 -> 1701651672192
1701651672192 [label=AccumulateGrad]
1701651673152 -> 1701651674784
1701651673152 [label=TBackward0]
1701651672000 -> 1701651673152
1701653343232 [label="partial_subnets.0.weight
(1, 4)" fillcolor=lightblue]
1701653343232 -> 1701651672000
1701651672000 [label=AccumulateGrad]
1701651676800 -> 1701651674496
1701651676800 [label=AddmmBackward0]
1701651672288 -> 1701651676800
1701653342672 [label="partial_subnets.1.bias
(1)" fillcolor=lightblue]
1701653342672 -> 1701651672288
1701651672288 [label=AccumulateGrad]
1701651676032 -> 1701651676800
1701651676032 [label=CatBackward0]
1701651675936 -> 1701651676032
1701651676320 -> 1701651676032
1701651676320 [label=AddmmBackward0]
1701651671376 -> 1701651676320
1701653340672 [label="full_subnets.4.bias
(1)" fillcolor=lightblue]
1701653340672 -> 1701651671376
1701651671376 [label=AccumulateGrad]
1701651674832 -> 1701651676320
1701651674832 [label=TBackward0]
1701651671280 -> 1701651674832
1701653340752 [label="full_subnets.4.weight
(1, 5)" fillcolor=lightblue]
1701653340752 -> 1701651671280
1701651671280 [label=AccumulateGrad]
1701651672144 -> 1701651676032
1701651672144 [label=AddmmBackward0]
1701651674400 -> 1701651672144
1701653341232 [label="full_subnets.5.bias
(1)" fillcolor=lightblue]
1701653341232 -> 1701651674400
1701651674400 [label=AccumulateGrad]
1701651673056 -> 1701651672144
1701651673056 [label=TBackward0]
1701651674208 -> 1701651673056
1701653341632 [label="full_subnets.5.weight
(1, 5)" fillcolor=lightblue]
1701653341632 -> 1701651674208
1701651674208 [label=AccumulateGrad]
1701651671520 -> 1701651676032
1701651671520 [label=AddmmBackward0]
1701651671328 -> 1701651671520
1701653341392 [label="full_subnets.6.bias
(1)" fillcolor=lightblue]
1701653341392 -> 1701651671328
1701651671328 [label=AccumulateGrad]
1701651672576 -> 1701651671520
1701651672576 [label=TBackward0]
1701651674064 -> 1701651672576
1701653342752 [label="full_subnets.6.weight
(1, 5)" fillcolor=lightblue]
1701653342752 -> 1701651674064
1701651674064 [label=AccumulateGrad]
1701651675888 -> 1701651676800
1701651675888 [label=TBackward0]
1701651676848 -> 1701651675888
1701651708272 [label="partial_subnets.1.weight
(1, 4)" fillcolor=lightblue]
1701651708272 -> 1701651676848
1701651676848 [label=AccumulateGrad]
1701651675840 -> 1701651674496
1701651675840 [label=AddmmBackward0]
1701651673248 -> 1701651675840
1701653342912 [label="partial_subnets.2.bias
(1)" fillcolor=lightblue]
1701653342912 -> 1701651673248
1701651673248 [label=AccumulateGrad]
1701651671712 -> 1701651675840
1701651671712 [label=CatBackward0]
1701651676272 -> 1701651671712
1701651676320 -> 1701651671712
1701651675360 -> 1701651671712
1701651675360 [label=AddmmBackward0]
1701651672096 -> 1701651675360
1701653342592 [label="full_subnets.7.bias
(1)" fillcolor=lightblue]
1701653342592 -> 1701651672096
1701651672096 [label=AccumulateGrad]
1701651672480 -> 1701651675360
1701651672480 [label=TBackward0]
1701589962608 -> 1701651672480
1701653342512 [label="full_subnets.7.weight
(1, 5)" fillcolor=lightblue]
1701653342512 -> 1701589962608
1701589962608 [label=AccumulateGrad]
1701651675744 -> 1701651671712
1701651675744 [label=AddmmBackward0]
1702483047664 -> 1701651675744
1701653342112 [label="full_subnets.8.bias
(1)" fillcolor=lightblue]
1701653342112 -> 1702483047664
1702483047664 [label=AccumulateGrad]
1701651673968 -> 1701651675744
1701651673968 [label=TBackward0]
1701650382080 -> 1701651673968
1701653341952 [label="full_subnets.8.weight
(1, 5)" fillcolor=lightblue]
1701653341952 -> 1701650382080
1701650382080 [label=AccumulateGrad]
1701651672336 -> 1701651675840
1701651672336 [label=TBackward0]
1701651676896 -> 1701651672336
1701653342832 [label="partial_subnets.2.weight
(1, 4)" fillcolor=lightblue]
1701653342832 -> 1701651676896
1701651676896 [label=AccumulateGrad]
1701651676128 -> 1701651674496
1701651676128 [label=AddmmBackward0]
1701651676656 -> 1701651676128
1701653344192 [label="partial_subnets.3.bias
(1)" fillcolor=lightblue]
1701653344192 -> 1701651676656
1701651676656 [label=AccumulateGrad]
1701650374352 -> 1701651676128
1701650374352 [label=CatBackward0]
1701651674160 -> 1701650374352
1701651672144 -> 1701650374352
1701651675360 -> 1701650374352
1701650374256 -> 1701650374352
1701650374256 [label=AddmmBackward0]
1701650374304 -> 1701650374256
1701653342432 [label="full_subnets.9.bias
(1)" fillcolor=lightblue]
1701653342432 -> 1701650374304
1701650374304 [label=AccumulateGrad]
1701650374160 -> 1701650374256
1701650374160 [label=TBackward0]
1701650367872 -> 1701650374160
1701653343152 [label="full_subnets.9.weight
(1, 5)" fillcolor=lightblue]
1701653343152 -> 1701650367872
1701650367872 [label=AccumulateGrad]
1701650381696 -> 1701651676128
1701650381696 [label=TBackward0]
1701650374736 -> 1701650381696
1701653343712 [label="partial_subnets.3.weight
(1, 4)" fillcolor=lightblue]
1701653343712 -> 1701650374736
1701650374736 [label=AccumulateGrad]
1701651673536 -> 1701651674496
1701651673536 [label=AddmmBackward0]
1701650374496 -> 1701651673536
1701653344112 [label="partial_subnets.4.bias
(1)" fillcolor=lightblue]
1701653344112 -> 1701650374496
1701650374496 [label=AccumulateGrad]
1701650368064 -> 1701651673536
1701650368064 [label=CatBackward0]
1701651674112 -> 1701650368064
1701651671520 -> 1701650368064
1701651675744 -> 1701650368064
1701650374256 -> 1701650368064
1701650367968 -> 1701651673536
1701650367968 [label=TBackward0]
1701650381984 -> 1701650367968
1701653344032 [label="partial_subnets.4.weight
(1, 4)" fillcolor=lightblue]
1701653344032 -> 1701650381984
1701650381984 [label=AccumulateGrad]
1701651674688 -> 1701651671472
1701651674688 [label=TBackward0]
1701651673584 -> 1701651674688
1701653343792 [label="output_layer.weight
(5, 5)" fillcolor=lightblue]
1701653343792 -> 1701651673584
1701651673584 [label=AccumulateGrad]
1701651672672 -> 1701651676704
1701651672672 [label=MinBackward1]
1701651671472 -> 1701651672672
1701651674544 -> 1701651673920
1701651674544 [label=MaxBackward1]
1701651676704 -> 1701651674544
1701651673920 -> 1701653345152
1701653448320 [label="
(1, 10)" fillcolor=darkolivegreen1]
1701651673200 [label=CatBackward0]
1701651675936 -> 1701651673200
1701651676272 -> 1701651673200
1701651674160 -> 1701651673200
1701651674112 -> 1701651673200
1701651676320 -> 1701651673200
1701651672144 -> 1701651673200
1701651671520 -> 1701651673200
1701651675360 -> 1701651673200
1701651675744 -> 1701651673200
1701650374256 -> 1701651673200
1701651673200 -> 1701653448320
}