MyCaffe  1.12.2.41
Deep learning software for Windows C# programmers.
ConvolutionOctaveLayer.cs
1using System;
2using System.Collections.Generic;
3using System.Linq;
4using System.Text;
5using MyCaffe.basecode;
6using MyCaffe.common;
7using MyCaffe.param;
8using MyCaffe.param.beta;
9
10namespace MyCaffe.layers.beta
11{
20 public class ConvolutionOctaveLayer<T> : Layer<T>
21 {
22 double m_dfAlphaIn = 0.5;
23 double m_dfAlphaOut = 0.5;
24 int m_nStride;
25 Layer<T> m_downsampleLayer1;
26 Layer<T> m_downsampleLayer2;
27 Layer<T> m_downsampleLayer3;
28 Layer<T> m_upsampleLayer;
29 Layer<T> m_conv_l2l = null;
30 Layer<T> m_conv_l2h = null;
31 Layer<T> m_conv_h2l = null;
32 Layer<T> m_conv_h2h = null;
33 Layer<T> m_add = null;
34 Blob<T> m_blob_x_h = null;
35 Blob<T> m_blob_x_l = null;
36 Blob<T> m_blob_x_h_ds = null;
37 Blob<T> m_blob_x_l_ds = null;
38 Blob<T> m_blob_x_h2h = null;
39 Blob<T> m_blob_x_h2l = null;
40 Blob<T> m_blob_x_l2l = null;
41 Blob<T> m_blob_x_l2h = null;
42 Blob<T> m_blob_x_l2h_us = null;
45 long m_hWorkspaceData = 0;
46 ulong m_lWorkspaceSizeInBytes = 0;
47
56 : base(cuda, log, p)
57 {
58 m_type = LayerParameter.LayerType.CONVOLUTION_OCTAVE;
59 }
60
64 protected override void dispose()
65 {
66 if (m_downsampleLayer1 != null)
67 {
68 m_downsampleLayer1.Dispose();
69 m_downsampleLayer1 = null;
70 }
71
72 if (m_downsampleLayer2 != null)
73 {
74 m_downsampleLayer2.Dispose();
75 m_downsampleLayer2 = null;
76 }
77
78 if (m_downsampleLayer3 != null)
79 {
80 m_downsampleLayer3.Dispose();
81 m_downsampleLayer3 = null;
82 }
83
84 if (m_upsampleLayer != null)
85 {
86 m_upsampleLayer.Dispose();
87 m_upsampleLayer = null;
88 }
89
90 if (m_conv_l2l != null)
91 {
92 m_conv_l2l.Dispose();
93 m_conv_l2l = null;
94 }
95
96 if (m_conv_l2h != null)
97 {
98 m_conv_l2h.Dispose();
99 m_conv_l2h = null;
100 }
101
102 if (m_conv_h2l != null)
103 {
104 m_conv_h2l.Dispose();
105 m_conv_h2l = null;
106 }
107
108 if (m_conv_h2h != null)
109 {
110 m_conv_h2h.Dispose();
111 m_conv_h2h = null;
112 }
113
114 if (m_add != null)
115 {
116 m_add.Dispose();
117 m_add = null;
118 }
119
120 if (m_blob_x_h != null)
121 {
122 m_blob_x_h.Dispose();
123 m_blob_x_h = null;
124 }
125
126 if (m_blob_x_l != null)
127 {
128 m_blob_x_l.Dispose();
129 m_blob_x_l = null;
130 }
131
132 if (m_blob_x_h_ds != null)
133 {
134 m_blob_x_h_ds.Dispose();
135 m_blob_x_h_ds = null;
136 }
137
138 if (m_blob_x_l_ds != null)
139 {
140 m_blob_x_l_ds.Dispose();
141 m_blob_x_l_ds = null;
142 }
143
144 if (m_blob_x_h2h != null)
145 {
146 m_blob_x_h2h.Dispose();
147 m_blob_x_h2h = null;
148 }
149
150 if (m_blob_x_h2l != null)
151 {
152 m_blob_x_h2l.Dispose();
153 m_blob_x_h2l = null;
154 }
155
156 if (m_blob_x_l2l != null)
157 {
158 m_blob_x_l2l.Dispose();
159 m_blob_x_l2l = null;
160 }
161
162 if (m_blob_x_l2h != null)
163 {
164 m_blob_x_l2h.Dispose();
165 m_blob_x_l2h = null;
166 }
167
168 if (m_blob_x_l2h_us != null)
169 {
170 m_blob_x_l2h_us.Dispose();
171 m_blob_x_l2h_us = null;
172 }
173
174 if (m_hWorkspaceData != 0)
175 {
176 m_cuda.FreeMemory(m_hWorkspaceData);
177 m_hWorkspaceData = 0;
178 }
179
180 base.dispose();
181 }
182
186 public override int MinBottomBlobs
187 {
188 get { return 1; }
189 }
190
194 public override int MaxBottomBlobs
195 {
196 get { return 2; }
197 }
198
202 public override int MinTopBlobs
203 {
204 get { return 1; }
205 }
206
210 public override int MaxTopBlobs
211 {
212 get { return 2; }
213 }
214
220 public override void LayerSetUp(BlobCollection<T> colBottom, BlobCollection<T> colTop)
221 {
224
225 m_log.CHECK_GE(m_dfAlphaIn, 0, "The alpha in must be >= 0.");
226 m_log.CHECK_LE(m_dfAlphaIn, 1, "The alpha in must be <= 1.");
227 m_log.CHECK_GE(m_dfAlphaOut, 0, "The alpha out must be >= 0.");
228 m_log.CHECK_LT(m_dfAlphaOut, 1, "The alpha out must be < 1.");
229
230 m_nStride = (int)m_param.convolution_param.stride[0];
231 m_log.CHECK_GE(m_nStride, 1, "The stride should be >= 1.");
232 m_log.CHECK_LE(m_nStride, 2, "The stride should be <= 2.");
233
234
235 //--------------------------------------------
236 // Create the blobs.
237 //--------------------------------------------
238
239 // process high frequency.
240 m_blob_x_h = new Blob<T>(m_cuda, m_log);
241 m_blob_x_h.Name = "x_h";
242 m_blob_x_h2h = new Blob<T>(m_cuda, m_log);
243 m_blob_x_h2h.Name = "x_h2h";
244
245 if (m_dfAlphaOut > 0)
246 {
247 m_blob_x_h_ds = new Blob<T>(m_cuda, m_log);
248 m_blob_x_h_ds.Name = "x_h_ds";
249 m_blob_x_h2l = new Blob<T>(m_cuda, m_log);
250 m_blob_x_h2l.Name = "x_h2l";
251 }
252
253 // process low frequency.
254 if (colBottom.Count > 1)
255 {
256 m_blob_x_l = new Blob<T>(m_cuda, m_log);
257 m_blob_x_l.Name = "x_l";
258 m_blob_x_l_ds = new Blob<T>(m_cuda, m_log);
259 m_blob_x_l_ds.Name = "x_l_ds";
260 m_blob_x_l2h = new Blob<T>(m_cuda, m_log);
261 m_blob_x_l2h.Name = "x_l2h";
262 m_blob_x_l2h_us = new Blob<T>(m_cuda, m_log);
263 m_blob_x_l2h_us.Name = "x_l2h_us";
264 m_blob_x_l2l = new Blob<T>(m_cuda, m_log);
265 m_blob_x_l2l.Name = "x_l2l";
266 }
267
268 //--------------------------------------------
269 // Create the internal layers.
270 //--------------------------------------------
271
272 LayerParameter poolParam = new LayerParameter(LayerParameter.LayerType.POOLING, "downsample");
273 poolParam.pooling_param.kernel_size.Add(2);
274 poolParam.pooling_param.stride.Add(2);
277
278 if (m_nStride == 2)
279 {
280 m_downsampleLayer1 = Layer<T>.Create(m_cuda, m_log, poolParam, null);
281 setupBtmTop(colBottom[0], m_blob_x_h);
282 m_downsampleLayer1.LayerSetUp(m_rgBtm, m_rgTop);
283 m_downsampleLayer1.Reshape(m_rgBtm, m_rgTop);
284 }
285 else
286 {
287 m_blob_x_h.ReshapeLike(colBottom[0]);
288 }
289
290 LayerParameter convParamBase = new LayerParameter(LayerParameter.LayerType.CONVOLUTION);
293 convParamBase.convolution_param.stride.Add(1);
298
299 int nInChannels = colBottom[0].channels;
300 uint nOutChannels = m_param.convolution_param.num_output;
301 uint nGroup = m_param.convolution_param.group;
302 uint nGroupTmp;
303
304 // h2h Layer
305 {
306 LayerParameter convParam = convParamBase.Clone(false);
307 convParam.name = "h2h conv";
308 convParam.convolution_param.num_output = nOutChannels - (uint)(m_dfAlphaOut * nOutChannels);
309 nGroupTmp = (uint)Math.Ceiling(nGroup - m_dfAlphaIn * nGroup);
310
311 convParam.convolution_param.group = (nInChannels % nGroupTmp == 0) ? nGroupTmp : 1;
312 m_conv_h2h = Layer<T>.Create(m_cuda, m_log, convParam, null);
313 m_conv_h2h.OnGetWorkspace += layer_OnGetWorkspace;
314 m_conv_h2h.OnSetWorkspace += layer_OnSetWorkspace;
315
316 setupBtmTop(m_blob_x_h, m_blob_x_h2h);
317 m_conv_h2h.LayerSetUp(m_rgBtm, m_rgTop);
318 m_conv_h2h.Reshape(m_rgBtm, m_rgTop);
319 m_colBlobs.Add(m_conv_h2h.blobs);
320 }
321
322 // h2l Layer
323 if (m_dfAlphaOut > 0)
324 {
325 m_downsampleLayer2 = Layer<T>.Create(m_cuda, m_log, poolParam, null);
326 setupBtmTop(m_blob_x_h, m_blob_x_h_ds);
327 m_downsampleLayer2.LayerSetUp(m_rgBtm, m_rgTop);
328 m_downsampleLayer2.Reshape(m_rgBtm, m_rgTop);
329
330 LayerParameter convParam = convParamBase.Clone(false);
331 convParam.name = "h2l conv";
332 convParam.convolution_param.num_output = (uint)(m_dfAlphaOut * nOutChannels);
333 convParam.convolution_param.group = (convParam.convolution_param.num_output % nGroup == 0) ? nGroup : 1;
334
335 m_conv_h2l = Layer<T>.Create(m_cuda, m_log, convParam, null);
336 m_conv_h2l.OnGetWorkspace += layer_OnGetWorkspace;
337 m_conv_h2l.OnSetWorkspace += layer_OnSetWorkspace;
338
339 setupBtmTop(m_blob_x_h_ds, m_blob_x_h2l);
340 m_conv_h2l.LayerSetUp(m_rgBtm, m_rgTop);
341 m_conv_h2l.Reshape(m_rgBtm, m_rgTop);
342 m_colBlobs.Add(m_conv_h2l.blobs);
343 }
344
345 if (colBottom.Count > 1)
346 {
347 m_blob_x_l.ReshapeLike(colBottom[1]);
348
349 // downsample3 Layer
350 if (m_nStride == 2)
351 {
352 m_downsampleLayer3 = Layer<T>.Create(m_cuda, m_log, poolParam, null);
353 setupBtmTop(colBottom[1], m_blob_x_l_ds);
354 m_downsampleLayer3.LayerSetUp(m_rgBtm, m_rgTop);
355 m_downsampleLayer3.Reshape(m_rgBtm, m_rgTop);
356 }
357 else
358 {
359 m_blob_x_l_ds.ReshapeLike(m_blob_x_l);
360 }
361
362 // l2l layer
363 if (m_dfAlphaOut > 0)
364 {
365 LayerParameter convParam = convParamBase.Clone(false);
366 convParam.name = "l2l conv";
367 convParam.convolution_param.num_output = (uint)(m_dfAlphaOut * nOutChannels);
368 nGroupTmp = (uint)Math.Ceiling(m_dfAlphaIn * nGroup);
369 convParam.convolution_param.group = (convParam.convolution_param.num_output % nGroupTmp == 0) ? nGroupTmp : 1;
370 m_conv_l2l = Layer<T>.Create(m_cuda, m_log, convParam, null);
371 m_conv_l2l.OnGetWorkspace += layer_OnGetWorkspace;
372 m_conv_l2l.OnSetWorkspace += layer_OnSetWorkspace;
373
374 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2l);
375 m_conv_l2l.LayerSetUp(m_rgBtm, m_rgTop);
376 m_conv_l2l.Reshape(m_rgBtm, m_rgTop);
377 m_colBlobs.Add(m_conv_l2l.blobs);
378 }
379
380 // l2h Layer
381 {
382 LayerParameter convParam = convParamBase.Clone(false);
383 convParam.name = "l2h conv";
384 convParam.convolution_param.num_output = nOutChannels - (uint)(m_dfAlphaOut * nOutChannels);
385 convParam.convolution_param.group = (convParam.convolution_param.num_output % nGroup == 0) ? nGroup : 1;
386 m_conv_l2h = Layer<T>.Create(m_cuda, m_log, convParam, null);
387 m_conv_l2h.OnGetWorkspace += layer_OnGetWorkspace;
388 m_conv_l2h.OnSetWorkspace += layer_OnSetWorkspace;
389
390 setupBtmTop(m_blob_x_l, m_blob_x_l2h);
391 m_conv_l2h.LayerSetUp(m_rgBtm, m_rgTop);
392 m_conv_l2h.Reshape(m_rgBtm, m_rgTop);
393 m_colBlobs.Add(m_conv_l2h.blobs);
394 }
395
396 // upsample Layer
397 if (m_nStride == 1)
398 {
399 LayerParameter interpParam = new LayerParameter(LayerParameter.LayerType.INTERP, "upsample");
400 interpParam.interp_param.zoom_factor = 2;
401 m_upsampleLayer = Layer<T>.Create(m_cuda, m_log, interpParam, null);
402
403 setupBtmTop(m_blob_x_l2h, m_blob_x_l2h_us);
404 m_upsampleLayer.LayerSetUp(m_rgBtm, m_rgTop);
405 m_upsampleLayer.Reshape(m_rgBtm, m_rgTop);
406 }
407 else
408 {
409 m_blob_x_l2h_us.ReshapeLike(m_blob_x_l2h);
410 }
411
412 // add Layer
414 eltAdd.name = "eltadd";
416 m_add = Layer<T>.Create(m_cuda, m_log, eltAdd, null);
417
418 setupBtmTop(m_blob_x_l2h_us, m_blob_x_h2h, colTop[0]);
419 m_add.LayerSetUp(m_rgBtm, m_rgTop);
420 m_add.Reshape(m_rgBtm, m_rgTop);
421
422 if (m_dfAlphaOut > 0)
423 {
424 setupBtmTop(m_blob_x_h2l, m_blob_x_l2l, colTop[1]);
425 m_add.Reshape(m_rgBtm, m_rgTop);
426 }
427 }
428 }
429
430 private void layer_OnSetWorkspace(object sender, WorkspaceArgs e)
431 {
432 if (e.WorkspaceSizeInBytes < m_lWorkspaceSizeInBytes || e.WorkspaceSizeInBytes == 0)
433 return;
434
435 m_lWorkspaceSizeInBytes = e.WorkspaceSizeInBytes;
436 m_cuda.DisableGhostMemory();
437
438 if (m_hWorkspaceData != 0)
439 m_cuda.FreeMemory(m_hWorkspaceData);
440
441 m_hWorkspaceData = m_cuda.AllocMemory((long)m_lWorkspaceSizeInBytes);
442 m_cuda.ResetGhostMemory();
443 }
444
445 private void layer_OnGetWorkspace(object sender, WorkspaceArgs e)
446 {
447 e.WorkspaceData = m_hWorkspaceData;
448 e.WorkspaceSizeInBytes = m_lWorkspaceSizeInBytes;
449 }
450
451 private void setupBtmTop(Blob<T> btm, Blob<T> top)
452 {
453 m_rgBtm.Clear();
454 m_rgBtm.Add(btm);
455 m_rgTop.Clear();
456 m_rgTop.Add(top);
457 }
458
459 private void setupBtmTop(Blob<T> btm1, Blob<T> btm2, Blob<T> top)
460 {
461 m_rgBtm.Clear();
462 m_rgBtm.Add(btm1);
463 m_rgBtm.Add(btm2);
464 m_rgTop.Clear();
465 m_rgTop.Add(top);
466 }
467
473 public override void Reshape(BlobCollection<T> colBottom, BlobCollection<T> colTop)
474 {
475 if (m_nStride == 2)
476 {
477 setupBtmTop(colBottom[0], m_blob_x_h);
478 m_downsampleLayer1.Reshape(m_rgBtm, m_rgTop);
479 }
480 else
481 {
482 m_blob_x_h.ReshapeLike(colBottom[0]);
483 }
484
485 setupBtmTop(m_blob_x_h, m_blob_x_h2h);
486 m_conv_h2h.Reshape(m_rgBtm, m_rgTop);
487
488 if (m_dfAlphaOut > 0)
489 {
490 setupBtmTop(m_blob_x_h, m_blob_x_h_ds);
491 m_downsampleLayer2.Reshape(m_rgBtm, m_rgTop);
492 setupBtmTop(m_blob_x_h_ds, m_blob_x_h2l);
493 m_conv_h2l.Reshape(m_rgBtm, m_rgTop);
494 }
495
496 if (colBottom.Count > 1)
497 {
498 m_blob_x_l.ReshapeLike(colBottom[1]);
499
500 if (m_nStride == 2)
501 {
502 setupBtmTop(colBottom[1], m_blob_x_l_ds);
503 m_downsampleLayer3.Reshape(m_rgBtm, m_rgTop);
504 }
505 else
506 {
507 m_blob_x_l_ds.ReshapeLike(m_blob_x_l);
508 }
509
510 if (m_dfAlphaOut > 0)
511 {
512 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2l);
513 m_conv_l2l.Reshape(m_rgBtm, m_rgTop);
514 }
515
516 setupBtmTop(m_blob_x_l, m_blob_x_l2h);
517 m_conv_l2h.Reshape(m_rgBtm, m_rgTop);
518
519 if (m_nStride == 1)
520 {
521 setupBtmTop(m_blob_x_l2h, m_blob_x_l2h_us);
522 m_upsampleLayer.Reshape(m_rgBtm, m_rgTop);
523 }
524 else
525 {
526 m_blob_x_l2h_us.ReshapeLike(m_blob_x_l2h);
527 }
528
529 setupBtmTop(m_blob_x_l2h_us, m_blob_x_h2h, colTop[0]);
530 m_add.Reshape(m_rgBtm, m_rgTop);
531
532 if (m_dfAlphaOut > 0)
533 {
534 setupBtmTop(m_blob_x_h2l, m_blob_x_l2l, colTop[1]);
535 m_add.Reshape(m_rgBtm, m_rgTop);
536 }
537 }
538 else
539 {
540 colTop[0].ReshapeLike(m_blob_x_h2h);
541
542 if (m_dfAlphaOut > 0)
543 colTop[1].ReshapeLike(m_blob_x_h2l);
544 }
545 }
546
556 protected override void forward(BlobCollection<T> colBottom, BlobCollection<T> colTop)
557 {
558 if (m_nStride == 2)
559 {
560 setupBtmTop(colBottom[0], m_blob_x_h); // x_h = self.downsample(bottom)
561 m_downsampleLayer1.Forward(m_rgBtm, m_rgTop);
562 }
563 else
564 {
565 m_blob_x_h.CopyFrom(colBottom[0]);
566 }
567
568 setupBtmTop(m_blob_x_h, m_blob_x_h2h); // x_h2h = self_conv_h2h(x_h) x_h2h -> x_h
569 m_conv_h2h.Forward(m_rgBtm, m_rgTop);
570
571 if (m_dfAlphaOut > 0)
572 {
573 setupBtmTop(m_blob_x_h, m_blob_x_h_ds); // tmp = self.downsample(x_h) bwd tmp -> x_h
574 m_downsampleLayer2.Forward(m_rgBtm, m_rgTop);
575 setupBtmTop(m_blob_x_h_ds, m_blob_x_h2l); // m_h2l = self.conv_h2l(tmp) bwd m_h2l -> tmp
576 m_conv_h2l.Forward(m_rgBtm, m_rgTop);
577 }
578
579 if (colBottom.Count > 1)
580 {
581 m_blob_x_l.CopyFrom(colBottom[1]);
582
583 if (m_nStride == 2)
584 {
585 setupBtmTop(m_blob_x_l, m_blob_x_l_ds);
586 m_downsampleLayer3.Forward(m_rgBtm, m_rgTop);
587 }
588 else
589 {
590 m_blob_x_l_ds.CopyFrom(m_blob_x_l);
591 }
592
593 if (m_dfAlphaOut > 0)
594 {
595 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2l);
596 m_conv_l2l.Forward(m_rgBtm, m_rgTop);
597 }
598
599 setupBtmTop(m_blob_x_l, m_blob_x_l2h);
600 m_conv_l2h.Forward(m_rgBtm, m_rgTop);
601
602 if (m_nStride == 1)
603 {
604 setupBtmTop(m_blob_x_l2h, m_blob_x_l2h_us);
605 m_upsampleLayer.Forward(m_rgBtm, m_rgTop);
606 }
607 else
608 {
609 m_blob_x_l2h_us.CopyFrom(m_blob_x_l2h);
610 }
611
612 setupBtmTop(m_blob_x_l2h_us, m_blob_x_h2h, colTop[0]);
613 m_add.Forward(m_rgBtm, m_rgTop);
614
615 if (m_dfAlphaOut > 0)
616 {
617 setupBtmTop(m_blob_x_h2l, m_blob_x_l2l, colTop[1]);
618 m_add.Forward(m_rgBtm, m_rgTop);
619 }
620
621 return;
622 }
623 else
624 {
625 colTop[0].CopyFrom(m_blob_x_h2h);
626
627 if (m_dfAlphaOut > 0)
628 colTop[1].CopyFrom(m_blob_x_h2l);
629 }
630
631 }
632
641 protected override void backward(BlobCollection<T> colTop, List<bool> rgbPropagateDown, BlobCollection<T> colBottom)
642 {
643 if (!rgbPropagateDown[0])
644 return;
645
646 if (colBottom.Count > 1)
647 {
648 if (m_dfAlphaOut > 0)
649 {
650 setupBtmTop(m_blob_x_h2l, m_blob_x_l2l, colTop[1]);
651 m_add.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
652 }
653
654 setupBtmTop(m_blob_x_l2h_us, m_blob_x_h2h, colTop[0]);
655 m_add.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
656
657 if (m_nStride == 1)
658 {
659 setupBtmTop(m_blob_x_l2h, m_blob_x_l2h_us);
660 m_upsampleLayer.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
661 }
662 else
663 {
664 m_blob_x_l2h.CopyFrom(m_blob_x_l2h_us, true);
665 }
666
667 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2h);
668 m_conv_l2h.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
669
670 if (m_dfAlphaOut > 0)
671 {
672 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2l);
673 m_conv_l2l.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
674 }
675
676 if (m_nStride == 2)
677 {
678 setupBtmTop(m_blob_x_l, m_blob_x_l_ds);
679 m_downsampleLayer3.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
680 }
681 else
682 {
683 m_blob_x_l.CopyFrom(m_blob_x_l_ds, true);
684 }
685
686 colBottom[1].CopyFrom(m_blob_x_l, true);
687 }
688 else
689 {
690 m_blob_x_h2h.CopyFrom(colTop[0], true);
691
692 if (m_dfAlphaOut > 0)
693 m_blob_x_h2l.CopyFrom(colTop[1], true);
694 }
695
696 if (m_dfAlphaOut > 0)
697 {
698 setupBtmTop(m_blob_x_h_ds, m_blob_x_h2l);
699 m_conv_h2l.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
700
701 setupBtmTop(m_blob_x_h, m_blob_x_h_ds);
702 m_downsampleLayer2.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
703 }
704
705 setupBtmTop(m_blob_x_h, m_blob_x_h2h);
706 m_conv_h2h.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
707
708 if (m_nStride == 2)
709 {
710 setupBtmTop(colBottom[0], m_blob_x_h);
711 m_downsampleLayer1.Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
712 }
713 else
714 {
715 colBottom[0].CopyFrom(m_blob_x_h, true);
716 }
717 }
718 }
719}
The Log class provides general output in text form.
Definition: Log.cs:13
void CHECK_LE(double df1, double df2, string str)
Test whether one number is less than or equal to another.
Definition: Log.cs:263
void CHECK_GE(double df1, double df2, string str)
Test whether one number is greater than or equal to another.
Definition: Log.cs:287
void CHECK_LT(double df1, double df2, string str)
Test whether one number is less than another.
Definition: Log.cs:275
The BlobCollection contains a list of Blobs.
void Add(Blob< T > b)
Add a new Blob to the collection.
int Count
Returns the number of items in the collection.
void Clear(bool bDispose=false)
Remove all items from the collection.
void ReshapeLike(BlobCollection< T > src)
Reshapes all blobs in the collection to the sizes of the source.
void CopyFrom(BlobCollection< T > bSrc, bool bCopyDiff=false)
Copy the data or diff from another BlobCollection into this one.
The Blob is the main holder of data that moves through the Layers of the Net.
Definition: Blob.cs:25
void CopyFrom(Blob< T > src, int nSrcOffset, int nDstOffset, int nCount, bool bCopyData, bool bCopyDiff)
Copy from a source Blob.
Definition: Blob.cs:903
void ReshapeLike(Blob< T > b, bool? bUseHalfSize=null)
Reshape this Blob to have the same shape as another Blob.
Definition: Blob.cs:648
string Name
Get/set the name of the Blob.
Definition: Blob.cs:2184
virtual void Dispose(bool bDisposing)
Releases all resources used by the Blob (including both GPU and Host).
Definition: Blob.cs:402
The CudaDnn object is the main interface to the Low-Level Cuda C++ DLL.
Definition: CudaDnn.cs:969
The WorkspaceArgs are passed to both the Layer::OnSetWorkspace and Layer::OnGetWorkspace events.
Definition: EventArgs.cs:17
long WorkspaceData
Get/set the handle to workspace data in GPU memory.
Definition: EventArgs.cs:36
ulong WorkspaceSizeInBytes
Get/set the workspace memory size in bytes.
Definition: EventArgs.cs:45
An interface for the units of computation which can be composed into a Net.
Definition: Layer.cs:31
Log m_log
Specifies the Log for output.
Definition: Layer.cs:43
LayerParameter m_param
Specifies the LayerParameter describing the Layer.
Definition: Layer.cs:47
EventHandler< WorkspaceArgs > OnGetWorkspace
Specifies the OnGetWorkspace event that fires when the getWorkspace() function is called by a layer t...
Definition: Layer.cs:124
abstract void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Performs Layer specific setup. Derived layers should override this function as well as the Reshape fu...
void Backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Given the top Blob error gradients, compute the bottom Blob error gradients.
Definition: Layer.cs:815
double Forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Given the bottom (input) Blobs, this function computes the top (output) Blobs and the loss.
Definition: Layer.cs:728
abstract void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Adjust the shapes of top blobs and internal buffers to accomodate the shapes of the bottom blobs.
void Dispose()
Releases all GPU and host resources used by the Layer.
Definition: Layer.cs:180
CudaDnn< T > m_cuda
Specifies the CudaDnn connection to Cuda.
Definition: Layer.cs:39
EventHandler< WorkspaceArgs > OnSetWorkspace
Specifies the OnSetWorkspace event that fires when the setWorkspace() function is called by a layer t...
Definition: Layer.cs:128
static Layer< T > Create(CudaDnn< T > cuda, Log log, LayerParameter p, CancelEvent evtCancel, IXDatabaseBase db=null, TransferInput trxinput=null)
Create a new Layer based on the LayerParameter.
Definition: Layer.cs:1468
LayerParameter.LayerType m_type
Specifies the Layer type.
Definition: Layer.cs:35
BlobCollection< T > blobs
Returns the collection of learnable parameter Blobs for the Layer.
Definition: Layer.cs:875
BlobCollection< T > m_colBlobs
Specifies the learnable parameter Blobs of the Layer.
Definition: Layer.cs:55
The ConvolutionOctaveLayer processes high and low frequency portions of images using convolution.
override void backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Computes the error gradient w.r.t. the concatenate inputs.
override int MaxTopBlobs
Returns the maximum number of top (output) Blobs: out_h, out_l
override int MaxBottomBlobs
Returns the maximum number of bottom (input) Blobs: in_h, in_l
override void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Reshape the bottom (input) and top (output) blobs.
ConvolutionOctaveLayer(CudaDnn< T > cuda, Log log, LayerParameter p)
The ConvolutionOctaveLayer constructor.
override int MinBottomBlobs
Returns the minimum number of required bottom (input) Blobs: input.
override void dispose()
Release all resources used.
override void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Setup the layer.
override int MinTopBlobs
Returns the minimum number of required top (output) Blobs: output
override void forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Forward computation.
double alpha_out
Specifies alpha applied to the output channels.
double alpha_in
Specifies alpha applied to the input channels.
uint group
The group size for group convolution.
FillerParameter bias_filler
The filler for the bias. The default is set to use the 'constant = 0.1' filler.
bool bias_term
Whether to have bias terms or not.
uint num_output
The number of outputs for the layer.
Specifies the parameters for the EltwiseLayer.
EltwiseOp
Defines the operation to perform.
EltwiseOp operation
Specifies the element-wise operation.
Engine engine
Specifies the Engine in use.
int? zoom_factor
Specifies the height of the output.
List< uint > kernel_size
Kernel size is given as a single value for equal dimensions in all spatial dimensions,...
List< uint > dilation
Factor used to dilate the kernel, (implicitly) zero-filling the resulting holes. (Kernel dilation is ...
List< uint > stride
Stride is given as a single value for equal dimensions in all spatial dimensions, or once per spatial...
List< uint > pad
Pad is given as a single value for equal dimensions in all spatial dimensions, or once per spatial di...
Specifies the base parameter for all layers.
InterpParameter interp_param
Returns the parameter set when initializing the LayerType.INTERP
ConvolutionParameter convolution_param
Returns the parameter set when initialized with LayerType.CONVOLUTION
string name
Specifies the name of this LayerParameter.
PoolingParameter pooling_param
Returns the parameter set when initialized with LayerType.POOLING
EltwiseParameter eltwise_param
Returns the parameter set when initialized with LayerType.ELTWISE
ConvolutionOctaveParameter convolution_octave_param
Returns the parameter set when initialized with LayerType.CONVOLUTION_OCTAVE
LayerType
Specifies the layer type.
virtual LayerParameter Clone(bool bCloneBlobs)
Creates a new copy of this instance of the parameter.
Specifies the parameters for the PoolingLayer.
PoolingMethod
Defines the pooling method.
PoolingMethod pool
Specifies the pooling method.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Definition: Annotation.cs:12
The MyCaffe.common namespace contains common MyCaffe classes.
Definition: BatchInput.cs:8
The MyCaffe.layers.beta namespace contains all beta stage layers.
Definition: LayerFactory.cs:9
The MyCaffe.param.beta parameters are used by the MyCaffe.layer.beta layers.
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...
Definition: Annotation.cs:12