MyCaffe  1.12.2.41
Deep learning software for Windows C# programmers.
ConcatLayer.cs
1using System;
2using System.Collections.Generic;
3using System.Linq;
4using System.Text;
5using MyCaffe.basecode;
6using MyCaffe.common;
7using MyCaffe.param;
8
9namespace MyCaffe.layers
10{
23 public class ConcatLayer<T> : Layer<T>
24 {
25 int m_nNumConcats;
26 int m_nConcatInputSize;
27 int m_nConcatAxis;
28
37 : base(cuda, log, p)
38 {
40 }
41
45 public override int MinBottomBlobs
46 {
47 get { return 1; }
48 }
49
53 public override int ExactNumTopBlobs
54 {
55 get { return 1; }
56 }
57
63 public override void LayerSetUp(BlobCollection<T> colBottom, BlobCollection<T> colTop)
64 {
65 }
66
72 public override void Reshape(BlobCollection<T> colBottom, BlobCollection<T> colTop)
73 {
74 int nNumAxes = colBottom[0].num_axes;
75
76 if (m_param.concat_param.concat_dim.HasValue)
77 {
78 m_nConcatAxis = (int)m_param.concat_param.concat_dim.Value;
79 // Don't allow negative indexing for concat_dim, a uint -- almost certainly
80 // unintended.
81 m_log.CHECK_GE(m_nConcatAxis, 0, "Casting concat_dim from uint to int produced a negative result; concat_dim must be > 0.");
82 m_log.CHECK_LT(m_nConcatAxis, nNumAxes, "concat_dim out of range.");
83 }
84 else
85 {
86 m_nConcatAxis = colBottom[0].CanonicalAxisIndex(m_param.concat_param.axis);
87 }
88
89 // Initialize with the first blob.
90 List<int> rgTopShape = Utility.Clone<int>(colBottom[0].shape());
91 m_nNumConcats = colBottom[0].count(0, m_nConcatAxis);
92 m_nConcatInputSize = colBottom[0].count(m_nConcatAxis + 1);
93
94 int nBottomCountSum = colBottom[0].count();
95
96 for (int i = 1; i < colBottom.Count; i++)
97 {
98 m_log.CHECK_EQ(nNumAxes, colBottom[i].num_axes, "All inputs must have the same # axes.");
99
100 for (int j = 0; j < nNumAxes; j++)
101 {
102 if (j == m_nConcatAxis)
103 continue;
104
105 m_log.CHECK_EQ(rgTopShape[j], colBottom[i].shape(j), "All inputs must have the same shape, except at concat_axis. You might try switching between the ONNX(p) and CAFFE(t) type pooling sizing methods.");
106 }
107
108 nBottomCountSum += colBottom[i].count();
109 rgTopShape[m_nConcatAxis] += colBottom[i].shape(m_nConcatAxis);
110 }
111
112 colTop[0].Reshape(rgTopShape);
113 m_log.CHECK_EQ(nBottomCountSum, colTop[0].count(), "The bottomCountSums should equal the top[0].count.");
114
115 if (colBottom.Count == 1)
116 {
117 colTop[0].ShareData(colBottom[0]);
118 colTop[0].ShareDiff(colBottom[0]);
119 }
120 }
121
140 protected override void forward(BlobCollection<T> colBottom, BlobCollection<T> colTop)
141 {
142 if (colBottom.Count == 1)
143 return;
144
145 long hTopData = colTop[0].mutable_gpu_data;
146 int nOffsetConcatAxis = 0;
147 int nTopConcatAxis = colTop[0].shape(m_nConcatAxis);
148
149 for (int i = 0; i < colBottom.Count; i++)
150 {
151 long hBottomData = colBottom[i].gpu_data;
152 int nBottomConcatAxis = colBottom[i].shape(m_nConcatAxis);
153 int nBottomConcatSize = nBottomConcatAxis * m_nConcatInputSize;
154 int nCount = nBottomConcatSize * m_nNumConcats;
155
156 m_cuda.concat_fwd(nCount, hBottomData, m_nNumConcats, m_nConcatInputSize, nTopConcatAxis, nBottomConcatAxis, nOffsetConcatAxis, hTopData);
157 nOffsetConcatAxis += nBottomConcatAxis;
158 }
159 }
160
183 protected override void backward(BlobCollection<T> colTop, List<bool> rgbPropagateDown, BlobCollection<T> colBottom)
184 {
185 if (colBottom.Count == 1)
186 return;
187
188 long hTopDiff = colTop[0].gpu_diff;
189 int nOffsetConcatAxis = 0;
190 int nTopConcatAxis = colTop[0].shape(m_nConcatAxis);
191
192 for (int i = 0; i < colBottom.Count; i++)
193 {
194 int nBottomConcatAxis = colBottom[i].shape(m_nConcatAxis);
195
196 if (rgbPropagateDown[i])
197 {
198 long hBottomDiff = colBottom[i].mutable_gpu_diff;
199 int nBottomConcatSize = nBottomConcatAxis * m_nConcatInputSize;
200 int nCount = nBottomConcatSize * m_nNumConcats;
201
202 m_cuda.concat_bwd(nCount, hTopDiff, m_nNumConcats, m_nConcatInputSize, nTopConcatAxis, nBottomConcatAxis, nOffsetConcatAxis, hBottomDiff);
203 }
204
205 nOffsetConcatAxis += nBottomConcatAxis;
206 }
207 }
208 }
209}
The Log class provides general output in text form.
Definition: Log.cs:13
void CHECK_EQ(double df1, double df2, string str)
Test whether one number is equal to another.
Definition: Log.cs:239
void CHECK_GE(double df1, double df2, string str)
Test whether one number is greater than or equal to another.
Definition: Log.cs:287
void CHECK_LT(double df1, double df2, string str)
Test whether one number is less than another.
Definition: Log.cs:275
The Utility class provides general utility funtions.
Definition: Utility.cs:35
The BlobCollection contains a list of Blobs.
int Count
Returns the number of items in the collection.
void Reshape(int[] rgShape)
Reshapes all blobs in the collection to the given shape.
The CudaDnn object is the main interface to the Low-Level Cuda C++ DLL.
Definition: CudaDnn.cs:969
The ConcatLayer takes at least two Blobs and concatentates them along either the num or channel dimen...
Definition: ConcatLayer.cs:24
override int ExactNumTopBlobs
Returns the exact number of required top (output) Blobs: concat
Definition: ConcatLayer.cs:54
ConcatLayer(CudaDnn< T > cuda, Log log, LayerParameter p)
The ConcatLayer constructor.
Definition: ConcatLayer.cs:36
override void forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Forward computation
Definition: ConcatLayer.cs:140
override void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Setup the layer.
Definition: ConcatLayer.cs:63
override void backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Computes the error gradient w.r.t. the concatenation inputs.
Definition: ConcatLayer.cs:183
override void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Reshape the bottom (input) and top (output) blobs.
Definition: ConcatLayer.cs:72
override int MinBottomBlobs
Returns the minimum number of required bottom (input) Blobs: input
Definition: ConcatLayer.cs:46
An interface for the units of computation which can be composed into a Net.
Definition: Layer.cs:31
Log m_log
Specifies the Log for output.
Definition: Layer.cs:43
LayerParameter m_param
Specifies the LayerParameter describing the Layer.
Definition: Layer.cs:47
CudaDnn< T > m_cuda
Specifies the CudaDnn connection to Cuda.
Definition: Layer.cs:39
LayerParameter.LayerType m_type
Specifies the Layer type.
Definition: Layer.cs:35
uint? concat_dim
DEPRECIATED: alias for 'axis' – does not support negative indexing.
int axis
The axis along which to concatenate – may be negative to index from the end (e.g.,...
Specifies the base parameter for all layers.
ConcatParameter concat_param
Returns the parameter set when initialized with LayerType.CONCAT
LayerType
Specifies the layer type.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Definition: Annotation.cs:12
The MyCaffe.common namespace contains common MyCaffe classes.
Definition: BatchInput.cs:8
The MyCaffe.layers namespace contains all layers that have a solidified code base,...
Definition: LayerFactory.cs:15
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...
Definition: Annotation.cs:12