MyCaffe  1.12.2.41
Deep learning software for Windows C# programmers.
Normalization1Layer.cs
1using System;
2using System.Collections.Generic;
3using System.Linq;
4using System.Text;
5using MyCaffe.basecode;
6using MyCaffe.common;
7using MyCaffe.param;
8
10{
20 public class Normalization1Layer<T> : Layer<T>
21 {
22 Blob<T> m_blobSquared;
23
34 : base(cuda, log, p)
35 {
36 m_type = LayerParameter.LayerType.NORMALIZATION1;
37 m_blobSquared = new Blob<T>(cuda, log);
38 m_blobSquared.Name = "squared";
39 }
40
42 protected override void dispose()
43 {
44 m_blobSquared.Dispose();
45 base.dispose();
46 }
47
49 protected override void setup_internal_blobs(BlobCollection<T> col)
50 {
51 if (col.Count > 0)
52 return;
53
54 col.Add(m_blobSquared);
55 }
56
60 public override int ExactNumBottomBlobs
61 {
62 get { return 1; }
63 }
64
68 public override int ExactNumTopBlobs
69 {
70 get { return 1; }
71 }
72
78 public override void LayerSetUp(BlobCollection<T> colBottom, BlobCollection<T> colTop)
79 {
80 }
81
87 public override void Reshape(BlobCollection<T> colBottom, BlobCollection<T> colTop)
88 {
89 colTop[0].ReshapeLike(colBottom[0]);
90 m_blobSquared.ReshapeLike(colBottom[0]);
91 }
92
100 protected override void forward(BlobCollection<T> colBottom, BlobCollection<T> colTop)
101 {
102 long hBottomData = colBottom[0].gpu_data;
103 long hTopData = colTop[0].mutable_gpu_data;
104 long hSquaredData = m_blobSquared.mutable_gpu_data;
105 double dfNormSqr;
106 int n = colBottom[0].num;
107 int d = colBottom[0].count() / n;
108
109 m_cuda.powx(n * d, hBottomData, 2.0, hSquaredData);
110
111 for (int i = 0; i < n; i++)
112 {
113 dfNormSqr = m_cuda.asum_double(d, hSquaredData, i * d);
114 dfNormSqr = Math.Pow(dfNormSqr, -0.5);
115 m_cuda.scale(d, convert(dfNormSqr), hBottomData, hTopData, i * d, i * d);
116 }
117 }
118
127 protected override void backward(BlobCollection<T> colTop, List<bool> rgbPropagateDown, BlobCollection<T> colBottom)
128 {
129 long hTopDiff = colTop[0].gpu_diff;
130 long hTopData = colTop[0].gpu_data;
131 long hBottomData = colBottom[0].gpu_data;
132 long hBottomDiff = colBottom[0].mutable_gpu_diff;
133 int n = colTop[0].num;
134 int d = colTop[0].count() / n;
135 T a;
136
137 for (int i = 0; i < n; i++)
138 {
139 a = m_cuda.dot(d, hTopData, hTopDiff, i * d, i * d);
140 m_cuda.scale(d, a, hTopData, hBottomDiff, i * d, i * d);
141 m_cuda.sub(d, hTopDiff, hBottomDiff, hBottomDiff, i * d, i * d, i * d);
142 a = m_cuda.dot(d, hBottomData, hBottomData, i * d, i * d);
143 double dfA = Math.Pow(convertD(a), -0.5);
144 m_cuda.scale(d, convert(dfA), hBottomDiff, hBottomDiff, i * d, i * d);
145 }
146 }
147 }
148}
The Log class provides general output in text form.
Definition: Log.cs:13
The BlobCollection contains a list of Blobs.
void Add(Blob< T > b)
Add a new Blob to the collection.
int Count
Returns the number of items in the collection.
void ReshapeLike(BlobCollection< T > src)
Reshapes all blobs in the collection to the sizes of the source.
The Blob is the main holder of data that moves through the Layers of the Net.
Definition: Blob.cs:25
The CudaDnn object is the main interface to the Low-Level Cuda C++ DLL.
Definition: CudaDnn.cs:969
An interface for the units of computation which can be composed into a Net.
Definition: Layer.cs:31
void convert(BlobCollection< T > col)
Convert a collection of blobs from / to half size.
Definition: Layer.cs:535
double convertD(T df)
Converts a generic to a double value.
Definition: Layer.cs:1349
CudaDnn< T > m_cuda
Specifies the CudaDnn connection to Cuda.
Definition: Layer.cs:39
LayerParameter.LayerType m_type
Specifies the Layer type.
Definition: Layer.cs:35
The Normalization1Layer performs an L2 normalization over the input data. This layer is initialized w...
override void dispose()
Releases all GPU and host resources used by the Layer.
override int ExactNumTopBlobs
Returns the exact number of required top (output) Blobs: norm
override int ExactNumBottomBlobs
Returns the exact number of required bottom (input) Blobs: data
Normalization1Layer(CudaDnn< T > cuda, Log log, LayerParameter p)
The Normalization1Layer constructor.
override void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Setup the layer.
override void setup_internal_blobs(BlobCollection< T > col)
Derivative layers should add all internal blobws to the 'col' provided.
override void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Reshape the bottom (input) and top (output) blobs.
override void forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Computes the forward calculation.
override void backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Computes the error gradient w.r.t the inputs.
Specifies the base parameter for all layers.
LayerType
Specifies the layer type.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Definition: Annotation.cs:12
The MyCaffe.common namespace contains common MyCaffe classes.
Definition: BatchInput.cs:8
The MyCaffe.layers.beta namespace contains all beta stage layers.
Definition: LayerFactory.cs:9
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...
Definition: Annotation.cs:12