Deep learning software for Windows C# programmers.
1using System;
2using System.Collections.Generic;
3using System.Linq;
4using System.Text;
5using MyCaffe.basecode;
6using MyCaffe.common;
7using MyCaffe.param;
9namespace MyCaffe.layers
22 public abstract class LossLayer<T> : Layer<T>
23 {
27 public const double kLOG_THRESHOLD = 1e-20;
31 protected bool m_bIgnoreLabels = false;
39 protected int m_nOuterNum = 0;
43 protected int m_nInnerNum = 0;
47 public event EventHandler<LossArgs> OnLoss;
48 LossArgs m_lossArgs = null;
62 : base(cuda, log, p)
63 {
66 }
72 protected void callLossEvent(Blob<T> blob)
73 {
74 if (OnLoss == null)
75 return;
77 if (m_lossArgs == null)
78 m_lossArgs = new LossArgs(blob.count(), blob.shape());
80 float[] rgData = convertF(blob.mutable_cpu_data);
81 Array.Copy(rgData, m_lossArgs.Data, rgData.Length);
83 OnLoss(this, m_lossArgs);
84 }
92 protected virtual double get_normalizer(LossParameter.NormalizationMode normalization_mode, int nValidCount)
93 {
94 return GetNormalizer(normalization_mode, m_nOuterNum, m_nInnerNum, nValidCount);
95 }
106 public double GetNormalizer(LossParameter.NormalizationMode normalization_mode, int nOuterNum, int nInnerNum, int nValidCount)
107 {
108 double dfNormalizer = 0.0;
110 switch (normalization_mode)
111 {
113 m_log.CHECK_GT(nInnerNum, 0, "The inner number must be set.");
114 m_log.CHECK_GT(nOuterNum, 0, "The outer number must be set.");
115 dfNormalizer = nOuterNum * nInnerNum;
116 break;
119 if (nValidCount == -1)
120 {
121 m_log.CHECK_GT(nInnerNum, 0, "The inner number must be set.");
122 m_log.CHECK_GT(nOuterNum, 0, "The outer number must be set.");
123 dfNormalizer = nOuterNum * nInnerNum;
124 }
125 else
126 dfNormalizer = nValidCount;
127 break;
129 case LossParameter.NormalizationMode.BATCH_SIZE:
130 m_log.CHECK_GT(nOuterNum, 0, "The outer number must be set.");
131 dfNormalizer = nOuterNum;
132 break;
135 dfNormalizer = 1.0;
136 break;
138 default:
139 m_log.FAIL("Unknown normalization mode " + normalization_mode.ToString());
140 break;
141 }
143 // Some users will have no labels for some examples in order to 'turn off' a
144 // particular loss in a multi-taks setup. The max prevents Nans in that case.
145 return Math.Max(dfNormalizer, 1.0);
146 }
151 public override int ExactNumBottomBlobs
152 {
153 get { return 2; }
154 }
159 public override int ExactNumTopBlobs
160 {
161 get { return 1; }
162 }
170 public override bool AutoTopBlobs
171 {
172 get { return true; }
173 }
181 public override bool AllowForceBackward(int nBottomIdx)
182 {
183 if (nBottomIdx != 1)
184 return true;
186 return false;
187 }
194 public override void LayerSetUp(BlobCollection<T> colBottom, BlobCollection<T> colTop)
195 {
196 // LossLayers have non-zero (1) loss by default.
197 if (m_param.loss_weight.Count == 0)
198 m_param.loss_weight.Add(1.0);
200 m_log.CHECK(!m_param.loss_param.normalize, "normalize is drepreciated, use 'normalization'.");
201 if (!m_param.loss_param.normalization.HasValue)
203 else
205 }
212 public override void Reshape(BlobCollection<T> colBottom, BlobCollection<T> colTop)
213 {
214 m_log.CHECK_EQ(colBottom[0].shape(0), colBottom[1].shape(0), "The data and label should have the same first dimension. Data has shape '" + colBottom[0].shape_string + "' and label has shape '" + colBottom[1].shape_string + "'.");
215 List<int> rgLossShape = new List<int>(); // Loss layers output a scalar, 0 axes.
216 colTop[0].Reshape(rgLossShape);
217 colTop[0].type = BLOB_TYPE.LOSS;
218 }
219 }
The Log class provides general output in text form.
Definition: Log.cs:13
void CHECK(bool b, string str)
Test a flag for true.
Definition: Log.cs:227
void FAIL(string str)
Causes a failure which throws an exception with the desciptive text.
Definition: Log.cs:394
void CHECK_EQ(double df1, double df2, string str)
Test whether one number is equal to another.
Definition: Log.cs:239
void CHECK_GT(double df1, double df2, string str)
Test whether one number is greater than another.
Definition: Log.cs:299
The LossArgs contains the loss values for a given batch.
Definition: EventArgs.cs:207
float[] Data
Specifies the loss values for a given batch.
Definition: EventArgs.cs:234
The BlobCollection contains a list of Blobs.
void Reshape(int[] rgShape)
Reshapes all blobs in the collection to the given shape.
The Blob is the main holder of data that moves through the Layers of the Net.
Definition: Blob.cs:25
T[] mutable_cpu_data
Get data from the GPU and bring it over to the host, or Set data from the Host and send it over to th...
Definition: Blob.cs:1461
List< int > shape()
Returns an array where each element contains the shape of an axis of the Blob.
Definition: Blob.cs:684
int count()
Returns the total number of items in the Blob.
Definition: Blob.cs:739
The CudaDnn object is the main interface to the Low-Level Cuda C++ DLL.
Definition: CudaDnn.cs:969
An interface for the units of computation which can be composed into a Net.
Definition: Layer.cs:31
Log m_log
Specifies the Log for output.
Definition: Layer.cs:43
LayerParameter m_param
Specifies the LayerParameter describing the Layer.
Definition: Layer.cs:47
float convertF(T df)
Converts a generic to a float value.
Definition: Layer.cs:1359
LayerParameter.? LayerType m_parentLayerType
Specifies the layer type of the parent.
Definition: Layer.cs:108
LayerParameter.LayerType m_type
Specifies the Layer type.
Definition: Layer.cs:35
The LossLayer provides an interface for Layer's that take two blobs as input – usually (1) prediction...
Definition: LossLayer.cs:23
const double kLOG_THRESHOLD
Specifies the minimum threshold for loss values.
Definition: LossLayer.cs:27
double GetNormalizer(LossParameter.NormalizationMode normalization_mode, int nOuterNum, int nInnerNum, int nValidCount)
Returns the normalizer used to normalize the loss.
Definition: LossLayer.cs:106
override bool AutoTopBlobs
For convenience and backwards compatibility, insturct the Net to automatically allocate a single top ...
Definition: LossLayer.cs:171
EventHandler< LossArgs > OnLoss
Specifies the loss event called on each learning cycle.
Definition: LossLayer.cs:47
bool m_bIgnoreLabels
Set to true when labels are to be ignored.
Definition: LossLayer.cs:31
override int ExactNumBottomBlobs
Returns the exact number of required bottom (intput) Blobs: prediction, label
Definition: LossLayer.cs:152
override void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Reshape the bottom (input) and top (output) blobs.
Definition: LossLayer.cs:212
int m_nOuterNum
Specifies the outer num, such as the batch count (e.g. count(0, axis)). Each derivative class must se...
Definition: LossLayer.cs:39
int m_nInnerNum
Specifies the inner num, such as the channel + height + width (e.g. count(axis + 1))....
Definition: LossLayer.cs:43
override int ExactNumTopBlobs
Returns the exact number of required top (output) Blobs: loss
Definition: LossLayer.cs:160
virtual double get_normalizer(LossParameter.NormalizationMode normalization_mode, int nValidCount)
Returns the normalizer used to normalize the loss.
Definition: LossLayer.cs:92
override void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Setup the layer.
Definition: LossLayer.cs:194
LossLayer(CudaDnn< T > cuda, Log log, LayerParameter p)
The LossLayer constructor.
Definition: LossLayer.cs:61
override bool AllowForceBackward(int nBottomIdx)
We usually cannot backpropagate to the labels; ignore force_backward for these inputs.
Definition: LossLayer.cs:181
LossParameter.NormalizationMode m_normalization
Specifies the normalization mode used to normalize the loss.
Definition: LossLayer.cs:35
void callLossEvent(Blob< T > blob)
This method is called by the loss layer to pass the blob data to the OnLoss event (if implemented)
Definition: LossLayer.cs:72
Specifies the base parameter for all layers.
List< double > loss_weight
Specifies the loss weight.
Specifies the layer type.
LossParameter loss_param
Returns the parameter set when initialized with LayerType.LOSS
Stores the parameters used by loss layers.
How to normalize the loss for loss layers that aggregate across batches, spatial dimensions,...
bool normalize
DEPRECIATED. Ignore if normalization is specified. If normalization is not specified,...
NormalizationMode? normalization
Specifies the normalization mode (default = VALID).
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Definition: Annotation.cs:12
The MyCaffe.common namespace contains common MyCaffe classes.
Definition: BatchInput.cs:8
Defines the tpe of data held by a given Blob.
Definition: Interfaces.cs:62
The MyCaffe.layers namespace contains all layers that have a solidified code base,...
Definition: LayerFactory.cs:15
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...
Definition: Annotation.cs:12