MyCaffe  1.12.2.41
Deep learning software for Windows C# programmers.
BatchNormParameter.cs
1using System;
2using System.Collections.Generic;
3using System.Linq;
4using System.Text;
6using MyCaffe.basecode;
7
8namespace MyCaffe.param
9{
18 [Serializable]
19 [TypeConverter(typeof(ExpandableObjectConverter))]
21 {
22 bool? m_bUseGlobalStats = null;
23 double m_dfMovingAverageFraction = 0.999;
24 double m_dfEps = 1e-5;
25 bool m_bScaleBias = false;
26 FillerParameter m_scaleFiller = null;
27 FillerParameter m_biasFiller = null;
28
31 {
32 }
33
38 public string useCaffeReason()
39 {
40 if (engine == Engine.CAFFE)
41 return "The engine setting is set on CAFFE.";
42
43 return "";
44 }
45
50 public bool useCudnn()
51 {
52 if (engine != EngineParameter.Engine.CAFFE)
53 return true;
54
55 return false;
56 }
57
65 [Description("Specifies to use the scale and bias terms, otherwise the scale = 1 and bias = 0 which performs an identity operation.")]
66 public bool scale_bias
67 {
68 get { return m_bScaleBias; }
69 set { m_bScaleBias = value; }
70 }
71
78 [Description("Specifies the scale filler used, when null 'constant(1)' is used.")]
80 {
81 get { return m_scaleFiller; }
82 set { m_scaleFiller = value; }
83 }
84
91 [Description("Specifies the bias filler used, when null 'constant(0)' is used.")]
93 {
94 get { return m_biasFiller; }
95 set { m_biasFiller = value; }
96 }
97
107 [Description("If false, accumulate global mean/variance values via a moving average. If true, use those accumulated values instead of computing mean/variance accross the batch.")]
108 public bool? use_global_stats
109 {
110 get { return m_bUseGlobalStats; }
111 set { m_bUseGlobalStats = value; }
112 }
113
123 [Description("Specifies how much the moving average decays each iteration.")]
125 {
126 get { return m_dfMovingAverageFraction; }
127 set { m_dfMovingAverageFraction = value; }
128 }
129
134 [Description("Specifies a small value to add to the variance estimate so that we don't divide by zero.")]
135 public double eps
136 {
137 get { return m_dfEps; }
138 set { m_dfEps = value; }
139 }
140
142 public override object Load(System.IO.BinaryReader br, bool bNewInstance = true)
143 {
144 RawProto proto = RawProto.Parse(br.ReadString());
145 BatchNormParameter p = FromProto(proto);
146
147 if (!bNewInstance)
148 Copy(p);
149
150 return p;
151 }
152
154 public override void Copy(LayerParameterBase src)
155 {
156 base.Copy(src);
157
158 if (src is BatchNormParameter)
159 {
161 m_bUseGlobalStats = p.m_bUseGlobalStats;
162 m_dfEps = p.m_dfEps;
163 m_dfMovingAverageFraction = p.m_dfMovingAverageFraction;
164 m_bScaleBias = p.m_bScaleBias;
165 m_biasFiller = p.m_biasFiller;
166
167 if (p.m_scaleFiller != null)
168 m_scaleFiller = p.m_scaleFiller.Clone();
169 else
170 m_scaleFiller = null;
171
172 if (p.m_biasFiller != null)
173 m_biasFiller = p.m_biasFiller.Clone();
174 else
175 m_biasFiller = null;
176 }
177 }
178
180 public override LayerParameterBase Clone()
181 {
183 p.Copy(this);
184 return p;
185 }
186
192 public override RawProto ToProto(string strName)
193 {
194 RawProto rpBase = base.ToProto("engine");
195 RawProtoCollection rgChildren = new RawProtoCollection();
196
197 rgChildren.Add(rpBase.Children);
198
199 if (use_global_stats.HasValue)
200 rgChildren.Add("use_global_stats", use_global_stats.Value.ToString());
201
202 if (moving_average_fraction != 0.999)
203 rgChildren.Add("moving_average_fraction", moving_average_fraction.ToString());
204
205 if (eps != 1e-5)
206 rgChildren.Add("eps", eps.ToString());
207
208 if (scale_bias)
209 {
210 rgChildren.Add("scale_bias", scale_bias.ToString());
211
212 if (scale_filler != null)
213 rgChildren.Add(scale_filler.ToProto("scale_filler"));
214
215 if (bias_filler != null)
216 rgChildren.Add(bias_filler.ToProto("bias_filler"));
217 }
218
219 return new RawProto(strName, "", rgChildren);
220 }
221
228 {
229 string strVal;
231
233
234 if ((strVal = rp.FindValue("use_global_stats")) != null)
235 p.use_global_stats = bool.Parse(strVal);
236
237 if ((strVal = rp.FindValue("moving_average_fraction")) != null)
239
240 if ((strVal = rp.FindValue("eps")) != null)
241 p.eps = ParseDouble(strVal);
242
243 if ((strVal = rp.FindValue("scale_bias")) != null)
244 {
245 p.scale_bias = bool.Parse(strVal);
246
247 RawProto rp1;
248
249 if ((rp1 = rp.FindChild("scale_filler")) != null)
251
252 if ((rp1 = rp.FindChild("bias_filler")) != null)
254 }
255
256 return p;
257 }
258 }
259}
static double ParseDouble(string strVal)
Parse double values using the US culture if the decimal separator = '.', then using the native cultur...
The RawProtoCollection class is a list of RawProto objects.
void Add(RawProto p)
Adds a RawProto to the collection.
The RawProto class is used to parse and output Google prototxt file data.
Definition: RawProto.cs:17
RawProtoCollection Children
Returns a collection of this nodes child nodes.
Definition: RawProto.cs:96
RawProto FindChild(string strName)
Searches for a given node.
Definition: RawProto.cs:231
static RawProto Parse(string str)
Parses a prototxt and places it in a new RawProto.
Definition: RawProto.cs:306
string FindValue(string strName)
Searches for a falue of a node within this nodes children.
Definition: RawProto.cs:105
Specifies the parameters for the BatchNormLayer.
FillerParameter bias_filler
Specifies the bias filler used to file the bias value. If null, a constant(0) filler is used.
override void Copy(LayerParameterBase src)
Copy on parameter to another.
bool scale_bias
Specifies to use the scale and bias terms, otherwise the scale = 1 and bias = 0 are used to form an i...
double eps
Specifies a small value to add to the variance estimate so that we don't divide by zero.
BatchNormParameter()
Constructor for the parameter.
double moving_average_fraction
Specifies how much the moving average decays each iteration. Smaller values make the moving average d...
string useCaffeReason()
Returns the reason that Caffe version was used instead of NVIDIA's cuDnn.
FillerParameter scale_filler
Specifies the scale filler used to fill the scale value. If null, a constant(1) filler is used.
override object Load(System.IO.BinaryReader br, bool bNewInstance=true)
Load the parameter from a binary reader.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
override RawProto ToProto(string strName)
Convert the parameter into a RawProto.
bool useCudnn()
Queries whether or not to use NVIDIA's cuDnn.
static new BatchNormParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
bool? use_global_stats
If false, normalization is performed over the current mini-batch and global statistics are accumulate...
Specifies whether to use the NVIDIA cuDnn version or Caffe version of a given forward/backward operat...
Engine engine
Specifies the Engine in use.
EngineParameter()
Constructor for the parameter.
static EngineParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Engine
Defines the type of engine to use.
Specifies the filler parameters used to create each Filler.
static FillerParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
override RawProto ToProto(string strName)
Convert the parameter into a RawProto.
FillerParameter Clone()
Creates a new copy of this instance of the parameter.
The LayerParameterBase is the base class for all other layer specific parameters.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Definition: Annotation.cs:12
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...
Definition: Annotation.cs:12