MyCaffe  1.12.2.41
Deep learning software for Windows C# programmers.
ModelBuilder.cs
1using MyCaffe.basecode;
2using MyCaffe.param;
3using MyCaffe.param.ssd;
4using System;
5using System.Collections.Generic;
6using System.Linq;
7using System.Text;
8using System.Threading.Tasks;
9
14{
18 public abstract class ModelBuilder
19 {
23 protected string m_strBaseDir;
27 protected NetParameter m_net = new NetParameter();
32
36 public enum SCALE_BIAS
37 {
41 NONE,
45 SCALE,
49 BIAS
50 }
51
58 public ModelBuilder(string strBaseDir, NetParameter net = null, SolverParameter solver = null)
59 {
60 m_strBaseDir = strBaseDir.TrimEnd('\\', '/');
61 m_net = net;
62 m_solver = solver;
63 }
64
71 public abstract SolverParameter CreateSolver();
72
76 public abstract NetParameter CreateModel(bool bDeploy = false);
77
82
89 protected string getFileName(string strFile, string strSubDir)
90 {
91 string strOut = m_strBaseDir + "\\";
92
93 if (!string.IsNullOrEmpty(strSubDir))
94 strOut += strSubDir + "\\";
95
96 strOut += strFile;
97
98 return strOut;
99 }
100
106 protected abstract LayerParameter addExtraLayers(bool bUseBatchNorm = true, double dfLrMult = 1.0);
107
113 protected LayerParameter findLayer(string strName)
114 {
115 foreach (LayerParameter p in m_net.layer)
116 {
117 if (p.name == strName)
118 return p;
119 }
120
121 return null;
122 }
123
129 protected NetParameter createNet(string strName)
130 {
131 NetParameter net = new NetParameter();
132
133 net.name = strName;
134
135 return net;
136 }
137
149 protected LayerParameter addDataLayer(string strSource, Phase phase, int nBatchSize = 32, bool bOutputLabel = true, TransformationParameter transform = null, string strName = "data", bool bSiamese = false)
150 {
152
153 data.include.Add(new NetStateRule(phase));
154
155 if (transform != null)
156 data.transform_param = transform;
157
158 data.name = strName;
159 data.data_param.batch_size = (uint)nBatchSize;
160 data.data_param.source = strSource;
161
162 if (bSiamese)
163 {
165 data.data_param.output_all_labels = true;
166 data.data_param.balance_matches = true;
167 }
168
169 data.top.Clear();
170 data.top.Add(strName);
171
172 if (bOutputLabel)
173 data.top.Add("label");
174
175 m_net.layer.Add(data);
176
177 return data;
178 }
179
192 protected LayerParameter addAnnotatedDataLayer(string strSource, Phase phase, int nBatchSize = 32, bool bOutputLabel = true, string strLabelMapFile = "", SimpleDatum.ANNOTATION_TYPE anno_type = SimpleDatum.ANNOTATION_TYPE.NONE, TransformationParameter transform = null, List<BatchSampler> rgSampler = null)
193 {
194 LayerParameter data = new LayerParameter(LayerParameter.LayerType.ANNOTATED_DATA);
195
196 data.include.Add(new NetStateRule(phase));
197
198 if (transform != null)
199 data.transform_param = transform;
200
201 data.annotated_data_param.label_map_file = strLabelMapFile;
202
203 if (rgSampler != null)
204 data.annotated_data_param.batch_sampler = rgSampler;
205
206 data.annotated_data_param.anno_type = anno_type;
207 data.name = "data";
208 data.data_param.batch_size = (uint)nBatchSize;
209 data.data_param.source = strSource;
210
211 data.top.Clear();
212 data.top.Add("data");
213
214 if (bOutputLabel)
215 data.top.Add("label");
216
217 m_net.layer.Add(data);
218
219 return data;
220 }
221
244 protected List<LayerParameter> createMultiBoxHead(LayerParameter data, int nNumClasses, List<MultiBoxHeadInfo> rgInfo, List<float> rgPriorVariance, bool bUseObjectness = false, bool bUseBatchNorm = true, double dfLrMult = 1.0, bool useScale = true, int nImageHt = 0, int nImageWd = 0, bool bShareLocation = true, bool bFlip = true, bool bClip = true, double dfOffset = 0.5, int nKernelSize = 1, int nPad = 0, string strConfPostfix = "", string strLocPostfix = "")
245 {
246 LayerParameter lastLayer;
247 string strName;
248
249 for (int i = 1; i < rgInfo.Count; i++)
250 {
251 if (!rgInfo[0].Verify(rgInfo[1]))
252 throw new Exception("The multi-bix header info must be consistent across all items.");
253 }
254
255 if (nNumClasses <= 0)
256 throw new Exception("The number of classes must be > 0.");
257
258 List<string> rgstrLocLayers = new List<string>();
259 List<string> rgstrConfLayers = new List<string>();
260 List<string> rgstrPriorBoxLayers = new List<string>();
261 List<string> rgstrObjLayers = new List<string>();
262
263 for (int i = 0; i < rgInfo.Count; i++)
264 {
265 LayerParameter fromLayer = findLayer(rgInfo[i].SourceLayer);
266
267 //---------------------------------------------------
268 // Get the normalize value.
269 //---------------------------------------------------
270 if (rgInfo[i].Normalization.HasValue && rgInfo[i].Normalization.Value != -1)
271 {
272 LayerParameter norm = new LayerParameter(LayerParameter.LayerType.NORMALIZATION2);
273 norm.name = fromLayer.name + "_norm";
274 norm.normalization2_param.scale_filler = new FillerParameter("constant", rgInfo[i].Normalization.Value);
277 norm.top.Add(norm.name);
278 fromLayer = connectAndAddLayer(fromLayer, norm);
279 }
280
281 //---------------------------------------------------
282 // Intermediate layers.
283 //---------------------------------------------------
284 if (rgInfo[i].InterLayerDepth.HasValue && rgInfo[i].InterLayerDepth.Value > 0)
285 fromLayer = addConvBNLayer(fromLayer.name, fromLayer.name + "_inter", bUseBatchNorm, true, (int)rgInfo[i].InterLayerDepth.Value, 3, 1, 1, dfLrMult);
286
287 //---------------------------------------------------
288 // Estimate number of priors per location given provided parameters.
289 //---------------------------------------------------
290 double? dfMinSize = rgInfo[i].MinSize;
291 double? dfMaxSize = rgInfo[i].MaxSize;
292 double? dfAspectHt = rgInfo[i].AspectRatioHeight;
293 double? dfAspectWd = rgInfo[i].AspectRatioWidth;
294 double? dfStepWd = rgInfo[i].StepWidth;
295 double? dfStepHt = rgInfo[i].StepHeight;
296 int nAspectLen = (dfAspectWd == dfAspectHt) ? 1 : 2;
297 int nNumPriorsPerLocation = (dfMaxSize.HasValue) ? (2 + nAspectLen) : (1 + nAspectLen);
298
299 if (bFlip)
300 nNumPriorsPerLocation += nAspectLen;
301
302 //---------------------------------------------------
303 // Create location prediction layer.
304 //---------------------------------------------------
305 int nNumLocOutput = nNumPriorsPerLocation * 4;
306 if (!bShareLocation)
307 nNumLocOutput *= nNumClasses;
308
309 strName = fromLayer.name + "_mbox_loc" + strLocPostfix;
310 lastLayer = addConvBNLayer(fromLayer.name, strName, bUseBatchNorm, false, nNumLocOutput, nKernelSize, nPad, 1, dfLrMult);
311
313 permute.name = strName + "_perm";
314 permute.permute_param.order = new List<int>() { 0, 2, 3, 1 };
315 permute.top.Add(permute.name);
316 lastLayer = connectAndAddLayer(lastLayer, permute);
317
319 flatten.name = strName + "_flat";
320 flatten.flatten_param.axis = 1;
321 flatten.top.Add(flatten.name);
322 lastLayer = connectAndAddLayer(lastLayer, flatten);
323 rgstrLocLayers.Add(lastLayer.name);
324
325 //---------------------------------------------------
326 // Create confidence prediction layer.
327 //---------------------------------------------------
328 strName = fromLayer.name + "_mbox_conf" + strConfPostfix;
329 int nNumConfOutput = nNumPriorsPerLocation * nNumClasses;
330 lastLayer = addConvBNLayer(fromLayer.name, strName, bUseBatchNorm, false, nNumConfOutput, nKernelSize, nPad, 1, dfLrMult);
331
332 permute = new LayerParameter(LayerParameter.LayerType.PERMUTE);
333 permute.name = strName + "_perm";
334 permute.permute_param.order = new List<int>() { 0, 2, 3, 1 };
335 permute.top.Add(permute.name);
336 lastLayer = connectAndAddLayer(lastLayer, permute);
337
338 flatten = new LayerParameter(LayerParameter.LayerType.FLATTEN);
339 flatten.name = strName + "_flat";
340 flatten.flatten_param.axis = 1;
341 flatten.top.Add(flatten.name);
342 lastLayer = connectAndAddLayer(lastLayer, flatten);
343 rgstrConfLayers.Add(lastLayer.name);
344
345 //---------------------------------------------------
346 // Create prior generation layer.
347 //---------------------------------------------------
348 strName = fromLayer.name + "_mbox_priorbox";
349 LayerParameter priorbox = new LayerParameter(LayerParameter.LayerType.PRIORBOX);
350 priorbox.name = strName;
351 priorbox.top.Add(priorbox.name);
352 priorbox.prior_box_param.min_size.Add((float)dfMinSize.Value);
353 priorbox.prior_box_param.clip = bClip;
354 priorbox.prior_box_param.variance = rgPriorVariance;
355 priorbox.prior_box_param.offset = (float)dfOffset;
356
357 if (dfMaxSize.HasValue)
358 priorbox.prior_box_param.max_size.Add((float)dfMaxSize.Value);
359
360 if (dfAspectWd.HasValue)
361 priorbox.prior_box_param.aspect_ratio.Add((float)dfAspectWd.Value);
362
363 if (dfAspectHt.HasValue)
364 priorbox.prior_box_param.aspect_ratio.Add((float)dfAspectHt.Value);
365
366 if (dfStepWd.HasValue && dfStepHt.HasValue)
367 {
368 if (dfStepWd.Value == dfStepHt.Value)
369 {
370 priorbox.prior_box_param.step = (float)dfStepWd.Value;
371 }
372 else
373 {
374 priorbox.prior_box_param.step_h = (float)dfStepHt.Value;
375 priorbox.prior_box_param.step_w = (float)dfStepWd.Value;
376 }
377 }
378
379 if (nImageHt != 0 && nImageWd != 0)
380 {
381 if (nImageHt == nImageWd)
382 {
383 priorbox.prior_box_param.img_size = (uint)nImageHt;
384 }
385 else
386 {
387 priorbox.prior_box_param.img_h = (uint)nImageHt;
388 priorbox.prior_box_param.img_w = (uint)nImageWd;
389 }
390 }
391
392 lastLayer = connectAndAddLayer(fromLayer, priorbox);
393 lastLayer.bottom.Add(data.top[0]);
394 rgstrPriorBoxLayers.Add(lastLayer.name);
395
396 //---------------------------------------------------
397 // Create objectness prediction layer
398 //---------------------------------------------------
399 if (bUseObjectness)
400 {
401 strName = fromLayer.name + "_mbox_objectness";
402 int nNumObjOutput = nNumPriorsPerLocation * 2;
403 lastLayer = addConvBNLayer(fromLayer.name, strName, bUseBatchNorm, false, nNumObjOutput, nKernelSize, nPad, 1, dfLrMult);
404
405 permute = new LayerParameter(LayerParameter.LayerType.PERMUTE);
406 permute.name = strName + "_perm";
407 permute.permute_param.order = new List<int>() { 0, 2, 3, 1 };
408 lastLayer = connectAndAddLayer(lastLayer, permute);
409
410 flatten = new LayerParameter(LayerParameter.LayerType.FLATTEN);
411 flatten.name = strName + "_flat";
412 flatten.flatten_param.axis = 1;
413 lastLayer = connectAndAddLayer(lastLayer, flatten);
414 rgstrObjLayers.Add(lastLayer.name);
415 }
416 }
417
418 //---------------------------------------------------
419 // Concatenate priorbox, loc, and conf layers.
420 //---------------------------------------------------
421 List<LayerParameter> rgMboxLayers = new List<LayerParameter>();
422 strName = "mbox_loc";
423
425 concat.name = strName;
426 concat.concat_param.axis = 1;
427 concat.bottom = rgstrLocLayers;
428 concat.top.Add(concat.name);
429 m_net.layer.Add(concat);
430 rgMboxLayers.Add(concat);
431
432 strName = "mbox_conf";
433 concat = new LayerParameter(LayerParameter.LayerType.CONCAT);
434 concat.name = strName;
435 concat.concat_param.axis = 1;
436 concat.bottom = rgstrConfLayers;
437 concat.top.Add(concat.name);
438 m_net.layer.Add(concat);
439 rgMboxLayers.Add(concat);
440
441 strName = "mbox_priorbox";
442 concat = new LayerParameter(LayerParameter.LayerType.CONCAT);
443 concat.name = strName;
444 concat.concat_param.axis = 2;
445 concat.bottom = rgstrPriorBoxLayers;
446 concat.top.Add(concat.name);
447 m_net.layer.Add(concat);
448 rgMboxLayers.Add(concat);
449
450 if (bUseObjectness)
451 {
452 strName = "mbox_objectness";
453 concat = new LayerParameter(LayerParameter.LayerType.CONCAT);
454 concat.name = strName;
455 concat.concat_param.axis = 1;
456 concat.bottom = rgstrObjLayers;
457 concat.top.Add(concat.name);
458 m_net.layer.Add(concat);
459 rgMboxLayers.Add(concat);
460 }
461
462 return rgMboxLayers;
463 }
464
491 protected LayerParameter addConvBNLayer(string strInputLayer, string strOutputLayer, bool bUseBatchNorm, bool bUseRelU, int nNumOutput, int nKernelSize, int nPad, int nStride, double dfLrMult = 1.0, int nDilation = 1, SCALE_BIAS useScale = SCALE_BIAS.SCALE, string strConvPrefix = "", string strConvPostfix = "", string strBnPrefix = "", string strBnPostfix = "_bn", string strScalePrefix = "", string strScalePostFix = "_scale", string strBiasPrefix = "", string strBiasPostfix = "_bias", bool bNamedParams = false, string strLayerPostfix = "", Phase phaseExclude = Phase.NONE)
492 {
493 LayerParameter lastLayer = findLayer(strInputLayer);
494 string strName = strConvPrefix + strOutputLayer + strConvPostfix;
495
497 LayerParameter convLayer = new LayerParameter(type);
498 convLayer.convolution_param.weight_filler = new FillerParameter("xavier");
499 convLayer.convolution_param.bias_filler = new FillerParameter("constant", 0);
500 convLayer.convolution_param.bias_term = true;
501 convLayer.name = strName + strLayerPostfix;
502 convLayer.convolution_param.kernel_size.Add((uint)nKernelSize);
503 convLayer.convolution_param.pad.Add((uint)nPad);
504 convLayer.convolution_param.stride.Add((uint)nStride);
505 convLayer.convolution_param.dilation.Add((uint)nDilation);
506 convLayer.convolution_param.num_output = (uint)nNumOutput;
507 convLayer.top.Add(convLayer.name);
508
509 addExclusion(convLayer, phaseExclude);
510
511 LayerParameter bnLayer = null;
512 LayerParameter bnLayerL = null;
513 LayerParameter scaleLayer = null;
514 LayerParameter biasLayer = null;
515 LayerParameter reluLayer = null;
516
517 // Setup the BachNorm Layer
518 if (bUseBatchNorm)
519 {
520 convLayer.parameters.Add(new ParamSpec(dfLrMult, 1.0, (bNamedParams) ? strName + "_w" : null));
521 convLayer.convolution_param.weight_filler = new FillerParameter("gaussian", 0, 0, 0.01);
522 convLayer.convolution_param.bias_term = false;
523
524 bnLayer = new LayerParameter(LayerParameter.LayerType.BATCHNORM);
525 strName = strBnPrefix + strOutputLayer + strBnPostfix;
526 bnLayer.name = strName + strLayerPostfix;
527 bnLayer.batch_norm_param.eps = 0.001;
529 bnLayer.batch_norm_param.use_global_stats = false;
530 bnLayer.parameters.Add(new ParamSpec(0.0, 0.0, (bNamedParams) ? strName + "_w1" : null));
531 bnLayer.parameters.Add(new ParamSpec(0.0, 0.0, (bNamedParams) ? strName + "_w2" : null));
532 bnLayer.parameters.Add(new ParamSpec(0.0, 0.0, (bNamedParams) ? strName + "_w3" : null));
533 bnLayer.top.Add(bnLayer.name);
534 addExclusion(bnLayer, phaseExclude);
535
536 double dfBnLrMult = dfLrMult;
537
538 // When using global stats, not updating scale/bias parameters.
539 if (bnLayer.batch_norm_param.use_global_stats.GetValueOrDefault(false))
540 dfBnLrMult = 0;
541
542 // Parameters for scale bias layer after batchnorm.
543 if (useScale == SCALE_BIAS.SCALE)
544 {
545 scaleLayer = new LayerParameter(LayerParameter.LayerType.SCALE);
546 strName = strScalePrefix + strOutputLayer + strScalePostFix;
547 scaleLayer.name = strName + strLayerPostfix;
548 scaleLayer.scale_param.bias_term = true;
549 scaleLayer.scale_param.filler = new FillerParameter("constant", 1.0);
550 scaleLayer.scale_param.bias_filler = new FillerParameter("constant", 0.0);
551 scaleLayer.parameters.Add(new ParamSpec(dfBnLrMult, 0.0, (bNamedParams) ? strName + "_w" : null));
552 scaleLayer.parameters.Add(new ParamSpec(dfBnLrMult, 0.0, (bNamedParams) ? strName + "_b" : null));
553 scaleLayer.top.Add(scaleLayer.name);
554 addExclusion(scaleLayer, phaseExclude);
555 }
556 else if (useScale == SCALE_BIAS.BIAS)
557 {
558 biasLayer = new LayerParameter(LayerParameter.LayerType.BIAS);
559 strName = strBiasPrefix + strOutputLayer + strBiasPostfix;
560 biasLayer.name = strName + strLayerPostfix;
561 biasLayer.bias_param.filler = new FillerParameter("constant", 0.0);
562 biasLayer.parameters.Add(new ParamSpec(dfBnLrMult, 0.0, (bNamedParams) ? strName + "_w" : null));
563 biasLayer.top.Add(biasLayer.name);
564 addExclusion(biasLayer, phaseExclude);
565 }
566 }
567 else
568 {
569 convLayer.parameters.Add(new ParamSpec(dfLrMult, 1.0, (bNamedParams) ? strName + "_w" : null));
570 convLayer.parameters.Add(new ParamSpec(dfLrMult * 2, 0.0, (bNamedParams) ? strName + "_b" : null));
571 }
572
573 lastLayer = connectAndAddLayer(strInputLayer, convLayer);
574
575 if (bnLayer != null)
576 lastLayer = connectAndAddLayer(lastLayer, bnLayer, true, true);
577
578 if (scaleLayer != null)
579 lastLayer = connectAndAddLayer(lastLayer, scaleLayer, true, true);
580
581 if (biasLayer != null)
582 lastLayer = connectAndAddLayer(lastLayer, biasLayer);
583
584 if (bUseRelU)
585 {
586 reluLayer = new LayerParameter(LayerParameter.LayerType.RELU);
587 reluLayer.name = convLayer.name + "_relu";
588 addExclusion(reluLayer, phaseExclude);
589 lastLayer = connectAndAddLayer(lastLayer, reluLayer, true, true);
590 }
591
592 return lastLayer;
593 }
594
602 protected LayerParameter connectAndAddLayer(string fromLayer, LayerParameter toLayer, string fromLayer2 = null)
603 {
604 toLayer.bottom.Clear();
605 toLayer.bottom.Add(fromLayer);
606
607 if (fromLayer2 != null)
608 toLayer.bottom.Add(fromLayer2);
609
610 m_net.layer.Add(toLayer);
611
612 return toLayer;
613 }
614
624 protected LayerParameter connectAndAddLayer(LayerParameter fromLayer, LayerParameter toLayer, bool bInPlace = false, bool bAdd = true, int nTopIdx = 0)
625 {
626 toLayer.bottom.Clear();
627 toLayer.bottom.Add(fromLayer.top[nTopIdx]);
628
629 if (bAdd)
630 m_net.layer.Add(toLayer);
631
632 if (bInPlace)
633 {
634 toLayer.top.Clear();
635 toLayer.top.Add(fromLayer.top[nTopIdx]);
636 }
637
638 return toLayer;
639 }
640
648 protected LayerParameter connectAndAddLayer(List<LayerParameter> rgFromLayer, LayerParameter toLayer, bool bAdd = true)
649 {
650 toLayer.bottom.Clear();
651
652 for (int i = 0; i < rgFromLayer.Count; i++)
653 {
654 toLayer.bottom.Add(rgFromLayer[i].top[0]);
655 }
656
657 if (bAdd)
658 m_net.layer.Add(toLayer);
659
660 return toLayer;
661 }
662
673 protected LayerParameter createConvolution(string strName, int nNumOutput, int nKernelSize, int nPad = 0, int nStride = 1, int nDilation = 1)
674 {
676 conv.name = strName;
677 conv.convolution_param.num_output = (uint)nNumOutput;
678 conv.convolution_param.kernel_size.Add((uint)nKernelSize);
679 conv.convolution_param.pad.Add((uint)nPad);
680 conv.convolution_param.stride.Add((uint)nStride);
681 conv.convolution_param.dilation.Add((uint)nDilation);
683 conv.convolution_param.bias_filler = new FillerParameter("constant", 0.0);
684 conv.parameters.Add(new ParamSpec(1.0, 1.0));
685 conv.parameters.Add(new ParamSpec(2.0, 0.0));
686 conv.top.Add(strName);
687
688 return conv;
689 }
690
700 protected LayerParameter createPooling(string strName, PoolingParameter.PoolingMethod method, int nKernelSize, int nPad = 0, int nStride = 1)
701 {
703 pool.name = strName;
704 pool.pooling_param.kernel_size.Add((uint)nKernelSize);
705 pool.pooling_param.stride.Add((uint)nStride);
706 pool.pooling_param.pad.Add((uint)nPad);
707 pool.pooling_param.pool = method;
708 pool.top.Add(strName);
709
710 return pool;
711 }
712
728 protected LayerParameter addVGGBlock(LayerParameter lastLayer, int nBlockIdx, int nConvIdx, int nNumOutput, int nConvCount, bool? bNoPool, bool bDilatePool = false, int nKernelSize = 3, int nPad = 1, int nStride = 1, int nDilation = 1)
729 {
730 for (int i = 0; i < nConvCount; i++)
731 {
732 string strConvName = "conv" + nBlockIdx.ToString() + "_" + nConvIdx.ToString();
733
734 LayerParameter conv = createConvolution(strConvName, nNumOutput, nKernelSize, nPad, nStride, nDilation);
735 lastLayer = connectAndAddLayer(lastLayer, conv);
736
738 relu.name = "relu" + nBlockIdx.ToString();
739 lastLayer = connectAndAddLayer(lastLayer, relu, true);
740
741 nConvIdx++;
742 }
743
744 if (!bNoPool.HasValue)
745 return lastLayer;
746
747 if (bNoPool.Value)
748 {
749 string strConvName = "conv" + nBlockIdx.ToString() + "_" + nConvIdx.ToString();
750 LayerParameter conv = createConvolution(strConvName, nNumOutput, 3, 1, 2);
751 lastLayer = connectAndAddLayer(lastLayer, conv);
752 }
753 else
754 {
755 string strPoolName = "pool" + nBlockIdx.ToString();
756 LayerParameter pool = (bDilatePool) ? createPooling(strPoolName, PoolingParameter.PoolingMethod.MAX, 3, 1, 1) : createPooling(strPoolName, PoolingParameter.PoolingMethod.MAX, 2, 0, 2);
757 lastLayer = connectAndAddLayer(lastLayer, pool);
758 }
759
760 return lastLayer;
761 }
762
777 protected LayerParameter addVGGfc(LayerParameter lastLayer, int nBlockIdx, int nConvIdx, int nNumOutput, int nDilation, bool bDilated, bool bNoPool, bool bFullConv, bool bReduced, bool bDropout)
778 {
779 string strConvName = "conv" + nBlockIdx.ToString() + "_" + nConvIdx.ToString();
780 string strPoolName = "pool" + nBlockIdx.ToString();
781
782 if (bDilated)
783 {
784 if (bNoPool)
785 {
786 LayerParameter conv = createConvolution(strConvName, nNumOutput, 3, 1, 1);
787 lastLayer = connectAndAddLayer(lastLayer, conv);
788 }
789 else
790 {
791 LayerParameter pool = createPooling(strPoolName, PoolingParameter.PoolingMethod.MAX, 3, 1, 1);
792 lastLayer = connectAndAddLayer(lastLayer, pool);
793 }
794 }
795 else
796 {
797 if (bNoPool)
798 {
799 LayerParameter conv = createConvolution(strConvName, nNumOutput, 3, 1, 2);
800 lastLayer = connectAndAddLayer(lastLayer, conv);
801 }
802 else
803 {
804 LayerParameter pool = createPooling(strPoolName, PoolingParameter.PoolingMethod.MAX, 2, 0, 2);
805 lastLayer = connectAndAddLayer(lastLayer, pool);
806 }
807 }
808
809 if (bFullConv)
810 {
811 int nKernelSize;
812
813 if (bDilated)
814 {
815 if (bReduced)
816 {
817 nDilation *= 6;
818 nKernelSize = 3;
819 nNumOutput = 1024;
820 }
821 else
822 {
823 nDilation *= 2;
824 nKernelSize = 7;
825 nNumOutput = 4096;
826 }
827 }
828 else
829 {
830 if (bReduced)
831 {
832 nDilation *= 3;
833 nKernelSize = 3;
834 nNumOutput = 1024;
835 }
836 else
837 {
838 nKernelSize = 7;
839 nNumOutput = 4096;
840 }
841 }
842
843 int nPad = (int)((nKernelSize + (nDilation - 1) * (nKernelSize - 1)) - 1) / 2;
844 LayerParameter fc6 = createConvolution("fc6", nNumOutput, nKernelSize, nPad, 1, nDilation);
845 lastLayer = connectAndAddLayer(lastLayer, fc6);
846
848 relu.name = "relu" + nBlockIdx.ToString();
849 lastLayer = connectAndAddLayer(lastLayer, relu, true);
850
851 if (bDropout)
852 {
854 dropout.name = "dropout6";
855 dropout.dropout_param.dropout_ratio = 0.5;
856 lastLayer = connectAndAddLayer(lastLayer, dropout, true);
857 }
858
859 LayerParameter fc7 = createConvolution("fc7", nNumOutput, 1);
860 lastLayer = connectAndAddLayer(lastLayer, fc7);
861
862 if (bDropout)
863 {
865 dropout.name = "dropout7";
866 dropout.dropout_param.dropout_ratio = 0.5;
867 lastLayer = connectAndAddLayer(lastLayer, dropout, true);
868 }
869 }
870 else
871 {
874 fc6.name = "fc6";
875 lastLayer = connectAndAddLayer(lastLayer, fc6, true);
876
877 if (bDropout)
878 {
880 dropout.name = "dropout6";
881 dropout.dropout_param.dropout_ratio = 0.5;
882 lastLayer = connectAndAddLayer(lastLayer, dropout, true);
883 }
884
887 fc6.name = "fc7";
888 lastLayer = connectAndAddLayer(lastLayer, fc7);
889
890 if (bDropout)
891 {
893 dropout.name = "dropout7";
894 dropout.dropout_param.dropout_ratio = 0.5;
895 lastLayer = connectAndAddLayer(lastLayer, dropout, true);
896 }
897 }
898
899 return lastLayer;
900 }
901
915 protected LayerParameter addVGGNetBody(LayerParameter lastLayer, bool bNeedFc = true, bool bFullConv = true, bool bReduced = true, bool bDilated = true, bool bNoPool = false, bool bDropout = false, List<string> rgstrFreezeLayers = null, bool bDilatePool4 = false)
916 {
917 lastLayer = addVGGBlock(lastLayer, 1, 1, 64, 2, bNoPool, false, 3, 1, 1);
918 lastLayer = addVGGBlock(lastLayer, 2, 1, 128, 2, bNoPool, false, 3, 1, 1);
919 lastLayer = addVGGBlock(lastLayer, 3, 1, 256, 3, bNoPool, false, 3, 1, 1);
920 lastLayer = addVGGBlock(lastLayer, 4, 1, 512, 3, bNoPool, bDilatePool4, 3, 1, 1);
921
922 int nDilation = (bDilatePool4) ? 2 : 1;
923 int nKernelSize = 3;
924 int nPad = (int)((nKernelSize + (nDilation - 1) * (nKernelSize - 1)) - 1) / 2;
925 lastLayer = addVGGBlock(lastLayer, 5, 1, 512, 3, null, false, nKernelSize, nPad, 1, nDilation);
926
927 if (bNeedFc)
928 lastLayer = addVGGfc(lastLayer, 5, 4, 512, nDilation, bDilated, bNoPool, bFullConv, bReduced, bDropout);
929
930 if (rgstrFreezeLayers != null)
931 {
932 foreach (string strFreezeLayer in rgstrFreezeLayers)
933 {
934 LayerParameter p = findLayer(strFreezeLayer);
935 if (p != null)
936 p.freeze_learning = true;
937 }
938 }
939
940 return lastLayer;
941 }
942
943
959 protected LayerParameter addResBody(LayerParameter lastLayer, string strBlockName, int nOut2A, int nOut2B, int nOut2C, int nStride, bool bUseBranch1, int nDilation = 1, bool bNamedParams = false, string strLayerPostfix = "", Phase phaseExclude = Phase.NONE)
960 {
961 string strConvPrefix = "res_" + strBlockName;
962 string strConvPostfix = "";
963 string strBnPrefix = "bn_" + strBlockName;
964 string strBnPostfix = "";
965 string strScalePrefix = "scale_" + strBlockName;
966 string strScalePostfix = "";
967 SCALE_BIAS useScale = SCALE_BIAS.SCALE;
968 string strBranch1 = lastLayer.name;
969 string strBranch2;
970 string strBranchName;
971 string strOutName = lastLayer.name;
972
973 if (bUseBranch1)
974 {
975 strBranchName = "_br1";
976 lastLayer = addConvBNLayer(lastLayer.name, strBranchName, true, false, nOut2C, 1, 0, nStride, 1, nDilation, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix, "", "_bias", bNamedParams, strLayerPostfix, phaseExclude);
977 strBranch1 = lastLayer.top[0];
978 strOutName = strBranch1;
979 nStride = 1;
980 }
981
982 strBranchName = "_br2a";
983 lastLayer = addConvBNLayer(strOutName, strBranchName, true, true, nOut2A, 1, 0, nStride, 1, nDilation, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix, "", "_bias", bNamedParams, strLayerPostfix, phaseExclude);
984 strOutName = strConvPrefix + strBranchName + strLayerPostfix;
985 strBranchName = "_br2b";
986
987 if (nDilation == 1)
988 {
989 lastLayer = addConvBNLayer(strOutName, strBranchName, true, true, nOut2B, 3, 1, 1, 1, nDilation, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix, "", "_bias", bNamedParams, strLayerPostfix, phaseExclude);
990 }
991 else
992 {
993 int nPad = (int)(((3 + (nDilation - 1) * 2) - 1) / 2);
994 lastLayer = addConvBNLayer(strOutName, strBranchName, true, true, nOut2B, 3, nPad, 1, 1, nDilation, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix, "", "_bias", bNamedParams, strLayerPostfix, phaseExclude);
995 }
996
997 strOutName = strConvPrefix + strBranchName + strLayerPostfix;
998 strBranchName = "_br2c";
999
1000 lastLayer = addConvBNLayer(strOutName, strBranchName, true, false, nOut2C, 1, 0, 1, 1, 1, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix, "", "_bias", bNamedParams, strLayerPostfix, phaseExclude);
1001 strBranch2 = lastLayer.top[0];
1002
1004 eltwise.name = "res" + strBlockName + strLayerPostfix;
1005 eltwise.bottom.Add(strBranch1);
1006 eltwise.bottom.Add(strBranch2);
1007 eltwise.top.Add(eltwise.name);
1008 addExclusion(eltwise, phaseExclude);
1009 m_net.layer.Add(eltwise);
1010 lastLayer = eltwise;
1011
1013 relu.name = eltwise.name + "_relu";
1014 addExclusion(relu, phaseExclude);
1015 connectAndAddLayer(lastLayer, relu, true, true);
1016
1017 return lastLayer;
1018 }
1019
1025 protected void addExclusion(LayerParameter p, Phase phase)
1026 {
1027 if (phase == Phase.NONE)
1028 return;
1029
1030 if (p.include.Count > 0)
1031 return;
1032
1033 p.exclude.Add(new NetStateRule(phase));
1034 }
1035
1048 protected LayerParameter addResNetBody(string strDataName, int nBlock3Count = 4, int nBlock4Count = 23, bool bUsePool5 = true, bool bUseDilationConv5 = false, bool bNamedParams = false, string strLayerPostfix = "", Phase phaseExclude = Phase.NONE)
1049 {
1050 string strConvPrefix = "";
1051 string strConvPostfix = "";
1052 string strBnPrefix = "bn_";
1053 string strBnPostfix = "";
1054 string strScalePrefix = "scale_";
1055 string strScalePostfix = "";
1056
1057 LayerParameter lastLayer = addConvBNLayer(strDataName, "conv1", true, true, 64, 7, 3, 2, 1, 1, SCALE_BIAS.SCALE, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix, "", "_bias", bNamedParams, strLayerPostfix, phaseExclude);
1058
1059 LayerParameter pool = createPooling("pool1" + strLayerPostfix, PoolingParameter.PoolingMethod.MAX, 3, 0, 2);
1060 addExclusion(pool, phaseExclude);
1061 lastLayer = connectAndAddLayer(lastLayer, pool);
1062
1063 lastLayer = addResBody(lastLayer, "2a", 64, 64, 256, 1, true, 1, bNamedParams, strLayerPostfix, phaseExclude);
1064 lastLayer = addResBody(lastLayer, "2b", 64, 64, 256, 1, false, 1, bNamedParams, strLayerPostfix, phaseExclude);
1065 lastLayer = addResBody(lastLayer, "2c", 64, 64, 256, 1, false, 1, bNamedParams, strLayerPostfix, phaseExclude);
1066
1067 lastLayer = addResBody(lastLayer, "3a", 128, 128, 512, 2, true, 1, bNamedParams, strLayerPostfix, phaseExclude);
1068 for (int i = 1; i <= nBlock3Count; i++)
1069 {
1070 lastLayer = addResBody(lastLayer, "3b" + i.ToString(), 128, 128, 512, 1, false, 1, bNamedParams, strLayerPostfix, phaseExclude);
1071 }
1072
1073 lastLayer = addResBody(lastLayer, "4a", 256, 256, 1024, 2, true, 1, bNamedParams, strLayerPostfix, phaseExclude);
1074 for (int i = 1; i <= nBlock4Count; i++)
1075 {
1076 lastLayer = addResBody(lastLayer, "4b" + i.ToString(), 256, 256, 1024, 1, false, 1, bNamedParams, strLayerPostfix, phaseExclude);
1077 }
1078
1079 int nStride = 2;
1080 int nDilation = 1;
1081 if (bUseDilationConv5)
1082 {
1083 nStride = 1;
1084 nDilation = 2;
1085 }
1086
1087 lastLayer = addResBody(lastLayer, "5a", 512, 512, 2048, nStride, true, nDilation, bNamedParams, strLayerPostfix, phaseExclude);
1088 lastLayer = addResBody(lastLayer, "5b", 512, 512, 2048, 1, false, nDilation, bNamedParams, strLayerPostfix, phaseExclude);
1089 lastLayer = addResBody(lastLayer, "5c", 512, 512, 2048, 1, false, nDilation, bNamedParams, strLayerPostfix, phaseExclude);
1090
1091 if (bUsePool5)
1092 {
1093 LayerParameter pool5 = createPooling("pool5" + strLayerPostfix, PoolingParameter.PoolingMethod.AVE, 3, 0, 1);
1094 addExclusion(pool5, phaseExclude);
1095 pool5.pooling_param.global_pooling = true;
1096 lastLayer = connectAndAddLayer(lastLayer, pool5);
1097 }
1098
1099 return lastLayer;
1100 }
1101
1106 {
1107 get { return m_net; }
1108 }
1109
1114 {
1115 get { return m_solver; }
1116 }
1117 }
1118
1122 public class MultiBoxHeadInfo
1123 {
1124 string m_strSourceLayer;
1125 double? m_dfMinSize;
1126 double? m_dfMaxSize;
1127 double? m_dfStepWidth;
1128 double? m_dfStepHeight;
1129 double? m_dfAspectRatioHeight;
1130 double? m_dfAspectRatioWidth;
1131 double? m_dfNormalization;
1132 double? m_nInterLayerDepth;
1133
1146 public MultiBoxHeadInfo(string strSrcLayer, double? dfMinSize = null, double? dfMaxSize = null, double? dfStepWidth = null, double? dfStepHeight = null, double? dfAspectRatioWidth = null, double? dfAspectRatioHeight = null, double? dfNormalization = null, int? nInterLayerDepth = null)
1147 {
1148 m_strSourceLayer = strSrcLayer;
1149 m_dfMinSize = dfMinSize;
1150 m_dfMaxSize = dfMaxSize;
1151 m_dfStepWidth = dfStepWidth;
1152 m_dfStepHeight = dfStepHeight;
1153 m_dfAspectRatioHeight = dfAspectRatioHeight;
1154 m_dfAspectRatioWidth = dfAspectRatioWidth;
1155 m_dfNormalization = dfNormalization;
1156 m_nInterLayerDepth = nInterLayerDepth;
1157 }
1158
1159 private bool verify(double? df1, double? df2)
1160 {
1161 if ((df1.HasValue && !df2.HasValue) || (!df1.HasValue && df2.HasValue))
1162 return false;
1163
1164 return true;
1165 }
1166
1172 public bool Verify(MultiBoxHeadInfo info)
1173 {
1174 if (!verify(m_dfAspectRatioHeight, info.m_dfAspectRatioHeight))
1175 return false;
1176
1177 if (!verify(m_dfAspectRatioWidth, info.m_dfAspectRatioWidth))
1178 return false;
1179
1180 if (!verify(m_dfMaxSize, info.m_dfMaxSize))
1181 return false;
1182
1183 if (!verify(m_dfMinSize, info.m_dfMinSize))
1184 return false;
1185
1186 if (!verify(m_dfNormalization, info.m_dfNormalization))
1187 return false;
1188
1189 if (!verify(m_dfStepWidth, info.m_dfStepWidth))
1190 return false;
1191
1192 if (!verify(m_dfStepHeight, info.m_dfStepHeight))
1193 return false;
1194
1195 if (!verify(m_nInterLayerDepth, info.m_nInterLayerDepth))
1196 return false;
1197
1198 return true;
1199 }
1200
1204 public string SourceLayer
1205 {
1206 get { return m_strSourceLayer; }
1207 }
1208
1212 public double? MinSize
1213 {
1214 get { return m_dfMinSize; }
1215 }
1216
1220 public double? MaxSize
1221 {
1222 get { return m_dfMaxSize; }
1223 }
1224
1228 public double? StepHeight
1229 {
1230 get { return m_dfStepHeight; }
1231 }
1232
1236 public double? StepWidth
1237 {
1238 get { return m_dfStepWidth; }
1239 }
1240
1244 public double? AspectRatioHeight
1245 {
1246 get { return m_dfAspectRatioHeight; }
1247 }
1248
1252 public double? AspectRatioWidth
1253 {
1254 get { return m_dfAspectRatioWidth; }
1255 }
1256
1260 public double? Normalization
1261 {
1262 get { return m_dfNormalization; }
1263 }
1264
1268 public double? InterLayerDepth
1269 {
1270 get { return m_nInterLayerDepth; }
1271 }
1272 }
1273}
The SimpleDatum class holds a data input within host memory.
Definition: SimpleDatum.cs:161
ANNOTATION_TYPE
Specifies the annotation type when using annotations.
Definition: SimpleDatum.cs:204
The ModelBuilder is an abstract class that is overridden by a base class used to programically build ...
Definition: ModelBuilder.cs:19
SolverParameter Solver
Returns the base solver.
LayerParameter addVGGNetBody(LayerParameter lastLayer, bool bNeedFc=true, bool bFullConv=true, bool bReduced=true, bool bDilated=true, bool bNoPool=false, bool bDropout=false, List< string > rgstrFreezeLayers=null, bool bDilatePool4=false)
Adds the full VGG body to the network, connecting it to the 'lastLayer'.
SolverParameter m_solver
Specifies the base solver to use.
Definition: ModelBuilder.cs:31
void addExclusion(LayerParameter p, Phase phase)
Add a phase exclusion.
abstract LayerParameter addExtraLayers(bool bUseBatchNorm=true, double dfLrMult=1.0)
Add extra layers on top of a 'base' network (e.g. VGGNet or Inception)
SCALE_BIAS
Defines the scale bias type to use.
Definition: ModelBuilder.cs:37
LayerParameter addResNetBody(string strDataName, int nBlock3Count=4, int nBlock4Count=23, bool bUsePool5=true, bool bUseDilationConv5=false, bool bNamedParams=false, string strLayerPostfix="", Phase phaseExclude=Phase.NONE)
Create a ResNet101 Body.
LayerParameter addAnnotatedDataLayer(string strSource, Phase phase, int nBatchSize=32, bool bOutputLabel=true, string strLabelMapFile="", SimpleDatum.ANNOTATION_TYPE anno_type=SimpleDatum.ANNOTATION_TYPE.NONE, TransformationParameter transform=null, List< BatchSampler > rgSampler=null)
Add the Annotated Data layer.
NetParameter m_net
Specifies the base net to be altered.
Definition: ModelBuilder.cs:27
LayerParameter createPooling(string strName, PoolingParameter.PoolingMethod method, int nKernelSize, int nPad=0, int nStride=1)
Create a new pooling layer parameter.
NetParameter Net
Returns the base net altered by the model builder.
LayerParameter addVGGBlock(LayerParameter lastLayer, int nBlockIdx, int nConvIdx, int nNumOutput, int nConvCount, bool? bNoPool, bool bDilatePool=false, int nKernelSize=3, int nPad=1, int nStride=1, int nDilation=1)
Add a new VGG block.
ModelBuilder(string strBaseDir, NetParameter net=null, SolverParameter solver=null)
The constructor.
Definition: ModelBuilder.cs:58
List< LayerParameter > createMultiBoxHead(LayerParameter data, int nNumClasses, List< MultiBoxHeadInfo > rgInfo, List< float > rgPriorVariance, bool bUseObjectness=false, bool bUseBatchNorm=true, double dfLrMult=1.0, bool useScale=true, int nImageHt=0, int nImageWd=0, bool bShareLocation=true, bool bFlip=true, bool bClip=true, double dfOffset=0.5, int nKernelSize=1, int nPad=0, string strConfPostfix="", string strLocPostfix="")
Create the multi-box head layers.
abstract NetParameter CreateDeployModel()
Create the deploy model to use.
LayerParameter createConvolution(string strName, int nNumOutput, int nKernelSize, int nPad=0, int nStride=1, int nDilation=1)
Create a new convolution layer parameter.
LayerParameter addConvBNLayer(string strInputLayer, string strOutputLayer, bool bUseBatchNorm, bool bUseRelU, int nNumOutput, int nKernelSize, int nPad, int nStride, double dfLrMult=1.0, int nDilation=1, SCALE_BIAS useScale=SCALE_BIAS.SCALE, string strConvPrefix="", string strConvPostfix="", string strBnPrefix="", string strBnPostfix="_bn", string strScalePrefix="", string strScalePostFix="_scale", string strBiasPrefix="", string strBiasPostfix="_bias", bool bNamedParams=false, string strLayerPostfix="", Phase phaseExclude=Phase.NONE)
Add convolution, batch-norm layers.
string m_strBaseDir
Specifies the base directory that contains the data and models.
Definition: ModelBuilder.cs:23
LayerParameter connectAndAddLayer(List< LayerParameter > rgFromLayer, LayerParameter toLayer, bool bAdd=true)
Connect the from layer to the 'to' layer.
NetParameter createNet(string strName)
Create the base network parameter for the model and set its name to the 'm_strModel' name.
LayerParameter addDataLayer(string strSource, Phase phase, int nBatchSize=32, bool bOutputLabel=true, TransformationParameter transform=null, string strName="data", bool bSiamese=false)
Add the Data layer.
LayerParameter findLayer(string strName)
Find a layer with a given name.
LayerParameter addVGGfc(LayerParameter lastLayer, int nBlockIdx, int nConvIdx, int nNumOutput, int nDilation, bool bDilated, bool bNoPool, bool bFullConv, bool bReduced, bool bDropout)
Adds the final layers to the network.
abstract NetParameter CreateModel(bool bDeploy=false)
Create the training/testing/deploy model to use.
LayerParameter addResBody(LayerParameter lastLayer, string strBlockName, int nOut2A, int nOut2B, int nOut2C, int nStride, bool bUseBranch1, int nDilation=1, bool bNamedParams=false, string strLayerPostfix="", Phase phaseExclude=Phase.NONE)
Adds a ResNet body to the network, connecting it to the 'lastLayer'.
LayerParameter connectAndAddLayer(string fromLayer, LayerParameter toLayer, string fromLayer2=null)
Connect the from layer to the 'to' layer.
abstract SolverParameter CreateSolver()
Create the base solver to use.
string getFileName(string strFile, string strSubDir)
Returns the full path of the filename using the base directory original set when creating the ModelBu...
Definition: ModelBuilder.cs:89
LayerParameter connectAndAddLayer(LayerParameter fromLayer, LayerParameter toLayer, bool bInPlace=false, bool bAdd=true, int nTopIdx=0)
Connect the from layer to the 'to' layer.
The MultiBoxHeadInfo contains information used to build the multi-box head of layers.
MultiBoxHeadInfo(string strSrcLayer, double? dfMinSize=null, double? dfMaxSize=null, double? dfStepWidth=null, double? dfStepHeight=null, double? dfAspectRatioWidth=null, double? dfAspectRatioHeight=null, double? dfNormalization=null, int? nInterLayerDepth=null)
The constructor.
double? InterLayerDepth
Returns the inner layer depth, or -1 to ignore.
double? MaxSize
Returns the bbox maximum size.
double? StepWidth
Returns the step width.
string SourceLayer
Returns the source layer.
double? Normalization
Returns the normalization, or -1 to ignore.
double? AspectRatioWidth
Returns the aspect ratio width.
double? MinSize
Returns the bbox minimum size.
bool Verify(MultiBoxHeadInfo info)
Verify that all numical values are consistently set (or not) between two info objects.
double? AspectRatioHeight
Returns the aspect ratio height.
double? StepHeight
Returns the step eight.
double eps
Specifies a small value to add to the variance estimate so that we don't divide by zero.
double moving_average_fraction
Specifies how much the moving average decays each iteration. Smaller values make the moving average d...
bool? use_global_stats
If false, normalization is performed over the current mini-batch and global statistics are accumulate...
FillerParameter filler
(filler is ignored unless just one bottom is given and the bias is a learned parameter of the layer....
int axis
The axis along which to concatenate – may be negative to index from the end (e.g.,...
FillerParameter weight_filler
The filler for the weight. The default is set to use the 'xavier' filler.
FillerParameter bias_filler
The filler for the bias. The default is set to use the 'constant = 0.1' filler.
bool bias_term
Whether to have bias terms or not.
uint num_output
The number of outputs for the layer.
virtual uint batch_size
Specifies the batch size.
int images_per_blob
(optional, default = 1) Specifies the number of images to load into each blob channel....
string source
When used with the DATA parameter, specifies the data 'source' within the database....
bool output_all_labels
(optional, default = false) When using images_per_blob > 1, 'output_all_labels' specifies to output a...
bool balance_matches
(optional, default = true) When using images_per_blob > 1, 'balance_matches' specifies to query image...
double dropout_ratio
Specifies the dropout ratio. (e.g. the probability that values will be dropped out and set to zero....
Specifies the filler parameters used to create each Filler.
int axis
Specifies the first axis to flatten: all preceding axes are retained in the output....
uint num_output
The number of outputs for the layer.
List< uint > kernel_size
Kernel size is given as a single value for equal dimensions in all spatial dimensions,...
List< uint > dilation
Factor used to dilate the kernel, (implicitly) zero-filling the resulting holes. (Kernel dilation is ...
List< uint > stride
Stride is given as a single value for equal dimensions in all spatial dimensions, or once per spatial...
List< uint > pad
Pad is given as a single value for equal dimensions in all spatial dimensions, or once per spatial di...
Specifies the base parameter for all layers.
Normalization2Parameter normalization2_param
Returns the parameter set when initialized with LayerType.NORMALIZATION2
ConvolutionParameter convolution_param
Returns the parameter set when initialized with LayerType.CONVOLUTION
List< ParamSpec > parameters
Specifies the ParamSpec parameters of the LayerParameter.
string name
Specifies the name of this LayerParameter.
List< NetStateRule > include
Specifies the NetStateRule's for which this LayerParameter should be included.
ScaleParameter scale_param
Returns the parameter set when initialized with LayerType.SCALE
bool freeze_learning
Get/set whether or not to freeze the learning for this layer globally.
List< NetStateRule > exclude
Specifies the NetStateRule's for which this LayerParameter should be excluded.
PoolingParameter pooling_param
Returns the parameter set when initialized with LayerType.POOLING
PermuteParameter permute_param
Returns the parameter set when initialized with LayerType.PERMUTE
PriorBoxParameter prior_box_param
Returns the parameter set when initialized with LayerType.PRIORBOX
List< string > top
Specifies the active top connections (in the bottom, out the top)
AnnotatedDataParameter annotated_data_param
Returns the parameter set when initialized with LayerType.ANNOTATED_DATA
BiasParameter bias_param
Returns the parameter set when initialized with LayerType.BIAS
InnerProductParameter inner_product_param
Returns the parameter set when initialized with LayerType.INNERPRODUCT
ConcatParameter concat_param
Returns the parameter set when initialized with LayerType.CONCAT
TransformationParameter transform_param
Returns the parameter set when initialized with LayerType.TRANSFORM
DataParameter data_param
Returns the parameter set when initialized with LayerType.DATA
FlattenParameter flatten_param
Returns the parameter set when initialized with LayerType.FLATTEN
BatchNormParameter batch_norm_param
Returns the parameter set when initialized with LayerType.BATCHNORM
List< string > bottom
Specifies the active bottom connections (in the bottom, out the top).
LayerType
Specifies the layer type.
override string ToString()
Returns a string representation of the LayerParameter.
DropoutParameter dropout_param
Returns the parameter set when initialized with LayerType.DROPOUT
Specifies the parameters use to create a Net
Definition: NetParameter.cs:18
string name
The name of the network.
Definition: NetParameter.cs:90
List< LayerParameter > layer
The layers that make up the net. Each of their configurations, including connectivity and behavior,...
Specifies a NetStateRule used to determine whether a Net falls within a given include or exclude patt...
Definition: NetStateRule.cs:20
Specifies training parameters (multipliers on global learning constants, and the name of other settin...
Definition: ParamSpec.cs:19
Specifies the parameters for the PoolingLayer.
PoolingMethod
Defines the pooling method.
PoolingMethod pool
Specifies the pooling method.
bool global_pooling
Specifies whether or not to enable global pooling.
bool bias_term
Whether to also learn a bias (equivalent to a ScalarLayer + BiasLayer, but may be more efficient).
FillerParameter bias_filler
Filler used for bias filling.
The SolverParameter is a parameter for the solver, specifying the train and test networks.
Stores parameters used to apply transformation to the data layer's data.
SimpleDatum.ANNOTATION_TYPE anno_type
Get/set the annotation type.
List< BatchSampler > batch_sampler
Get/set the batch sampler.
string label_map_file
Get/set the label map file.
FillerParameter scale_filler
Specifies the filler for the initial value of scale, default is 1.0 for all.
bool channel_shared
Specifies whether or not the scale parameters are shared across channels.
bool across_spatial
Specifies to normalize across the spatial dimensions.
List< int > order
Specifies the new orders of the axes of data.
float offset
Specifies the offset to the top left corner of each cell.
uint? img_size
Specifies the image size. By default we calculate the img_height, img_width, step_x and step_y based ...
uint? img_h
Specifies the image height. By default we calculate the img_height, img_width, step_x and step_y base...
List< float > variance
Specifies the variance for adjusting the prior boxes.
List< float > max_size
Specifies the maximum box size (in pixels) and is required!
float? step_w
Specifies the explicit step size to use along width.
uint? img_w
Specifies the image width. By default we calculate the img_height, img_width, step_x and step_y based...
float? step
Specifies the excplicit step size to use.
float? step_h
Specifies the explicit step size to use along height.
List< float > aspect_ratio
Specifies various aspect ratios. Duplicate ratios are ignored. If none are provided,...
bool clip
Specifies whether or not to clip the prior so that it is within [0,1].
List< float > min_size
Specifies the minimum box size (in pixels) and is required!
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Definition: Annotation.cs:12
Phase
Defines the Phase under which to run a Net.
Definition: Interfaces.cs:61
@ NONE
No training category specified.
The MyCaffe.model namespace contains all classes used to programically create new model scripts.
Definition: ModelBuilder.cs:14
The MyCaffe.param.ssd namespace contains all SSD related parameter objects that correspond to the nat...
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...
Definition: Annotation.cs:12