MyCaffe  1.12.2.41
Deep learning software for Windows C# programmers.
LayerParameter.cs
1using System;
2using System.Collections.Generic;
3using System.Linq;
4using System.Text;
5using System.IO;
6using MyCaffe.basecode;
7using MyCaffe.common;
8using MyCaffe.param.nt;
9using MyCaffe.param.ssd;
10using MyCaffe.param.beta;
11using MyCaffe.param.gpt;
12using MyCaffe.param.tft;
13using MyCaffe.param.lnn;
15
16namespace MyCaffe.param
17{
21 [Serializable]
22 [TypeConverter(typeof(ExpandableObjectConverter))]
23 public class LayerParameter : BaseParameter, ICloneable, IComparable, IBinaryPersist
24 {
29 // The layer name.
30 string m_strName;
31 // The layer type.
32 LayerType m_type;
33 // The name of each bottom blob.
34 List<string> m_rgstrBottom = new List<string>();
35 // The name of each top blob.
36 List<string> m_rgstrTop = new List<string>();
37 // Used for rendering models only.
38 bool m_bGroupStart = false;
39 // Use half sized memory
40 bool m_bUseHalfSize = false;
41
42 // The train/test phase for computation.
43 Phase m_phase;
44
45 // The amout of weight to assign each top blob in the objective.
46 // Each layer assigns a default value, usually of either 0 or 1,
47 // to each top blob.
48 List<double> m_rgLossWeight = new List<double>();
49
50 // Specifies training parameters (multipliers on global learning constants,
51 // and the name and other settings used for weight sharing).
52 List<ParamSpec> m_rgParams = new List<ParamSpec>();
53
57 List<BlobProto> m_rgBlobs = new List<BlobProto>();
58
66 List<bool> m_rgbPropagateDown = new List<bool>();
67
76 List<NetStateRule> m_rgInclude = new List<NetStateRule>();
77 List<NetStateRule> m_rgExclude = new List<NetStateRule>();
78 Dictionary<Phase, int> m_rgMaxBottomCount = new Dictionary<Phase, int>();
79
80 int m_nSolverCount = 1;
81 int m_nSolverRank = 0;
82 List<string> m_rgstrExpectedTop = new List<string>();
83 List<string> m_rgstrExpectedBottom = new List<string>();
84 bool m_bFreezeLearning = false;
85 bool m_bConnectLossEvent = false;
86
91 {
95 NONE,
99 INFERENCE,
103 INFERENCE_AND_TRAINING
104 }
105
109 public enum LayerType
110 {
114 ABSVAL,
118 ACCURACY,
122 ACCURACY_DECODE,
126 ACCURACY_ENCODING,
130 ANNOTATED_DATA,
134 ARGMAX,
138 ATTENTION,
142 BIAS,
146 BATCHNORM,
150 BATCHREINDEX,
154 BNLL,
158 CATEGORICAL_TRANS,
162 CAUSAL_SELF_ATTENTION,
166 CFC,
170 CFC_UNIT,
174 CHANNEL_EMBEDDING,
178 CLIP,
182 CONCAT,
186 CONSTANT,
190 CONTRASTIVE_LOSS,
194 CONVOLUTION,
198 CONVOLUTION_OCTAVE,
202 COPY,
206 CROP,
210 DECODE,
214 DECONVOLUTION,
218 DETECTION_EVALUATE,
222 DETECTION_OUTPUT,
226 DATA,
230 DATA_NORMALIZER,
234 DATA_SEQUENCE,
238 DATA_TEMPORAL,
242 DROPOUT,
246 DUMMYDATA,
250 ELTWISE,
254 ELU,
258 EMBED,
262 EUCLIDEAN_LOSS,
266 EVENT,
270 EXP,
274 FILTER,
278 FLATTEN,
282 GATHER,
286 GATEADDNORM,
290 GELU,
294 GLU,
298 GRN,
302 GRADIENTSCALER,
306 GRAM,
310 GLOBRES_NORM,
314 HDF5_DATA,
318 HINGE_LOSS,
322 IMAGE_DATA,
326 IM2COL,
330 INFOGAIN_LOSS,
334 INNERPRODUCT,
338 INPUT,
342 INTERP,
346 LABELMAPPING,
350 LAYERNORM,
354 LECUN,
358 LOG,
362 LOSS,
366 LRN,
370 LTC_UNIT,
374 MEAN_ERROR_LOSS,
378 MATH,
382 MEMORYDATA,
386 MEMORY_LOSS,
390 MERGE,
394 MISH,
398 MULTIBOX_LOSS,
402 MULTIHEAD_ATTENTION,
406 MULTIHEAD_ATTENTION_INTERP,
410 MULTINOMIALLOGISTIC_LOSS,
414 MVN,
418 NLL_LOSS,
422 NUMERIC_TRANS,
426 ONEHOT,
430 PARAMETER,
434 PERMUTE,
438 POOLING,
442 POSITIONAL_ENCODER,
446 POWER,
450 PRELU,
454 PRIORBOX,
458 QUANTILE_ACCURACY,
462 QUANTILE_LOSS,
466 REDUCTION,
470 RELU,
474 RESHAPE,
478 RESHAPE_TEMPORAL,
482 SCALAR,
486 SCALE,
490 SERF,
494 SIGMOID,
498 SIGMOIDCROSSENTROPY_LOSS,
502 SILU,
506 SOFTPLUS,
510 SOFTMAXCROSSENTROPY_LOSS,
514 SOFTMAXCROSSENTROPY2_LOSS,
518 SOFTMAX,
522 SOFTMAXWITH_LOSS,
526 SMOOTHL1_LOSS,
530 SPP,
534 SILENCE,
538 SLICE,
542 SPLIT,
546 SQUEEZE,
550 UNSQUEEZE,
554 SWISH,
558 MODEL_DATA,
562 TEXT_DATA,
566 TV_LOSS,
570 TANH,
574 THRESHOLD,
578 TILE,
582 TRANSFORM,
586 TRANSFORMER_BLOCK,
590 TOKENIZED_DATA,
594 TOKENIZED_DATA_PAIRS,
598 TOKENIZED_DATA_PAIRS_PY,
602 TRANSPOSE,
606 LSTM_SIMPLE,
610 LSTM_ATTENTION,
614 RECURRENT,
618 RNN,
622 LSTM,
626 LSTM_UNIT,
630 UNPOOLING1,
634 UNPOOLING,
638 NORMALIZATION1,
642 NORMALIZATION2,
646 TRIPLET_LOSS_SIMPLE,
650 TRIPLET_LOSS,
654 KNN,
658 DEBUG,
662 VIDEO_DATA,
666 VARSELNET,
667#pragma warning disable 1591
668 _MAX
669#pragma warning restore 1591
670 }
671
672 // Layer type-specific parameters
673 //
674 // Note: certain layers may have more than one computation engine
675 // for their implementation. These layers include an Engine type and
676 // engine parameter for selecting the implementation.
677 // The default for the engine is set by the ENGINE switch at compile-time.
678 Dictionary<LayerType, LayerParameterBase> m_rgLayerParameters = new Dictionary<LayerType, LayerParameterBase>();
679
681 public LayerParameter() : base()
682 {
683 for (int i = 0; i < (int)LayerType._MAX; i++)
684 {
685 m_rgLayerParameters.Add((LayerType)i, null);
686 }
687 }
688
694 public LayerParameter(LayerType lt, string strName = null)
695 : base()
696 {
697 m_type = lt;
698 m_strName = strName;
699
700 if (m_strName == null)
701 m_strName = lt.ToString();
702
703 for (int i = 0; i < (int)LayerType._MAX; i++)
704 {
705 m_rgLayerParameters.Add((LayerType)i, null);
706 }
707
708 setupParams(lt);
709 }
710
716 : base()
717 {
718 m_type = p.m_type;
719 m_strName = p.m_strName;
720 m_rgstrBottom = p.m_rgstrBottom;
721 m_rgstrTop = p.m_rgstrTop;
722 m_phase = p.m_phase;
723 m_rgLossWeight = p.m_rgLossWeight;
724 m_rgParams = p.m_rgParams;
725 m_rgBlobs = p.m_rgBlobs;
726 m_rgbPropagateDown = p.m_rgbPropagateDown;
727 m_rgInclude = p.m_rgInclude;
728 m_rgExclude = p.m_rgExclude;
729 m_rgLayerParameters = p.m_rgLayerParameters;
730 m_nSolverCount = p.m_nSolverCount;
731 m_nSolverRank = p.m_nSolverRank;
732 m_bGroupStart = p.m_bGroupStart;
733 }
734
739 public string PrepareRunModelInputs()
740 {
741 if (m_rgLayerParameters[m_type] == null)
742 return null;
743
744 return m_rgLayerParameters[m_type].PrepareRunModelInputs();
745 }
746
750 public void PrepareRunModel()
751 {
752 if (m_rgLayerParameters[m_type] == null)
753 return;
754
755 m_rgLayerParameters[m_type].PrepareRunModel(this);
756 }
757
762 public int GetParameterCount()
763 {
764 int nOffset = 0;
765
766 switch (m_type)
767 {
768 case LayerType.CONVOLUTION:
769 case LayerType.DECONVOLUTION:
770 if (convolution_param != null && !convolution_param.bias_term && m_rgParams.Count > 1)
771 nOffset = -1;
772 break;
773
774 case LayerType.INNERPRODUCT:
775 if (inner_product_param != null && !inner_product_param.bias_term && m_rgParams.Count > 1)
776 nOffset = -1;
777 break;
778 }
779
780 return m_rgParams.Count + nOffset;
781 }
782
788 {
789 if (p == null)
790 return;
791
792 if (p.type != m_type)
793 throw new ArgumentOutOfRangeException();
794
795 m_rgInclude = p.include;
796 m_rgExclude = p.exclude;
797 m_rgParams = p.parameters;
798
799 switch (m_type)
800 {
801 case LayerType.ANNOTATED_DATA:
805 break;
806
807 case LayerType.DATA:
810 break;
811
812 case LayerType.IMAGE_DATA:
816 break;
817
818 case LayerType.MEMORYDATA:
821 break;
822 }
823 }
824
830 public bool MeetsPhase(Phase phase)
831 {
832 if (phase == Phase.NONE)
833 return true;
834
835 foreach (NetStateRule r in m_rgExclude)
836 {
837 if (r.phase == phase)
838 return false;
839 }
840
841 foreach (NetStateRule r in m_rgInclude)
842 {
843 if (r.phase == phase)
844 return true;
845 }
846
847 if (m_rgInclude.Count == 0)
848 return true;
849
850 if (m_rgExclude.Count > 0)
851 return true;
852
853 return false;
854 }
855
860 public void Save(BinaryWriter bw)
861 {
862 bw.Write((int)m_type);
863 bw.Write(m_strName);
864 Utility.Save<string>(bw, m_rgstrBottom);
865 Utility.Save<string>(bw, m_rgstrTop);
866 Utility.Save<double>(bw, m_rgLossWeight);
867 Utility.Save<ParamSpec>(bw, m_rgParams);
868 Utility.Save<BlobProto>(bw, m_rgBlobs);
869 Utility.Save<bool>(bw, m_rgbPropagateDown);
870 Utility.Save<NetStateRule>(bw, m_rgInclude);
871 Utility.Save<NetStateRule>(bw, m_rgExclude);
872
873 int nCount = 0;
874
875 foreach (LayerParameterBase p in m_rgLayerParameters.Values)
876 {
877 if (p != null)
878 nCount++;
879 }
880
881 bw.Write(nCount);
882
883 foreach (KeyValuePair<LayerType, LayerParameterBase> kv in m_rgLayerParameters)
884 {
885 if (kv.Value != null)
886 {
887 bw.Write((int)kv.Key);
888
889 IBinaryPersist bp = kv.Value as IBinaryPersist;
890 bp.Save(bw);
891 }
892 }
893 }
894
901 public object Load(BinaryReader br, bool bNewInstance)
902 {
903 LayerType lt = (LayerType)br.ReadInt32();
904 string strName = br.ReadString();
905
906 LayerParameter p = this;
907
908 if (bNewInstance)
909 p = new LayerParameter(lt, strName);
910
911 p.m_rgstrBottom = Utility.Load<string>(br);
912 p.m_rgstrTop = Utility.Load<string>(br);
913 p.m_rgLossWeight = Utility.Load<double>(br);
914 p.m_rgParams = Utility.Load<ParamSpec>(br);
915 p.m_rgBlobs = Utility.Load<BlobProto>(br);
916 p.m_rgbPropagateDown = Utility.Load<bool>(br);
917 p.m_rgInclude = Utility.Load<NetStateRule>(br);
918 p.m_rgExclude = Utility.Load<NetStateRule>(br);
919
920 int nCount = br.ReadInt32();
921
922 for (int i = 0; i < nCount; i++)
923 {
924 lt = (LayerType)br.ReadInt32();
925 IBinaryPersist bp = m_rgLayerParameters[lt] as IBinaryPersist;
926 bp.Load(br, false);
927 }
928
929 return p;
930 }
931
932 private void setupParams(LayerType lt, bool bNewParams = true)
933 {
934 switch (lt)
935 {
936 case LayerType.ABSVAL:
937 expected_bottom.Add("input");
938 expected_top.Add("abs");
940 break;
941
942 case LayerType.ACCURACY:
943 expected_bottom.Add("input");
944 expected_bottom.Add("label");
945 expected_top.Add("accuracy");
946 m_rgLayerParameters[LayerType.ACCURACY] = new AccuracyParameter();
947 break;
948
949 case LayerType.ACCURACY_DECODE:
950 expected_bottom.Add("decode");
951 expected_top.Add("accuracy");
952 m_rgLayerParameters[LayerType.ACCURACY] = new AccuracyParameter();
953 break;
954
955 case LayerType.ACCURACY_ENCODING:
956 expected_bottom.Add("input");
957 expected_bottom.Add("label");
958 expected_top.Add("accuracy");
959 m_rgLayerParameters[LayerType.ACCURACY] = new AccuracyParameter();
960 m_rgLayerParameters[LayerType.DECODE] = new DecodeParameter();
961 break;
962
963 case LayerType.ANNOTATED_DATA:
964 expected_top.Add("data");
965 expected_top.Add("label");
966 m_rgLayerParameters[LayerType.TRANSFORM] = new TransformationParameter();
967 m_rgLayerParameters[LayerType.ANNOTATED_DATA] = new AnnotatedDataParameter();
968 m_rgLayerParameters[LayerType.DATA] = new DataParameter();
969 break;
970
971 case LayerType.ARGMAX:
972 expected_bottom.Add("input");
973 expected_top.Add("max");
974 m_rgLayerParameters[lt] = new ArgMaxParameter();
976 break;
977
978 case LayerType.CAUSAL_SELF_ATTENTION:
979 expected_bottom.Add("input");
980 expected_top.Add("atten");
981 m_rgLayerParameters[lt] = new CausalSelfAttentionParameter();
982 break;
983
984 case LayerType.ATTENTION:
985 expected_bottom.Add("input");
986 expected_top.Add("atten");
987 m_rgLayerParameters[lt] = new AttentionParameter();
988 break;
989
990 case LayerType.BATCHNORM:
991 expected_bottom.Add("input");
992 expected_top.Add("norm");
993 m_rgLayerParameters[lt] = new BatchNormParameter();
994 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
995 break;
996
997 case LayerType.BATCHREINDEX:
998 expected_bottom.Add("input");
999 expected_bottom.Add("idx");
1000 expected_top.Add("data");
1001 break;
1002
1003 case LayerType.BIAS:
1004 expected_bottom.Add("input");
1005 expected_bottom.Add("bias");
1006 expected_top.Add("bias");
1007 m_rgLayerParameters[lt] = new BiasParameter();
1008 break;
1009
1010 case LayerType.BNLL:
1011 expected_bottom.Add("input");
1012 expected_top.Add("bnll");
1013 break;
1014
1015 case LayerType.CATEGORICAL_TRANS:
1016 expected_bottom.Add("x");
1017 expected_top.Add("proj");
1018 m_rgLayerParameters[lt] = new CategoricalTransformationParameter();
1019 break;
1020
1021 case LayerType.CHANNEL_EMBEDDING:
1022 expected_bottom.Add("x_num");
1023 expected_bottom.Add("x_cat");
1024 expected_top.Add("emb");
1025 m_rgLayerParameters[LayerType.CATEGORICAL_TRANS] = new CategoricalTransformationParameter();
1026 m_rgLayerParameters[LayerType.NUMERIC_TRANS] = new NumericTransformationParameter();
1027 break;
1028
1029 case LayerType.CFC:
1030 expected_bottom.Add("x");
1031 expected_bottom.Add("ts");
1032 expected_bottom.Add("mask");
1033 expected_top.Add("cfc");
1034 m_rgLayerParameters[lt] = new CfcParameter();
1035 m_rgLayerParameters[LayerType.CFC_UNIT] = new CfcUnitParameter();
1036 m_rgLayerParameters[LayerType.LTC_UNIT] = new LtcUnitParameter();
1037 break;
1038
1039 case LayerType.CFC_UNIT:
1040 expected_bottom.Add("x");
1041 expected_bottom.Add("hx");
1042 expected_bottom.Add("ts");
1043 expected_top.Add("cfc");
1044 m_rgLayerParameters[lt] = new CfcUnitParameter();
1045 break;
1046
1047 case LayerType.CLIP:
1048 expected_bottom.Add("input");
1049 expected_top.Add("clip");
1050 m_rgLayerParameters[lt] = new ClipParameter();
1052 break;
1053
1054 case LayerType.CONCAT:
1055 expected_bottom.Add("x_1");
1056 expected_bottom.Add("x_2");
1057 expected_top.Add("concat");
1058 m_rgLayerParameters[lt] = new ConcatParameter();
1060 break;
1061
1062 case LayerType.CONSTANT:
1063 expected_top.Add("const");
1064 m_rgLayerParameters[lt] = new ConstantParameter();
1066 break;
1067
1068 case LayerType.CONTRASTIVE_LOSS:
1069 expected_bottom.Add("f1");
1070 expected_bottom.Add("f2");
1071 expected_bottom.Add("lbl");
1072 expected_top.Add("loss");
1073 expected_top.Add("match");
1074 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1075 m_rgLayerParameters[lt] = new ContrastiveLossParameter();
1076 break;
1077
1078 case LayerType.CONVOLUTION:
1079 case LayerType.IM2COL:
1080 expected_bottom.Add("enc");
1081 expected_top.Add("label");
1082 m_rgLayerParameters[LayerType.CONVOLUTION] = new ConvolutionParameter();
1083 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
1084 break;
1085
1086 case LayerType.CONVOLUTION_OCTAVE:
1087 expected_bottom.Add("in_h");
1088 expected_bottom.Add("in_l");
1089 expected_top.Add("x_h");
1090 expected_top.Add("x_l");
1091 m_rgLayerParameters[LayerType.CONVOLUTION] = new ConvolutionParameter();
1092 m_rgLayerParameters[LayerType.CONVOLUTION_OCTAVE] = new ConvolutionOctaveParameter();
1093 break;
1094
1095 case LayerType.CROP:
1096 expected_bottom.Add("upscore");
1097 expected_bottom.Add("data");
1098 expected_top.Add("score");
1099 m_rgLayerParameters[lt] = new CropParameter();
1100 break;
1101
1102 case LayerType.COPY:
1103 expected_bottom.Add("src");
1104 expected_bottom.Add("dst");
1105 break;
1106
1107 case LayerType.DECODE:
1108 expected_bottom.Add("enc");
1109 expected_top.Add("dist1");
1110 m_rgLayerParameters[lt] = new DecodeParameter();
1111 break;
1112
1113 case LayerType.DECONVOLUTION:
1114 expected_bottom.Add("score");
1115 expected_top.Add("upscore");
1116 if (bNewParams || m_rgLayerParameters[LayerType.CONVOLUTION] == null)
1117 m_rgLayerParameters[LayerType.CONVOLUTION] = new ConvolutionParameter();
1118 break;
1119
1120 case LayerType.DETECTION_EVALUATE:
1121 expected_bottom.Add("det");
1122 expected_bottom.Add("gt");
1123 expected_top.Add("output");
1124 m_rgLayerParameters[lt] = new DetectionEvaluateParameter();
1125 break;
1126
1127 case LayerType.DETECTION_OUTPUT:
1128 expected_bottom.Add("loc");
1129 expected_bottom.Add("conf");
1130 expected_bottom.Add("prior");
1131 expected_top.Add("output");
1132 m_rgLayerParameters[lt] = new DetectionOutputParameter();
1133 break;
1134
1135 case LayerType.DATA:
1136 expected_top.Add("data");
1137 expected_top.Add("label");
1138 m_rgLayerParameters[LayerType.TRANSFORM] = new TransformationParameter();
1139 m_rgLayerParameters[LayerType.DATA] = new DataParameter();
1140 break;
1141
1142 case LayerType.DATA_NORMALIZER:
1143 expected_bottom.Add("data");
1144 expected_bottom.Add("label");
1145 expected_top.Add("ndata");
1146 expected_bottom.Add("nlabel");
1147 m_rgLayerParameters[lt] = new DataNormalizerParameter();
1148 break;
1149
1150 case LayerType.DATA_SEQUENCE:
1151 expected_bottom.Add("data");
1152 expected_bottom.Add("label");
1153 expected_top.Add("anchor");
1154 expected_top.Add("datax");
1155 m_rgLayerParameters[lt] = new DataSequenceParameter();
1156 break;
1157
1158 case LayerType.DATA_TEMPORAL:
1159 expected_top.Add("sn");
1160 expected_top.Add("sc");
1161 expected_top.Add("hn");
1162 expected_top.Add("hc");
1163 expected_top.Add("fn");
1164 expected_top.Add("fc");
1165 expected_top.Add("t");
1166 m_rgLayerParameters[lt] = new DataTemporalParameter();
1167 break;
1168
1169 case LayerType.DEBUG:
1170 expected_bottom.Add("input");
1171 expected_bottom.Add("label");
1172 expected_top.Add("output");
1173 m_rgLayerParameters[lt] = new DebugParameter();
1174 break;
1175
1176 case LayerType.DROPOUT:
1177 expected_bottom.Add("input");
1178 expected_top.Add("dropout");
1179 m_rgLayerParameters[lt] = new DropoutParameter();
1180 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
1181 break;
1182
1183 case LayerType.DUMMYDATA:
1184 expected_top.Add("data");
1185 expected_top.Add("label");
1186 m_rgLayerParameters[LayerType.TRANSFORM] = new TransformationParameter();
1187 m_rgLayerParameters[lt] = new DummyDataParameter();
1188 break;
1189
1190 case LayerType.ELTWISE:
1191 expected_bottom.Add("x_1");
1192 expected_bottom.Add("x_2");
1193 expected_top.Add("eltwise");
1194 m_rgLayerParameters[lt] = new EltwiseParameter();
1196 break;
1197
1198 case LayerType.ELU:
1199 expected_bottom.Add("input");
1200 expected_top.Add("elu");
1201 m_rgLayerParameters[lt] = new EluParameter();
1203 break;
1204
1205 case LayerType.EMBED:
1206 expected_bottom.Add("input");
1207 expected_top.Add("embed");
1208 m_rgLayerParameters[lt] = new EmbedParameter();
1209 break;
1210
1211 case LayerType.EUCLIDEAN_LOSS:
1212 expected_bottom.Add("pred");
1213 expected_bottom.Add("trgt");
1214 expected_top.Add("loss");
1215 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1216 break;
1217
1218 case LayerType.EVENT:
1219 expected_bottom.Add("input");
1220 expected_top.Add("output");
1221 break;
1222
1223 case LayerType.EXP:
1224 expected_bottom.Add("input");
1225 expected_top.Add("exp");
1226 m_rgLayerParameters[lt] = new ExpParameter();
1228 break;
1229
1230 case LayerType.FILTER:
1231 expected_bottom.Add("x_1");
1232 expected_bottom.Add("x_2");
1233 expected_top.Add("y_1");
1234 expected_top.Add("y_2");
1235 break;
1236
1237 case LayerType.FLATTEN:
1238 expected_bottom.Add("x_1");
1239 expected_top.Add("flatten");
1240 m_rgLayerParameters[lt] = new FlattenParameter();
1242 break;
1243
1244 case LayerType.GATHER:
1245 expected_bottom.Add("input");
1246 expected_bottom.Add("idx");
1247 expected_top.Add("gthr");
1248 m_rgLayerParameters[lt] = new GatherParameter();
1249 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
1250 break;
1251
1252 case LayerType.GATEADDNORM:
1253 expected_bottom.Add("input");
1254 expected_top.Add("gan");
1255 m_rgLayerParameters[lt] = new GateAddNormParameter();
1256 m_rgLayerParameters[LayerType.GLU] = new GluParameter();
1257 m_rgLayerParameters[LayerType.DROPOUT] = new DropoutParameter();
1258 m_rgLayerParameters[LayerType.LAYERNORM] = new LayerNormParameter();
1259 break;
1260
1261 case LayerType.GELU:
1262 expected_bottom.Add("input");
1263 expected_top.Add("gelu");
1264 m_rgLayerParameters[lt] = new GeluParameter();
1265 break;
1266
1267 case LayerType.GLU:
1268 expected_bottom.Add("input");
1269 expected_top.Add("glu");
1270 m_rgLayerParameters[lt] = new GluParameter();
1271 break;
1272
1273 case LayerType.GRN:
1274 expected_bottom.Add("input");
1275 expected_top.Add("grn");
1276 m_rgLayerParameters[lt] = new GrnParameter();
1277 break;
1278
1279 case LayerType.GRADIENTSCALER:
1280 expected_bottom.Add("input");
1281 expected_top.Add("identity");
1282 m_rgLayerParameters[lt] = new GradientScaleParameter();
1283 break;
1284
1285 case LayerType.GLOBRES_NORM:
1286 expected_bottom.Add("input");
1287 expected_top.Add("gresnet");
1288 m_rgLayerParameters[lt] = new FlattenParameter();
1289 break;
1290
1291 case LayerType.GRAM:
1292 expected_bottom.Add("input");
1293 expected_top.Add("gram");
1294 m_rgLayerParameters[lt] = new GramParameter();
1295 break;
1296
1297 case LayerType.HDF5_DATA:
1298 expected_top.Add("data");
1299 m_rgLayerParameters[LayerType.HDF5_DATA] = new HDF5DataParameter();
1300 break;
1301
1302 case LayerType.HINGE_LOSS:
1303 expected_bottom.Add("pred");
1304 expected_bottom.Add("label");
1305 expected_top.Add("loss");
1306 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1307 m_rgLayerParameters[lt] = new HingeLossParameter();
1308 break;
1309
1310 case LayerType.IMAGE_DATA:
1311 expected_top.Add("data");
1312 expected_top.Add("label");
1313 m_rgLayerParameters[LayerType.TRANSFORM] = new TransformationParameter();
1314 m_rgLayerParameters[LayerType.IMAGE_DATA] = new ImageDataParameter();
1315 DataParameter imgdp = new DataParameter();
1316 imgdp.backend = DataParameter.DB.NONE;
1317 imgdp.enable_random_selection = false;
1318 m_rgLayerParameters[LayerType.DATA] = imgdp;
1319 break;
1320
1321 case LayerType.INFOGAIN_LOSS:
1322 expected_bottom.Add("pred");
1323 expected_bottom.Add("label");
1324 expected_bottom.Add("H");
1325 expected_top.Add("loss");
1326 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1327 m_rgLayerParameters[lt] = new InfogainLossParameter();
1328 break;
1329
1330 case LayerType.INNERPRODUCT:
1331 expected_bottom.Add("input");
1332 expected_top.Add("ip");
1333 m_rgLayerParameters[lt] = new InnerProductParameter();
1334 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
1335 break;
1336
1337 case LayerType.INPUT:
1338 expected_top.Add("data");
1339 expected_top.Add("label");
1340 m_rgLayerParameters[LayerType.INPUT] = new InputParameter();
1341 break;
1342
1343 case LayerType.INTERP:
1344 expected_top.Add("input");
1345 expected_top.Add("interp");
1346 m_rgLayerParameters[lt] = new InterpParameter();
1347 break;
1348
1349 case LayerType.LABELMAPPING:
1350 expected_bottom.Add("input");
1351 expected_top.Add("output");
1352 m_rgLayerParameters[lt] = new LabelMappingParameter();
1353 break;
1354
1355 case LayerType.KNN:
1356 expected_bottom.Add("input");
1357 expected_bottom.Add("label");
1358 expected_top.Add("classes");
1359 m_rgMaxBottomCount.Add(Phase.RUN, 1);
1360 m_rgLayerParameters[lt] = new KnnParameter();
1361 break;
1362
1363 case LayerType.LAYERNORM:
1364 expected_bottom.Add("input");
1365 expected_top.Add("norm");
1366 m_rgLayerParameters[lt] = new LayerNormParameter();
1367 break;
1368
1369 case LayerType.LECUN:
1370 expected_bottom.Add("input");
1371 expected_top.Add("lecun");
1372 break;
1373
1374 case LayerType.LOG:
1375 expected_bottom.Add("input");
1376 expected_top.Add("log");
1377 m_rgLayerParameters[lt] = new LogParameter();
1379 break;
1380
1381 case LayerType.LRN:
1382 expected_bottom.Add("input");
1383 expected_top.Add("lrn");
1384 m_rgLayerParameters[lt] = new LRNParameter();
1386 break;
1387
1388 case LayerType.LTC_UNIT:
1389 expected_bottom.Add("x");
1390 expected_bottom.Add("hx");
1391 expected_bottom.Add("ts");
1392 expected_top.Add("ltc");
1393 m_rgLayerParameters[lt] = new LtcUnitParameter();
1394 break;
1395
1396 case LayerType.MEAN_ERROR_LOSS:
1397 expected_bottom.Add("pred");
1398 expected_bottom.Add("target");
1399 expected_top.Add("loss");
1400 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1401 m_rgLayerParameters[lt] = new MeanErrorLossParameter();
1402 break;
1403
1404 case LayerType.MATH:
1405 expected_bottom.Add("input");
1406 expected_top.Add("math");
1407 m_rgLayerParameters[lt] = new MathParameter();
1409 break;
1410
1411 case LayerType.MERGE:
1412 expected_bottom.Add("input1");
1413 expected_bottom.Add("input2");
1414 expected_top.Add("merge");
1415 m_rgLayerParameters[lt] = new MergeParameter();
1416 break;
1417
1418 case LayerType.MEMORYDATA:
1419 expected_top.Add("data");
1420 m_rgLayerParameters[LayerType.TRANSFORM] = new TransformationParameter();
1421 m_rgLayerParameters[LayerType.MEMORYDATA] = new MemoryDataParameter();
1422 break;
1423
1424 case LayerType.MEMORY_LOSS:
1425 expected_bottom.Add("input");
1426 expected_top.Add("loss");
1427 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1428 break;
1429
1430 case LayerType.MISH:
1431 expected_bottom.Add("input");
1432 expected_top.Add("mish");
1433 m_rgLayerParameters[lt] = new MishParameter();
1434 break;
1435
1436 case LayerType.MULTIBOX_LOSS:
1437 expected_bottom.Add("loc");
1438 expected_bottom.Add("conf");
1439 expected_bottom.Add("prior");
1440 expected_bottom.Add("gt");
1441 expected_top.Add("loss");
1442 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1443 m_rgLayerParameters[lt] = new MultiBoxLossParameter();
1444 break;
1445
1446 case LayerType.MULTIHEAD_ATTENTION:
1447 expected_bottom.Add("q");
1448 expected_bottom.Add("k");
1449 expected_bottom.Add("v");
1450 expected_top.Add("attn");
1451 m_rgLayerParameters[lt] = new MultiheadAttentionParameter();
1452 break;
1453
1454 case LayerType.MULTIHEAD_ATTENTION_INTERP:
1455 expected_bottom.Add("q");
1456 expected_bottom.Add("k");
1457 expected_bottom.Add("v");
1458 expected_top.Add("attn");
1459 expected_top.Add("y");
1460 expected_top.Add("out");
1461 expected_top.Add("scr");
1462 m_rgLayerParameters[lt] = new MultiHeadAttentionInterpParameter();
1463 break;
1464
1465 case LayerType.MULTINOMIALLOGISTIC_LOSS:
1466 expected_bottom.Add("pred");
1467 expected_bottom.Add("label");
1468 expected_top.Add("loss");
1469 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1470 break;
1471
1472 case LayerType.MVN:
1473 expected_bottom.Add("input");
1474 expected_top.Add("mvn");
1475 m_rgLayerParameters[lt] = new MVNParameter();
1476 break;
1477
1478 case LayerType.NLL_LOSS:
1479 expected_bottom.Add("pred");
1480 expected_bottom.Add("label");
1481 expected_top.Add("loss");
1482 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1483 m_rgLayerParameters[lt] = new NLLLossParameter();
1484 break;
1485
1486 case LayerType.NUMERIC_TRANS:
1487 expected_bottom.Add("x");
1488 expected_top.Add("proj");
1489 m_rgLayerParameters[lt] = new NumericTransformationParameter();
1490 break;
1491
1492 case LayerType.ONEHOT:
1493 expected_bottom.Add("input");
1494 expected_top.Add("onehot");
1495 m_rgLayerParameters[lt] = new OneHotParameter();
1496 break;
1497
1498 case LayerType.NORMALIZATION1:
1499 expected_bottom.Add("input");
1500 expected_top.Add("norm");
1501 m_rgLayerParameters[lt] = new Normalization1Parameter();
1502 break;
1503
1504 case LayerType.NORMALIZATION2:
1505 expected_bottom.Add("input");
1506 expected_top.Add("norm");
1507 m_rgLayerParameters[lt] = new Normalization2Parameter();
1508 break;
1509
1510 case LayerType.PARAMETER:
1511 expected_bottom.Add("input");
1512 expected_top.Add("param");
1513 m_rgLayerParameters[lt] = new ParameterParameter();
1514 break;
1515
1516 case LayerType.PERMUTE:
1517 expected_bottom.Add("input");
1518 expected_top.Add("permute");
1519 m_rgLayerParameters[lt] = new PermuteParameter();
1520 break;
1521
1522 case LayerType.POSITIONAL_ENCODER:
1523 expected_bottom.Add("input");
1524 expected_top.Add("pos");
1525 m_rgLayerParameters[lt] = new PositionalEncoderParameter();
1526 break;
1527
1528 case LayerType.POOLING:
1529 expected_bottom.Add("input");
1530 expected_top.Add("pool");
1531 expected_top.Add("mask");
1532 m_rgLayerParameters[lt] = new PoolingParameter();
1533 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
1534 break;
1535
1536 case LayerType.UNPOOLING1:
1537 expected_bottom.Add("pool");
1538 expected_top.Add("unpool");
1539 m_rgLayerParameters[LayerType.UNPOOLING] = new UnPoolingParameter();
1540 break;
1541
1542 case LayerType.UNPOOLING:
1543 expected_bottom.Add("pool");
1544 expected_top.Add("unpool");
1545 m_rgLayerParameters[lt] = new UnPoolingParameter();
1546 break;
1547
1548 case LayerType.POWER:
1549 expected_bottom.Add("input");
1550 expected_top.Add("power");
1551 m_rgLayerParameters[lt] = new PowerParameter();
1553 break;
1554
1555 case LayerType.PRELU:
1556 expected_bottom.Add("input");
1557 expected_top.Add("prelu");
1558 m_rgLayerParameters[lt] = new PReLUParameter();
1560 break;
1561
1562 case LayerType.PRIORBOX:
1563 expected_bottom.Add("input");
1564 expected_top.Add("priorbox");
1565 m_rgLayerParameters[lt] = new PriorBoxParameter();
1566 break;
1567
1568 case LayerType.QUANTILE_ACCURACY:
1569 expected_bottom.Add("x");
1570 expected_bottom.Add("trgt");
1571 expected_top.Add("accuracy");
1572 m_rgLayerParameters[lt] = new QuantileAccuracyParameter();
1573 break;
1574
1575 case LayerType.QUANTILE_LOSS:
1576 expected_bottom.Add("x");
1577 expected_bottom.Add("trgt");
1578 expected_top.Add("loss");
1579 m_rgLayerParameters[LayerType.LOSS] = new LossParameter(LossParameter.NormalizationMode.BATCH_SIZE);
1580 m_rgLayerParameters[lt] = new QuantileLossParameter();
1581 break;
1582
1583 case LayerType.REDUCTION:
1584 expected_bottom.Add("input");
1585 expected_top.Add("reduction");
1586 m_rgLayerParameters[lt] = new ReductionParameter();
1588 break;
1589
1590 case LayerType.RELU:
1591 expected_bottom.Add("input");
1592 expected_top.Add("relu");
1593 m_rgLayerParameters[lt] = new ReLUParameter();
1594 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
1595 break;
1596
1597 case LayerType.RESHAPE:
1598 expected_bottom.Add("input");
1599 expected_top.Add("reshape");
1600 m_rgLayerParameters[lt] = new ReshapeParameter();
1601 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
1602 break;
1603
1604 case LayerType.RESHAPE_TEMPORAL:
1605 expected_bottom.Add("input");
1606 expected_top.Add("reshape_t");
1607 m_rgLayerParameters[lt] = new ReshapeTemporalParameter();
1608 break;
1609
1610 case LayerType.SQUEEZE:
1611 expected_bottom.Add("input");
1612 expected_top.Add("squeeze");
1613 m_rgLayerParameters[LayerType.SQUEEZE] = new SqueezeParameter();
1615 break;
1616
1617 case LayerType.UNSQUEEZE:
1618 expected_bottom.Add("input");
1619 expected_top.Add("unsqueeze");
1620 m_rgLayerParameters[LayerType.SQUEEZE] = new SqueezeParameter();
1622 break;
1623
1624 case LayerType.SCALAR:
1625 expected_bottom.Add("input");
1626 expected_top.Add("sca");
1627 m_rgLayerParameters[lt] = new ScalarParameter();
1628 break;
1629
1630 case LayerType.SCALE:
1631 expected_bottom.Add("input");
1632 expected_top.Add("scale");
1633 m_rgLayerParameters[lt] = new ScaleParameter();
1634 break;
1635
1636 case LayerType.SERF:
1637 expected_bottom.Add("input");
1638 expected_top.Add("serf");
1639 m_rgLayerParameters[lt] = new SerfParameter();
1640 break;
1641
1642 case LayerType.SIGMOID:
1643 expected_bottom.Add("input");
1644 expected_top.Add("sigmoid");
1645 m_rgLayerParameters[lt] = new SigmoidParameter();
1646 break;
1647
1648 case LayerType.SIGMOIDCROSSENTROPY_LOSS:
1649 expected_bottom.Add("scores");
1650 expected_bottom.Add("trgt");
1651 expected_top.Add("loss");
1652 m_rgLayerParameters[LayerType.LOSS] = new LossParameter(LossParameter.NormalizationMode.BATCH_SIZE);
1653 m_rgLayerParameters[LayerType.SIGMOID] = new SigmoidParameter();
1654 break;
1655
1656 case LayerType.SILU:
1657 expected_bottom.Add("input");
1658 expected_top.Add("silu");
1659 break;
1660
1661 case LayerType.SOFTPLUS:
1662 expected_bottom.Add("input");
1663 expected_top.Add("sftp");
1664 break;
1665
1666 case LayerType.SOFTMAXCROSSENTROPY_LOSS:
1667 expected_bottom.Add("scores");
1668 expected_bottom.Add("trgt");
1669 expected_top.Add("loss");
1670 m_rgLayerParameters[LayerType.LOSS] = new LossParameter(LossParameter.NormalizationMode.BATCH_SIZE);
1671 m_rgLayerParameters[LayerType.SOFTMAX] = new SoftmaxParameter();
1672 break;
1673
1674 case LayerType.SOFTMAXCROSSENTROPY2_LOSS:
1675 expected_bottom.Add("scores");
1676 expected_bottom.Add("trgt");
1677 expected_top.Add("loss");
1678 m_rgLayerParameters[LayerType.LOSS] = new LossParameter(LossParameter.NormalizationMode.BATCH_SIZE);
1679 m_rgLayerParameters[LayerType.SOFTMAX] = new SoftmaxParameter();
1680 break;
1681
1682 case LayerType.SILENCE:
1683 expected_bottom.Add("input");
1684 break;
1685
1686 case LayerType.SLICE:
1687 expected_bottom.Add("input");
1688 expected_top.Add("sl1");
1689 expected_top.Add("sl2");
1690 m_rgLayerParameters[lt] = new SliceParameter();
1691 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
1692 break;
1693
1694 case LayerType.SPLIT:
1695 expected_bottom.Add("input");
1696 expected_top.Add("sp1");
1697 expected_top.Add("sp2");
1698 m_onnxConversionSupport = ONNX_CONVERSION_SUPPORT.INFERENCE_AND_TRAINING;
1699 break;
1700
1701 case LayerType.SOFTMAX:
1702 expected_bottom.Add("input");
1703 expected_top.Add("softmax");
1704 m_rgLayerParameters[lt] = new SoftmaxParameter();
1706 break;
1707
1708 case LayerType.SOFTMAXWITH_LOSS:
1709 expected_bottom.Add("pred");
1710 expected_bottom.Add("label");
1711 expected_top.Add("loss");
1712 m_rgLayerParameters[LayerType.SOFTMAX] = new SoftmaxParameter();
1713 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1714 break;
1715
1716 case LayerType.SMOOTHL1_LOSS:
1717 expected_bottom.Add("pred");
1718 expected_bottom.Add("label");
1719 expected_top.Add("loss");
1720 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1721 break;
1722
1723 case LayerType.SPP:
1724 expected_bottom.Add("input");
1725 expected_top.Add("spp");
1726 m_rgLayerParameters[lt] = new SPPParameter();
1727 break;
1728
1729 case LayerType.SWISH:
1730 expected_bottom.Add("input");
1731 expected_top.Add("swish");
1732 m_rgLayerParameters[lt] = new SwishParameter();
1733 break;
1734
1735 case LayerType.TANH:
1736 expected_bottom.Add("input");
1737 expected_top.Add("tanh");
1738 m_rgLayerParameters[lt] = new TanhParameter();
1739 break;
1740
1741 case LayerType.MODEL_DATA:
1742 expected_top.Add("data");
1743 expected_top.Add("decinput");
1744 m_rgLayerParameters[LayerType.MODEL_DATA] = new ModelDataParameter();
1745 break;
1746
1747 case LayerType.TEXT_DATA:
1748 expected_top.Add("data");
1749 expected_top.Add("datar");
1750 expected_top.Add("decinput");
1751 m_rgLayerParameters[LayerType.TEXT_DATA] = new TextDataParameter();
1752 break;
1753
1754 case LayerType.THRESHOLD:
1755 expected_bottom.Add("input");
1756 expected_top.Add("thresh");
1757 m_rgLayerParameters[lt] = new ThresholdParameter();
1758 break;
1759
1760 case LayerType.TILE:
1761 expected_bottom.Add("input");
1762 expected_top.Add("tile");
1763 m_rgLayerParameters[lt] = new TileParameter();
1764 break;
1765
1766 case LayerType.TRANSFORMER_BLOCK:
1767 expected_bottom.Add("input");
1768 expected_top.Add("tfb");
1769 m_rgLayerParameters[lt] = new TransformerBlockParameter();
1770 break;
1771
1772 case LayerType.TOKENIZED_DATA:
1773 expected_top.Add("data");
1774 expected_top.Add("pos");
1775 expected_top.Add("tgt");
1776 m_rgLayerParameters[lt] = new TokenizedDataParameter();
1777 break;
1778
1779 case LayerType.TOKENIZED_DATA_PAIRS:
1780 case LayerType.TOKENIZED_DATA_PAIRS_PY:
1781 expected_top.Add("enc");
1782 expected_top.Add("dec");
1783 expected_top.Add("tgt");
1784 expected_top.Add("emsk");
1785 expected_top.Add("dmsk");
1786 m_rgLayerParameters[LayerType.TOKENIZED_DATA_PAIRS] = new TokenizedDataPairsParameter();
1787 break;
1788
1789 case LayerType.TRANSPOSE:
1790 expected_bottom.Add("input");
1791 expected_top.Add("output");
1792 m_rgLayerParameters[lt] = new TransposeParameter();
1794 break;
1795
1796 case LayerType.TRIPLET_LOSS:
1797 expected_bottom.Add("anchor");
1798 expected_bottom.Add("pos");
1799 expected_bottom.Add("neg");
1800 expected_bottom.Add("label");
1801 expected_top.Add("loss");
1802 m_rgLayerParameters[lt] = new TripletLossParameter();
1803 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1804 break;
1805
1806 case LayerType.TV_LOSS:
1807 expected_bottom.Add("pred");
1808 expected_bottom.Add("label");
1809 expected_top.Add("loss");
1810 m_rgLayerParameters[LayerType.LOSS] = new LossParameter();
1811 m_rgLayerParameters[lt] = new TVLossParameter();
1812 break;
1813
1814 // DEPRECIATED
1815 case LayerType.LSTM_SIMPLE:
1816 expected_bottom.Add("time_seq");
1817 expected_bottom.Add("clip");
1818 expected_top.Add("lstm");
1819 m_rgLayerParameters[LayerType.LSTM_SIMPLE] = new LSTMSimpleParameter();
1820 break;
1821
1822 case LayerType.LSTM_ATTENTION:
1823 expected_bottom.Add("input");
1824 expected_bottom.Add("clip");
1825 expected_top.Add("lstm");
1826 m_rgLayerParameters[lt] = new LSTMAttentionParameter();
1827 break;
1828
1829 case LayerType.RNN:
1830 expected_bottom.Add("time_seq");
1831 expected_bottom.Add("clip");
1832 expected_top.Add("rnn");
1833 m_rgLayerParameters[LayerType.RECURRENT] = new RecurrentParameter();
1834 break;
1835
1836 case LayerType.LSTM:
1837 expected_bottom.Add("time_seq");
1838 expected_bottom.Add("clip");
1839 expected_top.Add("lstm");
1840 m_rgLayerParameters[LayerType.RECURRENT] = new RecurrentParameter();
1841 break;
1842
1843 case LayerType.VIDEO_DATA:
1844 expected_top.Add("data");
1845 expected_top.Add("label");
1846 m_rgLayerParameters[LayerType.VIDEO_DATA] = new VideoDataParameter();
1847 m_rgLayerParameters[LayerType.DATA] = new DataParameter();
1848 m_rgLayerParameters[LayerType.TRANSFORM] = new TransformationParameter();
1849 break;
1850
1851 case LayerType.VARSELNET:
1852 expected_bottom.Add("flatemb");
1853 expected_bottom.Add("ctx");
1854 expected_top.Add("outsum");
1855 expected_top.Add("sprcwts");
1856 m_rgLayerParameters[lt] = new VarSelNetParameter();
1857 break;
1858 }
1859 }
1860
1864 public string name
1865 {
1866 get { return m_strName; }
1867 set { m_strName = value; }
1868 }
1869
1874 {
1875 get { return m_type; }
1876 }
1877
1881 public bool use_halfsize
1882 {
1883 get { return m_bUseHalfSize; }
1884 set { m_bUseHalfSize = value; }
1885 }
1886
1891 {
1892 get { return m_onnxConversionSupport; }
1893 }
1894
1900 public void SetType(LayerType type, bool bNewParam = true)
1901 {
1902 m_type = type;
1903 setupParams(type, bNewParam);
1904 }
1905
1909 public List<string> bottom
1910 {
1911 get { return m_rgstrBottom; }
1912 set { m_rgstrBottom = value; }
1913 }
1914
1918 public List<string> top
1919 {
1920 get { return m_rgstrTop; }
1921 set { m_rgstrTop = value; }
1922 }
1923
1928 {
1929 get { return m_phase; }
1930 set { m_phase = value; }
1931 }
1932
1937 {
1938 get { return m_bFreezeLearning; }
1939 set { m_bFreezeLearning = value; }
1940 }
1941
1946 {
1947 get { return m_bConnectLossEvent; }
1948 set { m_bConnectLossEvent = value; }
1949 }
1950
1954 public List<double> loss_weight
1955 {
1956 get { return m_rgLossWeight; }
1957 set { m_rgLossWeight = value; }
1958 }
1959
1963 public List<ParamSpec> parameters
1964 {
1965 get { return m_rgParams; }
1966 set { m_rgParams = value; }
1967 }
1968
1972 public List<BlobProto> blobs
1973 {
1974 get { return m_rgBlobs; }
1975 set { m_rgBlobs = value; }
1976 }
1977
1981 public List<bool> propagate_down
1982 {
1983 get { return m_rgbPropagateDown; }
1984 set { m_rgbPropagateDown = value; }
1985 }
1986
1990 public List<NetStateRule> include
1991 {
1992 get { return m_rgInclude; }
1993 set { m_rgInclude = value; }
1994 }
1995
1999 public List<NetStateRule> exclude
2000 {
2001 get { return m_rgExclude; }
2002 set { m_rgExclude = value; }
2003 }
2004
2008 public bool group_start
2009 {
2010 get { return m_bGroupStart; }
2011 set { m_bGroupStart = value; }
2012 }
2013
2018 {
2019 get { return (TransformationParameter)m_rgLayerParameters[LayerType.TRANSFORM]; }
2020 set { m_rgLayerParameters[LayerType.TRANSFORM] = value; }
2021 }
2022
2027 {
2028 get { return (LossParameter)m_rgLayerParameters[LayerType.LOSS]; }
2029 set { m_rgLayerParameters[LayerType.LOSS] = value; }
2030 }
2031
2036 {
2037 get { return (AccuracyParameter)m_rgLayerParameters[LayerType.ACCURACY]; }
2038 set { m_rgLayerParameters[LayerType.ACCURACY] = value; }
2039 }
2040
2045 {
2046 get { return (ArgMaxParameter)m_rgLayerParameters[LayerType.ARGMAX]; }
2047 set { m_rgLayerParameters[LayerType.ARGMAX] = value; }
2048 }
2049
2054 {
2055 get { return (BatchNormParameter)m_rgLayerParameters[LayerType.BATCHNORM]; }
2056 set { m_rgLayerParameters[LayerType.BATCHNORM] = value; }
2057 }
2058
2063 {
2064 get { return (BiasParameter)m_rgLayerParameters[LayerType.BIAS]; }
2065 set { m_rgLayerParameters[LayerType.BIAS] = value; }
2066 }
2067
2072 {
2073 get { return (CfcParameter)m_rgLayerParameters[LayerType.CFC]; }
2074 set { m_rgLayerParameters[LayerType.CFC] = value; }
2075 }
2076
2081 {
2082 get { return (CfcUnitParameter)m_rgLayerParameters[LayerType.CFC_UNIT]; }
2083 set { m_rgLayerParameters[LayerType.CFC_UNIT] = value; }
2084 }
2085
2090 {
2091 get { return (ClipParameter)m_rgLayerParameters[LayerType.CLIP]; }
2092 set { m_rgLayerParameters[LayerType.CLIP] = value; }
2093 }
2094
2099 {
2100 get { return (ConcatParameter)m_rgLayerParameters[LayerType.CONCAT]; }
2101 set { m_rgLayerParameters[LayerType.CONCAT] = value; }
2102 }
2103
2108 {
2109 get { return (ConstantParameter)m_rgLayerParameters[LayerType.CONSTANT]; }
2110 set { m_rgLayerParameters[LayerType.CONSTANT] = value; }
2111 }
2112
2117 {
2118 get { return (ContrastiveLossParameter)m_rgLayerParameters[LayerType.CONTRASTIVE_LOSS]; }
2119 set { m_rgLayerParameters[LayerType.CONTRASTIVE_LOSS] = value; }
2120 }
2121
2126 {
2127 get { return (ConvolutionParameter)m_rgLayerParameters[LayerType.CONVOLUTION]; }
2128 set { m_rgLayerParameters[LayerType.CONVOLUTION] = value; }
2129 }
2130
2135 {
2136 get { return (ConvolutionOctaveParameter)m_rgLayerParameters[LayerType.CONVOLUTION_OCTAVE]; }
2137 set { m_rgLayerParameters[LayerType.CONVOLUTION_OCTAVE] = value; }
2138 }
2139
2144 {
2145 get { return (CropParameter)m_rgLayerParameters[LayerType.CROP]; }
2146 set { m_rgLayerParameters[LayerType.CROP] = value; }
2147 }
2148
2153 {
2154 get { return (DecodeParameter)m_rgLayerParameters[LayerType.DECODE]; }
2155 set { m_rgLayerParameters[LayerType.DECODE] = value; }
2156 }
2157
2162 {
2163 get { return (AnnotatedDataParameter)m_rgLayerParameters[LayerType.ANNOTATED_DATA]; }
2164 set { m_rgLayerParameters[LayerType.ANNOTATED_DATA] = value; }
2165 }
2166
2167
2172 {
2173 get { return (AttentionParameter)m_rgLayerParameters[LayerType.ATTENTION]; }
2174 set { m_rgLayerParameters[LayerType.ATTENTION] = value; }
2175 }
2176
2177
2182 {
2183 get { return (CategoricalTransformationParameter)m_rgLayerParameters[LayerType.CATEGORICAL_TRANS]; }
2184 set { m_rgLayerParameters[LayerType.CATEGORICAL_TRANS] = value; }
2185 }
2186
2191 {
2192 get { return (CausalSelfAttentionParameter)m_rgLayerParameters[LayerType.CAUSAL_SELF_ATTENTION]; }
2193 set { m_rgLayerParameters[LayerType.CAUSAL_SELF_ATTENTION] = value; }
2194 }
2195
2200 {
2201 get { return (MultiheadAttentionParameter)m_rgLayerParameters[LayerType.MULTIHEAD_ATTENTION]; }
2202 set { m_rgLayerParameters[LayerType.MULTIHEAD_ATTENTION] = value; }
2203 }
2204
2209 {
2210 get { return (MultiHeadAttentionInterpParameter)m_rgLayerParameters[LayerType.MULTIHEAD_ATTENTION_INTERP]; }
2211 set { m_rgLayerParameters[LayerType.MULTIHEAD_ATTENTION_INTERP] = value; }
2212 }
2213
2218 {
2219 get { return (PositionalEncoderParameter)m_rgLayerParameters[LayerType.POSITIONAL_ENCODER]; }
2220 set { m_rgLayerParameters[LayerType.POSITIONAL_ENCODER] = value; }
2221 }
2222
2227 {
2228 get { return (DetectionEvaluateParameter)m_rgLayerParameters[LayerType.DETECTION_EVALUATE]; }
2229 set { m_rgLayerParameters[LayerType.DETECTION_EVALUATE] = value; }
2230 }
2231
2236 {
2237 get { return (DetectionOutputParameter)m_rgLayerParameters[LayerType.DETECTION_OUTPUT]; }
2238 set { m_rgLayerParameters[LayerType.DETECTION_OUTPUT] = value; }
2239 }
2240
2245 {
2246 get { return (DataParameter)m_rgLayerParameters[LayerType.DATA]; }
2247 set { m_rgLayerParameters[LayerType.DATA] = value; }
2248 }
2249
2254 {
2255 get { return (DataNormalizerParameter)m_rgLayerParameters[LayerType.DATA_NORMALIZER]; }
2256 set { m_rgLayerParameters[LayerType.DATA_NORMALIZER] = value; }
2257 }
2258
2263 {
2264 get { return (DataSequenceParameter)m_rgLayerParameters[LayerType.DATA_SEQUENCE]; }
2265 set { m_rgLayerParameters[LayerType.DATA_SEQUENCE] = value; }
2266 }
2267
2272 {
2273 get { return (DataTemporalParameter)m_rgLayerParameters[LayerType.DATA_TEMPORAL]; }
2274 set { m_rgLayerParameters[LayerType.DATA_TEMPORAL] = value; }
2275 }
2276
2281 {
2282 get { return (DebugParameter)m_rgLayerParameters[LayerType.DEBUG]; }
2283 set { m_rgLayerParameters[LayerType.DEBUG] = value; }
2284 }
2285
2290 {
2291 get { return (DropoutParameter)m_rgLayerParameters[LayerType.DROPOUT]; }
2292 set { m_rgLayerParameters[LayerType.DROPOUT] = value; }
2293 }
2294
2299 {
2300 get { return (DummyDataParameter)m_rgLayerParameters[LayerType.DUMMYDATA]; }
2301 set { m_rgLayerParameters[LayerType.DUMMYDATA] = value; }
2302 }
2303
2308 {
2309 get { return (EltwiseParameter)m_rgLayerParameters[LayerType.ELTWISE]; }
2310 set { m_rgLayerParameters[LayerType.ELTWISE] = value; }
2311 }
2312
2317 {
2318 get { return (EluParameter)m_rgLayerParameters[LayerType.ELU]; }
2319 set { m_rgLayerParameters[LayerType.ELU] = value; }
2320 }
2321
2326 {
2327 get { return (EmbedParameter)m_rgLayerParameters[LayerType.EMBED]; }
2328 set { m_rgLayerParameters[LayerType.EMBED] = value; }
2329 }
2330
2335 {
2336 get { return (ExpParameter)m_rgLayerParameters[LayerType.EXP]; }
2337 set { m_rgLayerParameters[LayerType.EXP] = value; }
2338 }
2339
2344 {
2345 get { return (FlattenParameter)m_rgLayerParameters[LayerType.FLATTEN]; }
2346 set { m_rgLayerParameters[LayerType.FLATTEN] = value; }
2347 }
2348
2353 {
2354 get { return (GatherParameter)m_rgLayerParameters[LayerType.GATHER]; }
2355 set { m_rgLayerParameters[LayerType.GATHER] = value; }
2356 }
2357
2362 {
2363 get { return (GateAddNormParameter)m_rgLayerParameters[LayerType.GATEADDNORM]; }
2364 set { m_rgLayerParameters[LayerType.GATEADDNORM] = value; }
2365 }
2366
2371 {
2372 get { return (GeluParameter)m_rgLayerParameters[LayerType.GELU]; }
2373 set { m_rgLayerParameters[LayerType.GELU] = value; }
2374 }
2375
2380 {
2381 get { return (GluParameter)m_rgLayerParameters[LayerType.GLU]; }
2382 set { m_rgLayerParameters[LayerType.GLU] = value; }
2383 }
2384
2389 {
2390 get { return (GrnParameter)m_rgLayerParameters[LayerType.GRN]; }
2391 set { m_rgLayerParameters[LayerType.GRN] = value; }
2392 }
2393
2398 {
2399 get { return (GradientScaleParameter)m_rgLayerParameters[LayerType.GRADIENTSCALER]; }
2400 set { m_rgLayerParameters[LayerType.GRADIENTSCALER] = value; }
2401 }
2402
2407 {
2408 get { return (GramParameter)m_rgLayerParameters[LayerType.GRAM]; }
2409 set { m_rgLayerParameters[LayerType.GRAM] = value; }
2410 }
2411
2416 {
2417 get { return (HDF5DataParameter)m_rgLayerParameters[LayerType.HDF5_DATA]; }
2418 set { m_rgLayerParameters[LayerType.HDF5_DATA] = value; }
2419 }
2420
2425 {
2426 get { return (HingeLossParameter)m_rgLayerParameters[LayerType.HINGE_LOSS]; }
2427 set { m_rgLayerParameters[LayerType.HINGE_LOSS] = value; }
2428 }
2429
2434 {
2435 get { return (ImageDataParameter)m_rgLayerParameters[LayerType.IMAGE_DATA]; }
2436 set { m_rgLayerParameters[LayerType.IMAGE_DATA] = value; }
2437 }
2438
2443 {
2444 get { return (InfogainLossParameter)m_rgLayerParameters[LayerType.INFOGAIN_LOSS]; }
2445 set { m_rgLayerParameters[LayerType.INFOGAIN_LOSS] = value; }
2446 }
2447
2452 {
2453 get { return (InnerProductParameter)m_rgLayerParameters[LayerType.INNERPRODUCT]; }
2454 set { m_rgLayerParameters[LayerType.INNERPRODUCT] = value; }
2455 }
2456
2461 {
2462 get { return (InterpParameter)m_rgLayerParameters[LayerType.INTERP]; }
2463 set { m_rgLayerParameters[LayerType.INTERP] = value; }
2464 }
2465
2470 {
2471 get { return (KnnParameter)m_rgLayerParameters[LayerType.KNN]; }
2472 set { m_rgLayerParameters[LayerType.KNN] = value; }
2473 }
2474
2479 {
2480 get { return (LabelMappingParameter)m_rgLayerParameters[LayerType.LABELMAPPING]; }
2481 set { m_rgLayerParameters[LayerType.LABELMAPPING] = value; }
2482 }
2483
2488 {
2489 get { return (LayerNormParameter)m_rgLayerParameters[LayerType.LAYERNORM]; }
2490 set { m_rgLayerParameters[LayerType.LAYERNORM] = value; }
2491 }
2492
2497 {
2498 get { return (LogParameter)m_rgLayerParameters[LayerType.LOG]; }
2499 set { m_rgLayerParameters[LayerType.LOG] = value; }
2500 }
2501
2506 {
2507 get { return (LRNParameter)m_rgLayerParameters[LayerType.LRN]; }
2508 set { m_rgLayerParameters[LayerType.LRN] = value; }
2509 }
2510
2515 {
2516 get { return (LtcUnitParameter)m_rgLayerParameters[LayerType.LTC_UNIT]; }
2517 set { m_rgLayerParameters[LayerType.LTC_UNIT] = value; }
2518 }
2519
2524 {
2525 get { return (MeanErrorLossParameter)m_rgLayerParameters[LayerType.MEAN_ERROR_LOSS]; }
2526 set { m_rgLayerParameters[LayerType.MEAN_ERROR_LOSS] = value; }
2527 }
2528
2533 {
2534 get { return (MathParameter)m_rgLayerParameters[LayerType.MATH]; }
2535 set { m_rgLayerParameters[LayerType.MATH] = value; }
2536 }
2537
2542 {
2543 get { return (MergeParameter)m_rgLayerParameters[LayerType.MERGE]; }
2544 set { m_rgLayerParameters[LayerType.MERGE] = value; }
2545 }
2546
2551 {
2552 get { return (MemoryDataParameter)m_rgLayerParameters[LayerType.MEMORYDATA]; }
2553 set { m_rgLayerParameters[LayerType.MEMORYDATA] = value; }
2554 }
2555
2560 {
2561 get { return (MishParameter)m_rgLayerParameters[LayerType.MISH]; }
2562 set { m_rgLayerParameters[LayerType.MISH] = value; }
2563 }
2564
2569 {
2570 get { return (MultiBoxLossParameter)m_rgLayerParameters[LayerType.MULTIBOX_LOSS]; }
2571 set { m_rgLayerParameters[LayerType.MULTIBOX_LOSS] = value; }
2572 }
2573
2578 {
2579 get { return (MVNParameter)m_rgLayerParameters[LayerType.MVN]; }
2580 set { m_rgLayerParameters[LayerType.MVN] = value; }
2581 }
2582
2587 {
2588 get { return (NLLLossParameter)m_rgLayerParameters[LayerType.NLL_LOSS]; }
2589 set { m_rgLayerParameters[LayerType.NLL_LOSS] = value; }
2590 }
2591
2596 {
2597 get { return (NumericTransformationParameter)m_rgLayerParameters[LayerType.NUMERIC_TRANS]; }
2598 set { m_rgLayerParameters[LayerType.NUMERIC_TRANS] = value; }
2599 }
2600
2605 {
2606 get { return (OneHotParameter)m_rgLayerParameters[LayerType.ONEHOT]; }
2607 set { m_rgLayerParameters[LayerType.ONEHOT] = value; }
2608 }
2609
2614 {
2615 get { return (Normalization1Parameter)m_rgLayerParameters[LayerType.NORMALIZATION1]; }
2616 set { m_rgLayerParameters[LayerType.NORMALIZATION1] = value; }
2617 }
2618
2623 {
2624 get { return (Normalization2Parameter)m_rgLayerParameters[LayerType.NORMALIZATION2]; }
2625 set { m_rgLayerParameters[LayerType.NORMALIZATION2] = value; }
2626 }
2627
2632 {
2633 get { return (PoolingParameter)m_rgLayerParameters[LayerType.POOLING]; }
2634 set { m_rgLayerParameters[LayerType.POOLING] = value; }
2635 }
2636
2641 {
2642 get { return (UnPoolingParameter)m_rgLayerParameters[LayerType.UNPOOLING]; }
2643 set { m_rgLayerParameters[LayerType.UNPOOLING] = value; }
2644 }
2645
2650 {
2651 get { return (ParameterParameter)m_rgLayerParameters[LayerType.PARAMETER]; }
2652 set { m_rgLayerParameters[LayerType.PARAMETER] = value; }
2653 }
2654
2659 {
2660 get { return (PermuteParameter)m_rgLayerParameters[LayerType.PERMUTE]; }
2661 set { m_rgLayerParameters[LayerType.PERMUTE] = value; }
2662 }
2663
2668 {
2669 get { return (PowerParameter)m_rgLayerParameters[LayerType.POWER]; }
2670 set { m_rgLayerParameters[LayerType.POWER] = value; }
2671 }
2672
2677 {
2678 get { return (PReLUParameter)m_rgLayerParameters[LayerType.PRELU]; }
2679 set { m_rgLayerParameters[LayerType.PRELU] = value; }
2680 }
2681
2686 {
2687 get { return (PriorBoxParameter)m_rgLayerParameters[LayerType.PRIORBOX]; }
2688 set { m_rgLayerParameters[LayerType.PRIORBOX] = value; }
2689 }
2690
2695 {
2696 get { return (QuantileAccuracyParameter)m_rgLayerParameters[LayerType.QUANTILE_ACCURACY]; }
2697 set { m_rgLayerParameters[LayerType.QUANTILE_ACCURACY] = value; }
2698 }
2699
2704 {
2705 get { return (QuantileLossParameter)m_rgLayerParameters[LayerType.QUANTILE_LOSS]; }
2706 set { m_rgLayerParameters[LayerType.QUANTILE_LOSS] = value; }
2707 }
2708
2713 {
2714 get { return (ReductionParameter)m_rgLayerParameters[LayerType.REDUCTION]; }
2715 set { m_rgLayerParameters[LayerType.REDUCTION] = value; }
2716 }
2717
2722 {
2723 get { return (ReLUParameter)m_rgLayerParameters[LayerType.RELU]; }
2724 set { m_rgLayerParameters[LayerType.RELU] = value; }
2725 }
2726
2731 {
2732 get { return (ReshapeParameter)m_rgLayerParameters[LayerType.RESHAPE]; }
2733 set { m_rgLayerParameters[LayerType.RESHAPE] = value; }
2734 }
2735
2740 {
2741 get { return (ReshapeTemporalParameter)m_rgLayerParameters[LayerType.RESHAPE_TEMPORAL]; }
2742 set { m_rgLayerParameters[LayerType.RESHAPE_TEMPORAL] = value; }
2743 }
2744
2749 {
2750 get { return (SqueezeParameter)m_rgLayerParameters[LayerType.SQUEEZE]; }
2751 set { m_rgLayerParameters[LayerType.SQUEEZE] = value; }
2752 }
2753
2758 {
2759 get { return (ScalarParameter)m_rgLayerParameters[LayerType.SCALAR]; }
2760 set { m_rgLayerParameters[LayerType.SCALAR] = value; }
2761 }
2762
2767 {
2768 get { return (ScaleParameter)m_rgLayerParameters[LayerType.SCALE]; }
2769 set { m_rgLayerParameters[LayerType.SCALE] = value; }
2770 }
2771
2776 {
2777 get { return (SerfParameter)m_rgLayerParameters[LayerType.SERF]; }
2778 set { m_rgLayerParameters[LayerType.SERF] = value; }
2779 }
2780
2785 {
2786 get { return (SigmoidParameter)m_rgLayerParameters[LayerType.SIGMOID]; }
2787 set { m_rgLayerParameters[LayerType.SIGMOID] = value; }
2788 }
2789
2794 {
2795 get { return (SoftmaxParameter)m_rgLayerParameters[LayerType.SOFTMAX]; }
2796 set { m_rgLayerParameters[LayerType.SOFTMAX] = value; }
2797 }
2798
2803 {
2804 get { return (SPPParameter)m_rgLayerParameters[LayerType.SPP]; }
2805 set { m_rgLayerParameters[LayerType.SPP] = value; }
2806 }
2807
2812 {
2813 get { return (SliceParameter)m_rgLayerParameters[LayerType.SLICE]; }
2814 set { m_rgLayerParameters[LayerType.SLICE] = value; }
2815 }
2816
2817
2822 {
2823 get { return (SwishParameter)m_rgLayerParameters[LayerType.SWISH]; }
2824 set { m_rgLayerParameters[LayerType.SWISH] = value; }
2825 }
2826
2831 {
2832 get { return (TanhParameter)m_rgLayerParameters[LayerType.TANH]; }
2833 set { m_rgLayerParameters[LayerType.TANH] = value; }
2834 }
2835
2840 {
2841 get { return (ModelDataParameter)m_rgLayerParameters[LayerType.MODEL_DATA]; }
2842 set { m_rgLayerParameters[LayerType.MODEL_DATA] = value; }
2843 }
2844
2849 {
2850 get { return (TextDataParameter)m_rgLayerParameters[LayerType.TEXT_DATA]; }
2851 set { m_rgLayerParameters[LayerType.TEXT_DATA] = value; }
2852 }
2853
2858 {
2859 get { return (ThresholdParameter)m_rgLayerParameters[LayerType.THRESHOLD]; }
2860 set { m_rgLayerParameters[LayerType.THRESHOLD] = value; }
2861 }
2862
2867 {
2868 get { return (TileParameter)m_rgLayerParameters[LayerType.TILE]; }
2869 set { m_rgLayerParameters[LayerType.TILE] = value; }
2870 }
2871
2876 {
2877 get { return (TransposeParameter)m_rgLayerParameters[LayerType.TRANSPOSE]; }
2878 set { m_rgLayerParameters[LayerType.TRANSPOSE] = value; }
2879 }
2880
2885 {
2886 get { return (TransformerBlockParameter)m_rgLayerParameters[LayerType.TRANSFORMER_BLOCK]; }
2887 set { m_rgLayerParameters[LayerType.TRANSFORMER_BLOCK] = value; }
2888 }
2889
2894 {
2895 get { return (TokenizedDataParameter)m_rgLayerParameters[LayerType.TOKENIZED_DATA]; }
2896 set { m_rgLayerParameters[LayerType.TOKENIZED_DATA] = value; }
2897 }
2898
2903 {
2904 get { return (TokenizedDataPairsParameter)m_rgLayerParameters[LayerType.TOKENIZED_DATA_PAIRS]; }
2905 set { m_rgLayerParameters[LayerType.TOKENIZED_DATA_PAIRS] = value; }
2906 }
2907
2912 {
2913 get { return (TripletLossParameter)m_rgLayerParameters[LayerType.TRIPLET_LOSS]; }
2914 set { m_rgLayerParameters[LayerType.TRIPLET_LOSS] = value; }
2915 }
2916
2921 {
2922 get { return (TVLossParameter)m_rgLayerParameters[LayerType.TV_LOSS]; }
2923 set { m_rgLayerParameters[LayerType.TV_LOSS] = value; }
2924 }
2925
2930 {
2931 get { return (LSTMSimpleParameter)m_rgLayerParameters[LayerType.LSTM_SIMPLE]; }
2932 set { m_rgLayerParameters[LayerType.LSTM_SIMPLE] = value; }
2933 }
2934
2939 {
2940 get { return (LSTMAttentionParameter)m_rgLayerParameters[LayerType.LSTM_ATTENTION]; }
2941 set { m_rgLayerParameters[LayerType.LSTM_ATTENTION] = value; }
2942 }
2943
2948 {
2949 get { return (RecurrentParameter)m_rgLayerParameters[LayerType.RECURRENT]; }
2950 set { m_rgLayerParameters[LayerType.RECURRENT] = value; }
2951 }
2952
2957 {
2958 get { return (InputParameter)m_rgLayerParameters[LayerType.INPUT]; }
2959 set { m_rgLayerParameters[LayerType.INPUT] = value; }
2960 }
2961
2966 {
2967 get { return (VideoDataParameter)m_rgLayerParameters[LayerType.VIDEO_DATA]; }
2968 set { m_rgLayerParameters[LayerType.VIDEO_DATA] = value; }
2969 }
2970
2975 {
2976 get { return (VarSelNetParameter)m_rgLayerParameters[LayerType.VARSELNET]; }
2977 set { m_rgLayerParameters[LayerType.VARSELNET] = value; }
2978 }
2979
2983 public void clear_blobs()
2984 {
2985 m_rgBlobs.Clear();
2986 }
2987
2991 public int solver_count
2992 {
2993 get { return m_nSolverCount; }
2994 set { m_nSolverCount = value; }
2995 }
2996
3000 public int solver_rank
3001 {
3002 get { return m_nSolverRank; }
3003 set { m_nSolverRank = value; }
3004 }
3005
3009 public List<string> expected_top
3010 {
3011 get { return m_rgstrExpectedTop; }
3012 }
3013
3017 public List<string> expected_bottom
3018 {
3019 get { return m_rgstrExpectedBottom; }
3020 }
3021
3027 {
3028 m_rgLayerParameters = new Dictionary<LayerType, LayerParameterBase>();
3029
3030 foreach (KeyValuePair<LayerType, LayerParameterBase> kv in src.m_rgLayerParameters)
3031 {
3032 if (kv.Value != null)
3033 m_rgLayerParameters.Add(kv.Key, kv.Value.Clone());
3034 else
3035 m_rgLayerParameters.Add(kv.Key, null);
3036 }
3037 }
3038
3043 public virtual LayerParameter Clone(bool bCloneBlobs)
3044 {
3045 LayerParameter p = new LayerParameter(m_type, m_strName);
3046
3047 p.m_rgstrBottom = Utility.Clone<string>(m_rgstrBottom);
3048 p.m_rgstrTop = Utility.Clone<string>(m_rgstrTop);
3049 p.m_phase = m_phase;
3050 p.m_rgLossWeight = Utility.Clone<double>(m_rgLossWeight);
3051 p.m_rgParams = Utility.Clone<ParamSpec>(m_rgParams);
3052
3053 if (bCloneBlobs)
3054 p.m_rgBlobs = Utility.Clone<BlobProto>(m_rgBlobs);
3055
3056 p.m_rgbPropagateDown = Utility.Clone<bool>(m_rgbPropagateDown);
3057 p.m_rgInclude = Utility.Clone<NetStateRule>(m_rgInclude);
3058 p.m_rgExclude = Utility.Clone<NetStateRule>(m_rgExclude);
3059 p.m_bFreezeLearning = m_bFreezeLearning;
3060 p.m_bConnectLossEvent = m_bConnectLossEvent;
3061
3062 p.m_rgLayerParameters = new Dictionary<LayerType, LayerParameterBase>();
3063
3064 foreach (KeyValuePair<LayerType, LayerParameterBase> kv in m_rgLayerParameters)
3065 {
3066 if (kv.Value != null)
3067 p.m_rgLayerParameters.Add(kv.Key, kv.Value.Clone());
3068 else
3069 p.m_rgLayerParameters.Add(kv.Key, null);
3070 }
3071
3072 p.m_nSolverCount = m_nSolverCount;
3073 p.m_nSolverRank = m_nSolverRank;
3074 p.m_bGroupStart = m_bGroupStart;
3075 p.m_bUseHalfSize = m_bUseHalfSize;
3076
3077 return p;
3078 }
3079
3084 object ICloneable.Clone()
3085 {
3086 return Clone(true);
3087 }
3088
3090 public int CompareTo(object obj)
3091 {
3092 LayerParameter p = obj as LayerParameter;
3093
3094 if (p == null)
3095 return 1;
3096
3097 if (!Compare(p))
3098 return 1;
3099
3100 return 0;
3101 }
3102
3103 private string getTypeString(LayerType type)
3104 {
3105 switch (type)
3106 {
3107 case LayerType.ABSVAL:
3108 return "AbsVal";
3109
3110 case LayerType.ACCURACY:
3111 return "Accuracy";
3112
3113 case LayerType.ACCURACY_DECODE:
3114 return "AccuracyDecode";
3115
3116 case LayerType.ACCURACY_ENCODING:
3117 return "AccuracyEncoding";
3118
3119 case LayerType.ARGMAX:
3120 return "ArgMax";
3121
3122 case LayerType.ANNOTATED_DATA:
3123 return "AnnotatedData";
3124
3125 case LayerType.ATTENTION:
3126 return "Attention";
3127
3128 case LayerType.BATCHNORM:
3129 return "BatchNorm";
3130
3131 case LayerType.BATCHREINDEX:
3132 return "BatchReIndex";
3133
3134 case LayerType.BIAS:
3135 return "Bias";
3136
3137 case LayerType.BNLL:
3138 return "BNLL";
3139
3140 case LayerType.CATEGORICAL_TRANS:
3141 return "CategoricalTrans";
3142
3143 case LayerType.CAUSAL_SELF_ATTENTION:
3144 return "CausalSelfAttention";
3145
3146 case LayerType.CFC:
3147 return "Cfc";
3148
3149 case LayerType.CFC_UNIT:
3150 return "Cfc_Unit";
3151
3152 case LayerType.CHANNEL_EMBEDDING:
3153 return "ChannelEmbedding";
3154
3155 case LayerType.CLIP:
3156 return "Clip";
3157
3158 case LayerType.CONCAT:
3159 return "Concat";
3160
3161 case LayerType.CONSTANT:
3162 return "Constant";
3163
3164 case LayerType.CONTRASTIVE_LOSS:
3165 return "ContrastiveLoss";
3166
3167 case LayerType.CONVOLUTION:
3168 return "Convolution";
3169
3170 case LayerType.CONVOLUTION_OCTAVE:
3171 return "ConvolutionOctave";
3172
3173 case LayerType.CROP:
3174 return "Crop";
3175
3176 case LayerType.COPY:
3177 return "Copy";
3178
3179 case LayerType.DECODE:
3180 return "Decode";
3181
3182 case LayerType.DATA:
3183 return "Data";
3184
3185 case LayerType.DATA_NORMALIZER:
3186 return "DataNormalizer";
3187
3188 case LayerType.DATA_SEQUENCE:
3189 return "DataSequence";
3190
3191 case LayerType.DATA_TEMPORAL:
3192 return "DataTemporal";
3193
3194 case LayerType.DEBUG:
3195 return "Debug";
3196
3197 case LayerType.DECONVOLUTION:
3198 return "Deconvolution";
3199
3200 case LayerType.DETECTION_EVALUATE:
3201 return "DetectionEvaluate";
3202
3203 case LayerType.DETECTION_OUTPUT:
3204 return "DetectionOutput";
3205
3206 case LayerType.DROPOUT:
3207 return "Dropout";
3208
3209 case LayerType.DUMMYDATA:
3210 return "DummyData";
3211
3212 case LayerType.ELTWISE:
3213 return "Eltwise";
3214
3215 case LayerType.ELU:
3216 return "ELU";
3217
3218 case LayerType.EMBED:
3219 return "Embed";
3220
3221 case LayerType.EUCLIDEAN_LOSS:
3222 return "EuclideanLoss";
3223
3224 case LayerType.EVENT:
3225 return "Event";
3226
3227 case LayerType.EXP:
3228 return "EXP";
3229
3230 case LayerType.FILTER:
3231 return "Filter";
3232
3233 case LayerType.FLATTEN:
3234 return "Flatten";
3235
3236 case LayerType.GATHER:
3237 return "Gather";
3238
3239 case LayerType.GATEADDNORM:
3240 return "GateAddNorm";
3241
3242 case LayerType.GELU:
3243 return "GELU";
3244
3245 case LayerType.GLU:
3246 return "GLU";
3247
3248 case LayerType.GRN:
3249 return "GRN";
3250
3251 case LayerType.GLOBRES_NORM:
3252 return "GlobResNorm";
3253
3254 case LayerType.GRADIENTSCALER:
3255 return "GSL";
3256
3257 case LayerType.GRAM:
3258 return "Gram";
3259
3260 case LayerType.HDF5_DATA:
3261 return "HDF5Data";
3262
3263 case LayerType.HINGE_LOSS:
3264 return "HingeLoss";
3265
3266 case LayerType.IMAGE_DATA:
3267 return "ImageData";
3268
3269 case LayerType.IM2COL:
3270 return "Im2Col";
3271
3272 case LayerType.INFOGAIN_LOSS:
3273 return "InfogainLoss";
3274
3275 case LayerType.INNERPRODUCT:
3276 return "InnerProduct";
3277
3278 case LayerType.INPUT:
3279 return "Input";
3280
3281 case LayerType.INTERP:
3282 return "Interp";
3283
3284 case LayerType.KNN:
3285 return "Knn";
3286
3287 case LayerType.LABELMAPPING:
3288 return "LabelMapping";
3289
3290 case LayerType.LAYERNORM:
3291 return "LayerNorm";
3292
3293 case LayerType.LECUN:
3294 return "LeCun";
3295
3296 case LayerType.LOG:
3297 return "Log";
3298
3299 case LayerType.LOSS:
3300 return "Loss";
3301
3302 case LayerType.LRN:
3303 return "LRN";
3304
3305 case LayerType.LTC_UNIT:
3306 return "Ltc_Unit";
3307
3308 case LayerType.MEAN_ERROR_LOSS:
3309 return "MeanErrorLoss";
3310
3311 case LayerType.MATH:
3312 return "MATH";
3313
3314 case LayerType.MERGE:
3315 return "Merge";
3316
3317 case LayerType.MEMORYDATA:
3318 return "MemoryData";
3319
3320 case LayerType.MULTIBOX_LOSS:
3321 return "MultiBoxLoss";
3322
3323 case LayerType.MULTIHEAD_ATTENTION:
3324 return "MultiheadAttention";
3325
3326 case LayerType.MULTIHEAD_ATTENTION_INTERP:
3327 return "MultiheadAttentionInterp";
3328
3329 case LayerType.MEMORY_LOSS:
3330 return "MemoryLoss";
3331
3332 case LayerType.MISH:
3333 return "Mish";
3334
3335 case LayerType.MULTINOMIALLOGISTIC_LOSS:
3336 return "MultinomialLogisticLoss";
3337
3338 case LayerType.MVN:
3339 return "MVN";
3340
3341 case LayerType.NLL_LOSS:
3342 return "NLLLoss";
3343
3344 case LayerType.NUMERIC_TRANS:
3345 return "NumericTrans";
3346
3347 case LayerType.ONEHOT:
3348 return "OneHot";
3349
3350 case LayerType.NORMALIZATION1:
3351 return "Normalization1";
3352
3353 case LayerType.NORMALIZATION2:
3354 return "Normalization2";
3355
3356 case LayerType.PARAMETER:
3357 return "Parameter";
3358
3359 case LayerType.PERMUTE:
3360 return "Permute";
3361
3362 case LayerType.POSITIONAL_ENCODER:
3363 return "PositionalEncoder";
3364
3365 case LayerType.POOLING:
3366 return "Pooling";
3367
3368 case LayerType.UNPOOLING1:
3369 return "UnPooling1";
3370
3371 case LayerType.UNPOOLING:
3372 return "UnPooling";
3373
3374 case LayerType.POWER:
3375 return "Power";
3376
3377 case LayerType.PRELU:
3378 return "PReLU";
3379
3380 case LayerType.PRIORBOX:
3381 return "PriorBox";
3382
3383 case LayerType.QUANTILE_ACCURACY:
3384 return "QuantileAccuracy";
3385
3386 case LayerType.QUANTILE_LOSS:
3387 return "QuantileLoss";
3388
3389 case LayerType.REDUCTION:
3390 return "Reduction";
3391
3392 case LayerType.RELU:
3393 return "ReLU";
3394
3395 case LayerType.RESHAPE:
3396 return "Reshape";
3397
3398 case LayerType.RESHAPE_TEMPORAL:
3399 return "ReshapeTemporal";
3400
3401 case LayerType.SQUEEZE:
3402 return "Squeeze";
3403
3404 case LayerType.UNSQUEEZE:
3405 return "Unsqueeze";
3406
3407 case LayerType.SCALAR:
3408 return "Scalar";
3409
3410 case LayerType.SCALE:
3411 return "Scale";
3412
3413 case LayerType.SERF:
3414 return "Serf";
3415
3416 case LayerType.SIGMOID:
3417 return "Sigmoid";
3418
3419 case LayerType.SIGMOIDCROSSENTROPY_LOSS:
3420 return "SigmoidCrossEntropyLoss";
3421
3422 case LayerType.SILU:
3423 return "SiLU";
3424
3425 case LayerType.SOFTPLUS:
3426 return "SoftPlus";
3427
3428 case LayerType.SOFTMAXCROSSENTROPY_LOSS:
3429 return "SoftmaxCrossEntropyLoss";
3430
3431 case LayerType.SOFTMAXCROSSENTROPY2_LOSS:
3432 return "SoftmaxCrossEntropy2Loss";
3433
3434 case LayerType.SILENCE:
3435 return "Silence";
3436
3437 case LayerType.SLICE:
3438 return "Slice";
3439
3440 case LayerType.SOFTMAX:
3441 return "Softmax";
3442
3443 case LayerType.SOFTMAXWITH_LOSS:
3444 return "SoftmaxWithLoss";
3445
3446 case LayerType.SMOOTHL1_LOSS:
3447 return "SmoothL1Loss";
3448
3449 case LayerType.SPLIT:
3450 return "Split";
3451
3452 case LayerType.SPP:
3453 return "SPP";
3454
3455 case LayerType.SWISH:
3456 return "Swish";
3457
3458 case LayerType.TANH:
3459 return "TanH";
3460
3461 case LayerType.MODEL_DATA:
3462 return "ModelData";
3463
3464 case LayerType.TEXT_DATA:
3465 return "TextData";
3466
3467 case LayerType.THRESHOLD:
3468 return "Threshold";
3469
3470 case LayerType.TILE:
3471 return "Tile";
3472
3473 case LayerType.TRANSPOSE:
3474 return "Transpose";
3475
3476 case LayerType.TRANSFORMER_BLOCK:
3477 return "TransformerBlock";
3478
3479 case LayerType.TOKENIZED_DATA:
3480 return "TokenizedData";
3481
3482 case LayerType.TOKENIZED_DATA_PAIRS:
3483 return "TokenizedDataPairs";
3484
3485 case LayerType.TOKENIZED_DATA_PAIRS_PY:
3486 return "TokenizedDataPairsPy";
3487
3488 case LayerType.TRIPLET_LOSS:
3489 return "TripletLoss";
3490
3491 case LayerType.TV_LOSS:
3492 return "TVLoss";
3493
3494 // DEPRECIATED
3495 case LayerType.LSTM_SIMPLE:
3496 return "LstmSimple";
3497
3498 case LayerType.LSTM_ATTENTION:
3499 return "LstmAttention";
3500
3501 case LayerType.RNN:
3502 return "Rnn";
3503
3504 case LayerType.LSTM:
3505 return "Lstm";
3506
3507 case LayerType.LSTM_UNIT:
3508 return "Lstm_Unit";
3509
3510 case LayerType.VIDEO_DATA:
3511 return "VideoData";
3512
3513 case LayerType.VARSELNET:
3514 return "VarSelNet";
3515
3516 default:
3517 return "Unknown";
3518 }
3519 }
3520
3522 public override RawProto ToProto(string strName)
3523 {
3524 RawProtoCollection rgChildren = new RawProtoCollection();
3525
3526 rgChildren.Add("name", name, RawProto.TYPE.STRING);
3527 rgChildren.Add("type", getTypeString(type), RawProto.TYPE.STRING);
3528 rgChildren.Add<string>("bottom", bottom);
3529 rgChildren.Add<string>("top", top);
3530 rgChildren.Add<double>("loss_weight", loss_weight);
3531
3532 if (group_start)
3533 rgChildren.Add("group_start", group_start.ToString());
3534
3535 if (freeze_learning)
3536 rgChildren.Add("freeze_learning", freeze_learning.ToString());
3537
3539 rgChildren.Add("connect_loss_event", connect_loss_event.ToString());
3540
3541 if (use_halfsize)
3542 rgChildren.Add("use_halfsize", use_halfsize.ToString());
3543
3544 foreach (ParamSpec ps in parameters)
3545 {
3546 rgChildren.Add(ps.ToProto("param"));
3547 }
3548
3549 foreach (BlobProto bp in blobs)
3550 {
3551 rgChildren.Add(bp.ToProto("blobs"));
3552 }
3553
3554 rgChildren.Add<bool>("propagate_down", propagate_down);
3555
3556 foreach (NetStateRule nsr in include)
3557 {
3558 rgChildren.Add(nsr.ToProto("include"));
3559 }
3560
3561 foreach (NetStateRule nsr in exclude)
3562 {
3563 rgChildren.Add(nsr.ToProto("exclude"));
3564 }
3565
3566 foreach (KeyValuePair<Phase, int> kv in m_rgMaxBottomCount)
3567 {
3568 RawProtoCollection prChildren = new RawProtoCollection();
3569 prChildren.Add("phase", kv.Key.ToString());
3570 prChildren.Add("count", kv.Value.ToString());
3571 RawProto prMaxBottomCount = new RawProto("max_bottom_count", "", prChildren);
3572 rgChildren.Add(prMaxBottomCount);
3573 }
3574
3575 List<KeyValuePair<BaseParameter, string>> rgParam = new List<KeyValuePair<BaseParameter,string>>();
3576
3577 // Standard layers.
3578 rgParam.Add(new KeyValuePair<BaseParameter,string>(transform_param, "transform_param"));
3579 rgParam.Add(new KeyValuePair<BaseParameter,string>(loss_param, "loss_param"));
3580 rgParam.Add(new KeyValuePair<BaseParameter,string>(accuracy_param, "accuracy_param"));
3581 rgParam.Add(new KeyValuePair<BaseParameter, string>(argmax_param, "argmax_param"));
3582 rgParam.Add(new KeyValuePair<BaseParameter, string>(batch_norm_param, "batch_norm_param"));
3583 rgParam.Add(new KeyValuePair<BaseParameter, string>(bias_param, "bias_param"));
3584 rgParam.Add(new KeyValuePair<BaseParameter, string>(clip_param, "clip_param"));
3585 rgParam.Add(new KeyValuePair<BaseParameter, string>(concat_param, "concat_param"));
3586 rgParam.Add(new KeyValuePair<BaseParameter, string>(constant_param, "constant_param"));
3587 rgParam.Add(new KeyValuePair<BaseParameter, string>(contrastive_loss_param, "contrastive_loss_param"));
3588 rgParam.Add(new KeyValuePair<BaseParameter, string>(convolution_param, "convolution_param"));
3589 rgParam.Add(new KeyValuePair<BaseParameter, string>(crop_param, "crop_param"));
3590 rgParam.Add(new KeyValuePair<BaseParameter, string>(data_param, "data_param"));
3591 rgParam.Add(new KeyValuePair<BaseParameter, string>(debug_param, "debug_param"));
3592 rgParam.Add(new KeyValuePair<BaseParameter, string>(dropout_param, "dropout_param"));
3593 rgParam.Add(new KeyValuePair<BaseParameter, string>(dummy_data_param, "dummy_data_param"));
3594 rgParam.Add(new KeyValuePair<BaseParameter, string>(eltwise_param, "eltwise_param"));
3595 rgParam.Add(new KeyValuePair<BaseParameter, string>(elu_param, "elu_param"));
3596 rgParam.Add(new KeyValuePair<BaseParameter, string>(embed_param, "embed_param"));
3597 rgParam.Add(new KeyValuePair<BaseParameter, string>(exp_param, "exp_param"));
3598 rgParam.Add(new KeyValuePair<BaseParameter, string>(flatten_param, "flatten_param"));
3599 rgParam.Add(new KeyValuePair<BaseParameter, string>(gradient_scale_param, "gradient_scale_param"));
3600 rgParam.Add(new KeyValuePair<BaseParameter, string>(hinge_loss_param, "hinge_loss_param"));
3601 rgParam.Add(new KeyValuePair<BaseParameter, string>(image_data_param, "image_data_param"));
3602 rgParam.Add(new KeyValuePair<BaseParameter, string>(infogain_loss_param, "infogain_loss_param"));
3603 rgParam.Add(new KeyValuePair<BaseParameter, string>(inner_product_param, "inner_product_param"));
3604 rgParam.Add(new KeyValuePair<BaseParameter, string>(input_param, "input_param"));
3605 rgParam.Add(new KeyValuePair<BaseParameter, string>(labelmapping_param, "labelmapping_param"));
3606 rgParam.Add(new KeyValuePair<BaseParameter, string>(log_param, "log_param"));
3607 rgParam.Add(new KeyValuePair<BaseParameter, string>(lrn_param, "lrn_param"));
3608 rgParam.Add(new KeyValuePair<BaseParameter, string>(memory_data_param, "memory_data_param"));
3609 rgParam.Add(new KeyValuePair<BaseParameter, string>(mvn_param, "mvn_param"));
3610 rgParam.Add(new KeyValuePair<BaseParameter, string>(pooling_param, "pooling_param"));
3611 rgParam.Add(new KeyValuePair<BaseParameter, string>(parameter_param, "parameter_param"));
3612 rgParam.Add(new KeyValuePair<BaseParameter, string>(power_param, "power_param"));
3613 rgParam.Add(new KeyValuePair<BaseParameter, string>(prelu_param, "prelu_param"));
3614 rgParam.Add(new KeyValuePair<BaseParameter, string>(reduction_param, "reduction_param"));
3615 rgParam.Add(new KeyValuePair<BaseParameter, string>(relu_param, "relu_param"));
3616 rgParam.Add(new KeyValuePair<BaseParameter, string>(reshape_param, "reshape_param"));
3617 rgParam.Add(new KeyValuePair<BaseParameter, string>(scale_param, "scale_param"));
3618 rgParam.Add(new KeyValuePair<BaseParameter, string>(sigmoid_param, "sigmoid_param"));
3619 rgParam.Add(new KeyValuePair<BaseParameter, string>(softmax_param, "softmax_param"));
3620 rgParam.Add(new KeyValuePair<BaseParameter, string>(spp_param, "spp_param"));
3621 rgParam.Add(new KeyValuePair<BaseParameter, string>(slice_param, "slice_param"));
3622 rgParam.Add(new KeyValuePair<BaseParameter, string>(swish_param, "swish_param"));
3623 rgParam.Add(new KeyValuePair<BaseParameter, string>(tanh_param, "tanh_param"));
3624 rgParam.Add(new KeyValuePair<BaseParameter, string>(threshold_param, "threshold_param"));
3625 rgParam.Add(new KeyValuePair<BaseParameter, string>(tile_param, "tile_param"));
3626 rgParam.Add(new KeyValuePair<BaseParameter, string>(lstm_simple_param, "lstm_simple_param")); // DEPRECIATED
3627 rgParam.Add(new KeyValuePair<BaseParameter, string>(recurrent_param, "recurrent_param"));
3628
3629 // Alpha layers.
3630
3631 // Beta layers.
3632 rgParam.Add(new KeyValuePair<BaseParameter, string>(attention_param, "attention_param"));
3633 rgParam.Add(new KeyValuePair<BaseParameter, string>(convolution_octave_param, "convolution_octave_param"));
3634 rgParam.Add(new KeyValuePair<BaseParameter, string>(data_sequence_param, "data_sequence_param"));
3635 rgParam.Add(new KeyValuePair<BaseParameter, string>(decode_param, "decode_param"));
3636 rgParam.Add(new KeyValuePair<BaseParameter, string>(gather_param, "gather_param"));
3637 rgParam.Add(new KeyValuePair<BaseParameter, string>(interp_param, "interp_param"));
3638 rgParam.Add(new KeyValuePair<BaseParameter, string>(knn_param, "knn_param"));
3639 rgParam.Add(new KeyValuePair<BaseParameter, string>(lstm_attention_param, "lstm_attention_param"));
3640 rgParam.Add(new KeyValuePair<BaseParameter, string>(mean_error_loss_param, "mean_error_loss_param"));
3641 rgParam.Add(new KeyValuePair<BaseParameter, string>(merge_param, "merge_param"));
3642 rgParam.Add(new KeyValuePair<BaseParameter, string>(mish_param, "mish_param"));
3643 rgParam.Add(new KeyValuePair<BaseParameter, string>(normalization1_param, "normalization_param"));
3644 rgParam.Add(new KeyValuePair<BaseParameter, string>(serf_param, "serf_param"));
3645 rgParam.Add(new KeyValuePair<BaseParameter, string>(squeeze_param, "squeeze_param"));
3646 rgParam.Add(new KeyValuePair<BaseParameter, string>(model_data_param, "model_data_param"));
3647 rgParam.Add(new KeyValuePair<BaseParameter, string>(text_data_param, "text_data_param"));
3648 rgParam.Add(new KeyValuePair<BaseParameter, string>(triplet_loss_param, "triplet_loss_param"));
3649 rgParam.Add(new KeyValuePair<BaseParameter, string>(unpooling_param, "unpooling_param"));
3650 rgParam.Add(new KeyValuePair<BaseParameter, string>(transpose_param, "transpose_param"));
3651
3652 // HDF5 layes.
3653 rgParam.Add(new KeyValuePair<BaseParameter, string>(hdf5_data_param, "hdf5_data_param"));
3654
3655 // GPT Layers.
3656 rgParam.Add(new KeyValuePair<BaseParameter, string>(causal_self_attention_param, "causal_self_attention_param"));
3657 rgParam.Add(new KeyValuePair<BaseParameter, string>(multihead_attention_param, "multihead_attention_param"));
3658 rgParam.Add(new KeyValuePair<BaseParameter, string>(positional_encoder_param, "positional_encoder_param"));
3659 rgParam.Add(new KeyValuePair<BaseParameter, string>(gelu_param, "gelu_param"));
3660 rgParam.Add(new KeyValuePair<BaseParameter, string>(layer_norm_param, "layer_norm_param"));
3661 rgParam.Add(new KeyValuePair<BaseParameter, string>(transformer_block_param, "transformer_block_param"));
3662 rgParam.Add(new KeyValuePair<BaseParameter, string>(tokenized_data_param, "tokenized_data_param"));
3663 rgParam.Add(new KeyValuePair<BaseParameter, string>(tokenized_data_pairs_param, "tokenized_data_pairs_param"));
3664 rgParam.Add(new KeyValuePair<BaseParameter, string>(nll_loss_param, "nll_loss_param"));
3665
3666 // TFT Layers
3667 rgParam.Add(new KeyValuePair<BaseParameter, string>(data_temporal_param, "data_temporal_param"));
3668 rgParam.Add(new KeyValuePair<BaseParameter, string>(categorical_trans_param, "categorical_trans_param"));
3669 rgParam.Add(new KeyValuePair<BaseParameter, string>(numeric_trans_param, "numeric_trans_param"));
3670 rgParam.Add(new KeyValuePair<BaseParameter, string>(gateaddnorm_param, "gateaddnorm_param"));
3671 rgParam.Add(new KeyValuePair<BaseParameter, string>(glu_param, "glu_param"));
3672 rgParam.Add(new KeyValuePair<BaseParameter, string>(grn_param, "grn_param"));
3673 rgParam.Add(new KeyValuePair<BaseParameter, string>(varselnet_param, "varselnet_param"));
3674 rgParam.Add(new KeyValuePair<BaseParameter, string>(multihead_attention_interp_param, "multihead_attention_interp_param"));
3675 rgParam.Add(new KeyValuePair<BaseParameter, string>(reshape_temporal_param, "reshape_temporal_param"));
3676 rgParam.Add(new KeyValuePair<BaseParameter, string>(quantile_loss_param, "quantile_loss_param"));
3677 rgParam.Add(new KeyValuePair<BaseParameter, string>(quantile_accuracy_param, "quantile_accuracy_param"));
3678
3679 // LNN Layers
3680 rgParam.Add(new KeyValuePair<BaseParameter, string>(cfc_param, "cfc_param"));
3681 rgParam.Add(new KeyValuePair<BaseParameter, string>(cfc_unit_param, "cfc_unit_param"));
3682 rgParam.Add(new KeyValuePair<BaseParameter, string>(ltc_unit_param, "ltc_unit_param"));
3683
3684 // Nt layers.
3685 rgParam.Add(new KeyValuePair<BaseParameter, string>(gram_param, "gram_param"));
3686 rgParam.Add(new KeyValuePair<BaseParameter, string>(onehot_param, "onehot_param"));
3687 rgParam.Add(new KeyValuePair<BaseParameter, string>(scalar_param, "scalar_param"));
3688 rgParam.Add(new KeyValuePair<BaseParameter, string>(tv_loss_param, "tv_loss_param"));
3689
3690 // Ssd layers.
3691 rgParam.Add(new KeyValuePair<BaseParameter, string>(annotated_data_param, "annotated_data_param"));
3692 rgParam.Add(new KeyValuePair<BaseParameter, string>(detection_evaluate_param, "detection_evaluate_param"));
3693 rgParam.Add(new KeyValuePair<BaseParameter, string>(detection_output_param, "detection_output_param"));
3694 rgParam.Add(new KeyValuePair<BaseParameter, string>(multiboxloss_param, "multiboxloss_param"));
3695 rgParam.Add(new KeyValuePair<BaseParameter, string>(normalization2_param, "normalization2_param"));
3696 rgParam.Add(new KeyValuePair<BaseParameter, string>(permute_param, "permute_param"));
3697 rgParam.Add(new KeyValuePair<BaseParameter, string>(prior_box_param, "prior_box_param"));
3698 rgParam.Add(new KeyValuePair<BaseParameter, string>(video_data_param, "video_data_param"));
3699
3700 foreach (KeyValuePair<BaseParameter, string> kv in rgParam)
3701 {
3702 if (kv.Key != null)
3703 rgChildren.Add(kv.Key.ToProto(kv.Value));
3704 }
3705
3706 return new RawProto(strName, "", rgChildren);
3707 }
3708
3715 {
3716 string strVal;
3717 string strName = null;
3718 LayerType layerType;
3719
3720 if ((strVal = rp.FindValue("name")) != null)
3721 strName = strVal;
3722
3723 if ((strVal = rp.FindValue("type")) == null)
3724 throw new Exception("No layer type specified!");
3725
3726 layerType = parseLayerType(strVal);
3727
3728 LayerParameter p = new LayerParameter(layerType, strName);
3729
3730 p.bottom = rp.FindArray<string>("bottom");
3731 for (int i = 0; i < p.bottom.Count; i++)
3732 {
3733 p.bottom[i] = p.bottom[i].Trim('\"', ' ');
3734 }
3735 p.top = rp.FindArray<string>("top");
3736 for (int i = 0; i < p.top.Count; i++)
3737 {
3738 p.top[i] = p.top[i].Trim('\"', ' ');
3739 }
3740
3741 if ((strVal = rp.FindValue("phase")) != null)
3742 p.phase = parsePhase(strVal);
3743
3744 p.loss_weight = rp.FindArray<double>("loss_weight");
3745
3746 if ((strVal = rp.FindValue("group_start")) != null)
3747 p.group_start = bool.Parse(strVal);
3748
3749 if ((strVal = rp.FindValue("freeze_learning")) != null)
3750 p.freeze_learning = bool.Parse(strVal);
3751
3752 if ((strVal = rp.FindValue("connect_loss_event")) != null)
3753 p.connect_loss_event = bool.Parse(strVal);
3754
3755 if ((strVal = rp.FindValue("use_halfsize")) != null)
3756 p.use_halfsize = bool.Parse(strVal);
3757
3758 RawProtoCollection rgrp;
3759
3760 rgrp = rp.FindChildren("param");
3761 foreach (RawProto rpChild in rgrp)
3762 {
3763 p.parameters.Add(ParamSpec.FromProto(rpChild));
3764 }
3765
3766 rgrp = rp.FindChildren("blobs");
3767 foreach (RawProto rpChild in rgrp)
3768 {
3769 p.blobs.Add(BlobProto.FromProto(rpChild));
3770 }
3771
3772 p.propagate_down = rp.FindArray<bool>("propagate_down");
3773
3774 rgrp = rp.FindChildren("include");
3775 foreach (RawProto rpChild in rgrp)
3776 {
3777 p.include.Add(NetStateRule.FromProto(rpChild));
3778 }
3779
3780 rgrp = rp.FindChildren("exclude");
3781 foreach (RawProto rpChild in rgrp)
3782 {
3783 p.exclude.Add(NetStateRule.FromProto(rpChild));
3784 }
3785
3786 rgrp = rp.FindChildren("max_bottom_count");
3787 foreach (RawProto rpChild in rgrp)
3788 {
3789 RawProto prPhase = rpChild.FindChild("phase");
3790 if (prPhase != null)
3791 {
3792 Phase phase = parsePhase(prPhase.Value);
3793 if (!p.m_rgMaxBottomCount.ContainsKey(phase))
3794 {
3795 RawProto prCount = rpChild.FindChild("count");
3796 if (prCount != null)
3797 p.m_rgMaxBottomCount.Add(phase, int.Parse(prCount.Value));
3798 }
3799 }
3800 }
3801
3802 RawProto rpp;
3803
3804 // Standard layers
3805 if ((rpp = rp.FindChild("transform_param")) != null)
3806 p.transform_param = TransformationParameter.FromProto(rpp);
3807
3808 if ((rpp = rp.FindChild("loss_param")) != null)
3809 p.loss_param = LossParameter.FromProto(rpp);
3810
3811 if ((rpp = rp.FindChild("accuracy_param")) != null)
3812 p.accuracy_param = AccuracyParameter.FromProto(rpp);
3813
3814 if ((rpp = rp.FindChild("argmax_param")) != null)
3815 p.argmax_param = ArgMaxParameter.FromProto(rpp);
3816
3817 if ((rpp = rp.FindChild("batch_norm_param")) != null)
3818 p.batch_norm_param = BatchNormParameter.FromProto(rpp);
3819
3820 if ((rpp = rp.FindChild("bias_param")) != null)
3821 p.bias_param = BiasParameter.FromProto(rpp);
3822
3823 if ((rpp = rp.FindChild("clip_param")) != null)
3824 p.clip_param = ClipParameter.FromProto(rpp);
3825
3826 if ((rpp = rp.FindChild("concat_param")) != null)
3827 p.concat_param = ConcatParameter.FromProto(rpp);
3828
3829 if ((rpp = rp.FindChild("constant_param")) != null)
3830 p.constant_param = ConstantParameter.FromProto(rpp);
3831
3832 if ((rpp = rp.FindChild("contrastive_loss_param")) != null)
3833 p.contrastive_loss_param = ContrastiveLossParameter.FromProto(rpp);
3834
3835 if ((rpp = rp.FindChild("convolution_param")) != null)
3836 p.convolution_param = ConvolutionParameter.FromProto(rpp);
3837
3838 if ((rpp = rp.FindChild("convolution_octave_param")) != null)
3839 p.convolution_octave_param = ConvolutionOctaveParameter.FromProto(rpp);
3840
3841 if ((rpp = rp.FindChild("crop_param")) != null)
3842 p.crop_param = CropParameter.FromProto(rpp);
3843
3844 if ((rpp = rp.FindChild("data_param")) != null)
3845 p.data_param = DataParameter.FromProto(rpp);
3846
3847 if ((rpp = rp.FindChild("debug_param")) != null)
3848 p.debug_param = DebugParameter.FromProto(rpp);
3849
3850 if ((rpp = rp.FindChild("dropout_param")) != null)
3851 p.dropout_param = DropoutParameter.FromProto(rpp);
3852
3853 if ((rpp = rp.FindChild("dummy_data_param")) != null)
3854 p.dummy_data_param = DummyDataParameter.FromProto(rpp);
3855
3856 if ((rpp = rp.FindChild("eltwise_param")) != null)
3857 p.eltwise_param = EltwiseParameter.FromProto(rpp);
3858
3859 if ((rpp = rp.FindChild("elu_param")) != null)
3860 p.elu_param = EluParameter.FromProto(rpp);
3861
3862 if ((rpp = rp.FindChild("embed_param")) != null)
3863 p.embed_param = EmbedParameter.FromProto(rpp);
3864
3865 if ((rpp = rp.FindChild("exp_param")) != null)
3866 p.exp_param = ExpParameter.FromProto(rpp);
3867
3868 if ((rpp = rp.FindChild("flatten_param")) != null)
3869 p.flatten_param = FlattenParameter.FromProto(rpp);
3870
3871 if ((rpp = rp.FindChild("gradient_scale_param")) != null)
3872 p.gradient_scale_param = GradientScaleParameter.FromProto(rpp);
3873
3874 if ((rpp = rp.FindChild("hinge_loss_param")) != null)
3875 p.hinge_loss_param = HingeLossParameter.FromProto(rpp);
3876
3877 if ((rpp = rp.FindChild("image_data_param")) != null)
3878 p.image_data_param = ImageDataParameter.FromProto(rpp);
3879
3880 if ((rpp = rp.FindChild("infogain_loss_param")) != null)
3881 p.infogain_loss_param = InfogainLossParameter.FromProto(rpp);
3882
3883 if ((rpp = rp.FindChild("inner_product_param")) != null)
3884 p.inner_product_param = InnerProductParameter.FromProto(rpp);
3885
3886 if ((rpp = rp.FindChild("input_param")) != null)
3887 p.input_param = InputParameter.FromProto(rpp);
3888
3889 if ((rpp = rp.FindChild("labelmapping_param")) != null)
3890 p.labelmapping_param = LabelMappingParameter.FromProto(rpp);
3891
3892 if ((rpp = rp.FindChild("log_param")) != null)
3893 p.log_param = LogParameter.FromProto(rpp);
3894
3895 if ((rpp = rp.FindChild("lrn_param")) != null)
3896 p.lrn_param = LRNParameter.FromProto(rpp);
3897
3898 if ((rpp = rp.FindChild("mean_error_loss_param")) != null)
3899 p.mean_error_loss_param = MeanErrorLossParameter.FromProto(rpp);
3900
3901 if ((rpp = rp.FindChild("memory_data_param")) != null)
3902 p.memory_data_param = MemoryDataParameter.FromProto(rpp);
3903
3904 if ((rpp = rp.FindChild("mvn_param")) != null)
3905 p.mvn_param = MVNParameter.FromProto(rpp);
3906
3907 if ((rpp = rp.FindChild("pooling_param")) != null)
3908 p.pooling_param = PoolingParameter.FromProto(rpp);
3909
3910 if ((rpp = rp.FindChild("parameter_param")) != null)
3911 p.parameter_param = ParameterParameter.FromProto(rpp);
3912
3913 if ((rpp = rp.FindChild("power_param")) != null)
3914 p.power_param = PowerParameter.FromProto(rpp);
3915
3916 if ((rpp = rp.FindChild("prelu_param")) != null)
3917 p.prelu_param = PReLUParameter.FromProto(rpp);
3918
3919 if ((rpp = rp.FindChild("reduction_param")) != null)
3920 p.reduction_param = ReductionParameter.FromProto(rpp);
3921
3922 if ((rpp = rp.FindChild("relu_param")) != null)
3923 p.relu_param = ReLUParameter.FromProto(rpp);
3924
3925 if ((rpp = rp.FindChild("reshape_param")) != null)
3926 p.reshape_param = ReshapeParameter.FromProto(rpp);
3927
3928 if ((rpp = rp.FindChild("scale_param")) != null)
3929 p.scale_param = ScaleParameter.FromProto(rpp);
3930
3931 if ((rpp = rp.FindChild("sigmoid_param")) != null)
3932 p.sigmoid_param = SigmoidParameter.FromProto(rpp);
3933
3934 if ((rpp = rp.FindChild("softmax_param")) != null)
3935 p.softmax_param = SoftmaxParameter.FromProto(rpp);
3936
3937 if ((rpp = rp.FindChild("spp_param")) != null)
3938 p.spp_param = SPPParameter.FromProto(rpp);
3939
3940 if ((rpp = rp.FindChild("slice_param")) != null)
3941 p.slice_param = SliceParameter.FromProto(rpp);
3942
3943 if ((rpp = rp.FindChild("swish_param")) != null)
3944 p.swish_param = SwishParameter.FromProto(rpp);
3945
3946 if ((rpp = rp.FindChild("tanh_param")) != null)
3947 p.tanh_param = TanhParameter.FromProto(rpp);
3948
3949 if ((rpp = rp.FindChild("threshold_param")) != null)
3950 p.threshold_param = ThresholdParameter.FromProto(rpp);
3951
3952 if ((rpp = rp.FindChild("tile_param")) != null)
3953 p.tile_param = TileParameter.FromProto(rpp);
3954
3955 // DEPRECIATED
3956 if ((rpp = rp.FindChild("lstm_simple_param")) != null)
3957 p.lstm_simple_param = LSTMSimpleParameter.FromProto(rpp);
3958
3959 if ((rpp = rp.FindChild("recurrent_param")) != null)
3960 p.recurrent_param = RecurrentParameter.FromProto(rpp);
3961
3962 // Alpha layers
3963
3964 // Beta layers.
3965 if ((rpp = rp.FindChild("attention_param")) != null)
3966 p.attention_param = AttentionParameter.FromProto(rpp);
3967
3968 if ((rpp = rp.FindChild("data_sequence_param")) != null)
3969 p.data_sequence_param = DataSequenceParameter.FromProto(rpp);
3970
3971 if ((rpp = rp.FindChild("decode_param")) != null)
3972 p.decode_param = DecodeParameter.FromProto(rpp);
3973
3974 if ((rpp = rp.FindChild("gather_param")) != null)
3975 p.gather_param = GatherParameter.FromProto(rpp);
3976
3977 if ((rpp = rp.FindChild("interp_param")) != null)
3978 p.interp_param = InterpParameter.FromProto(rpp);
3979
3980 if ((rpp = rp.FindChild("knn_param")) != null)
3981 p.knn_param = KnnParameter.FromProto(rpp);
3982
3983 if ((rpp = rp.FindChild("lstm_attention_param")) != null)
3984 p.lstm_attention_param = LSTMAttentionParameter.FromProto(rpp);
3985
3986 if ((rpp = rp.FindChild("merge_param")) != null)
3987 p.merge_param = MergeParameter.FromProto(rpp);
3988
3989 if ((rpp = rp.FindChild("mish_param")) != null)
3990 p.mish_param = MishParameter.FromProto(rpp);
3991
3992 if ((rpp = rp.FindChild("normalization_param")) != null)
3993 p.normalization1_param = Normalization1Parameter.FromProto(rpp);
3994
3995 if ((rpp = rp.FindChild("serf_param")) != null)
3996 p.serf_param = SerfParameter.FromProto(rpp);
3997
3998 if ((rpp = rp.FindChild("squeeze_param")) != null)
3999 p.squeeze_param = SqueezeParameter.FromProto(rpp);
4000
4001 if ((rpp = rp.FindChild("model_data_param")) != null)
4002 p.model_data_param = ModelDataParameter.FromProto(rpp);
4003
4004 if ((rpp = rp.FindChild("text_data_param")) != null)
4005 p.text_data_param = TextDataParameter.FromProto(rpp);
4006
4007 if ((rpp = rp.FindChild("triplet_loss_param")) != null)
4008 p.triplet_loss_param = TripletLossParameter.FromProto(rpp);
4009
4010 if ((rpp = rp.FindChild("transpose_param")) != null)
4011 p.transpose_param = TransposeParameter.FromProto(rpp);
4012
4013 if ((rpp = rp.FindChild("unpooling_param")) != null)
4014 p.unpooling_param = UnPoolingParameter.FromProto(rpp);
4015
4016 // HDF5 layers.
4017 if ((rpp = rp.FindChild("hdf5_data_param")) != null)
4018 p.hdf5_data_param = HDF5DataParameter.FromProto(rpp);
4019
4020 // GPT layers.
4021 if ((rpp = rp.FindChild("causal_self_attention_param")) != null)
4022 p.causal_self_attention_param = CausalSelfAttentionParameter.FromProto(rpp);
4023
4024 if ((rpp = rp.FindChild("multihead_attention_param")) != null)
4025 p.multihead_attention_param = MultiheadAttentionParameter.FromProto(rpp);
4026
4027 if ((rpp = rp.FindChild("positional_encoder_param")) != null)
4028 p.positional_encoder_param = PositionalEncoderParameter.FromProto(rpp);
4029
4030 if ((rpp = rp.FindChild("gelu_param")) != null)
4031 p.gelu_param = GeluParameter.FromProto(rpp);
4032
4033 if ((rpp = rp.FindChild("layer_norm_param")) != null)
4034 p.layer_norm_param = LayerNormParameter.FromProto(rpp);
4035
4036 if ((rpp = rp.FindChild("transformer_block_param")) != null)
4037 p.transformer_block_param = TransformerBlockParameter.FromProto(rpp);
4038
4039 if ((rpp = rp.FindChild("tokenized_data_param")) != null)
4040 p.tokenized_data_param = TokenizedDataParameter.FromProto(rpp);
4041
4042 if ((rpp = rp.FindChild("tokenized_data_pairs_param")) != null)
4043 p.tokenized_data_pairs_param = TokenizedDataPairsParameter.FromProto(rpp);
4044
4045 if ((rpp = rp.FindChild("nll_loss_param")) != null)
4046 p.nll_loss_param = NLLLossParameter.FromProto(rpp);
4047
4048 // TFT layers.
4049 if ((rpp = rp.FindChild("data_temporal_param")) != null)
4050 p.data_temporal_param = DataTemporalParameter.FromProto(rpp);
4051
4052 if ((rpp = rp.FindChild("categorical_trans_param")) != null)
4053 p.categorical_trans_param = CategoricalTransformationParameter.FromProto(rpp);
4054
4055 if ((rpp = rp.FindChild("numeric_trans_param")) != null)
4056 p.numeric_trans_param = NumericTransformationParameter.FromProto(rpp);
4057
4058 if ((rpp = rp.FindChild("gateaddnorm_param")) != null)
4059 p.gateaddnorm_param = GateAddNormParameter.FromProto(rpp);
4060
4061 if ((rpp = rp.FindChild("glu_param")) != null)
4062 p.glu_param = GluParameter.FromProto(rpp);
4063
4064 if ((rpp = rp.FindChild("grn_param")) != null)
4065 p.grn_param = GrnParameter.FromProto(rpp);
4066
4067 if ((rpp = rp.FindChild("varselnet_param")) != null)
4068 p.varselnet_param = VarSelNetParameter.FromProto(rpp);
4069
4070 if ((rpp = rp.FindChild("multihead_attention_interp_param")) != null)
4071 p.multihead_attention_interp_param = MultiHeadAttentionInterpParameter.FromProto(rpp);
4072
4073 if ((rpp = rp.FindChild("reshape_temporal_param")) != null)
4074 p.reshape_temporal_param = ReshapeTemporalParameter.FromProto(rpp);
4075
4076 if ((rpp = rp.FindChild("quantile_loss_param")) != null)
4077 p.quantile_loss_param = QuantileLossParameter.FromProto(rpp);
4078
4079 if ((rpp = rp.FindChild("quantile_accuracy_param")) != null)
4080 p.quantile_accuracy_param = QuantileAccuracyParameter.FromProto(rpp);
4081
4082 // LNN Layers
4083
4084 if ((rpp = rp.FindChild("cfc_param")) != null)
4085 p.cfc_param = CfcParameter.FromProto(rpp);
4086
4087 if ((rpp = rp.FindChild("cfc_unit_param")) != null)
4088 p.cfc_unit_param = CfcUnitParameter.FromProto(rpp);
4089
4090 if ((rpp = rp.FindChild("ltc_unit_param")) != null)
4091 p.ltc_unit_param = LtcUnitParameter.FromProto(rpp);
4092
4093 // Nt layers.
4094 if ((rpp = rp.FindChild("gram_param")) != null)
4095 p.gram_param = GramParameter.FromProto(rpp);
4096
4097 if ((rpp = rp.FindChild("onehot_param")) != null)
4098 p.onehot_param = OneHotParameter.FromProto(rpp);
4099
4100 if ((rpp = rp.FindChild("scalar_param")) != null)
4101 p.scalar_param = ScalarParameter.FromProto(rpp);
4102
4103 if ((rpp = rp.FindChild("tv_loss_param")) != null)
4104 p.tv_loss_param = TVLossParameter.FromProto(rpp);
4105
4106 // Ssd layers.
4107 if ((rpp = rp.FindChild("annotated_data_param")) != null)
4108 p.annotated_data_param = AnnotatedDataParameter.FromProto(rpp);
4109
4110 if ((rpp = rp.FindChild("detection_evaluate_param")) != null)
4111 p.detection_evaluate_param = DetectionEvaluateParameter.FromProto(rpp);
4112
4113 if ((rpp = rp.FindChild("detection_output_param")) != null)
4114 p.detection_output_param = DetectionOutputParameter.FromProto(rpp);
4115
4116 if ((rpp = rp.FindChild("multiboxloss_param")) != null)
4117 p.multiboxloss_param = MultiBoxLossParameter.FromProto(rpp);
4118
4119 if ((rpp = rp.FindChild("normalization2_param")) != null)
4120 p.normalization2_param = Normalization2Parameter.FromProto(rpp);
4121
4122 if ((rpp = rp.FindChild("permute_param")) != null)
4123 p.permute_param = PermuteParameter.FromProto(rpp);
4124
4125 if ((rpp = rp.FindChild("prior_box_param")) != null)
4126 p.prior_box_param = PriorBoxParameter.FromProto(rpp);
4127
4128 if ((rpp = rp.FindChild("video_data_param")) != null)
4129 p.video_data_param = VideoDataParameter.FromProto(rpp);
4130
4131 return p;
4132 }
4133
4134 private static Phase parsePhase(string strVal)
4135 {
4136 switch (strVal)
4137 {
4138 case "TEST":
4139 return Phase.TEST;
4140
4141 case "TRAIN":
4142 return Phase.TRAIN;
4143
4144 case "RUN":
4145 return Phase.RUN;
4146
4147 case "NONE":
4148 return Phase.NONE;
4149
4150 default:
4151 throw new Exception("Unknown 'phase' value: " + strVal);
4152 }
4153 }
4154
4160 public static LayerType? GetType(string strType)
4161 {
4162 try
4163 {
4164 return parseLayerType(strType);
4165 }
4166 catch (Exception)
4167 {
4168 return null;
4169 }
4170 }
4171
4172 private static LayerType parseLayerType(string str)
4173 {
4174 str = str.ToLower();
4175
4176 switch (str)
4177 {
4178 case "absval":
4179 return LayerType.ABSVAL;
4180
4181 case "accuracy":
4182 return LayerType.ACCURACY;
4183
4184 case "accuracydecode":
4185 case "accuracy_decode":
4186 return LayerType.ACCURACY_DECODE;
4187
4188 case "accuracyencoding":
4189 case "accuracy_encoding":
4190 return LayerType.ACCURACY_ENCODING;
4191
4192 case "argmax":
4193 return LayerType.ARGMAX;
4194
4195 case "annotateddata":
4196 return LayerType.ANNOTATED_DATA;
4197
4198 case "attention":
4199 return LayerType.ATTENTION;
4200
4201 case "batchnorm":
4202 return LayerType.BATCHNORM;
4203
4204 case "batchreindex":
4205 return LayerType.BATCHREINDEX;
4206
4207 case "bias":
4208 return LayerType.BIAS;
4209
4210 case "bnll":
4211 return LayerType.BNLL;
4212
4213 case "categoricaltrans":
4214 case "categorical_trans":
4215 return LayerType.CATEGORICAL_TRANS;
4216
4217 case "clip":
4218 return LayerType.CLIP;
4219
4220 case "causalselfattention":
4221 return LayerType.CAUSAL_SELF_ATTENTION;
4222
4223 case "channelembedding":
4224 return LayerType.CHANNEL_EMBEDDING;
4225
4226 case "cfc":
4227 return LayerType.CFC;
4228
4229 case "cfc_unit":
4230 return LayerType.CFC_UNIT;
4231
4232 case "concat":
4233 return LayerType.CONCAT;
4234
4235 case "constant":
4236 return LayerType.CONSTANT;
4237
4238 case "contrastiveloss":
4239 case "contrastive_loss":
4240 return LayerType.CONTRASTIVE_LOSS;
4241
4242 case "convolution":
4243 return LayerType.CONVOLUTION;
4244
4245 case "convolutionoctave":
4246 case "convolution_octave":
4247 return LayerType.CONVOLUTION_OCTAVE;
4248
4249 case "crop":
4250 return LayerType.CROP;
4251
4252 case "copy":
4253 return LayerType.COPY;
4254
4255 case "decode":
4256 return LayerType.DECODE;
4257
4258 case "data":
4259 return LayerType.DATA;
4260
4261 case "datanormalizer":
4262 case "data_normalizer":
4263 return LayerType.DATA_NORMALIZER;
4264
4265 case "datasequence":
4266 case "data_sequence":
4267 return LayerType.DATA_SEQUENCE;
4268
4269 case "datatemporal":
4270 case "data_temporal":
4271 return LayerType.DATA_TEMPORAL;
4272
4273 case "debug":
4274 return LayerType.DEBUG;
4275
4276 case "deconvolution":
4277 return LayerType.DECONVOLUTION;
4278
4279 case "detectionevaluate":
4280 case "detection_evaluate":
4281 return LayerType.DETECTION_EVALUATE;
4282
4283 case "detectionoutput":
4284 case "detection_output":
4285 return LayerType.DETECTION_OUTPUT;
4286
4287 case "dropout":
4288 return LayerType.DROPOUT;
4289
4290 case "dummydata":
4291 return LayerType.DUMMYDATA;
4292
4293 case "eltwise":
4294 return LayerType.ELTWISE;
4295
4296 case "elu":
4297 return LayerType.ELU;
4298
4299 case "embed":
4300 return LayerType.EMBED;
4301
4302 case "euclideanloss":
4303 case "euclidean_loss":
4304 return LayerType.EUCLIDEAN_LOSS;
4305
4306 case "event":
4307 return LayerType.EVENT;
4308
4309 case "exp":
4310 return LayerType.EXP;
4311
4312 case "filter":
4313 return LayerType.FILTER;
4314
4315 case "flatten":
4316 return LayerType.FLATTEN;
4317
4318 case "gather":
4319 return LayerType.GATHER;
4320
4321 case "gateaddnorm":
4322 return LayerType.GATEADDNORM;
4323
4324 case "gelu":
4325 return LayerType.GELU;
4326
4327 case "glu":
4328 return LayerType.GLU;
4329
4330 case "grn":
4331 return LayerType.GRN;
4332
4333 case "globresnet":
4334 return LayerType.GLOBRES_NORM;
4335
4336 case "gsl":
4337 return LayerType.GRADIENTSCALER;
4338
4339 case "gram":
4340 return LayerType.GRAM;
4341
4342 case "hdf5data":
4343 return LayerType.HDF5_DATA;
4344
4345// case "hdf5output":
4346// return LayerType.HDF5OUTPUT;
4347
4348 case "hingeloss":
4349 case "hinge_loss":
4350 return LayerType.HINGE_LOSS;
4351
4352 case "im2col":
4353 return LayerType.IM2COL;
4354
4355 case "imagedata":
4356 return LayerType.IMAGE_DATA;
4357
4358 case "infogainloss":
4359 case "infogain_loss":
4360 return LayerType.INFOGAIN_LOSS;
4361
4362 case "innerproduct":
4363 case "inner_product":
4364 return LayerType.INNERPRODUCT;
4365
4366 case "input":
4367 return LayerType.INPUT;
4368
4369 case "interp":
4370 return LayerType.INTERP;
4371
4372 case "knn":
4373 return LayerType.KNN;
4374
4375 case "labelmapping":
4376 return LayerType.LABELMAPPING;
4377
4378 case "layernorm":
4379 return LayerType.LAYERNORM;
4380
4381 case "lecun":
4382 return LayerType.LECUN;
4383
4384 case "log":
4385 return LayerType.LOG;
4386
4387 case "lrn":
4388 return LayerType.LRN;
4389
4390 case "ltc_unit":
4391 return LayerType.LTC_UNIT;
4392
4393 case "mean_error_loss":
4394 case "meanerrorloss":
4395 return LayerType.MEAN_ERROR_LOSS;
4396
4397 case "math":
4398 return LayerType.MATH;
4399
4400 case "merge":
4401 return LayerType.MERGE;
4402
4403 case "memorydata":
4404 return LayerType.MEMORYDATA;
4405
4406 case "multiboxloss":
4407 case "multibox_loss":
4408 return LayerType.MULTIBOX_LOSS;
4409
4410 case "multiheadattention":
4411 return LayerType.MULTIHEAD_ATTENTION;
4412
4413 case "multiheadattentioninterp":
4414 return LayerType.MULTIHEAD_ATTENTION_INTERP;
4415
4416 case "memoryloss":
4417 case "memory_loss":
4418 return LayerType.MEMORY_LOSS;
4419
4420 case "mish":
4421 return LayerType.MISH;
4422
4423 case "multinomiallogisticloss":
4424 case "multinomiallogistic_loss":
4425 return LayerType.MULTINOMIALLOGISTIC_LOSS;
4426
4427 case "mvn":
4428 return LayerType.MVN;
4429
4430 case "nllloss":
4431 case "nll_loss":
4432 return LayerType.NLL_LOSS;
4433
4434 case "numerictrans":
4435 case "numeric_trans":
4436 return LayerType.NUMERIC_TRANS;
4437
4438 case "onehot":
4439 return LayerType.ONEHOT;
4440
4441 case "normalization1":
4442 return LayerType.NORMALIZATION1;
4443
4444 case "normalize":
4445 case "normalization2":
4446 return LayerType.NORMALIZATION2;
4447
4448 case "parameter":
4449 return LayerType.PARAMETER;
4450
4451 case "permute":
4452 return LayerType.PERMUTE;
4453
4454 case "positionalencoder":
4455 return LayerType.POSITIONAL_ENCODER;
4456
4457 case "pooling":
4458 return LayerType.POOLING;
4459
4460 case "unpooling1":
4461 return LayerType.UNPOOLING1;
4462
4463 case "unpooling":
4464 return LayerType.UNPOOLING;
4465
4466 case "power":
4467 return LayerType.POWER;
4468
4469 case "prelu":
4470 return LayerType.PRELU;
4471
4472 case "priorbox":
4473 return LayerType.PRIORBOX;
4474
4475 case "quantileaccuracy":
4476 case "quantile_accuracy":
4477 return LayerType.QUANTILE_ACCURACY;
4478
4479 case "quantileloss":
4480 case "quantile_loss":
4481 return LayerType.QUANTILE_LOSS;
4482
4483 case "reduction":
4484 return LayerType.REDUCTION;
4485
4486 case "relu":
4487 return LayerType.RELU;
4488
4489 case "reshape":
4490 return LayerType.RESHAPE;
4491
4492 case "reshapetemporal":
4493 return LayerType.RESHAPE_TEMPORAL;
4494
4495 case "squeeze":
4496 return LayerType.SQUEEZE;
4497
4498 case "unsqueeze":
4499 return LayerType.UNSQUEEZE;
4500
4501 case "scalar":
4502 return LayerType.SCALAR;
4503
4504 case "scale":
4505 return LayerType.SCALE;
4506
4507 case "serf":
4508 return LayerType.SERF;
4509
4510 case "sigmoid":
4511 return LayerType.SIGMOID;
4512
4513 case "sigmoidcrossentropyloss":
4514 case "sigmoidcrossentropy_loss":
4515 return LayerType.SIGMOIDCROSSENTROPY_LOSS;
4516
4517 case "silu":
4518 return LayerType.SILU;
4519
4520 case "softplus":
4521 return LayerType.SOFTPLUS;
4522
4523 case "softmaxcrossentropyloss":
4524 case "softmaxcrossentropy_loss":
4525 return LayerType.SOFTMAXCROSSENTROPY_LOSS;
4526
4527 case "softmaxcrossentropy2loss":
4528 case "softmaxcrossentropy2_loss":
4529 return LayerType.SOFTMAXCROSSENTROPY2_LOSS;
4530
4531 case "silence":
4532 return LayerType.SILENCE;
4533
4534 case "slice":
4535 return LayerType.SLICE;
4536
4537 case "softmax":
4538 return LayerType.SOFTMAX;
4539
4540 case "softmaxwithloss":
4541 case "softmaxwith_loss":
4542 case "softmax_loss":
4543 return LayerType.SOFTMAXWITH_LOSS;
4544
4545 case "smoothl1loss":
4546 case "smoothl1_loss":
4547 return LayerType.SMOOTHL1_LOSS;
4548
4549 case "split":
4550 return LayerType.SPLIT;
4551
4552 case "spp":
4553 return LayerType.SPP;
4554
4555 case "swish":
4556 return LayerType.SWISH;
4557
4558 case "tanh":
4559 return LayerType.TANH;
4560
4561 case "modeldata":
4562 case "model_data":
4563 return LayerType.MODEL_DATA;
4564
4565 case "textdata":
4566 case "text_data":
4567 return LayerType.TEXT_DATA;
4568
4569 case "threshold":
4570 return LayerType.THRESHOLD;
4571
4572 case "tile":
4573 return LayerType.TILE;
4574
4575 case "transpose":
4576 return LayerType.TRANSPOSE;
4577
4578 case "transformerblock":
4579 return LayerType.TRANSFORMER_BLOCK;
4580
4581 case "tokenizeddata":
4582 return LayerType.TOKENIZED_DATA;
4583
4584 case "tokenizeddatapairs":
4585 return LayerType.TOKENIZED_DATA_PAIRS;
4586
4587 case "tokenizeddatapairs_py":
4588 case "tokenizeddatapairspy":
4589 return LayerType.TOKENIZED_DATA_PAIRS_PY;
4590
4591 case "triplet_loss":
4592 case "tripletloss":
4593 return LayerType.TRIPLET_LOSS;
4594
4595 case "tvloss":
4596 case "tv_loss":
4597 return LayerType.TV_LOSS;
4598
4599 // case "windowdata":
4600 // return LayerType.WINDOWDATA;
4601
4602 // DEPRECIATED
4603 case "lstmsimple":
4604 case "lstm_simple":
4605 return LayerType.LSTM_SIMPLE;
4606
4607 case "lstmattention":
4608 case "lstm_attention":
4609 return LayerType.LSTM_ATTENTION;
4610
4611 case "rnn":
4612 return LayerType.RNN;
4613
4614 case "lstm":
4615 return LayerType.LSTM;
4616
4617 case "lstm_unit":
4618 return LayerType.LSTM_UNIT;
4619
4620 case "videodata":
4621 case "video_data":
4622 return LayerType.VIDEO_DATA;
4623
4624 case "varselnet":
4625 return LayerType.VARSELNET;
4626
4627 default:
4628 throw new Exception("Unknown 'layertype' value: " + str);
4629 }
4630 }
4631
4636 public override string ToString()
4637 {
4638 string strOut = ((use_halfsize) ? "HALF " : "FULL ");
4639
4640 strOut += m_strName + " (" + m_type.ToString() + ")";
4641 strOut += " btm = " + Utility.ToString(m_rgstrBottom);
4642 strOut += " top = " + Utility.ToString(m_rgstrTop);
4643
4644 return strOut;
4645 }
4646 }
4647}
The BaseParameter class is the base class for all other parameter classes.
virtual bool Compare(BaseParameter p)
Compare this parameter to another parameter.
The RawProtoCollection class is a list of RawProto objects.
void Add(RawProto p)
Adds a RawProto to the collection.
The RawProto class is used to parse and output Google prototxt file data.
Definition: RawProto.cs:17
TYPE
Defines the type of a RawProto node.
Definition: RawProto.cs:27
string Value
Get/set the value of the node.
Definition: RawProto.cs:79
RawProto FindChild(string strName)
Searches for a given node.
Definition: RawProto.cs:231
string FindValue(string strName)
Searches for a falue of a node within this nodes children.
Definition: RawProto.cs:105
RawProtoCollection FindChildren(params string[] rgstrName)
Searches for all children with a given name in this node's children.
Definition: RawProto.cs:263
The Utility class provides general utility funtions.
Definition: Utility.cs:35
static void Save(BinaryWriter bw, List< double > rg)
Save a list of double to a binary writer.
Definition: Utility.cs:337
Specifies the parameters for the AccuracyLayer.
static AccuracyParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ArgMaxLayer
static ArgMaxParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
[/b DEPRECIATED, use MultiHeadAttention layers instead.] Specifies the parameters for the AttentionLa...
static AttentionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the BatchNormLayer.
static new BatchNormParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the BiasLayer
static BiasParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
The BlobProto contains the descripion of a blob.
Definition: BlobProto.cs:15
override RawProto ToProto(string strName)
Converts the BlobProto to a RawProto.
Definition: BlobProto.cs:219
static BlobProto FromProto(RawProto rp)
Parses a new BlobProto from a RawProto.
Definition: BlobProto.cs:243
Stores the parameters used by the ClipLayer
static ClipParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ConcatLayer
static ConcatParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ConstantLayer.
static ConstantParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ContrastiveLossLayer.
static ContrastiveLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ConvolutionOctaveLayer.
static ConvolutionOctaveParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ConvolutionLayer. The default weight filler is set to the XavierFill...
static new ConvolutionParameter FromProto(RawProto rp)
Parse a RawProto into a new instance of the parameter.
bool bias_term
Whether to have bias terms or not.
Specifies the parameters for the MyCaffe.CropLayer.
static CropParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the data normalizer layer.
Specifies the parameter for the data layer.
DB
Defines the database type to use.
bool? enable_random_selection
(optional, default = null) Specifies whether or not to randomly query images from the data source....
DB backend
Specifies the backend database.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
static DataParameter FromProto(RawProto rp, DataParameter p=null)
Parses the parameter from a RawProto.
Specifies the parameters used by the DebugLayer
static DebugParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters of the DropoutLayer.
static new DropoutParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
This layer produces N >= 1 top blobs. DummyDataParameter must specify 1 or shape fields,...
static DummyDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the EltwiseLayer.
static EltwiseParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the EluLayer.
Definition: EluParameter.cs:20
static new EluParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the EmbedLayer.
static EmbedParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ExpLayer.
Definition: ExpParameter.cs:26
static ExpParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the FlattenLayer.
static FlattenParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GradientScaleLayer.
static GradientScaleParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the HDF5 data layer.
static HDF5DataParameter FromProto(RawProto rp, HDF5DataParameter p=null)
Parses the parameter from a RawProto.
Specifies the parameters for the HingLossLayer.
static HingeLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ImageDataLayer
static ImageDataParameter FromProto(RawProto rp, ImageDataParameter p=null)
Parses the parameter from a RawProto.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
Specifies the parameters for the InfogainLossLayer.
static InfogainLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the InnerProductLayer.
bool bias_term
Whether to have bias terms or not.
static InnerProductParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the InputLayer.
static InputParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the InterpLayer.
static InterpParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the LRNLayer.
Definition: LRNParameter.cs:20
static new LRNParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the LSTMAttentionLayer that provides an attention based LSTM layer used ...
static LSTMAttentionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
[DEPRECIATED - use LSTMAttentionParameter instead with enable_attention = false] Specifies the parame...
static LSTMSimpleParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
/b DEPRECIATED (use DataLayer DataLabelMappingParameter instead) Specifies the parameters for the Lab...
static LabelMappingParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
The LayerParameterBase is the base class for all other layer specific parameters.
abstract LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
Specifies the base parameter for all layers.
InterpParameter interp_param
Returns the parameter set when initializing the LayerType.INTERP
Normalization2Parameter normalization2_param
Returns the parameter set when initialized with LayerType.NORMALIZATION2
ConvolutionParameter convolution_param
Returns the parameter set when initialized with LayerType.CONVOLUTION
SerfParameter serf_param
Returns the parameter set when initialized with LayerType.SERF
ParameterParameter parameter_param
Returns the parameter set when initialized with LayerType.PARAMETER
CfcParameter cfc_param
Returns the parameter set when initialized with LayerType.CFC
LayerParameter()
Constructor for the parameter.
TokenizedDataParameter tokenized_data_param
Returns the parameter set when initialized with LayerType.TOKENIZED_DATA
List< ParamSpec > parameters
Specifies the ParamSpec parameters of the LayerParameter.
SliceParameter slice_param
Returns the parameter set when initialized with LayerType.SLICE
ONNX_CONVERSION_SUPPORT m_onnxConversionSupport
Specifies the level of conversion support for the layer.
LogParameter log_param
Returns the parameter set when initialized with LayerType.LOG
void PrepareRunModel()
Prepare the layer settings for a run model.
string name
Specifies the name of this LayerParameter.
List< double > loss_weight
Specifies the loss weight.
ClipParameter clip_param
Returns the parameter set when initialized with LayerType.CLIP
TileParameter tile_param
Returns the parameter set when initialized with LayerType.TILE
LayerType type
Specifies the type of this LayerParameter.
TripletLossParameter triplet_loss_param
Returns the parameter set when initialized with LayerType.TRIPLET_LOSS
ConstantParameter constant_param
Returns the parameter set when initialized with LayerType.CONSTANT
MeanErrorLossParameter mean_error_loss_param
Returns the parameter set when initialized with LayerType.MEAN_ERROR_LOSS
DetectionOutputParameter detection_output_param
Returns the parmeter set when initialized with LayerType.DETECTION_OUTPUT
MultiBoxLossParameter multiboxloss_param
Returns the parameter set when initializing with LayerType.MULTIBOX_LOSS
LtcUnitParameter ltc_unit_param
Returns the parameter set when initialized with LayerType.LTC_UNIT
SoftmaxParameter softmax_param
Returns the parameter set when initialized with LayerType.SOFTMAX
LRNParameter lrn_param
Returns the parameter set when initialized with LayerType.LRN
void CopyDefaults(LayerParameter p)
Copies the defaults from another LayerParameter.
ONNX_CONVERSION_SUPPORT onnx_conversion_support
Returns the level of Onnx conversion support.
List< bool > propagate_down
Specifies whether or not the LayerParameter (or protions of) should be backpropagated.
MultiheadAttentionParameter multihead_attention_param
Returns the parameter set when initialized with LayerType.MULTIHEAD_ATTENTION
LSTMSimpleParameter lstm_simple_param
[DEPRECIATED] Returns the parameter set when initialized with LayerType.LSTM_SIMPLE
SqueezeParameter squeeze_param
Returns the parameter set when initialized with LayerType.RESHAPE
List< NetStateRule > include
Specifies the NetStateRule's for which this LayerParameter should be included.
MathParameter math_param
Returns the parameter set when initialized with LayerType.MATH
SPPParameter spp_param
Returns the parameter set when initialized with LayerType.SPP
ScaleParameter scale_param
Returns the parameter set when initialized with LayerType.SCALE
LayerNormParameter layer_norm_param
Returns the parameter set when initialized with LayerType.LAYERNORM
bool MeetsPhase(Phase phase)
Determines whether or not this LayerParameter meets a given Phase.
int GetParameterCount()
Returns the number of ParamSpec parameters used by the layer.
OneHotParameter onehot_param
Returns the parameter set when initialized with LayerType.ONEHOT
ThresholdParameter threshold_param
Returns the parameter set when initialized with LayerType.THRESHOLD
ReLUParameter relu_param
Returns the parameter set when initialized with LayerType.RELU
void clear_blobs()
Clears the collection of Blobs used by this layer.
bool freeze_learning
Get/set whether or not to freeze the learning for this layer globally.
bool group_start
Specifies whether or not this node is the start of a new group - this is only used when rendering mod...
List< NetStateRule > exclude
Specifies the NetStateRule's for which this LayerParameter should be excluded.
UnPoolingParameter unpooling_param
Returns the parameter set when initialized with LayerType.UNPOOLING
List< string > expected_top
Returns a list of expected top connections (in the bottom, out the top).
ArgMaxParameter argmax_param
Returns the parameter set when initialized with LayerType.ARGMAX
ReductionParameter reduction_param
Returns the parameter set when initialized with LayerType.REDUCTION
PoolingParameter pooling_param
Returns the parameter set when initialized with LayerType.POOLING
LayerParameter(LayerType lt, string strName=null)
The LayerParameter constructor.
SigmoidParameter sigmoid_param
Returns the parameter set when initialized with LayerType.SIGMOID
bool connect_loss_event
Get/set whether or not to connect the loss event to this layer.
TokenizedDataPairsParameter tokenized_data_pairs_param
Returns the parameter set when initialized with LayerType.TOKENIZED_DATA_PAIRS
bool use_halfsize
Specifies whether or not to use half sized memory or not.
GramParameter gram_param
Returns the parameter set when initialized with LayerType.GRAM
ExpParameter exp_param
Returns the parameter set when initialized with LayerType.EXP
static LayerParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
ContrastiveLossParameter contrastive_loss_param
Returns the parameter set when initialized with LayerType.CONTRASTIVE_LOSS
DebugParameter debug_param
Returns the parameter set when initialized with LayerType.DEBUG
GluParameter glu_param
Returns the parameter set when initialized with LayerType.GLU
GrnParameter grn_param
Returns the parameter set when initialized with LayerType.GLU
DummyDataParameter dummy_data_param
Returns the parameter set when initialized with LayerType.DUMMYDATA
PositionalEncoderParameter positional_encoder_param
Returns the parameter set when initialized with LayerType.POSITIONAL_ENCODER
PermuteParameter permute_param
Returns the parameter set when initialized with LayerType.PERMUTE
EltwiseParameter eltwise_param
Returns the parameter set when initialized with LayerType.ELTWISE
void SetType(LayerType type, bool bNewParam=true)
Set the layer type.
MemoryDataParameter memory_data_param
Returns the parameter set when initialized with LayerType.MEMORY_DATA
MultiHeadAttentionInterpParameter multihead_attention_interp_param
Returns the parameter set when initialized with LayerType.MULTIHEAD_ATTENTION_INTERP
NLLLossParameter nll_loss_param
Returns the parameter set when initialized with LayerType.NLL_LOSS
PriorBoxParameter prior_box_param
Returns the parameter set when initialized with LayerType.PRIORBOX
CategoricalTransformationParameter categorical_trans_param
Returns the parameter set when initialized with LayerType.CATEGORICAL_TRANS
InputParameter input_param
Returns the parameter set when initialized with LayerType.INPUT
QuantileAccuracyParameter quantile_accuracy_param
Returns the parameter set when initialized with LayerType.QUANTILE_ACCURACY
PowerParameter power_param
Returns the parameter set when initialized with LayerType.POWER
List< string > expected_bottom
Returns a list of expected bottom connections (in the bottom, out the top).
List< string > top
Specifies the active top connections (in the bottom, out the top)
ReshapeParameter reshape_param
Returns the parameter set when initialized with LayerType.RESHAPE
int solver_count
Returns the number of Solvers participating in a multi-GPU session for which the Solver using this La...
EluParameter elu_param
Returns the parameter set when initialized with LayerType.ELU
MergeParameter merge_param
Returns the parameter set when initialized with LayerType.MERGE
EmbedParameter embed_param
Returns the parameter set when initialized with LayerType.EMBED
ReshapeTemporalParameter reshape_temporal_param
Returns the parameter set when initialized with LayerType.RESHAPE_TEMPORAL
GradientScaleParameter gradient_scale_param
Returns the parameter set when initialized with LayerType.GSL
TextDataParameter text_data_param
Returns the parameter set when initialized with LayerType.TEXT_DATA
AnnotatedDataParameter annotated_data_param
Returns the parameter set when initialized with LayerType.ANNOTATED_DATA
HingeLossParameter hinge_loss_param
Returns the parameter set when initialized with LayerType.HINGE_LOSS
CausalSelfAttentionParameter causal_self_attention_param
Returns the parameter set when initialized with LayerType.CAUSAL_SELF_ATTENTION
int CompareTo(object obj)
Constructor for the parameter.
DataSequenceParameter data_sequence_param
Returns the parameter set when initialized with LayerType.DATA_SEQUENCE
BiasParameter bias_param
Returns the parameter set when initialized with LayerType.BIAS
DetectionEvaluateParameter detection_evaluate_param
Returns the parmeter set when initialized with LayerType.DETECTION_EVALUATE
GateAddNormParameter gateaddnorm_param
Returns the parameter set when initialized with LayerType.GLU
TVLossParameter tv_loss_param
Returns the parameter set when initialized with LayerType.TV_LOSS
NumericTransformationParameter numeric_trans_param
Returns the parameter set when initialized with LayerType.NUMERIC_TRANS
DataNormalizerParameter data_normalizer_param
Returns the parameter set when initialized with LayerType.DATA_NORMALIZER
int solver_rank
Returns the SolverRank of the Solver using this LayerParameter (if any).
InnerProductParameter inner_product_param
Returns the parameter set when initialized with LayerType.INNERPRODUCT
AccuracyParameter accuracy_param
Returns the parameter set when initialized with LayerType.ACCURACY
ConcatParameter concat_param
Returns the parameter set when initialized with LayerType.CONCAT
GatherParameter gather_param
Returns the parameter set when initialized with LayerType.GATHER
HDF5DataParameter hdf5_data_param
Returns the parameter set when initialized with LayerType.HDF5_DATA
TransformerBlockParameter transformer_block_param
Returns the parameter set when initialized with LayerType.TRANSFORMER_BLOCK
MVNParameter mvn_param
Returns the parameter set when initialized with LayerType.MVN
void CopyParameters(LayerParameter src)
Copy just the layer specific parameters to this layer parameter.
CfcUnitParameter cfc_unit_param
Returns the parameter set when initialized with LayerType.CFC_UNIT
string PrepareRunModelInputs()
Prepare model inputs for the run-net (if any are needed for the layer).
KnnParameter knn_param
Returns the parameter set when initialized with LayerType.KNN
QuantileLossParameter quantile_loss_param
Returns the parameter set when initialized with LayerType.QUANTILE_LOSS
MishParameter mish_param
Returns the parameter set when initialized with LayerType.MISH
object Load(BinaryReader br, bool bNewInstance)
Load the parameter from a binary reader.
ScalarParameter scalar_param
Returns the parameter set when initialized with LayerType.SCALAR
DataTemporalParameter data_temporal_param
Returns the parameter set when initialized with LayerType.DATA_TEMPORAL
TransformationParameter transform_param
Returns the parameter set when initialized with LayerType.TRANSFORM
TransposeParameter transpose_param
Returns the parameter set when initialized with LayerType.TRANSPOSE
Normalization1Parameter normalization1_param
Returns the parameter set when initialized with LayerType.NORMALIZATION1
DataParameter data_param
Returns the parameter set when initialized with LayerType.DATA
VarSelNetParameter varselnet_param
Returns the parameter set when initialized with LayerType.VARSELNET
AttentionParameter attention_param
Returns the parameter set when initialized with LayerType.ATTENTION
FlattenParameter flatten_param
Returns the parameter set when initialized with LayerType.FLATTEN
VideoDataParameter video_data_param
Returns the parameter set when initialized with LayerType.VIDEO_DATA
Phase phase
Specifies the Phase for which this LayerParameter is run.
ConvolutionOctaveParameter convolution_octave_param
Returns the parameter set when initialized with LayerType.CONVOLUTION_OCTAVE
DecodeParameter decode_param
Returns the parameter set when initializing with LayerType.DECODE or LayerType.ACCURACY_ENCODING;
RecurrentParameter recurrent_param
Returns the parameter set when initialized with LayerType.RECURRENT
BatchNormParameter batch_norm_param
Returns the parameter set when initialized with LayerType.BATCHNORM
List< string > bottom
Specifies the active bottom connections (in the bottom, out the top).
LayerType
Specifies the layer type.
override RawProto ToProto(string strName)
Constructor for the parameter.
void Save(BinaryWriter bw)
Save this parameter to a binary writer.
override string ToString()
Returns a string representation of the LayerParameter.
static ? LayerType GetType(string strType)
Converts the string type into a LayerType, or null if no match is found.
SwishParameter swish_param
Returns the parameter set when initialized with LayerType.SWISH
GeluParameter gelu_param
Returns the parameter set when initialized with LayerType.GELU
DropoutParameter dropout_param
Returns the parameter set when initialized with LayerType.DROPOUT
LSTMAttentionParameter lstm_attention_param
Returns the parameter set when initialized with LayerType.LSTM_ATTENTION
InfogainLossParameter infogain_loss_param
Returns the parameter set when initialized with LayerType.INFOGAIN_LOSS
PReLUParameter prelu_param
Returns the parameter set when initialized with LayerType.PRELU
LossParameter loss_param
Returns the parameter set when initialized with LayerType.LOSS
ModelDataParameter model_data_param
Returns the parameter set when initialized with LayerType.MODEL_DATA
ImageDataParameter image_data_param
Returns the parameter set when initialized with LayerType.IMAGE_DATA
LabelMappingParameter labelmapping_param
Returns the parameter set when initialized with LayerType.LABELMAPPING
List< BlobProto > blobs
Specifies the blobs of the LayerParameter.
virtual LayerParameter Clone(bool bCloneBlobs)
Creates a new copy of this instance of the parameter.
TanhParameter tanh_param
Returns the parameter set when initialized with LayerType.TANH
ONNX_CONVERSION_SUPPORT
Defines whether a layer node has ONNX conversion support or not.
CropParameter crop_param
Returns the parameter set when initialized with LayerType.CROP
LayerParameter(LayerParameter p)
The LayerParameter constructor.
Specifies the parameters for the LogLayer.
Definition: LogParameter.cs:26
static LogParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Stores the parameters used by loss layers.
NormalizationMode
How to normalize the loss for loss layers that aggregate across batches, spatial dimensions,...
static LossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MVNLayer.
Definition: MVNParameter.cs:21
static MVNParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MathLayer.
Specifies the parameters for the MeanErrorLossLayerParameter.
static MeanErrorLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the MemoryDataLayer.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
static MemoryDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the model data layer.
static ModelDataParameter FromProto(RawProto rp, ModelDataParameter p=null)
Parses the parameter from a RawProto.
Specifies a NetStateRule used to determine whether a Net falls within a given include or exclude patt...
Definition: NetStateRule.cs:20
override RawProto ToProto(string strName)
Converts a NetStateRule into a RawProto.
static NetStateRule FromProto(RawProto rp)
Parses a RawProto representing a NetStateRule and creates a new instance of a NetStateRule from it.
Phase phase
Set phase to require the NetState to have a particular phase (TRAIN or TEST) to meet this rule.
Definition: NetStateRule.cs:99
Specifies the parameters for the PReLULayer.
static PReLUParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies training parameters (multipliers on global learning constants, and the name of other settin...
Definition: ParamSpec.cs:19
override RawProto ToProto(string strName)
Converts the ParamSpec into a RawProto.
Definition: ParamSpec.cs:194
static ParamSpec FromProto(RawProto rp)
Parses a new ParamSpec from a RawProto.
Definition: ParamSpec.cs:217
Specifies the parameters for the ParameterLayer
static ParameterParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PoolingLayer.
static new PoolingParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PowerLayer.
static PowerParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ReLULayer
static new ReLUParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the RecurrentLayer.
static new RecurrentParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by ReductionLayer.
static ReductionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ReshapeLayer.
static ReshapeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
The SPPParameter specifies the parameters for the SPPLayer.
Definition: SPPParameter.cs:21
static new SPPParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ScaleLayer.
static new ScaleParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the SigmoidLayer.
static new SigmoidParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the SliceLayer.
static SliceParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the SoftmaxLayer
static new SoftmaxParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Stores the parameters used by the SwishLayer
static new SwishParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TanhLayer
static new TanhParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the Text data layer.
static TextDataParameter FromProto(RawProto rp, TextDataParameter p=null)
Parses the parameter from a RawProto.
Stores the parameters used by the ThresholdLayer
static ThresholdParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the TileLayer
static TileParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Stores parameters used to apply transformation to the data layer's data.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
static TransformationParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DataSequenceLayer.
static DataSequenceParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DecodeLayer and the AccuracyEncodingLayer.
static DecodeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GatherLayer.
static GatherParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the KnnLayer.
Definition: KnnParameter.cs:22
static KnnParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MergeLayer.
static MergeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Stores the parameters used by the MishLayer
static new MishParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the Normalization1Layer.
static Normalization1Parameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Stores the parameters used by the SerfLayer
static new SerfParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the SqueezeLayer.
static SqueezeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TransposeLayer.
static TransposeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TripletLossLayer.
static TripletLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the UnPoolingLayer.
static new UnPoolingParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the CausalSelfAttentionLayer.
static CausalSelfAttentionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GeluLayer.
static GeluParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the LayerNormalizationLayer.
static LayerNormParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MultiheadAttentionLayer.
static MultiheadAttentionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the NLLLossLayer.
static NLLLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PositionalEncoderLayer.
static PositionalEncoderParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TokenizedDataPairsLayer.
static new TokenizedDataPairsParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TokenizedDataLayer.
static TokenizedDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TransformerBlockLayer.
static TransformerBlockParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the CfcLayer. Note, you must also fill out the CfcUnitParameter.
Definition: CfcParameter.cs:21
static CfcParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the CfcUnitLayer used by the CfCLayer.
static CfcUnitParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the LtcUnitLayer used by the CfCLayer.
static LtcUnitParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GramLayer
static GramParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the OneHotLayer
static OneHotParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ScalarLayer
static ScalarParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TVLossLayer
static TVLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the InputLayer.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
static AnnotatedDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DetectionEvaluateLayer.
static DetectionEvaluateParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DetectionOutputLayer.
static DetectionOutputParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MultiBoxLossParameter.
static MultiBoxLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the Normalization2Layer used in SSD.
static Normalization2Parameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PermuteLayer.
static PermuteParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PriorBoxParameter.
static PriorBoxParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the VideoDataLayer.
static VideoDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the NumericInputTransformationLayer.
static CategoricalTransformationParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DataTemporalLayer (used in TFT models).
static DataTemporalParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GetAddNormLayer (Gate Add Norm).
static GateAddNormParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GluLayer (Gated Linear Unit).
Definition: GluParameter.cs:28
static GluParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GrnLayer (Gated Response Network).
Definition: GrnParameter.cs:28
static GrnParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MultiHeadAttentionInterpLayer (Interpretable Multi-Head Attention La...
static MultiHeadAttentionInterpParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the NumericInputTransformationLayer.
static NumericTransformationParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the QuantileAccuracyLayer used in TFT models
static QuantileAccuracyParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the QuantileLossLayer used in TFT models
static QuantileLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ReshapeTemporalLayer.
static ReshapeTemporalParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the VarSelNetLayer (Variable Selection Network).
static VarSelNetParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
The IBinaryPersist interface provides generic save and load functionality.
Definition: Utility.cs:16
void Save(BinaryWriter bw)
Save to a binary writer.
object Load(BinaryReader br, bool bNewInstance=true)
Load from a binary reader.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Definition: Annotation.cs:12
Phase
Defines the Phase under which to run a Net.
Definition: Interfaces.cs:61
@ DATA
Specifies a data gym that collects data from a data source, such as a database.
@ RECURRENT
Defines the recurrent training method.
@ NONE
No training category specified.
@ RNN
Run the trainer in RNN mode.
The MyCaffe.common namespace contains common MyCaffe classes.
Definition: BatchInput.cs:8
@ SOFTMAX
Specifies to use softmax.
@ TANH
Specifies to run the tanh function.
@ ATTENTION
The blob contains attention scores.
@ ACCURACY
The Blob holds Accuracy Data.
@ CLIP
The blob holds Clip data.
@ LOSS
The Blob holds Loss Data.
@ LSTM
Specifies to use a 4 gate LSTM Recurrent Learning unit.
@ LOG
Specifies to use the log algorithm.
@ BIAS
Bias weights are targeted.
The MyCaffe.param.beta parameters are used by the MyCaffe.layer.beta layers.
The MyCaffe.param.nt namespace defines the parameters used by the Nerual Style Transfer layers.
The MyCaffe.param.ssd namespace contains all SSD related parameter objects that correspond to the nat...
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...
Definition: Annotation.cs:12