MyCaffe  1.12.2.41
Deep learning software for Windows C# programmers.
MemoryDataLayer.cs
1using System;
2using System.Collections.Generic;
3using System.Linq;
4using System.Text;
5using System.Threading;
6using System.Diagnostics;
7using MyCaffe.basecode;
8using MyCaffe.db.image;
9using MyCaffe.param;
10using MyCaffe.common;
11using MyCaffe.data;
12
13namespace MyCaffe.layers
14{
20 public class MemoryDataLayer<T> : BaseDataLayer<T>
21 {
22 int m_nBatchSize;
23 int m_nChannels;
24 int m_nHeight;
25 int m_nWidth;
26 int m_nLabelChannels = 0;
27 int m_nLabelHeight = 0;
28 int m_nLabelWidth = 0;
29 int m_nDataSize;
30 int m_nLabelSize = 0;
31 int m_nClipSize1 = 0;
32 int m_nClipSize2 = 0;
33 int m_nLabelIdx = -1;
34 int m_nClipIdx = -1;
35 Blob<T> m_blobData;
36 Blob<T> m_blobLabel;
37 Blob<T> m_blobClip = null;
38 bool m_bHasNewData;
39 int m_nPos = 0;
40 int m_nN = 1;
41
45 public event EventHandler<MemoryDataLayerGetDataArgs> OnGetData;
50 public event EventHandler<MemoryDataLayerPackDataArgs<T>> OnDataPack;
51
67 : base(cuda, log, p, null)
68 {
69 m_type = LayerParameter.LayerType.MEMORYDATA;
70 m_blobData = new Blob<T>(cuda, log);
71 m_blobData.Name = m_param.name + " data";
72 m_blobLabel = new Blob<T>(cuda, log);
73 m_blobLabel.Name = m_param.name + " label";
74 }
75
77 protected override void dispose()
78 {
79 base.dispose();
80 }
81
87 protected override void DataLayerSetUp(BlobCollection<T> colBottom, BlobCollection<T> colTop)
88 {
89 m_nBatchSize = (int)m_param.memory_data_param.batch_size;
90 m_nChannels = (int)m_param.memory_data_param.channels;
91 m_nHeight = (int)m_param.memory_data_param.height;
92 m_nWidth = (int)m_param.memory_data_param.width;
93 m_nDataSize = m_nChannels * m_nHeight * m_nWidth;
94
95 m_log.CHECK_GT(m_nBatchSize * m_nDataSize, 0, "batch_size, channels, height, and width must be specified and positive in memory_data_param.");
96
98 {
99 m_nLabelChannels = (int)m_param.memory_data_param.label_channels;
100 m_nLabelHeight = (int)m_param.memory_data_param.label_height;
101 m_nLabelWidth = (int)m_param.memory_data_param.label_width;
102 m_nLabelSize = m_nLabelChannels * m_nLabelHeight * m_nLabelWidth;
103
105 m_log.CHECK_GT(m_nBatchSize * m_nLabelSize, 0, "batch_size, label_channels, label_height, and label_width must be specified and positive in memory_data_param when using label_type = MULTIPLE.");
106 }
107
109 m_nClipSize1 = (int)m_param.memory_data_param.clip_length1;
110
112 m_nClipSize2 = (int)m_param.memory_data_param.clip_length2;
113
114 if (m_nClipSize1 > 1 && m_nClipSize2 == 0)
115 m_nClipSize2 = 1;
116
117 if (m_nClipSize2 > 1 && m_nClipSize1 == 0)
118 m_nClipSize1 = 1;
119
120 if (m_nClipSize1 > 0 || m_nClipSize2 > 0)
121 {
122 m_blobClip = new Blob<T>(m_cuda, m_log);
123 m_blobClip.Name = m_param.name + " clip";
124 m_blobClip.Reshape(new List<int>() { m_nClipSize1, m_nClipSize2 });
125 }
126
127 if (OnGetData != null)
128 {
129 OnGetData(this, new MemoryDataLayerGetDataArgs(true));
130
132 {
133 m_log.CHECK_EQ(m_nLabelChannels, m_blobLabel.channels, "The actual label channels (" + m_blobLabel.channels.ToString() + ") do not match the 'memory_data_param.label_channels' setting of " + m_nLabelChannels.ToString() + ".");
134 m_log.CHECK_EQ(m_nLabelHeight, m_blobLabel.height, "The actual label channels (" + m_blobLabel.height.ToString() + ") do not match the 'memory_data_param.label_channels' setting of " + m_nLabelHeight.ToString() + ".");
135 m_log.CHECK_EQ(m_nLabelWidth, m_blobLabel.width, "The actual label channels (" + m_blobLabel.width.ToString() + ") do not match the 'memory_data_param.label_channels' setting of " + m_nLabelWidth.ToString() + ".");
136 }
137 }
138 else
139 {
140 m_blobData.Reshape(m_nBatchSize, m_nChannels, m_nHeight, m_nWidth);
141
143 {
144 if (m_param.memory_data_param.label_type == LayerParameterBase.LABEL_TYPE.SINGLE && m_blobClip != null)
145 m_blobLabel.Reshape(1, m_nLabelChannels, m_nLabelHeight, m_nLabelWidth);
146 else
147 m_blobLabel.Reshape(m_nBatchSize, m_nLabelChannels, m_nLabelHeight, m_nLabelWidth);
148 }
149 }
150
151 colTop[0].Reshape(m_nBatchSize, m_nChannels, m_nHeight, m_nWidth);
152 m_blobData.update_cpu_data();
153
154 int nLabelIdx = -1;
155 int nClipIdx = -1;
156
157 for (int i = 0; i < m_param.top.Count; i++)
158 {
159 if (m_param.top[i].ToLower() == "label")
160 nLabelIdx = i;
161
162 if (m_param.top[i].ToLower() == "clip")
163 nClipIdx = i;
164 }
165
167 {
168 if (nLabelIdx == -1)
169 nLabelIdx = (nClipIdx == -1) ? 1 : nClipIdx + 1;
170
171 m_nLabelIdx = nLabelIdx;
172 colTop[nLabelIdx].ReshapeLike(m_blobLabel);
173 m_blobLabel.update_cpu_data();
174 }
175
176 if (m_blobClip != null)
177 {
178 if (nClipIdx == -1)
179 throw new Exception("Could not find a top named 'clip'!");
180
181 m_nClipIdx = nClipIdx;
182 colTop[nClipIdx].ReshapeLike(m_blobClip);
183 m_blobClip.update_cpu_data();
184 }
185 }
186
192 public override void Reshape(BlobCollection<T> colBottom, BlobCollection<T> colTop)
193 {
194 base.Reshape(colBottom, colTop);
195
196 m_blobData.Reshape(m_nBatchSize, m_nChannels, m_nHeight, m_nWidth);
197
198 colTop[0].Reshape(m_nBatchSize, m_nChannels, m_nHeight, m_nWidth);
199 m_blobData.update_cpu_data();
200
202 {
203 if (m_param.memory_data_param.label_type == LayerParameterBase.LABEL_TYPE.SINGLE && m_blobClip != null)
204 m_blobLabel.Reshape(1, m_nLabelChannels, m_nLabelHeight, m_nLabelWidth);
205 else
206 m_blobLabel.Reshape(m_nBatchSize, m_nLabelChannels, m_nLabelHeight, m_nLabelWidth);
207
208 colTop[m_nLabelIdx].ReshapeLike(m_blobLabel);
209 m_blobLabel.update_cpu_data();
210 }
211
212 if (m_blobClip != null)
213 {
214 m_blobClip.Reshape(new List<int>() { m_nClipSize1, m_nClipSize2 });
215 colTop[m_nClipIdx].ReshapeLike(m_blobClip);
216 m_blobClip.update_cpu_data();
217 }
218 }
219
223 public override int ExactNumBottomBlobs
224 {
225 get { return 0; }
226 }
227
231 public override int ExactNumTopBlobs
232 {
233 get { return -1; }
234 }
235
239 public override int MinTopBlobs
240 {
241 get { return 1; }
242 }
243
247 public override int MaxTopBlobs
248 {
249 get { return 3; }
250 }
251
259 public virtual void AddDatum(SimpleDatum sd, int nLblAxis = 1, bool bReset = false, bool bResizeBatch = false)
260 {
261 List<Datum> rgData = new List<Datum>();
262 rgData.Add(new Datum(sd));
263 AddDatumVector(rgData, null, nLblAxis, bReset, bResizeBatch);
264 }
265
276 public virtual void AddDatumVector(Datum[] rgData, Datum[] rgClip = null, int nLblAxis = 1, bool bReset = false, bool bResizeBatch = false)
277 {
278 AddDatumVector(rgData.ToList(), (rgClip == null) ? null : rgClip.ToList(), nLblAxis, bReset, bResizeBatch);
279 }
280
291 public virtual void AddDatumVector(List<Datum> rgData, List<Datum> rgClip = null, int nLblAxis = 1, bool bReset = false, bool bResizeBatch = false)
292 {
293 if (bReset)
294 m_bHasNewData = false;
295
296 m_log.CHECK(!m_bHasNewData, "Can't add data until current data has been consumed.");
297 int nNum = rgData.Count;
298 m_log.CHECK_GT(nNum, 0, "There are no datum to add.");
299
300 if (rgClip == null)
301 {
302 if (bResizeBatch)
303 m_nBatchSize = rgData.Count;
304
305 int nNumAligned = (int)Math.Floor((double)rgData.Count / (double)m_nBatchSize) * m_nBatchSize;
306 m_log.CHECK_GT(nNumAligned, 0, "Three are not enough datum to add.");
307
308 if (nNumAligned < nNum)
309 {
310 m_log.WriteLine("WARNING: Clipping batch to batch aligned count of " + nNumAligned.ToString() + ".");
311
312 for (int i = nNumAligned; i < nNum; i++)
313 {
314 rgData.RemoveAt(rgData.Count - 1);
315 }
316 }
317
318 nNum = nNumAligned;
319 m_log.CHECK_EQ(nNum % m_nBatchSize, 0, "The added data must be a multiple of the batch size.");
320
321 m_blobData.Reshape(nNum, m_nChannels, m_nHeight, m_nWidth);
322
323 // Apply data transformations (mirror, scale, crop...)
324 m_transformer.Transform(rgData, m_blobData, m_cuda, m_log);
325
327 {
328 List<int> rgLblShape = new List<int>();
329 int nLabelNum = nNum;
330
332 nLabelNum = 1;
333
334 rgLblShape.Add(nLabelNum);
335 rgLblShape.Add(1);
336 rgLblShape.Add(1);
337 rgLblShape.Add(1);
338
339 // Reshape label blob depending on label type.
341 {
342 m_log.CHECK_GE(nLblAxis, 1, "The label axis must be greater than or equal to 1.");
343 m_log.CHECK_LE(nLblAxis, 4, "The label axis must be less than 4.");
344
345 List<float> rgLbl = BinaryData.UnPackFloatList(rgData[0].DataCriteria, rgData[0].DataCriteriaFormat);
346 rgLblShape[nLblAxis] = rgLbl.Count;
347 }
348
349 m_blobLabel.Reshape(rgLblShape);
350
351 // Copy labels - use DataCriteria for MULTIPLE labels
353 {
354 T[] rgLabels = m_blobLabel.mutable_cpu_data;
355 int nIdx = 0;
356
357 for (int i = 0; i < nNum; i++)
358 {
359 List<float> rgLbl = BinaryData.UnPackFloatList(rgData[i].DataCriteria, rgData[i].DataCriteriaFormat);
360 for (int j = 0; j < rgLbl.Count; j++)
361 {
362 rgLabels[nIdx] = (T)Convert.ChangeType(rgLbl[j], typeof(T));
363 nIdx++;
364 }
365 }
366 m_blobLabel.mutable_cpu_data = rgLabels;
367 }
368 else
369 {
370 T[] rgLabels = m_blobLabel.mutable_cpu_data;
371
372 // For recurrent layers, single labels only use the last label in the sequence.
373 if (rgClip != null)
374 {
375 rgLabels[0] = (T)Convert.ChangeType(rgData[nNum - 1].label, typeof(T));
376 }
377 else
378 {
379 // Copy labels - use standard Datum label for SINGLE labels.
380 for (int i = 0; i < nNum; i++)
381 {
382 rgLabels[i] = (T)Convert.ChangeType(rgData[i].label, typeof(T));
383 }
384 }
385
386 m_blobLabel.mutable_cpu_data = rgLabels;
387 }
388 }
389 }
390 else
391 {
392 if (OnDataPack == null)
393 throw new Exception("To properly handle data packing, you must connect the OnDataPack event and properly fill out the data and clip blobs with the ordering expected by the recurrent layers.");
394
395 MemoryDataLayerPackDataArgs<T> args = new MemoryDataLayerPackDataArgs<T>(m_blobData, m_blobClip, m_blobLabel, rgData, rgClip);
396 OnDataPack(this, args);
397 }
398
399 m_bHasNewData = true;
400 m_nN = nNum;
401 }
402
409 public void Reset(Blob<T> data, Blob<T> labels, int n)
410 {
411 m_log.CHECK_EQ(n % m_nBatchSize, 0, "'n' must be a multiple of batch size.");
412 m_nN = n;
413
414 m_log.CHECK_GT(m_blobData.count(), 0, "There is no data.");
415 m_blobData.ReshapeLike(data);
416 m_cuda.copy(m_blobData.count(), data.gpu_data, m_blobData.mutable_gpu_data);
417
419 {
420 m_log.CHECK_GT(m_blobLabel.count(), 0, "There are no lables.");
421 m_blobLabel.ReshapeLike(labels);
422 m_cuda.copy(m_blobLabel.count(), labels.gpu_data, m_blobLabel.mutable_gpu_data);
423 }
424 }
425
430 public void Copy(MemoryDataLayer<T> src)
431 {
432 m_nN = src.m_nN;
433
434 m_blobData.ReshapeLike(src.m_blobData);
435 m_cuda.copy(src.m_blobData.count(), src.m_blobData.gpu_data, m_blobData.mutable_gpu_data);
436
438 {
439 m_blobLabel.ReshapeLike(src.m_blobLabel);
440 m_cuda.copy(src.m_blobLabel.count(), src.m_blobLabel.gpu_data, m_blobLabel.mutable_gpu_data);
441 }
442
443 if (m_blobClip != null)
444 {
445 m_blobClip.ReshapeLike(src.m_blobClip);
446 m_cuda.copy(src.m_blobClip.count(), src.m_blobClip.gpu_data, m_blobClip.mutable_gpu_data);
447 }
448
449 m_bHasNewData = true;
450 }
451
455 public int batch_size
456 {
457 get { return (int)m_nBatchSize; }
458 set
459 {
460 m_log.CHECK(!m_bHasNewData, "Can't change the batch size until current data has been consumed.");
461 m_nBatchSize = value;
462 }
463 }
464
468 public int channels
469 {
470 get { return m_nChannels; }
471 }
472
476 public int height
477 {
478 get { return m_nHeight; }
479 }
480
484 public int width
485 {
486 get { return m_nWidth; }
487 }
488
492 public int clip_size1
493 {
494 get { return m_nClipSize1; }
495 }
496
500 public int clip_size2
501 {
502 get { return m_nClipSize2; }
503 }
504
513 protected override void forward(BlobCollection<T> colBottom, BlobCollection<T> colTop)
514 {
515 int nSrcOffset;
516
517 Blob<T> blobData = colTop[0];
518 Blob<T> blobClip = null;
519 Blob<T> blobLabel = null;
520
521 for (int i=1; i<colTop.Count; i++)
522 {
523 if (blobClip == null && (colTop[i].type == BLOB_TYPE.CLIP || colTop[i].Name.ToLower().Contains("clip")))
524 blobClip = colTop[i];
525 else
526 blobLabel = colTop[i];
527 }
528
529 blobData.Reshape(m_nBatchSize, m_nChannels, m_nHeight, m_nWidth);
530
531 nSrcOffset = m_nPos * m_nDataSize;
532 m_cuda.copy(blobData.count(), m_blobData.gpu_data, blobData.mutable_gpu_data, nSrcOffset, 0);
533
534
536 {
537 if (blobLabel == null)
538 m_log.WriteError(new Exception("Could not find the MemoryDataLayer 'label' top!"));
539
540 int nLabelSize = m_nBatchSize;
542 nLabelSize = 1;
543
544 List<int> rgLabelShape = Utility.Clone<int>(m_blobLabel.shape());
545 if (rgLabelShape.Count == 0)
546 rgLabelShape.Add(nLabelSize);
547 else
548 rgLabelShape[0] = nLabelSize;
549
550 blobLabel.Reshape(rgLabelShape);
551
552 nSrcOffset = m_nPos * m_nLabelSize;
553 m_cuda.copy(blobLabel.count(), m_blobLabel.gpu_data, blobLabel.mutable_gpu_data, nSrcOffset, 0);
554 }
555
556 if (m_blobClip != null)
557 {
558 if (blobClip == null)
559 m_log.WriteError(new Exception("Could not find the MemoryDataLayer 'clip' top!"));
560
561 blobClip.CopyFrom(m_blobClip, false, true);
562 }
563
564 m_nPos = (m_nPos + m_nBatchSize) % m_nN;
565
566 if (m_nPos == 0)
567 m_bHasNewData = false;
568 }
569 }
570
574 public class MemoryDataLayerGetDataArgs : EventArgs
575 {
576 bool m_bInitialization = true;
577
582 public MemoryDataLayerGetDataArgs(bool bInit)
583 {
584 m_bInitialization = bInit;
585 }
586
590 public bool Initialization
591 {
592 get { return m_bInitialization; }
593 }
594 }
595
601 public class MemoryDataLayerPackDataArgs<T> : EventArgs
602 {
603 Blob<T> m_blobData;
604 Blob<T> m_blobClip;
605 Blob<T> m_blobLabel;
606 List<Datum> m_rgData;
607 List<Datum> m_rgClip;
609
619 public MemoryDataLayerPackDataArgs(Blob<T> blobData, Blob<T> blobClip, Blob<T> blobLabel, List<Datum> rgData, List<Datum> rgClip, LayerParameter.LayerType type = LayerParameter.LayerType.LSTM)
620 {
621 m_blobData = blobData;
622 m_blobClip = blobClip;
623 m_blobLabel = blobLabel;
624 m_rgData = rgData;
625 m_rgClip = rgClip;
626 m_lstmType = type;
627 }
628
633 {
634 get { return m_lstmType; }
635 }
636
641 {
642 get { return m_blobData; }
643 }
644
649 {
650 get { return m_blobClip; }
651 }
652
657 {
658 get { return m_blobLabel; }
659 }
660
664 public List<Datum> DataItems
665 {
666 get { return m_rgData; }
667 }
668
672 public List<Datum> ClipItems
673 {
674 get { return m_rgClip; }
675 }
676 }
677}
The BinaryData class is used to pack and unpack DataCriteria binary data, optionally stored within ea...
Definition: BinaryData.cs:15
static List< float > UnPackFloatList(byte[] rg, DATA_FORMAT fmtExpected)
Unpack the byte array into a list of float values.
Definition: BinaryData.cs:132
The Datum class is a simple wrapper to the SimpleDatum class to ensure compatibility with the origina...
Definition: Datum.cs:12
The Log class provides general output in text form.
Definition: Log.cs:13
void CHECK(bool b, string str)
Test a flag for true.
Definition: Log.cs:227
void WriteLine(string str, bool bOverrideEnabled=false, bool bHeader=false, bool bError=false, bool bDisable=false)
Write a line of output.
Definition: Log.cs:80
void CHECK_EQ(double df1, double df2, string str)
Test whether one number is equal to another.
Definition: Log.cs:239
void WriteError(Exception e)
Write an error as output.
Definition: Log.cs:130
void CHECK_GT(double df1, double df2, string str)
Test whether one number is greater than another.
Definition: Log.cs:299
void CHECK_LE(double df1, double df2, string str)
Test whether one number is less than or equal to another.
Definition: Log.cs:263
void CHECK_GE(double df1, double df2, string str)
Test whether one number is greater than or equal to another.
Definition: Log.cs:287
The SimpleDatum class holds a data input within host memory.
Definition: SimpleDatum.cs:161
The Utility class provides general utility funtions.
Definition: Utility.cs:35
The BlobCollection contains a list of Blobs.
bool Contains(Blob< T > blob)
Returns whether or not the collection contains a given blob.
int Count
Returns the number of items in the collection.
void ReshapeLike(BlobCollection< T > src)
Reshapes all blobs in the collection to the sizes of the source.
void Reshape(int[] rgShape)
Reshapes all blobs in the collection to the given shape.
The Blob is the main holder of data that moves through the Layers of the Net.
Definition: Blob.cs:25
int channels
DEPRECIATED; legacy shape accessor channels: use shape(1) instead.
Definition: Blob.cs:800
int height
DEPRECIATED; legacy shape accessor height: use shape(2) instead.
Definition: Blob.cs:808
long mutable_gpu_data
Returns the data GPU handle used by the CudaDnn connection.
Definition: Blob.cs:1487
T[] mutable_cpu_data
Get data from the GPU and bring it over to the host, or Set data from the Host and send it over to th...
Definition: Blob.cs:1461
void Reshape(int nNum, int nChannels, int nHeight, int nWidth, bool? bUseHalfSize=null)
DEPRECIATED; use
Definition: Blob.cs:442
void CopyFrom(Blob< T > src, int nSrcOffset, int nDstOffset, int nCount, bool bCopyData, bool bCopyDiff)
Copy from a source Blob.
Definition: Blob.cs:903
int width
DEPRECIATED; legacy shape accessor width: use shape(3) instead.
Definition: Blob.cs:816
List< int > shape()
Returns an array where each element contains the shape of an axis of the Blob.
Definition: Blob.cs:684
T[] update_cpu_data()
Update the CPU data by transferring the GPU data over to the Host.
Definition: Blob.cs:1470
int count()
Returns the total number of items in the Blob.
Definition: Blob.cs:739
void ReshapeLike(Blob< T > b, bool? bUseHalfSize=null)
Reshape this Blob to have the same shape as another Blob.
Definition: Blob.cs:648
string Name
Get/set the name of the Blob.
Definition: Blob.cs:2184
long gpu_data
Returns the data GPU handle used by the CudaDnn connection.
Definition: Blob.cs:1479
The CudaDnn object is the main interface to the Low-Level Cuda C++ DLL.
Definition: CudaDnn.cs:969
The BaseDataLayer is the base class for data Layers that feed Blobs of data into the Net.
DataTransformer< T > m_transformer
Specifies the DataTransformer used to transform each data item as it loaded.
Log m_log
Specifies the Log for output.
Definition: Layer.cs:43
LayerParameter m_param
Specifies the LayerParameter describing the Layer.
Definition: Layer.cs:47
LayerParameter.LayerType type
Returns the LayerType of this Layer.
Definition: Layer.cs:927
CudaDnn< T > m_cuda
Specifies the CudaDnn connection to Cuda.
Definition: Layer.cs:39
LayerParameter.LayerType m_type
Specifies the Layer type.
Definition: Layer.cs:35
The MemoryDataLayerGetDataArgs class is passed to the OnGetData event.
MemoryDataLayerGetDataArgs(bool bInit)
The constructor.
bool Initialization
Returns whether the event was fired during the DataLayerSetup call or not.
The MemoryDataLayer provides data to the Net from memory. This layer is initialized with the MyCaffe....
int width
Returns the data width.
int height
Returns the data height.
override int MinTopBlobs
Returns the minimum number of top blobs: data
virtual void AddDatum(SimpleDatum sd, int nLblAxis=1, bool bReset=false, bool bResizeBatch=false)
This method is used to add a single Datum to the memory.
override void DataLayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Setup the MemoryDataLayer.
virtual void AddDatumVector(Datum[] rgData, Datum[] rgClip=null, int nLblAxis=1, bool bReset=false, bool bResizeBatch=false)
This method is used to add a list of Datums to the memory.
int channels
Returns the data channels.
void Reset(Blob< T > data, Blob< T > labels, int n)
Resets the data by copying the internal data to the parameters specified.
override int MaxTopBlobs
Returns the maximum number of top blobs: data, clip, label
int clip_size2
Returns the clip 2 size, if any exists.
override int ExactNumTopBlobs
Returns the exact number of required top (output) Blobs.
override void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Reshape the internal data and outputs.
override int ExactNumBottomBlobs
No bottom blobs are used by this layer.
int batch_size
Returns the batch size.
MemoryDataLayer(CudaDnn< T > cuda, Log log, LayerParameter p)
The BaseDataLayer constructor.
virtual void AddDatumVector(List< Datum > rgData, List< Datum > rgClip=null, int nLblAxis=1, bool bReset=false, bool bResizeBatch=false)
This method is used to add a list of Datums to the memory.
EventHandler< MemoryDataLayerGetDataArgs > OnGetData
The OnGetData event fires on the DataLayerSetup call and each time the data wraps around (e....
EventHandler< MemoryDataLayerPackDataArgs< T > > OnDataPack
The OnDataPack event fires from within the AddDatumVector method and is used to pack the data into a ...
override void forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
The forward computation which loads the data into the top (output) Blobs.
int clip_size1
Returns the clip 1 size, if any exists.
void Copy(MemoryDataLayer< T > src)
Copy the data by copying the src alyer data and label to the parameters specified.
override void dispose()
Releases all GPU and host resources used by the Layer.
The MemoryDataLayerPackDataArgs is passed to the OnDataPack event which fires each time the data rece...
Blob< T > Label
Returns the label data to fill with ordered label information.
Blob< T > Clip
Returns the clip data to fill with ordered data for clipping.
List< Datum > ClipItems
Returns the raw clip items to use to fill.
LayerParameter.LayerType LstmType
Returns the LSTM type.
Blob< T > Data
Returns the blob data to fill with ordered data.
List< Datum > DataItems
Returns the raw data items to use to fill.
MemoryDataLayerPackDataArgs(Blob< T > blobData, Blob< T > blobClip, Blob< T > blobLabel, List< Datum > rgData, List< Datum > rgClip, LayerParameter.LayerType type=LayerParameter.LayerType.LSTM)
The constructor.
The LayerParameterBase is the base class for all other layer specific parameters.
LABEL_TYPE
Defines the label type.
Specifies the base parameter for all layers.
string name
Specifies the name of this LayerParameter.
MemoryDataParameter memory_data_param
Returns the parameter set when initialized with LayerType.MEMORY_DATA
List< string > top
Specifies the active top connections (in the bottom, out the top)
LayerType
Specifies the layer type.
uint clip_length1
Specifies the clip length 1 (default = 0, which means unused).
uint clip_length2
Specifies the clip length 2 (default = 0, which means unused).
uint width
The width of the data.
LABEL_TYPE label_type
(optional, default = SINGLE) Specifies the label type: SINGLE - the default which uses the 'Label' fi...
uint label_height
The height of the label.
uint label_width
The width of the label.
uint channels
The number of channels in the data.
uint label_channels
The number of channels in the label.
uint height
The height of the data.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Definition: Annotation.cs:12
The MyCaffe.common namespace contains common MyCaffe classes.
Definition: BatchInput.cs:8
BLOB_TYPE
Defines the tpe of data held by a given Blob.
Definition: Interfaces.cs:62
The MyCaffe.data namespace contains dataset creators used to create common testing datasets such as M...
Definition: BinaryFile.cs:16
The MyCaffe.db.image namespace contains all image database related classes.
Definition: Database.cs:18
The MyCaffe.layers namespace contains all layers that have a solidified code base,...
Definition: LayerFactory.cs:15
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...
Definition: Annotation.cs:12