AllowForceBackward(int nBottomIdx) | MyCaffe.layers.Layer< T > | virtual |
AutoTopBlobs | MyCaffe.layers.Layer< T > | |
backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom) | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | protectedvirtual |
Backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom) | MyCaffe.layers.Layer< T > | |
backward_timing | MyCaffe.layers.Layer< T > | |
backward_timing_average | MyCaffe.layers.Layer< T > | |
blobs | MyCaffe.layers.Layer< T > | |
check_nan(Blob< T > b) | MyCaffe.layers.Layer< T > | protected |
CheckBlobCounts(BlobCollection< T > colBottom, BlobCollection< T > colTop) | MyCaffe.layers.Layer< T > | protected |
compareShapes(BlobCollection< T > colBottom, BlobCollection< T > colTop) | MyCaffe.layers.Layer< T > | protected |
ConnectLoss(LossLayer< T > layer) | MyCaffe.layers.Layer< T > | virtual |
convert(BlobCollection< T > col) | MyCaffe.layers.Layer< T > | protected |
convert(double df) | MyCaffe.layers.Layer< T > | protected |
convert(float f) | MyCaffe.layers.Layer< T > | protected |
convert(double[] rg) | MyCaffe.layers.Layer< T > | protected |
convert(float[] rg) | MyCaffe.layers.Layer< T > | protected |
convert_to_full(int nCount, long hMem) | MyCaffe.layers.Layer< T > | protected |
convertD(T df) | MyCaffe.layers.Layer< T > | protected |
convertD(T[] rg) | MyCaffe.layers.Layer< T > | protected |
convertF(T df) | MyCaffe.layers.Layer< T > | protected |
convertF(T[] rg) | MyCaffe.layers.Layer< T > | protected |
convertLayerParam(LayerParameter pChild, LayerParameter pParent) | MyCaffe.layers.Layer< T > | protected |
ConvertToBase(BlobCollection< T > col) | MyCaffe.layers.Layer< T > | |
Create(CudaDnn< T > cuda, Log log, LayerParameter p, CancelEvent evtCancel, IXDatabaseBase db=null, TransferInput trxinput=null) | MyCaffe.layers.Layer< T > | static |
Dispose() | MyCaffe.layers.Layer< T > | |
dispose() | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | protectedvirtual |
MyCaffe::layers::Layer.dispose(ref Layer< T > l) | MyCaffe.layers.Layer< T > | protected |
MyCaffe::layers::Layer.dispose(ref Blob< T > b) | MyCaffe.layers.Layer< T > | protected |
MyCaffe::layers::Layer.dispose(ref BlobCollection< T > rg, bool bSetToNull=true) | MyCaffe.layers.Layer< T > | protected |
EqualNumBottomTopBlobs | MyCaffe.layers.Layer< T > | |
ExactNumBottomBlobs | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | |
ExactNumTopBlobs | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | |
forward(BlobCollection< T > colBottom, BlobCollection< T > colTop) | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | protectedvirtual |
Forward(BlobCollection< T > colBottom, BlobCollection< T > colTop) | MyCaffe.layers.Layer< T > | |
forward_timing | MyCaffe.layers.Layer< T > | |
forward_timing_average | MyCaffe.layers.Layer< T > | |
getCurrentIteration() | MyCaffe.layers.Layer< T > | protected |
getWorkspace() | MyCaffe.layers.Layer< T > | protectedvirtual |
internal_blobs | MyCaffe.layers.Layer< T > | |
Layer(CudaDnn< T > cuda, Log log, LayerParameter p) | MyCaffe.layers.Layer< T > | |
layer_param | MyCaffe.layers.Layer< T > | |
LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop) | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | virtual |
loss(int nTopIdx) | MyCaffe.layers.Layer< T > | |
m_bConvertBottom | MyCaffe.layers.Layer< T > | protected |
m_bConvertTopOnBwd | MyCaffe.layers.Layer< T > | protected |
m_bConvertTopOnFwd | MyCaffe.layers.Layer< T > | protected |
m_bEnablePassthrough | MyCaffe.layers.Layer< T > | protected |
m_bNetReshapeRequest | MyCaffe.layers.Layer< T > | protected |
m_bReshapeOnForwardNeeded | MyCaffe.layers.Layer< T > | protected |
m_bUseHalfSize | MyCaffe.layers.Layer< T > | protected |
m_colBlobs | MyCaffe.layers.Layer< T > | protected |
m_colInternalBlobs | MyCaffe.layers.Layer< T > | protected |
m_cuda | MyCaffe.layers.Layer< T > | protected |
m_log | MyCaffe.layers.Layer< T > | protected |
m_param | MyCaffe.layers.Layer< T > | protected |
m_parentLayerType | MyCaffe.layers.Layer< T > | protected |
m_phase | MyCaffe.layers.Layer< T > | protected |
m_rgbParamPropagateDown | MyCaffe.layers.Layer< T > | protected |
m_rgLoss | MyCaffe.layers.Layer< T > | protected |
m_tOne | MyCaffe.layers.Layer< T > | protected |
m_type | MyCaffe.layers.Layer< T > | protected |
m_tZero | MyCaffe.layers.Layer< T > | protected |
MaxBottomBlobs | MyCaffe.layers.Layer< T > | |
MaxTopBlobs | MyCaffe.layers.Layer< T > | |
MinBottomBlobs | MyCaffe.layers.Layer< T > | |
MinTopBlobs | MyCaffe.layers.Layer< T > | |
MultiheadAttentionLayer(CudaDnn< T > cuda, Log log, LayerParameter p) | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | |
OnDebug | MyCaffe.layers.Layer< T > | |
OnGetIteration | MyCaffe.layers.Layer< T > | |
OnGetWorkspace | MyCaffe.layers.Layer< T > | |
OnSetWorkspace | MyCaffe.layers.Layer< T > | |
param_propagate_down(int nParamIdx) | MyCaffe.layers.Layer< T > | |
parent_layer_type | MyCaffe.layers.Layer< T > | |
PostProcessFullOutput(Blob< T > blobSoftmax) | MyCaffe.layers.Layer< T > | virtual |
PostProcessLogitsOutput(int nCurIdx, Blob< T > blobLogits, Layer< T > softmax, int nAxis, int nK=1) | MyCaffe.layers.Layer< T > | virtual |
PostProcessOutput(Blob< T > blobSofmtax, int nK=1) | MyCaffe.layers.Layer< T > | virtual |
PostProcessOutput(int nIdx) | MyCaffe.layers.Layer< T > | virtual |
PreProcessInput(PropertySet customInput, out int nSeqLen, BlobCollection< T > colBottom=null) | MyCaffe.layers.Layer< T > | virtual |
PreProcessInput(string strEncInput, int? nDecInput, BlobCollection< T > colBottom) | MyCaffe.layers.Layer< T > | virtual |
ReInitializeParameters(WEIGHT_TARGET target) | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | virtual |
ResetOnDebug(EventHandler< GetWorkBlobArgs< T > > fn) | MyCaffe.layers.Layer< T > | virtual |
Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop) | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | virtual |
reshapeNeeded(BlobCollection< T > colBottom, BlobCollection< T > colTop, bool bReset=true) | MyCaffe.layers.Layer< T > | protectedvirtual |
set_loss(int nTopIdx, double dfLoss) | MyCaffe.layers.Layer< T > | |
set_param_propagate_down(int nParamIdx, bool bPropagate) | MyCaffe.layers.Layer< T > | |
SetEnablePassthrough(bool bEnable) | MyCaffe.layers.Layer< T > | |
SetLossWeights(BlobCollection< T > colTop) | MyCaffe.layers.Layer< T > | protected |
SetNetParameterUsed(NetParameter np) | MyCaffe.layers.Layer< T > | virtual |
SetNetReshapeRequest() | MyCaffe.layers.Layer< T > | |
SetOnDebug(EventHandler< GetWorkBlobArgs< T > > fn) | MyCaffe.layers.Layer< T > | virtual |
SetPhase(Phase phase) | MyCaffe.layers.Layer< T > | |
setShapes(BlobCollection< T > colBottom, BlobCollection< T > colTop) | MyCaffe.layers.Layer< T > | protected |
Setup(BlobCollection< T > colBottom, BlobCollection< T > colTop) | MyCaffe.layers.Layer< T > | |
setup_internal_blobs(BlobCollection< T > col) | MyCaffe.layers.gpt.MultiheadAttentionLayer< T > | protectedvirtual |
setWorkspace(ulong lSizeInBytes) | MyCaffe.layers.Layer< T > | protectedvirtual |
shareLayerBlob(Blob< T > b, List< int > rgMinShape) | MyCaffe.layers.Layer< T > | protected |
shareLayerBlobs(Layer< T > layer) | MyCaffe.layers.Layer< T > | protected |
shareParameter(Blob< T > b, List< int > rgMinShape, bool bAllowEndsWithComparison=false) | MyCaffe.layers.Layer< T > | protected |
size_at(Blob< T > b) | MyCaffe.layers.Layer< T > | protected |
SupportsPostProcessing | MyCaffe.layers.Layer< T > | |
SupportsPostProcessingFullOutput | MyCaffe.layers.Layer< T > | |
SupportsPostProcessingLogits | MyCaffe.layers.Layer< T > | |
SupportsPreProcessing | MyCaffe.layers.Layer< T > | |
type | MyCaffe.layers.Layer< T > | |
val_at(T[] rg, int nIdx) | MyCaffe.layers.Layer< T > | protected |