Skip to content

Commit

Permalink
[MNN:Sync] Sync internal gitlab
Browse files Browse the repository at this point in the history
  • Loading branch information
xiaying committed Apr 16, 2021
1 parent 0753ea0 commit 3c4ba7c
Show file tree
Hide file tree
Showing 22 changed files with 686 additions and 194 deletions.
37 changes: 28 additions & 9 deletions schema/current/CaffeOp_generated.h
Original file line number Diff line number Diff line change
Expand Up @@ -1916,9 +1916,11 @@ struct Pool3DT : public flatbuffers::NativeTable {
std::vector<int32_t> pads;
PoolType type;
PoolPadType padType;
bool isGlobal;
Pool3DT()
: type(PoolType_MAXPOOL),
padType(PoolPadType_CAFFE) {
padType(PoolPadType_CAFFE),
isGlobal(false) {
}
};

Expand All @@ -1932,7 +1934,8 @@ struct Pool3D FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
VT_KERNELS = 6,
VT_PADS = 8,
VT_TYPE = 10,
VT_PADTYPE = 12
VT_PADTYPE = 12,
VT_ISGLOBAL = 14
};
const flatbuffers::Vector<int32_t> *strides() const {
return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_STRIDES);
Expand All @@ -1949,6 +1952,9 @@ struct Pool3D FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
PoolPadType padType() const {
return static_cast<PoolPadType>(GetField<int8_t>(VT_PADTYPE, 0));
}
bool isGlobal() const {
return GetField<uint8_t>(VT_ISGLOBAL, 0) != 0;
}
bool Verify(flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyOffset(verifier, VT_STRIDES) &&
Expand All @@ -1959,6 +1965,7 @@ struct Pool3D FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
verifier.VerifyVector(pads()) &&
VerifyField<int8_t>(verifier, VT_TYPE) &&
VerifyField<int8_t>(verifier, VT_PADTYPE) &&
VerifyField<uint8_t>(verifier, VT_ISGLOBAL) &&
verifier.EndTable();
}
Pool3DT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
Expand All @@ -1984,6 +1991,9 @@ struct Pool3DBuilder {
void add_padType(PoolPadType padType) {
fbb_.AddElement<int8_t>(Pool3D::VT_PADTYPE, static_cast<int8_t>(padType), 0);
}
void add_isGlobal(bool isGlobal) {
fbb_.AddElement<uint8_t>(Pool3D::VT_ISGLOBAL, static_cast<uint8_t>(isGlobal), 0);
}
explicit Pool3DBuilder(flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
Expand All @@ -2002,11 +2012,13 @@ inline flatbuffers::Offset<Pool3D> CreatePool3D(
flatbuffers::Offset<flatbuffers::Vector<int32_t>> kernels = 0,
flatbuffers::Offset<flatbuffers::Vector<int32_t>> pads = 0,
PoolType type = PoolType_MAXPOOL,
PoolPadType padType = PoolPadType_CAFFE) {
PoolPadType padType = PoolPadType_CAFFE,
bool isGlobal = false) {
Pool3DBuilder builder_(_fbb);
builder_.add_pads(pads);
builder_.add_kernels(kernels);
builder_.add_strides(strides);
builder_.add_isGlobal(isGlobal);
builder_.add_padType(padType);
builder_.add_type(type);
return builder_.Finish();
Expand All @@ -2018,7 +2030,8 @@ inline flatbuffers::Offset<Pool3D> CreatePool3DDirect(
const std::vector<int32_t> *kernels = nullptr,
const std::vector<int32_t> *pads = nullptr,
PoolType type = PoolType_MAXPOOL,
PoolPadType padType = PoolPadType_CAFFE) {
PoolPadType padType = PoolPadType_CAFFE,
bool isGlobal = false) {
auto strides__ = strides ? _fbb.CreateVector<int32_t>(*strides) : 0;
auto kernels__ = kernels ? _fbb.CreateVector<int32_t>(*kernels) : 0;
auto pads__ = pads ? _fbb.CreateVector<int32_t>(*pads) : 0;
Expand All @@ -2028,7 +2041,8 @@ inline flatbuffers::Offset<Pool3D> CreatePool3DDirect(
kernels__,
pads__,
type,
padType);
padType,
isGlobal);
}

flatbuffers::Offset<Pool3D> CreatePool3D(flatbuffers::FlatBufferBuilder &_fbb, const Pool3DT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr);
Expand Down Expand Up @@ -4920,6 +4934,7 @@ inline void Pool3D::UnPackTo(Pool3DT *_o, const flatbuffers::resolver_function_t
{ auto _e = pads(); if (_e) { _o->pads.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->pads[_i] = _e->Get(_i); } } };
{ auto _e = type(); _o->type = _e; };
{ auto _e = padType(); _o->padType = _e; };
{ auto _e = isGlobal(); _o->isGlobal = _e; };
}

inline flatbuffers::Offset<Pool3D> Pool3D::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Pool3DT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
Expand All @@ -4935,13 +4950,15 @@ inline flatbuffers::Offset<Pool3D> CreatePool3D(flatbuffers::FlatBufferBuilder &
auto _pads = _o->pads.size() ? _fbb.CreateVector(_o->pads) : 0;
auto _type = _o->type;
auto _padType = _o->padType;
auto _isGlobal = _o->isGlobal;
return MNN::CreatePool3D(
_fbb,
_strides,
_kernels,
_pads,
_type,
_padType);
_padType,
_isGlobal);
}

inline ReluT *Relu::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
Expand Down Expand Up @@ -6222,7 +6239,8 @@ inline const flatbuffers::TypeTable *Pool3DTypeTable() {
{ flatbuffers::ET_INT, 1, -1 },
{ flatbuffers::ET_INT, 1, -1 },
{ flatbuffers::ET_CHAR, 0, 0 },
{ flatbuffers::ET_CHAR, 0, 1 }
{ flatbuffers::ET_CHAR, 0, 1 },
{ flatbuffers::ET_BOOL, 0, -1 }
};
static const flatbuffers::TypeFunction type_refs[] = {
PoolTypeTypeTable,
Expand All @@ -6233,10 +6251,11 @@ inline const flatbuffers::TypeTable *Pool3DTypeTable() {
"kernels",
"pads",
"type",
"padType"
"padType",
"isGlobal"
};
static const flatbuffers::TypeTable tt = {
flatbuffers::ST_TABLE, 5, type_codes, type_refs, nullptr, names
flatbuffers::ST_TABLE, 6, type_codes, type_refs, nullptr, names
};
return &tt;
}
Expand Down
77 changes: 64 additions & 13 deletions schema/current/TensorflowOp_generated.h
Original file line number Diff line number Diff line change
Expand Up @@ -2610,18 +2610,22 @@ struct RNNParamT : public flatbuffers::NativeTable {
typedef RNNParam TableType;
int32_t numUnits;
bool isBidirectionalRNN;
bool linearBeforeReset;
bool keepAllOutputs;
std::unique_ptr<BlobT> fwGateWeight;
std::unique_ptr<BlobT> fwGateBias;
std::unique_ptr<BlobT> fwCandidateWeight;
std::unique_ptr<BlobT> fwCandidateBias;
std::unique_ptr<BlobT> fwRecurrentBias;
std::unique_ptr<BlobT> bwGateWeight;
std::unique_ptr<BlobT> bwGateBias;
std::unique_ptr<BlobT> bwCandidateWeight;
std::unique_ptr<BlobT> bwCandidateBias;
std::unique_ptr<BlobT> bwRecurrentBias;
RNNParamT()
: numUnits(0),
isBidirectionalRNN(false),
linearBeforeReset(false),
keepAllOutputs(false) {
}
};
Expand All @@ -2634,22 +2638,28 @@ struct RNNParam FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
VT_NUMUNITS = 4,
VT_ISBIDIRECTIONALRNN = 6,
VT_KEEPALLOUTPUTS = 8,
VT_FWGATEWEIGHT = 10,
VT_FWGATEBIAS = 12,
VT_FWCANDIDATEWEIGHT = 14,
VT_FWCANDIDATEBIAS = 16,
VT_BWGATEWEIGHT = 18,
VT_BWGATEBIAS = 20,
VT_BWCANDIDATEWEIGHT = 22,
VT_BWCANDIDATEBIAS = 24
VT_LINEARBEFORERESET = 8,
VT_KEEPALLOUTPUTS = 10,
VT_FWGATEWEIGHT = 12,
VT_FWGATEBIAS = 14,
VT_FWCANDIDATEWEIGHT = 16,
VT_FWCANDIDATEBIAS = 18,
VT_FWRECURRENTBIAS = 20,
VT_BWGATEWEIGHT = 22,
VT_BWGATEBIAS = 24,
VT_BWCANDIDATEWEIGHT = 26,
VT_BWCANDIDATEBIAS = 28,
VT_BWRECURRENTBIAS = 30
};
int32_t numUnits() const {
return GetField<int32_t>(VT_NUMUNITS, 0);
}
bool isBidirectionalRNN() const {
return GetField<uint8_t>(VT_ISBIDIRECTIONALRNN, 0) != 0;
}
bool linearBeforeReset() const {
return GetField<uint8_t>(VT_LINEARBEFORERESET, 0) != 0;
}
bool keepAllOutputs() const {
return GetField<uint8_t>(VT_KEEPALLOUTPUTS, 0) != 0;
}
Expand All @@ -2665,6 +2675,9 @@ struct RNNParam FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
const Blob *fwCandidateBias() const {
return GetPointer<const Blob *>(VT_FWCANDIDATEBIAS);
}
const Blob *fwRecurrentBias() const {
return GetPointer<const Blob *>(VT_FWRECURRENTBIAS);
}
const Blob *bwGateWeight() const {
return GetPointer<const Blob *>(VT_BWGATEWEIGHT);
}
Expand All @@ -2677,10 +2690,14 @@ struct RNNParam FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
const Blob *bwCandidateBias() const {
return GetPointer<const Blob *>(VT_BWCANDIDATEBIAS);
}
const Blob *bwRecurrentBias() const {
return GetPointer<const Blob *>(VT_BWRECURRENTBIAS);
}
bool Verify(flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyField<int32_t>(verifier, VT_NUMUNITS) &&
VerifyField<uint8_t>(verifier, VT_ISBIDIRECTIONALRNN) &&
VerifyField<uint8_t>(verifier, VT_LINEARBEFORERESET) &&
VerifyField<uint8_t>(verifier, VT_KEEPALLOUTPUTS) &&
VerifyOffset(verifier, VT_FWGATEWEIGHT) &&
verifier.VerifyTable(fwGateWeight()) &&
Expand All @@ -2690,6 +2707,8 @@ struct RNNParam FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
verifier.VerifyTable(fwCandidateWeight()) &&
VerifyOffset(verifier, VT_FWCANDIDATEBIAS) &&
verifier.VerifyTable(fwCandidateBias()) &&
VerifyOffset(verifier, VT_FWRECURRENTBIAS) &&
verifier.VerifyTable(fwRecurrentBias()) &&
VerifyOffset(verifier, VT_BWGATEWEIGHT) &&
verifier.VerifyTable(bwGateWeight()) &&
VerifyOffset(verifier, VT_BWGATEBIAS) &&
Expand All @@ -2698,6 +2717,8 @@ struct RNNParam FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
verifier.VerifyTable(bwCandidateWeight()) &&
VerifyOffset(verifier, VT_BWCANDIDATEBIAS) &&
verifier.VerifyTable(bwCandidateBias()) &&
VerifyOffset(verifier, VT_BWRECURRENTBIAS) &&
verifier.VerifyTable(bwRecurrentBias()) &&
verifier.EndTable();
}
RNNParamT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const;
Expand All @@ -2714,6 +2735,9 @@ struct RNNParamBuilder {
void add_isBidirectionalRNN(bool isBidirectionalRNN) {
fbb_.AddElement<uint8_t>(RNNParam::VT_ISBIDIRECTIONALRNN, static_cast<uint8_t>(isBidirectionalRNN), 0);
}
void add_linearBeforeReset(bool linearBeforeReset) {
fbb_.AddElement<uint8_t>(RNNParam::VT_LINEARBEFORERESET, static_cast<uint8_t>(linearBeforeReset), 0);
}
void add_keepAllOutputs(bool keepAllOutputs) {
fbb_.AddElement<uint8_t>(RNNParam::VT_KEEPALLOUTPUTS, static_cast<uint8_t>(keepAllOutputs), 0);
}
Expand All @@ -2729,6 +2753,9 @@ struct RNNParamBuilder {
void add_fwCandidateBias(flatbuffers::Offset<Blob> fwCandidateBias) {
fbb_.AddOffset(RNNParam::VT_FWCANDIDATEBIAS, fwCandidateBias);
}
void add_fwRecurrentBias(flatbuffers::Offset<Blob> fwRecurrentBias) {
fbb_.AddOffset(RNNParam::VT_FWRECURRENTBIAS, fwRecurrentBias);
}
void add_bwGateWeight(flatbuffers::Offset<Blob> bwGateWeight) {
fbb_.AddOffset(RNNParam::VT_BWGATEWEIGHT, bwGateWeight);
}
Expand All @@ -2741,6 +2768,9 @@ struct RNNParamBuilder {
void add_bwCandidateBias(flatbuffers::Offset<Blob> bwCandidateBias) {
fbb_.AddOffset(RNNParam::VT_BWCANDIDATEBIAS, bwCandidateBias);
}
void add_bwRecurrentBias(flatbuffers::Offset<Blob> bwRecurrentBias) {
fbb_.AddOffset(RNNParam::VT_BWRECURRENTBIAS, bwRecurrentBias);
}
explicit RNNParamBuilder(flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
Expand All @@ -2757,26 +2787,32 @@ inline flatbuffers::Offset<RNNParam> CreateRNNParam(
flatbuffers::FlatBufferBuilder &_fbb,
int32_t numUnits = 0,
bool isBidirectionalRNN = false,
bool linearBeforeReset = false,
bool keepAllOutputs = false,
flatbuffers::Offset<Blob> fwGateWeight = 0,
flatbuffers::Offset<Blob> fwGateBias = 0,
flatbuffers::Offset<Blob> fwCandidateWeight = 0,
flatbuffers::Offset<Blob> fwCandidateBias = 0,
flatbuffers::Offset<Blob> fwRecurrentBias = 0,
flatbuffers::Offset<Blob> bwGateWeight = 0,
flatbuffers::Offset<Blob> bwGateBias = 0,
flatbuffers::Offset<Blob> bwCandidateWeight = 0,
flatbuffers::Offset<Blob> bwCandidateBias = 0) {
flatbuffers::Offset<Blob> bwCandidateBias = 0,
flatbuffers::Offset<Blob> bwRecurrentBias = 0) {
RNNParamBuilder builder_(_fbb);
builder_.add_bwRecurrentBias(bwRecurrentBias);
builder_.add_bwCandidateBias(bwCandidateBias);
builder_.add_bwCandidateWeight(bwCandidateWeight);
builder_.add_bwGateBias(bwGateBias);
builder_.add_bwGateWeight(bwGateWeight);
builder_.add_fwRecurrentBias(fwRecurrentBias);
builder_.add_fwCandidateBias(fwCandidateBias);
builder_.add_fwCandidateWeight(fwCandidateWeight);
builder_.add_fwGateBias(fwGateBias);
builder_.add_fwGateWeight(fwGateWeight);
builder_.add_numUnits(numUnits);
builder_.add_keepAllOutputs(keepAllOutputs);
builder_.add_linearBeforeReset(linearBeforeReset);
builder_.add_isBidirectionalRNN(isBidirectionalRNN);
return builder_.Finish();
}
Expand Down Expand Up @@ -4490,15 +4526,18 @@ inline void RNNParam::UnPackTo(RNNParamT *_o, const flatbuffers::resolver_functi
(void)_resolver;
{ auto _e = numUnits(); _o->numUnits = _e; };
{ auto _e = isBidirectionalRNN(); _o->isBidirectionalRNN = _e; };
{ auto _e = linearBeforeReset(); _o->linearBeforeReset = _e; };
{ auto _e = keepAllOutputs(); _o->keepAllOutputs = _e; };
{ auto _e = fwGateWeight(); if (_e) _o->fwGateWeight = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
{ auto _e = fwGateBias(); if (_e) _o->fwGateBias = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
{ auto _e = fwCandidateWeight(); if (_e) _o->fwCandidateWeight = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
{ auto _e = fwCandidateBias(); if (_e) _o->fwCandidateBias = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
{ auto _e = fwRecurrentBias(); if (_e) _o->fwRecurrentBias = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
{ auto _e = bwGateWeight(); if (_e) _o->bwGateWeight = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
{ auto _e = bwGateBias(); if (_e) _o->bwGateBias = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
{ auto _e = bwCandidateWeight(); if (_e) _o->bwCandidateWeight = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
{ auto _e = bwCandidateBias(); if (_e) _o->bwCandidateBias = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
{ auto _e = bwRecurrentBias(); if (_e) _o->bwRecurrentBias = std::unique_ptr<BlobT>(_e->UnPack(_resolver)); };
}

inline flatbuffers::Offset<RNNParam> RNNParam::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RNNParamT* _o, const flatbuffers::rehasher_function_t *_rehasher) {
Expand All @@ -4511,28 +4550,34 @@ inline flatbuffers::Offset<RNNParam> CreateRNNParam(flatbuffers::FlatBufferBuild
struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RNNParamT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va;
auto _numUnits = _o->numUnits;
auto _isBidirectionalRNN = _o->isBidirectionalRNN;
auto _linearBeforeReset = _o->linearBeforeReset;
auto _keepAllOutputs = _o->keepAllOutputs;
auto _fwGateWeight = _o->fwGateWeight ? CreateBlob(_fbb, _o->fwGateWeight.get(), _rehasher) : 0;
auto _fwGateBias = _o->fwGateBias ? CreateBlob(_fbb, _o->fwGateBias.get(), _rehasher) : 0;
auto _fwCandidateWeight = _o->fwCandidateWeight ? CreateBlob(_fbb, _o->fwCandidateWeight.get(), _rehasher) : 0;
auto _fwCandidateBias = _o->fwCandidateBias ? CreateBlob(_fbb, _o->fwCandidateBias.get(), _rehasher) : 0;
auto _fwRecurrentBias = _o->fwRecurrentBias ? CreateBlob(_fbb, _o->fwRecurrentBias.get(), _rehasher) : 0;
auto _bwGateWeight = _o->bwGateWeight ? CreateBlob(_fbb, _o->bwGateWeight.get(), _rehasher) : 0;
auto _bwGateBias = _o->bwGateBias ? CreateBlob(_fbb, _o->bwGateBias.get(), _rehasher) : 0;
auto _bwCandidateWeight = _o->bwCandidateWeight ? CreateBlob(_fbb, _o->bwCandidateWeight.get(), _rehasher) : 0;
auto _bwCandidateBias = _o->bwCandidateBias ? CreateBlob(_fbb, _o->bwCandidateBias.get(), _rehasher) : 0;
auto _bwRecurrentBias = _o->bwRecurrentBias ? CreateBlob(_fbb, _o->bwRecurrentBias.get(), _rehasher) : 0;
return MNN::CreateRNNParam(
_fbb,
_numUnits,
_isBidirectionalRNN,
_linearBeforeReset,
_keepAllOutputs,
_fwGateWeight,
_fwGateBias,
_fwCandidateWeight,
_fwCandidateBias,
_fwRecurrentBias,
_bwGateWeight,
_bwGateBias,
_bwCandidateWeight,
_bwCandidateBias);
_bwCandidateBias,
_bwRecurrentBias);
}

inline BatchMatMulParamT *BatchMatMulParam::UnPack(const flatbuffers::resolver_function_t *_resolver) const {
Expand Down Expand Up @@ -5562,6 +5607,9 @@ inline const flatbuffers::TypeTable *RNNParamTypeTable() {
{ flatbuffers::ET_INT, 0, -1 },
{ flatbuffers::ET_BOOL, 0, -1 },
{ flatbuffers::ET_BOOL, 0, -1 },
{ flatbuffers::ET_BOOL, 0, -1 },
{ flatbuffers::ET_SEQUENCE, 0, 0 },
{ flatbuffers::ET_SEQUENCE, 0, 0 },
{ flatbuffers::ET_SEQUENCE, 0, 0 },
{ flatbuffers::ET_SEQUENCE, 0, 0 },
{ flatbuffers::ET_SEQUENCE, 0, 0 },
Expand All @@ -5577,18 +5625,21 @@ inline const flatbuffers::TypeTable *RNNParamTypeTable() {
static const char * const names[] = {
"numUnits",
"isBidirectionalRNN",
"linearBeforeReset",
"keepAllOutputs",
"fwGateWeight",
"fwGateBias",
"fwCandidateWeight",
"fwCandidateBias",
"fwRecurrentBias",
"bwGateWeight",
"bwGateBias",
"bwCandidateWeight",
"bwCandidateBias"
"bwCandidateBias",
"bwRecurrentBias"
};
static const flatbuffers::TypeTable tt = {
flatbuffers::ST_TABLE, 11, type_codes, type_refs, nullptr, names
flatbuffers::ST_TABLE, 14, type_codes, type_refs, nullptr, names
};
return &tt;
}
Expand Down
1 change: 1 addition & 0 deletions schema/default/CaffeOp.fbs
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ table Pool3D {

type:PoolType;
padType:PoolPadType;
isGlobal:bool=false;
}

table Relu {
Expand Down
Loading

0 comments on commit 3c4ba7c

Please sign in to comment.