blob: 17378d602ad77f5ca93f9423d702b8d1b3da79d9 [file] [log] [blame]
// automatically generated by the FlatBuffers compiler, do not modify
#ifndef FLATBUFFERS_GENERATED_SPARSETENSOR_ORG_APACHE_ARROW_FLATBUF_H_
#define FLATBUFFERS_GENERATED_SPARSETENSOR_ORG_APACHE_ARROW_FLATBUF_H_
#include "flatbuffers/flatbuffers.h"
// Ensure the included flatbuffers.h is the same version as when this file was
// generated, otherwise it may not be compatible.
static_assert(FLATBUFFERS_VERSION_MAJOR == 23 &&
FLATBUFFERS_VERSION_MINOR == 5 &&
FLATBUFFERS_VERSION_REVISION == 26,
"Non-compatible flatbuffers version included");
#include "Tensor_generated.h"
namespace org {
namespace apache {
namespace arrow {
namespace flatbuf {
struct SparseTensorIndexCOO;
struct SparseTensorIndexCOOBuilder;
struct SparseMatrixIndexCSX;
struct SparseMatrixIndexCSXBuilder;
struct SparseTensorIndexCSF;
struct SparseTensorIndexCSFBuilder;
struct SparseTensor;
struct SparseTensorBuilder;
enum class SparseMatrixCompressedAxis : int16_t {
Row = 0,
Column = 1,
MIN = Row,
MAX = Column
};
inline const SparseMatrixCompressedAxis (&EnumValuesSparseMatrixCompressedAxis())[2] {
static const SparseMatrixCompressedAxis values[] = {
SparseMatrixCompressedAxis::Row,
SparseMatrixCompressedAxis::Column
};
return values;
}
inline const char * const *EnumNamesSparseMatrixCompressedAxis() {
static const char * const names[3] = {
"Row",
"Column",
nullptr
};
return names;
}
inline const char *EnumNameSparseMatrixCompressedAxis(SparseMatrixCompressedAxis e) {
if (::flatbuffers::IsOutRange(e, SparseMatrixCompressedAxis::Row, SparseMatrixCompressedAxis::Column)) return "";
const size_t index = static_cast<size_t>(e);
return EnumNamesSparseMatrixCompressedAxis()[index];
}
enum class SparseTensorIndex : uint8_t {
NONE = 0,
SparseTensorIndexCOO = 1,
SparseMatrixIndexCSX = 2,
SparseTensorIndexCSF = 3,
MIN = NONE,
MAX = SparseTensorIndexCSF
};
inline const SparseTensorIndex (&EnumValuesSparseTensorIndex())[4] {
static const SparseTensorIndex values[] = {
SparseTensorIndex::NONE,
SparseTensorIndex::SparseTensorIndexCOO,
SparseTensorIndex::SparseMatrixIndexCSX,
SparseTensorIndex::SparseTensorIndexCSF
};
return values;
}
inline const char * const *EnumNamesSparseTensorIndex() {
static const char * const names[5] = {
"NONE",
"SparseTensorIndexCOO",
"SparseMatrixIndexCSX",
"SparseTensorIndexCSF",
nullptr
};
return names;
}
inline const char *EnumNameSparseTensorIndex(SparseTensorIndex e) {
if (::flatbuffers::IsOutRange(e, SparseTensorIndex::NONE, SparseTensorIndex::SparseTensorIndexCSF)) return "";
const size_t index = static_cast<size_t>(e);
return EnumNamesSparseTensorIndex()[index];
}
template<typename T> struct SparseTensorIndexTraits {
static const SparseTensorIndex enum_value = SparseTensorIndex::NONE;
};
template<> struct SparseTensorIndexTraits<org::apache::arrow::flatbuf::SparseTensorIndexCOO> {
static const SparseTensorIndex enum_value = SparseTensorIndex::SparseTensorIndexCOO;
};
template<> struct SparseTensorIndexTraits<org::apache::arrow::flatbuf::SparseMatrixIndexCSX> {
static const SparseTensorIndex enum_value = SparseTensorIndex::SparseMatrixIndexCSX;
};
template<> struct SparseTensorIndexTraits<org::apache::arrow::flatbuf::SparseTensorIndexCSF> {
static const SparseTensorIndex enum_value = SparseTensorIndex::SparseTensorIndexCSF;
};
bool VerifySparseTensorIndex(::flatbuffers::Verifier &verifier, const void *obj, SparseTensorIndex type);
bool VerifySparseTensorIndexVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset<void>> *values, const ::flatbuffers::Vector<SparseTensorIndex> *types);
/// ----------------------------------------------------------------------
/// EXPERIMENTAL: Data structures for sparse tensors
/// Coordinate (COO) format of sparse tensor index.
///
/// COO's index list are represented as a NxM matrix,
/// where N is the number of non-zero values,
/// and M is the number of dimensions of a sparse tensor.
///
/// indicesBuffer stores the location and size of the data of this indices
/// matrix. The value type and the stride of the indices matrix is
/// specified in indicesType and indicesStrides fields.
///
/// For example, let X be a 2x3x4x5 tensor, and it has the following
/// 6 non-zero values:
/// ```text
/// X[0, 1, 2, 0] := 1
/// X[1, 1, 2, 3] := 2
/// X[0, 2, 1, 0] := 3
/// X[0, 1, 3, 0] := 4
/// X[0, 1, 2, 1] := 5
/// X[1, 2, 0, 4] := 6
/// ```
/// In COO format, the index matrix of X is the following 4x6 matrix:
/// ```text
/// [[0, 0, 0, 0, 1, 1],
/// [1, 1, 1, 2, 1, 2],
/// [2, 2, 3, 1, 2, 0],
/// [0, 1, 0, 0, 3, 4]]
/// ```
/// When isCanonical is true, the indices is sorted in lexicographical order
/// (row-major order), and it does not have duplicated entries. Otherwise,
/// the indices may not be sorted, or may have duplicated entries.
struct SparseTensorIndexCOO FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
typedef SparseTensorIndexCOOBuilder Builder;
enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
VT_INDICESTYPE = 4,
VT_INDICESSTRIDES = 6,
VT_INDICESBUFFER = 8,
VT_ISCANONICAL = 10
};
/// The type of values in indicesBuffer
const org::apache::arrow::flatbuf::Int *indicesType() const {
return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDICESTYPE);
}
/// Non-negative byte offsets to advance one value cell along each dimension
/// If omitted, default to row-major order (C-like).
const ::flatbuffers::Vector<int64_t> *indicesStrides() const {
return GetPointer<const ::flatbuffers::Vector<int64_t> *>(VT_INDICESSTRIDES);
}
/// The location and size of the indices matrix's data
const org::apache::arrow::flatbuf::Buffer *indicesBuffer() const {
return GetStruct<const org::apache::arrow::flatbuf::Buffer *>(VT_INDICESBUFFER);
}
/// This flag is true if and only if the indices matrix is sorted in
/// row-major order, and does not have duplicated entries.
/// This sort order is the same as of Tensorflow's SparseTensor,
/// but it is inverse order of SciPy's canonical coo_matrix
/// (SciPy employs column-major order for its coo_matrix).
bool isCanonical() const {
return GetField<uint8_t>(VT_ISCANONICAL, 0) != 0;
}
bool Verify(::flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyOffsetRequired(verifier, VT_INDICESTYPE) &&
verifier.VerifyTable(indicesType()) &&
VerifyOffset(verifier, VT_INDICESSTRIDES) &&
verifier.VerifyVector(indicesStrides()) &&
VerifyFieldRequired<org::apache::arrow::flatbuf::Buffer>(verifier, VT_INDICESBUFFER, 8) &&
VerifyField<uint8_t>(verifier, VT_ISCANONICAL, 1) &&
verifier.EndTable();
}
};
struct SparseTensorIndexCOOBuilder {
typedef SparseTensorIndexCOO Table;
::flatbuffers::FlatBufferBuilder &fbb_;
::flatbuffers::uoffset_t start_;
void add_indicesType(::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType) {
fbb_.AddOffset(SparseTensorIndexCOO::VT_INDICESTYPE, indicesType);
}
void add_indicesStrides(::flatbuffers::Offset<::flatbuffers::Vector<int64_t>> indicesStrides) {
fbb_.AddOffset(SparseTensorIndexCOO::VT_INDICESSTRIDES, indicesStrides);
}
void add_indicesBuffer(const org::apache::arrow::flatbuf::Buffer *indicesBuffer) {
fbb_.AddStruct(SparseTensorIndexCOO::VT_INDICESBUFFER, indicesBuffer);
}
void add_isCanonical(bool isCanonical) {
fbb_.AddElement<uint8_t>(SparseTensorIndexCOO::VT_ISCANONICAL, static_cast<uint8_t>(isCanonical), 0);
}
explicit SparseTensorIndexCOOBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
}
::flatbuffers::Offset<SparseTensorIndexCOO> Finish() {
const auto end = fbb_.EndTable(start_);
auto o = ::flatbuffers::Offset<SparseTensorIndexCOO>(end);
fbb_.Required(o, SparseTensorIndexCOO::VT_INDICESTYPE);
fbb_.Required(o, SparseTensorIndexCOO::VT_INDICESBUFFER);
return o;
}
};
inline ::flatbuffers::Offset<SparseTensorIndexCOO> CreateSparseTensorIndexCOO(
::flatbuffers::FlatBufferBuilder &_fbb,
::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
::flatbuffers::Offset<::flatbuffers::Vector<int64_t>> indicesStrides = 0,
const org::apache::arrow::flatbuf::Buffer *indicesBuffer = nullptr,
bool isCanonical = false) {
SparseTensorIndexCOOBuilder builder_(_fbb);
builder_.add_indicesBuffer(indicesBuffer);
builder_.add_indicesStrides(indicesStrides);
builder_.add_indicesType(indicesType);
builder_.add_isCanonical(isCanonical);
return builder_.Finish();
}
inline ::flatbuffers::Offset<SparseTensorIndexCOO> CreateSparseTensorIndexCOODirect(
::flatbuffers::FlatBufferBuilder &_fbb,
::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
const std::vector<int64_t> *indicesStrides = nullptr,
const org::apache::arrow::flatbuf::Buffer *indicesBuffer = nullptr,
bool isCanonical = false) {
auto indicesStrides__ = indicesStrides ? _fbb.CreateVector<int64_t>(*indicesStrides) : 0;
return org::apache::arrow::flatbuf::CreateSparseTensorIndexCOO(
_fbb,
indicesType,
indicesStrides__,
indicesBuffer,
isCanonical);
}
/// Compressed Sparse format, that is matrix-specific.
struct SparseMatrixIndexCSX FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
typedef SparseMatrixIndexCSXBuilder Builder;
enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
VT_COMPRESSEDAXIS = 4,
VT_INDPTRTYPE = 6,
VT_INDPTRBUFFER = 8,
VT_INDICESTYPE = 10,
VT_INDICESBUFFER = 12
};
/// Which axis, row or column, is compressed
org::apache::arrow::flatbuf::SparseMatrixCompressedAxis compressedAxis() const {
return static_cast<org::apache::arrow::flatbuf::SparseMatrixCompressedAxis>(GetField<int16_t>(VT_COMPRESSEDAXIS, 0));
}
/// The type of values in indptrBuffer
const org::apache::arrow::flatbuf::Int *indptrType() const {
return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDPTRTYPE);
}
/// indptrBuffer stores the location and size of indptr array that
/// represents the range of the rows.
/// The i-th row spans from `indptr[i]` to `indptr[i+1]` in the data.
/// The length of this array is 1 + (the number of rows), and the type
/// of index value is long.
///
/// For example, let X be the following 6x4 matrix:
/// ```text
/// X := [[0, 1, 2, 0],
/// [0, 0, 3, 0],
/// [0, 4, 0, 5],
/// [0, 0, 0, 0],
/// [6, 0, 7, 8],
/// [0, 9, 0, 0]].
/// ```
/// The array of non-zero values in X is:
/// ```text
/// values(X) = [1, 2, 3, 4, 5, 6, 7, 8, 9].
/// ```
/// And the indptr of X is:
/// ```text
/// indptr(X) = [0, 2, 3, 5, 5, 8, 10].
/// ```
const org::apache::arrow::flatbuf::Buffer *indptrBuffer() const {
return GetStruct<const org::apache::arrow::flatbuf::Buffer *>(VT_INDPTRBUFFER);
}
/// The type of values in indicesBuffer
const org::apache::arrow::flatbuf::Int *indicesType() const {
return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDICESTYPE);
}
/// indicesBuffer stores the location and size of the array that
/// contains the column indices of the corresponding non-zero values.
/// The type of index value is long.
///
/// For example, the indices of the above X is:
/// ```text
/// indices(X) = [1, 2, 2, 1, 3, 0, 2, 3, 1].
/// ```
/// Note that the indices are sorted in lexicographical order for each row.
const org::apache::arrow::flatbuf::Buffer *indicesBuffer() const {
return GetStruct<const org::apache::arrow::flatbuf::Buffer *>(VT_INDICESBUFFER);
}
bool Verify(::flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyField<int16_t>(verifier, VT_COMPRESSEDAXIS, 2) &&
VerifyOffsetRequired(verifier, VT_INDPTRTYPE) &&
verifier.VerifyTable(indptrType()) &&
VerifyFieldRequired<org::apache::arrow::flatbuf::Buffer>(verifier, VT_INDPTRBUFFER, 8) &&
VerifyOffsetRequired(verifier, VT_INDICESTYPE) &&
verifier.VerifyTable(indicesType()) &&
VerifyFieldRequired<org::apache::arrow::flatbuf::Buffer>(verifier, VT_INDICESBUFFER, 8) &&
verifier.EndTable();
}
};
struct SparseMatrixIndexCSXBuilder {
typedef SparseMatrixIndexCSX Table;
::flatbuffers::FlatBufferBuilder &fbb_;
::flatbuffers::uoffset_t start_;
void add_compressedAxis(org::apache::arrow::flatbuf::SparseMatrixCompressedAxis compressedAxis) {
fbb_.AddElement<int16_t>(SparseMatrixIndexCSX::VT_COMPRESSEDAXIS, static_cast<int16_t>(compressedAxis), 0);
}
void add_indptrType(::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType) {
fbb_.AddOffset(SparseMatrixIndexCSX::VT_INDPTRTYPE, indptrType);
}
void add_indptrBuffer(const org::apache::arrow::flatbuf::Buffer *indptrBuffer) {
fbb_.AddStruct(SparseMatrixIndexCSX::VT_INDPTRBUFFER, indptrBuffer);
}
void add_indicesType(::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType) {
fbb_.AddOffset(SparseMatrixIndexCSX::VT_INDICESTYPE, indicesType);
}
void add_indicesBuffer(const org::apache::arrow::flatbuf::Buffer *indicesBuffer) {
fbb_.AddStruct(SparseMatrixIndexCSX::VT_INDICESBUFFER, indicesBuffer);
}
explicit SparseMatrixIndexCSXBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
}
::flatbuffers::Offset<SparseMatrixIndexCSX> Finish() {
const auto end = fbb_.EndTable(start_);
auto o = ::flatbuffers::Offset<SparseMatrixIndexCSX>(end);
fbb_.Required(o, SparseMatrixIndexCSX::VT_INDPTRTYPE);
fbb_.Required(o, SparseMatrixIndexCSX::VT_INDPTRBUFFER);
fbb_.Required(o, SparseMatrixIndexCSX::VT_INDICESTYPE);
fbb_.Required(o, SparseMatrixIndexCSX::VT_INDICESBUFFER);
return o;
}
};
inline ::flatbuffers::Offset<SparseMatrixIndexCSX> CreateSparseMatrixIndexCSX(
::flatbuffers::FlatBufferBuilder &_fbb,
org::apache::arrow::flatbuf::SparseMatrixCompressedAxis compressedAxis = org::apache::arrow::flatbuf::SparseMatrixCompressedAxis::Row,
::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType = 0,
const org::apache::arrow::flatbuf::Buffer *indptrBuffer = nullptr,
::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
const org::apache::arrow::flatbuf::Buffer *indicesBuffer = nullptr) {
SparseMatrixIndexCSXBuilder builder_(_fbb);
builder_.add_indicesBuffer(indicesBuffer);
builder_.add_indicesType(indicesType);
builder_.add_indptrBuffer(indptrBuffer);
builder_.add_indptrType(indptrType);
builder_.add_compressedAxis(compressedAxis);
return builder_.Finish();
}
/// Compressed Sparse Fiber (CSF) sparse tensor index.
struct SparseTensorIndexCSF FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
typedef SparseTensorIndexCSFBuilder Builder;
enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
VT_INDPTRTYPE = 4,
VT_INDPTRBUFFERS = 6,
VT_INDICESTYPE = 8,
VT_INDICESBUFFERS = 10,
VT_AXISORDER = 12
};
/// CSF is a generalization of compressed sparse row (CSR) index.
/// See [smith2017knl](http://shaden.io/pub-files/smith2017knl.pdf)
///
/// CSF index recursively compresses each dimension of a tensor into a set
/// of prefix trees. Each path from a root to leaf forms one tensor
/// non-zero index. CSF is implemented with two arrays of buffers and one
/// arrays of integers.
///
/// For example, let X be a 2x3x4x5 tensor and let it have the following
/// 8 non-zero values:
/// ```text
/// X[0, 0, 0, 1] := 1
/// X[0, 0, 0, 2] := 2
/// X[0, 1, 0, 0] := 3
/// X[0, 1, 0, 2] := 4
/// X[0, 1, 1, 0] := 5
/// X[1, 1, 1, 0] := 6
/// X[1, 1, 1, 1] := 7
/// X[1, 1, 1, 2] := 8
/// ```
/// As a prefix tree this would be represented as:
/// ```text
/// 0 1
/// / \ |
/// 0 1 1
/// / / \ |
/// 0 0 1 1
/// /| /| | /| |
/// 1 2 0 2 0 0 1 2
/// ```
/// The type of values in indptrBuffers
const org::apache::arrow::flatbuf::Int *indptrType() const {
return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDPTRTYPE);
}
/// indptrBuffers stores the sparsity structure.
/// Each two consecutive dimensions in a tensor correspond to a buffer in
/// indptrBuffers. A pair of consecutive values at `indptrBuffers[dim][i]`
/// and `indptrBuffers[dim][i + 1]` signify a range of nodes in
/// `indicesBuffers[dim + 1]` who are children of `indicesBuffers[dim][i]` node.
///
/// For example, the indptrBuffers for the above X is:
/// ```text
/// indptrBuffer(X) = [
/// [0, 2, 3],
/// [0, 1, 3, 4],
/// [0, 2, 4, 5, 8]
/// ].
/// ```
const ::flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *> *indptrBuffers() const {
return GetPointer<const ::flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *> *>(VT_INDPTRBUFFERS);
}
/// The type of values in indicesBuffers
const org::apache::arrow::flatbuf::Int *indicesType() const {
return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDICESTYPE);
}
/// indicesBuffers stores values of nodes.
/// Each tensor dimension corresponds to a buffer in indicesBuffers.
/// For example, the indicesBuffers for the above X is:
/// ```text
/// indicesBuffer(X) = [
/// [0, 1],
/// [0, 1, 1],
/// [0, 0, 1, 1],
/// [1, 2, 0, 2, 0, 0, 1, 2]
/// ].
/// ```
const ::flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *> *indicesBuffers() const {
return GetPointer<const ::flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *> *>(VT_INDICESBUFFERS);
}
/// axisOrder stores the sequence in which dimensions were traversed to
/// produce the prefix tree.
/// For example, the axisOrder for the above X is:
/// ```text
/// axisOrder(X) = [0, 1, 2, 3].
/// ```
const ::flatbuffers::Vector<int32_t> *axisOrder() const {
return GetPointer<const ::flatbuffers::Vector<int32_t> *>(VT_AXISORDER);
}
bool Verify(::flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyOffsetRequired(verifier, VT_INDPTRTYPE) &&
verifier.VerifyTable(indptrType()) &&
VerifyOffsetRequired(verifier, VT_INDPTRBUFFERS) &&
verifier.VerifyVector(indptrBuffers()) &&
VerifyOffsetRequired(verifier, VT_INDICESTYPE) &&
verifier.VerifyTable(indicesType()) &&
VerifyOffsetRequired(verifier, VT_INDICESBUFFERS) &&
verifier.VerifyVector(indicesBuffers()) &&
VerifyOffsetRequired(verifier, VT_AXISORDER) &&
verifier.VerifyVector(axisOrder()) &&
verifier.EndTable();
}
};
struct SparseTensorIndexCSFBuilder {
typedef SparseTensorIndexCSF Table;
::flatbuffers::FlatBufferBuilder &fbb_;
::flatbuffers::uoffset_t start_;
void add_indptrType(::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType) {
fbb_.AddOffset(SparseTensorIndexCSF::VT_INDPTRTYPE, indptrType);
}
void add_indptrBuffers(::flatbuffers::Offset<::flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *>> indptrBuffers) {
fbb_.AddOffset(SparseTensorIndexCSF::VT_INDPTRBUFFERS, indptrBuffers);
}
void add_indicesType(::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType) {
fbb_.AddOffset(SparseTensorIndexCSF::VT_INDICESTYPE, indicesType);
}
void add_indicesBuffers(::flatbuffers::Offset<::flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *>> indicesBuffers) {
fbb_.AddOffset(SparseTensorIndexCSF::VT_INDICESBUFFERS, indicesBuffers);
}
void add_axisOrder(::flatbuffers::Offset<::flatbuffers::Vector<int32_t>> axisOrder) {
fbb_.AddOffset(SparseTensorIndexCSF::VT_AXISORDER, axisOrder);
}
explicit SparseTensorIndexCSFBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
}
::flatbuffers::Offset<SparseTensorIndexCSF> Finish() {
const auto end = fbb_.EndTable(start_);
auto o = ::flatbuffers::Offset<SparseTensorIndexCSF>(end);
fbb_.Required(o, SparseTensorIndexCSF::VT_INDPTRTYPE);
fbb_.Required(o, SparseTensorIndexCSF::VT_INDPTRBUFFERS);
fbb_.Required(o, SparseTensorIndexCSF::VT_INDICESTYPE);
fbb_.Required(o, SparseTensorIndexCSF::VT_INDICESBUFFERS);
fbb_.Required(o, SparseTensorIndexCSF::VT_AXISORDER);
return o;
}
};
inline ::flatbuffers::Offset<SparseTensorIndexCSF> CreateSparseTensorIndexCSF(
::flatbuffers::FlatBufferBuilder &_fbb,
::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType = 0,
::flatbuffers::Offset<::flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *>> indptrBuffers = 0,
::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
::flatbuffers::Offset<::flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *>> indicesBuffers = 0,
::flatbuffers::Offset<::flatbuffers::Vector<int32_t>> axisOrder = 0) {
SparseTensorIndexCSFBuilder builder_(_fbb);
builder_.add_axisOrder(axisOrder);
builder_.add_indicesBuffers(indicesBuffers);
builder_.add_indicesType(indicesType);
builder_.add_indptrBuffers(indptrBuffers);
builder_.add_indptrType(indptrType);
return builder_.Finish();
}
inline ::flatbuffers::Offset<SparseTensorIndexCSF> CreateSparseTensorIndexCSFDirect(
::flatbuffers::FlatBufferBuilder &_fbb,
::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType = 0,
const std::vector<org::apache::arrow::flatbuf::Buffer> *indptrBuffers = nullptr,
::flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
const std::vector<org::apache::arrow::flatbuf::Buffer> *indicesBuffers = nullptr,
const std::vector<int32_t> *axisOrder = nullptr) {
auto indptrBuffers__ = indptrBuffers ? _fbb.CreateVectorOfStructs<org::apache::arrow::flatbuf::Buffer>(*indptrBuffers) : 0;
auto indicesBuffers__ = indicesBuffers ? _fbb.CreateVectorOfStructs<org::apache::arrow::flatbuf::Buffer>(*indicesBuffers) : 0;
auto axisOrder__ = axisOrder ? _fbb.CreateVector<int32_t>(*axisOrder) : 0;
return org::apache::arrow::flatbuf::CreateSparseTensorIndexCSF(
_fbb,
indptrType,
indptrBuffers__,
indicesType,
indicesBuffers__,
axisOrder__);
}
struct SparseTensor FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table {
typedef SparseTensorBuilder Builder;
enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
VT_TYPE_TYPE = 4,
VT_TYPE = 6,
VT_SHAPE = 8,
VT_NON_ZERO_LENGTH = 10,
VT_SPARSEINDEX_TYPE = 12,
VT_SPARSEINDEX = 14,
VT_DATA = 16
};
org::apache::arrow::flatbuf::Type type_type() const {
return static_cast<org::apache::arrow::flatbuf::Type>(GetField<uint8_t>(VT_TYPE_TYPE, 0));
}
/// The type of data contained in a value cell.
/// Currently only fixed-width value types are supported,
/// no strings or nested types.
const void *type() const {
return GetPointer<const void *>(VT_TYPE);
}
template<typename T> const T *type_as() const;
const org::apache::arrow::flatbuf::Null *type_as_Null() const {
return type_type() == org::apache::arrow::flatbuf::Type::Null ? static_cast<const org::apache::arrow::flatbuf::Null *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Int *type_as_Int() const {
return type_type() == org::apache::arrow::flatbuf::Type::Int ? static_cast<const org::apache::arrow::flatbuf::Int *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::FloatingPoint *type_as_FloatingPoint() const {
return type_type() == org::apache::arrow::flatbuf::Type::FloatingPoint ? static_cast<const org::apache::arrow::flatbuf::FloatingPoint *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Binary *type_as_Binary() const {
return type_type() == org::apache::arrow::flatbuf::Type::Binary ? static_cast<const org::apache::arrow::flatbuf::Binary *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Utf8 *type_as_Utf8() const {
return type_type() == org::apache::arrow::flatbuf::Type::Utf8 ? static_cast<const org::apache::arrow::flatbuf::Utf8 *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Bool *type_as_Bool() const {
return type_type() == org::apache::arrow::flatbuf::Type::Bool ? static_cast<const org::apache::arrow::flatbuf::Bool *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Decimal *type_as_Decimal() const {
return type_type() == org::apache::arrow::flatbuf::Type::Decimal ? static_cast<const org::apache::arrow::flatbuf::Decimal *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Date *type_as_Date() const {
return type_type() == org::apache::arrow::flatbuf::Type::Date ? static_cast<const org::apache::arrow::flatbuf::Date *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Time *type_as_Time() const {
return type_type() == org::apache::arrow::flatbuf::Type::Time ? static_cast<const org::apache::arrow::flatbuf::Time *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Timestamp *type_as_Timestamp() const {
return type_type() == org::apache::arrow::flatbuf::Type::Timestamp ? static_cast<const org::apache::arrow::flatbuf::Timestamp *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Interval *type_as_Interval() const {
return type_type() == org::apache::arrow::flatbuf::Type::Interval ? static_cast<const org::apache::arrow::flatbuf::Interval *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::List *type_as_List() const {
return type_type() == org::apache::arrow::flatbuf::Type::List ? static_cast<const org::apache::arrow::flatbuf::List *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Struct_ *type_as_Struct_() const {
return type_type() == org::apache::arrow::flatbuf::Type::Struct_ ? static_cast<const org::apache::arrow::flatbuf::Struct_ *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Union *type_as_Union() const {
return type_type() == org::apache::arrow::flatbuf::Type::Union ? static_cast<const org::apache::arrow::flatbuf::Union *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::FixedSizeBinary *type_as_FixedSizeBinary() const {
return type_type() == org::apache::arrow::flatbuf::Type::FixedSizeBinary ? static_cast<const org::apache::arrow::flatbuf::FixedSizeBinary *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::FixedSizeList *type_as_FixedSizeList() const {
return type_type() == org::apache::arrow::flatbuf::Type::FixedSizeList ? static_cast<const org::apache::arrow::flatbuf::FixedSizeList *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Map *type_as_Map() const {
return type_type() == org::apache::arrow::flatbuf::Type::Map ? static_cast<const org::apache::arrow::flatbuf::Map *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Duration *type_as_Duration() const {
return type_type() == org::apache::arrow::flatbuf::Type::Duration ? static_cast<const org::apache::arrow::flatbuf::Duration *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::LargeBinary *type_as_LargeBinary() const {
return type_type() == org::apache::arrow::flatbuf::Type::LargeBinary ? static_cast<const org::apache::arrow::flatbuf::LargeBinary *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::LargeUtf8 *type_as_LargeUtf8() const {
return type_type() == org::apache::arrow::flatbuf::Type::LargeUtf8 ? static_cast<const org::apache::arrow::flatbuf::LargeUtf8 *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::LargeList *type_as_LargeList() const {
return type_type() == org::apache::arrow::flatbuf::Type::LargeList ? static_cast<const org::apache::arrow::flatbuf::LargeList *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::RunEndEncoded *type_as_RunEndEncoded() const {
return type_type() == org::apache::arrow::flatbuf::Type::RunEndEncoded ? static_cast<const org::apache::arrow::flatbuf::RunEndEncoded *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::BinaryView *type_as_BinaryView() const {
return type_type() == org::apache::arrow::flatbuf::Type::BinaryView ? static_cast<const org::apache::arrow::flatbuf::BinaryView *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::Utf8View *type_as_Utf8View() const {
return type_type() == org::apache::arrow::flatbuf::Type::Utf8View ? static_cast<const org::apache::arrow::flatbuf::Utf8View *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::ListView *type_as_ListView() const {
return type_type() == org::apache::arrow::flatbuf::Type::ListView ? static_cast<const org::apache::arrow::flatbuf::ListView *>(type()) : nullptr;
}
const org::apache::arrow::flatbuf::LargeListView *type_as_LargeListView() const {
return type_type() == org::apache::arrow::flatbuf::Type::LargeListView ? static_cast<const org::apache::arrow::flatbuf::LargeListView *>(type()) : nullptr;
}
/// The dimensions of the tensor, optionally named.
const ::flatbuffers::Vector<::flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>> *shape() const {
return GetPointer<const ::flatbuffers::Vector<::flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>> *>(VT_SHAPE);
}
/// The number of non-zero values in a sparse tensor.
int64_t non_zero_length() const {
return GetField<int64_t>(VT_NON_ZERO_LENGTH, 0);
}
org::apache::arrow::flatbuf::SparseTensorIndex sparseIndex_type() const {
return static_cast<org::apache::arrow::flatbuf::SparseTensorIndex>(GetField<uint8_t>(VT_SPARSEINDEX_TYPE, 0));
}
/// Sparse tensor index
const void *sparseIndex() const {
return GetPointer<const void *>(VT_SPARSEINDEX);
}
template<typename T> const T *sparseIndex_as() const;
const org::apache::arrow::flatbuf::SparseTensorIndexCOO *sparseIndex_as_SparseTensorIndexCOO() const {
return sparseIndex_type() == org::apache::arrow::flatbuf::SparseTensorIndex::SparseTensorIndexCOO ? static_cast<const org::apache::arrow::flatbuf::SparseTensorIndexCOO *>(sparseIndex()) : nullptr;
}
const org::apache::arrow::flatbuf::SparseMatrixIndexCSX *sparseIndex_as_SparseMatrixIndexCSX() const {
return sparseIndex_type() == org::apache::arrow::flatbuf::SparseTensorIndex::SparseMatrixIndexCSX ? static_cast<const org::apache::arrow::flatbuf::SparseMatrixIndexCSX *>(sparseIndex()) : nullptr;
}
const org::apache::arrow::flatbuf::SparseTensorIndexCSF *sparseIndex_as_SparseTensorIndexCSF() const {
return sparseIndex_type() == org::apache::arrow::flatbuf::SparseTensorIndex::SparseTensorIndexCSF ? static_cast<const org::apache::arrow::flatbuf::SparseTensorIndexCSF *>(sparseIndex()) : nullptr;
}
/// The location and size of the tensor's data
const org::apache::arrow::flatbuf::Buffer *data() const {
return GetStruct<const org::apache::arrow::flatbuf::Buffer *>(VT_DATA);
}
bool Verify(::flatbuffers::Verifier &verifier) const {
return VerifyTableStart(verifier) &&
VerifyField<uint8_t>(verifier, VT_TYPE_TYPE, 1) &&
VerifyOffsetRequired(verifier, VT_TYPE) &&
VerifyType(verifier, type(), type_type()) &&
VerifyOffsetRequired(verifier, VT_SHAPE) &&
verifier.VerifyVector(shape()) &&
verifier.VerifyVectorOfTables(shape()) &&
VerifyField<int64_t>(verifier, VT_NON_ZERO_LENGTH, 8) &&
VerifyField<uint8_t>(verifier, VT_SPARSEINDEX_TYPE, 1) &&
VerifyOffsetRequired(verifier, VT_SPARSEINDEX) &&
VerifySparseTensorIndex(verifier, sparseIndex(), sparseIndex_type()) &&
VerifyFieldRequired<org::apache::arrow::flatbuf::Buffer>(verifier, VT_DATA, 8) &&
verifier.EndTable();
}
};
template<> inline const org::apache::arrow::flatbuf::Null *SparseTensor::type_as<org::apache::arrow::flatbuf::Null>() const {
return type_as_Null();
}
template<> inline const org::apache::arrow::flatbuf::Int *SparseTensor::type_as<org::apache::arrow::flatbuf::Int>() const {
return type_as_Int();
}
template<> inline const org::apache::arrow::flatbuf::FloatingPoint *SparseTensor::type_as<org::apache::arrow::flatbuf::FloatingPoint>() const {
return type_as_FloatingPoint();
}
template<> inline const org::apache::arrow::flatbuf::Binary *SparseTensor::type_as<org::apache::arrow::flatbuf::Binary>() const {
return type_as_Binary();
}
template<> inline const org::apache::arrow::flatbuf::Utf8 *SparseTensor::type_as<org::apache::arrow::flatbuf::Utf8>() const {
return type_as_Utf8();
}
template<> inline const org::apache::arrow::flatbuf::Bool *SparseTensor::type_as<org::apache::arrow::flatbuf::Bool>() const {
return type_as_Bool();
}
template<> inline const org::apache::arrow::flatbuf::Decimal *SparseTensor::type_as<org::apache::arrow::flatbuf::Decimal>() const {
return type_as_Decimal();
}
template<> inline const org::apache::arrow::flatbuf::Date *SparseTensor::type_as<org::apache::arrow::flatbuf::Date>() const {
return type_as_Date();
}
template<> inline const org::apache::arrow::flatbuf::Time *SparseTensor::type_as<org::apache::arrow::flatbuf::Time>() const {
return type_as_Time();
}
template<> inline const org::apache::arrow::flatbuf::Timestamp *SparseTensor::type_as<org::apache::arrow::flatbuf::Timestamp>() const {
return type_as_Timestamp();
}
template<> inline const org::apache::arrow::flatbuf::Interval *SparseTensor::type_as<org::apache::arrow::flatbuf::Interval>() const {
return type_as_Interval();
}
template<> inline const org::apache::arrow::flatbuf::List *SparseTensor::type_as<org::apache::arrow::flatbuf::List>() const {
return type_as_List();
}
template<> inline const org::apache::arrow::flatbuf::Struct_ *SparseTensor::type_as<org::apache::arrow::flatbuf::Struct_>() const {
return type_as_Struct_();
}
template<> inline const org::apache::arrow::flatbuf::Union *SparseTensor::type_as<org::apache::arrow::flatbuf::Union>() const {
return type_as_Union();
}
template<> inline const org::apache::arrow::flatbuf::FixedSizeBinary *SparseTensor::type_as<org::apache::arrow::flatbuf::FixedSizeBinary>() const {
return type_as_FixedSizeBinary();
}
template<> inline const org::apache::arrow::flatbuf::FixedSizeList *SparseTensor::type_as<org::apache::arrow::flatbuf::FixedSizeList>() const {
return type_as_FixedSizeList();
}
template<> inline const org::apache::arrow::flatbuf::Map *SparseTensor::type_as<org::apache::arrow::flatbuf::Map>() const {
return type_as_Map();
}
template<> inline const org::apache::arrow::flatbuf::Duration *SparseTensor::type_as<org::apache::arrow::flatbuf::Duration>() const {
return type_as_Duration();
}
template<> inline const org::apache::arrow::flatbuf::LargeBinary *SparseTensor::type_as<org::apache::arrow::flatbuf::LargeBinary>() const {
return type_as_LargeBinary();
}
template<> inline const org::apache::arrow::flatbuf::LargeUtf8 *SparseTensor::type_as<org::apache::arrow::flatbuf::LargeUtf8>() const {
return type_as_LargeUtf8();
}
template<> inline const org::apache::arrow::flatbuf::LargeList *SparseTensor::type_as<org::apache::arrow::flatbuf::LargeList>() const {
return type_as_LargeList();
}
template<> inline const org::apache::arrow::flatbuf::RunEndEncoded *SparseTensor::type_as<org::apache::arrow::flatbuf::RunEndEncoded>() const {
return type_as_RunEndEncoded();
}
template<> inline const org::apache::arrow::flatbuf::BinaryView *SparseTensor::type_as<org::apache::arrow::flatbuf::BinaryView>() const {
return type_as_BinaryView();
}
template<> inline const org::apache::arrow::flatbuf::Utf8View *SparseTensor::type_as<org::apache::arrow::flatbuf::Utf8View>() const {
return type_as_Utf8View();
}
template<> inline const org::apache::arrow::flatbuf::ListView *SparseTensor::type_as<org::apache::arrow::flatbuf::ListView>() const {
return type_as_ListView();
}
template<> inline const org::apache::arrow::flatbuf::LargeListView *SparseTensor::type_as<org::apache::arrow::flatbuf::LargeListView>() const {
return type_as_LargeListView();
}
template<> inline const org::apache::arrow::flatbuf::SparseTensorIndexCOO *SparseTensor::sparseIndex_as<org::apache::arrow::flatbuf::SparseTensorIndexCOO>() const {
return sparseIndex_as_SparseTensorIndexCOO();
}
template<> inline const org::apache::arrow::flatbuf::SparseMatrixIndexCSX *SparseTensor::sparseIndex_as<org::apache::arrow::flatbuf::SparseMatrixIndexCSX>() const {
return sparseIndex_as_SparseMatrixIndexCSX();
}
template<> inline const org::apache::arrow::flatbuf::SparseTensorIndexCSF *SparseTensor::sparseIndex_as<org::apache::arrow::flatbuf::SparseTensorIndexCSF>() const {
return sparseIndex_as_SparseTensorIndexCSF();
}
struct SparseTensorBuilder {
typedef SparseTensor Table;
::flatbuffers::FlatBufferBuilder &fbb_;
::flatbuffers::uoffset_t start_;
void add_type_type(org::apache::arrow::flatbuf::Type type_type) {
fbb_.AddElement<uint8_t>(SparseTensor::VT_TYPE_TYPE, static_cast<uint8_t>(type_type), 0);
}
void add_type(::flatbuffers::Offset<void> type) {
fbb_.AddOffset(SparseTensor::VT_TYPE, type);
}
void add_shape(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>>> shape) {
fbb_.AddOffset(SparseTensor::VT_SHAPE, shape);
}
void add_non_zero_length(int64_t non_zero_length) {
fbb_.AddElement<int64_t>(SparseTensor::VT_NON_ZERO_LENGTH, non_zero_length, 0);
}
void add_sparseIndex_type(org::apache::arrow::flatbuf::SparseTensorIndex sparseIndex_type) {
fbb_.AddElement<uint8_t>(SparseTensor::VT_SPARSEINDEX_TYPE, static_cast<uint8_t>(sparseIndex_type), 0);
}
void add_sparseIndex(::flatbuffers::Offset<void> sparseIndex) {
fbb_.AddOffset(SparseTensor::VT_SPARSEINDEX, sparseIndex);
}
void add_data(const org::apache::arrow::flatbuf::Buffer *data) {
fbb_.AddStruct(SparseTensor::VT_DATA, data);
}
explicit SparseTensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb)
: fbb_(_fbb) {
start_ = fbb_.StartTable();
}
::flatbuffers::Offset<SparseTensor> Finish() {
const auto end = fbb_.EndTable(start_);
auto o = ::flatbuffers::Offset<SparseTensor>(end);
fbb_.Required(o, SparseTensor::VT_TYPE);
fbb_.Required(o, SparseTensor::VT_SHAPE);
fbb_.Required(o, SparseTensor::VT_SPARSEINDEX);
fbb_.Required(o, SparseTensor::VT_DATA);
return o;
}
};
inline ::flatbuffers::Offset<SparseTensor> CreateSparseTensor(
::flatbuffers::FlatBufferBuilder &_fbb,
org::apache::arrow::flatbuf::Type type_type = org::apache::arrow::flatbuf::Type::NONE,
::flatbuffers::Offset<void> type = 0,
::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>>> shape = 0,
int64_t non_zero_length = 0,
org::apache::arrow::flatbuf::SparseTensorIndex sparseIndex_type = org::apache::arrow::flatbuf::SparseTensorIndex::NONE,
::flatbuffers::Offset<void> sparseIndex = 0,
const org::apache::arrow::flatbuf::Buffer *data = nullptr) {
SparseTensorBuilder builder_(_fbb);
builder_.add_non_zero_length(non_zero_length);
builder_.add_data(data);
builder_.add_sparseIndex(sparseIndex);
builder_.add_shape(shape);
builder_.add_type(type);
builder_.add_sparseIndex_type(sparseIndex_type);
builder_.add_type_type(type_type);
return builder_.Finish();
}
inline ::flatbuffers::Offset<SparseTensor> CreateSparseTensorDirect(
::flatbuffers::FlatBufferBuilder &_fbb,
org::apache::arrow::flatbuf::Type type_type = org::apache::arrow::flatbuf::Type::NONE,
::flatbuffers::Offset<void> type = 0,
const std::vector<::flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>> *shape = nullptr,
int64_t non_zero_length = 0,
org::apache::arrow::flatbuf::SparseTensorIndex sparseIndex_type = org::apache::arrow::flatbuf::SparseTensorIndex::NONE,
::flatbuffers::Offset<void> sparseIndex = 0,
const org::apache::arrow::flatbuf::Buffer *data = nullptr) {
auto shape__ = shape ? _fbb.CreateVector<::flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>>(*shape) : 0;
return org::apache::arrow::flatbuf::CreateSparseTensor(
_fbb,
type_type,
type,
shape__,
non_zero_length,
sparseIndex_type,
sparseIndex,
data);
}
inline bool VerifySparseTensorIndex(::flatbuffers::Verifier &verifier, const void *obj, SparseTensorIndex type) {
switch (type) {
case SparseTensorIndex::NONE: {
return true;
}
case SparseTensorIndex::SparseTensorIndexCOO: {
auto ptr = reinterpret_cast<const org::apache::arrow::flatbuf::SparseTensorIndexCOO *>(obj);
return verifier.VerifyTable(ptr);
}
case SparseTensorIndex::SparseMatrixIndexCSX: {
auto ptr = reinterpret_cast<const org::apache::arrow::flatbuf::SparseMatrixIndexCSX *>(obj);
return verifier.VerifyTable(ptr);
}
case SparseTensorIndex::SparseTensorIndexCSF: {
auto ptr = reinterpret_cast<const org::apache::arrow::flatbuf::SparseTensorIndexCSF *>(obj);
return verifier.VerifyTable(ptr);
}
default: return true;
}
}
inline bool VerifySparseTensorIndexVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset<void>> *values, const ::flatbuffers::Vector<SparseTensorIndex> *types) {
if (!values || !types) return !values && !types;
if (values->size() != types->size()) return false;
for (::flatbuffers::uoffset_t i = 0; i < values->size(); ++i) {
if (!VerifySparseTensorIndex(
verifier, values->Get(i), types->GetEnum<SparseTensorIndex>(i))) {
return false;
}
}
return true;
}
inline const org::apache::arrow::flatbuf::SparseTensor *GetSparseTensor(const void *buf) {
return ::flatbuffers::GetRoot<org::apache::arrow::flatbuf::SparseTensor>(buf);
}
inline const org::apache::arrow::flatbuf::SparseTensor *GetSizePrefixedSparseTensor(const void *buf) {
return ::flatbuffers::GetSizePrefixedRoot<org::apache::arrow::flatbuf::SparseTensor>(buf);
}
inline bool VerifySparseTensorBuffer(
::flatbuffers::Verifier &verifier) {
return verifier.VerifyBuffer<org::apache::arrow::flatbuf::SparseTensor>(nullptr);
}
inline bool VerifySizePrefixedSparseTensorBuffer(
::flatbuffers::Verifier &verifier) {
return verifier.VerifySizePrefixedBuffer<org::apache::arrow::flatbuf::SparseTensor>(nullptr);
}
inline void FinishSparseTensorBuffer(
::flatbuffers::FlatBufferBuilder &fbb,
::flatbuffers::Offset<org::apache::arrow::flatbuf::SparseTensor> root) {
fbb.Finish(root);
}
inline void FinishSizePrefixedSparseTensorBuffer(
::flatbuffers::FlatBufferBuilder &fbb,
::flatbuffers::Offset<org::apache::arrow::flatbuf::SparseTensor> root) {
fbb.FinishSizePrefixed(root);
}
} // namespace flatbuf
} // namespace arrow
} // namespace apache
} // namespace org
#endif // FLATBUFFERS_GENERATED_SPARSETENSOR_ORG_APACHE_ARROW_FLATBUF_H_