blob: 1ca54ed83fd5da299ea782faad9556dabec7e9ec [file] [log] [blame]
// <auto-generated>
// automatically generated by the FlatBuffers compiler, do not modify
// </auto-generated>
namespace Apache.Arrow.Flatbuf
{
using global::System;
using global::System.Collections.Generic;
using global::Google.FlatBuffers;
/// Compressed Sparse Fiber (CSF) sparse tensor index.
internal struct SparseTensorIndexCSF : IFlatbufferObject
{
private Table __p;
public ByteBuffer ByteBuffer { get { return __p.bb; } }
public static void ValidateVersion() { FlatBufferConstants.FLATBUFFERS_23_5_9(); }
public static SparseTensorIndexCSF GetRootAsSparseTensorIndexCSF(ByteBuffer _bb) { return GetRootAsSparseTensorIndexCSF(_bb, new SparseTensorIndexCSF()); }
public static SparseTensorIndexCSF GetRootAsSparseTensorIndexCSF(ByteBuffer _bb, SparseTensorIndexCSF obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }
public void __init(int _i, ByteBuffer _bb) { __p = new Table(_i, _bb); }
public SparseTensorIndexCSF __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }
/// CSF is a generalization of compressed sparse row (CSR) index.
/// See [smith2017knl](http://shaden.io/pub-files/smith2017knl.pdf)
///
/// CSF index recursively compresses each dimension of a tensor into a set
/// of prefix trees. Each path from a root to leaf forms one tensor
/// non-zero index. CSF is implemented with two arrays of buffers and one
/// arrays of integers.
///
/// For example, let X be a 2x3x4x5 tensor and let it have the following
/// 8 non-zero values:
/// ```text
/// X[0, 0, 0, 1] := 1
/// X[0, 0, 0, 2] := 2
/// X[0, 1, 0, 0] := 3
/// X[0, 1, 0, 2] := 4
/// X[0, 1, 1, 0] := 5
/// X[1, 1, 1, 0] := 6
/// X[1, 1, 1, 1] := 7
/// X[1, 1, 1, 2] := 8
/// ```
/// As a prefix tree this would be represented as:
/// ```text
/// 0 1
/// / \ |
/// 0 1 1
/// / / \ |
/// 0 0 1 1
/// /| /| | /| |
/// 1 2 0 2 0 0 1 2
/// ```
/// The type of values in indptrBuffers
public Int? IndptrType { get { int o = __p.__offset(4); return o != 0 ? (Int?)(new Int()).__assign(__p.__indirect(o + __p.bb_pos), __p.bb) : null; } }
/// indptrBuffers stores the sparsity structure.
/// Each two consecutive dimensions in a tensor correspond to a buffer in
/// indptrBuffers. A pair of consecutive values at `indptrBuffers[dim][i]`
/// and `indptrBuffers[dim][i + 1]` signify a range of nodes in
/// `indicesBuffers[dim + 1]` who are children of `indicesBuffers[dim][i]` node.
///
/// For example, the indptrBuffers for the above X is:
/// ```text
/// indptrBuffer(X) = [
/// [0, 2, 3],
/// [0, 1, 3, 4],
/// [0, 2, 4, 5, 8]
/// ].
/// ```
public Buffer? IndptrBuffers(int j) { int o = __p.__offset(6); return o != 0 ? (Buffer?)(new Buffer()).__assign(__p.__vector(o) + j * 16, __p.bb) : null; }
public int IndptrBuffersLength { get { int o = __p.__offset(6); return o != 0 ? __p.__vector_len(o) : 0; } }
/// The type of values in indicesBuffers
public Int? IndicesType { get { int o = __p.__offset(8); return o != 0 ? (Int?)(new Int()).__assign(__p.__indirect(o + __p.bb_pos), __p.bb) : null; } }
/// indicesBuffers stores values of nodes.
/// Each tensor dimension corresponds to a buffer in indicesBuffers.
/// For example, the indicesBuffers for the above X is:
/// ```text
/// indicesBuffer(X) = [
/// [0, 1],
/// [0, 1, 1],
/// [0, 0, 1, 1],
/// [1, 2, 0, 2, 0, 0, 1, 2]
/// ].
/// ```
public Buffer? IndicesBuffers(int j) { int o = __p.__offset(10); return o != 0 ? (Buffer?)(new Buffer()).__assign(__p.__vector(o) + j * 16, __p.bb) : null; }
public int IndicesBuffersLength { get { int o = __p.__offset(10); return o != 0 ? __p.__vector_len(o) : 0; } }
/// axisOrder stores the sequence in which dimensions were traversed to
/// produce the prefix tree.
/// For example, the axisOrder for the above X is:
/// ```text
/// axisOrder(X) = [0, 1, 2, 3].
/// ```
public int AxisOrder(int j) { int o = __p.__offset(12); return o != 0 ? __p.bb.GetInt(__p.__vector(o) + j * 4) : (int)0; }
public int AxisOrderLength { get { int o = __p.__offset(12); return o != 0 ? __p.__vector_len(o) : 0; } }
#if ENABLE_SPAN_T
public Span<int> GetAxisOrderBytes() { return __p.__vector_as_span<int>(12, 4); }
#else
public ArraySegment<byte>? GetAxisOrderBytes() { return __p.__vector_as_arraysegment(12); }
#endif
public int[] GetAxisOrderArray() { return __p.__vector_as_array<int>(12); }
public static Offset<SparseTensorIndexCSF> CreateSparseTensorIndexCSF(FlatBufferBuilder builder,
Offset<Int> indptrTypeOffset = default(Offset<Int>),
VectorOffset indptrBuffersOffset = default(VectorOffset),
Offset<Int> indicesTypeOffset = default(Offset<Int>),
VectorOffset indicesBuffersOffset = default(VectorOffset),
VectorOffset axisOrderOffset = default(VectorOffset)) {
builder.StartTable(5);
SparseTensorIndexCSF.AddAxisOrder(builder, axisOrderOffset);
SparseTensorIndexCSF.AddIndicesBuffers(builder, indicesBuffersOffset);
SparseTensorIndexCSF.AddIndicesType(builder, indicesTypeOffset);
SparseTensorIndexCSF.AddIndptrBuffers(builder, indptrBuffersOffset);
SparseTensorIndexCSF.AddIndptrType(builder, indptrTypeOffset);
return SparseTensorIndexCSF.EndSparseTensorIndexCSF(builder);
}
public static void StartSparseTensorIndexCSF(FlatBufferBuilder builder) { builder.StartTable(5); }
public static void AddIndptrType(FlatBufferBuilder builder, Offset<Int> indptrTypeOffset) { builder.AddOffset(0, indptrTypeOffset.Value, 0); }
public static void AddIndptrBuffers(FlatBufferBuilder builder, VectorOffset indptrBuffersOffset) { builder.AddOffset(1, indptrBuffersOffset.Value, 0); }
public static void StartIndptrBuffersVector(FlatBufferBuilder builder, int numElems) { builder.StartVector(16, numElems, 8); }
public static void AddIndicesType(FlatBufferBuilder builder, Offset<Int> indicesTypeOffset) { builder.AddOffset(2, indicesTypeOffset.Value, 0); }
public static void AddIndicesBuffers(FlatBufferBuilder builder, VectorOffset indicesBuffersOffset) { builder.AddOffset(3, indicesBuffersOffset.Value, 0); }
public static void StartIndicesBuffersVector(FlatBufferBuilder builder, int numElems) { builder.StartVector(16, numElems, 8); }
public static void AddAxisOrder(FlatBufferBuilder builder, VectorOffset axisOrderOffset) { builder.AddOffset(4, axisOrderOffset.Value, 0); }
public static VectorOffset CreateAxisOrderVector(FlatBufferBuilder builder, int[] data) { builder.StartVector(4, data.Length, 4); for (int i = data.Length - 1; i >= 0; i--) builder.AddInt(data[i]); return builder.EndVector(); }
public static VectorOffset CreateAxisOrderVectorBlock(FlatBufferBuilder builder, int[] data) { builder.StartVector(4, data.Length, 4); builder.Add(data); return builder.EndVector(); }
public static VectorOffset CreateAxisOrderVectorBlock(FlatBufferBuilder builder, ArraySegment<int> data) { builder.StartVector(4, data.Count, 4); builder.Add(data); return builder.EndVector(); }
public static VectorOffset CreateAxisOrderVectorBlock(FlatBufferBuilder builder, IntPtr dataPtr, int sizeInBytes) { builder.StartVector(1, sizeInBytes, 1); builder.Add<int>(dataPtr, sizeInBytes); return builder.EndVector(); }
public static void StartAxisOrderVector(FlatBufferBuilder builder, int numElems) { builder.StartVector(4, numElems, 4); }
public static Offset<SparseTensorIndexCSF> EndSparseTensorIndexCSF(FlatBufferBuilder builder) {
int o = builder.EndTable();
builder.Required(o, 4); // indptrType
builder.Required(o, 6); // indptrBuffers
builder.Required(o, 8); // indicesType
builder.Required(o, 10); // indicesBuffers
builder.Required(o, 12); // axisOrder
return new Offset<SparseTensorIndexCSF>(o);
}
}
static internal class SparseTensorIndexCSFVerify
{
static public bool Verify(Google.FlatBuffers.Verifier verifier, uint tablePos)
{
return verifier.VerifyTableStart(tablePos)
&& verifier.VerifyTable(tablePos, 4 /*IndptrType*/, IntVerify.Verify, true)
&& verifier.VerifyVectorOfData(tablePos, 6 /*IndptrBuffers*/, 16 /*Buffer*/, true)
&& verifier.VerifyTable(tablePos, 8 /*IndicesType*/, IntVerify.Verify, true)
&& verifier.VerifyVectorOfData(tablePos, 10 /*IndicesBuffers*/, 16 /*Buffer*/, true)
&& verifier.VerifyVectorOfData(tablePos, 12 /*AxisOrder*/, 4 /*int*/, true)
&& verifier.VerifyTableEnd(tablePos);
}
}
}