1// automatically generated by the FlatBuffers compiler, do not modify
2
3
4#ifndef FLATBUFFERS_GENERATED_TENSOR_ORG_APACHE_ARROW_FLATBUF_H_
5#define FLATBUFFERS_GENERATED_TENSOR_ORG_APACHE_ARROW_FLATBUF_H_
6
7#include "flatbuffers/flatbuffers.h"
8
9#include "Schema_generated.h"
10
11namespace org {
12namespace apache {
13namespace arrow {
14namespace flatbuf {
15
16struct TensorDim;
17
18struct Tensor;
19
20struct SparseTensorIndexCOO;
21
22struct SparseMatrixIndexCSR;
23
24struct SparseTensor;
25
26enum SparseTensorIndex {
27 SparseTensorIndex_NONE = 0,
28 SparseTensorIndex_SparseTensorIndexCOO = 1,
29 SparseTensorIndex_SparseMatrixIndexCSR = 2,
30 SparseTensorIndex_MIN = SparseTensorIndex_NONE,
31 SparseTensorIndex_MAX = SparseTensorIndex_SparseMatrixIndexCSR
32};
33
34inline const SparseTensorIndex (&EnumValuesSparseTensorIndex())[3] {
35 static const SparseTensorIndex values[] = {
36 SparseTensorIndex_NONE,
37 SparseTensorIndex_SparseTensorIndexCOO,
38 SparseTensorIndex_SparseMatrixIndexCSR
39 };
40 return values;
41}
42
43inline const char * const *EnumNamesSparseTensorIndex() {
44 static const char * const names[] = {
45 "NONE",
46 "SparseTensorIndexCOO",
47 "SparseMatrixIndexCSR",
48 nullptr
49 };
50 return names;
51}
52
53inline const char *EnumNameSparseTensorIndex(SparseTensorIndex e) {
54 const size_t index = static_cast<int>(e);
55 return EnumNamesSparseTensorIndex()[index];
56}
57
58template<typename T> struct SparseTensorIndexTraits {
59 static const SparseTensorIndex enum_value = SparseTensorIndex_NONE;
60};
61
62template<> struct SparseTensorIndexTraits<SparseTensorIndexCOO> {
63 static const SparseTensorIndex enum_value = SparseTensorIndex_SparseTensorIndexCOO;
64};
65
66template<> struct SparseTensorIndexTraits<SparseMatrixIndexCSR> {
67 static const SparseTensorIndex enum_value = SparseTensorIndex_SparseMatrixIndexCSR;
68};
69
70bool VerifySparseTensorIndex(flatbuffers::Verifier &verifier, const void *obj, SparseTensorIndex type);
71bool VerifySparseTensorIndexVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector<flatbuffers::Offset<void>> *values, const flatbuffers::Vector<uint8_t> *types);
72
73/// ----------------------------------------------------------------------
74/// Data structures for dense tensors
75/// Shape data for a single axis in a tensor
76struct TensorDim FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
77 enum {
78 VT_SIZE = 4,
79 VT_NAME = 6
80 };
81 /// Length of dimension
82 int64_t size() const {
83 return GetField<int64_t>(VT_SIZE, 0);
84 }
85 /// Name of the dimension, optional
86 const flatbuffers::String *name() const {
87 return GetPointer<const flatbuffers::String *>(VT_NAME);
88 }
89 bool Verify(flatbuffers::Verifier &verifier) const {
90 return VerifyTableStart(verifier) &&
91 VerifyField<int64_t>(verifier, VT_SIZE) &&
92 VerifyOffset(verifier, VT_NAME) &&
93 verifier.VerifyString(name()) &&
94 verifier.EndTable();
95 }
96};
97
98struct TensorDimBuilder {
99 flatbuffers::FlatBufferBuilder &fbb_;
100 flatbuffers::uoffset_t start_;
101 void add_size(int64_t size) {
102 fbb_.AddElement<int64_t>(TensorDim::VT_SIZE, size, 0);
103 }
104 void add_name(flatbuffers::Offset<flatbuffers::String> name) {
105 fbb_.AddOffset(TensorDim::VT_NAME, name);
106 }
107 explicit TensorDimBuilder(flatbuffers::FlatBufferBuilder &_fbb)
108 : fbb_(_fbb) {
109 start_ = fbb_.StartTable();
110 }
111 TensorDimBuilder &operator=(const TensorDimBuilder &);
112 flatbuffers::Offset<TensorDim> Finish() {
113 const auto end = fbb_.EndTable(start_);
114 auto o = flatbuffers::Offset<TensorDim>(end);
115 return o;
116 }
117};
118
119inline flatbuffers::Offset<TensorDim> CreateTensorDim(
120 flatbuffers::FlatBufferBuilder &_fbb,
121 int64_t size = 0,
122 flatbuffers::Offset<flatbuffers::String> name = 0) {
123 TensorDimBuilder builder_(_fbb);
124 builder_.add_size(size);
125 builder_.add_name(name);
126 return builder_.Finish();
127}
128
129inline flatbuffers::Offset<TensorDim> CreateTensorDimDirect(
130 flatbuffers::FlatBufferBuilder &_fbb,
131 int64_t size = 0,
132 const char *name = nullptr) {
133 return org::apache::arrow::flatbuf::CreateTensorDim(
134 _fbb,
135 size,
136 name ? _fbb.CreateString(name) : 0);
137}
138
139struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
140 enum {
141 VT_TYPE_TYPE = 4,
142 VT_TYPE = 6,
143 VT_SHAPE = 8,
144 VT_STRIDES = 10,
145 VT_DATA = 12
146 };
147 Type type_type() const {
148 return static_cast<Type>(GetField<uint8_t>(VT_TYPE_TYPE, 0));
149 }
150 /// The type of data contained in a value cell. Currently only fixed-width
151 /// value types are supported, no strings or nested types
152 const void *type() const {
153 return GetPointer<const void *>(VT_TYPE);
154 }
155 template<typename T> const T *type_as() const;
156 const Null *type_as_Null() const {
157 return type_type() == Type_Null ? static_cast<const Null *>(type()) : nullptr;
158 }
159 const Int *type_as_Int() const {
160 return type_type() == Type_Int ? static_cast<const Int *>(type()) : nullptr;
161 }
162 const FloatingPoint *type_as_FloatingPoint() const {
163 return type_type() == Type_FloatingPoint ? static_cast<const FloatingPoint *>(type()) : nullptr;
164 }
165 const Binary *type_as_Binary() const {
166 return type_type() == Type_Binary ? static_cast<const Binary *>(type()) : nullptr;
167 }
168 const Utf8 *type_as_Utf8() const {
169 return type_type() == Type_Utf8 ? static_cast<const Utf8 *>(type()) : nullptr;
170 }
171 const Bool *type_as_Bool() const {
172 return type_type() == Type_Bool ? static_cast<const Bool *>(type()) : nullptr;
173 }
174 const Decimal *type_as_Decimal() const {
175 return type_type() == Type_Decimal ? static_cast<const Decimal *>(type()) : nullptr;
176 }
177 const Date *type_as_Date() const {
178 return type_type() == Type_Date ? static_cast<const Date *>(type()) : nullptr;
179 }
180 const Time *type_as_Time() const {
181 return type_type() == Type_Time ? static_cast<const Time *>(type()) : nullptr;
182 }
183 const Timestamp *type_as_Timestamp() const {
184 return type_type() == Type_Timestamp ? static_cast<const Timestamp *>(type()) : nullptr;
185 }
186 const Interval *type_as_Interval() const {
187 return type_type() == Type_Interval ? static_cast<const Interval *>(type()) : nullptr;
188 }
189 const List *type_as_List() const {
190 return type_type() == Type_List ? static_cast<const List *>(type()) : nullptr;
191 }
192 const Struct_ *type_as_Struct_() const {
193 return type_type() == Type_Struct_ ? static_cast<const Struct_ *>(type()) : nullptr;
194 }
195 const Union *type_as_Union() const {
196 return type_type() == Type_Union ? static_cast<const Union *>(type()) : nullptr;
197 }
198 const FixedSizeBinary *type_as_FixedSizeBinary() const {
199 return type_type() == Type_FixedSizeBinary ? static_cast<const FixedSizeBinary *>(type()) : nullptr;
200 }
201 const FixedSizeList *type_as_FixedSizeList() const {
202 return type_type() == Type_FixedSizeList ? static_cast<const FixedSizeList *>(type()) : nullptr;
203 }
204 const Map *type_as_Map() const {
205 return type_type() == Type_Map ? static_cast<const Map *>(type()) : nullptr;
206 }
207 /// The dimensions of the tensor, optionally named
208 const flatbuffers::Vector<flatbuffers::Offset<TensorDim>> *shape() const {
209 return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<TensorDim>> *>(VT_SHAPE);
210 }
211 /// Non-negative byte offsets to advance one value cell along each dimension
212 const flatbuffers::Vector<int64_t> *strides() const {
213 return GetPointer<const flatbuffers::Vector<int64_t> *>(VT_STRIDES);
214 }
215 /// The location and size of the tensor's data
216 const Buffer *data() const {
217 return GetStruct<const Buffer *>(VT_DATA);
218 }
219 bool Verify(flatbuffers::Verifier &verifier) const {
220 return VerifyTableStart(verifier) &&
221 VerifyField<uint8_t>(verifier, VT_TYPE_TYPE) &&
222 VerifyOffset(verifier, VT_TYPE) &&
223 VerifyType(verifier, type(), type_type()) &&
224 VerifyOffset(verifier, VT_SHAPE) &&
225 verifier.VerifyVector(shape()) &&
226 verifier.VerifyVectorOfTables(shape()) &&
227 VerifyOffset(verifier, VT_STRIDES) &&
228 verifier.VerifyVector(strides()) &&
229 VerifyField<Buffer>(verifier, VT_DATA) &&
230 verifier.EndTable();
231 }
232};
233
234template<> inline const Null *Tensor::type_as<Null>() const {
235 return type_as_Null();
236}
237
238template<> inline const Int *Tensor::type_as<Int>() const {
239 return type_as_Int();
240}
241
242template<> inline const FloatingPoint *Tensor::type_as<FloatingPoint>() const {
243 return type_as_FloatingPoint();
244}
245
246template<> inline const Binary *Tensor::type_as<Binary>() const {
247 return type_as_Binary();
248}
249
250template<> inline const Utf8 *Tensor::type_as<Utf8>() const {
251 return type_as_Utf8();
252}
253
254template<> inline const Bool *Tensor::type_as<Bool>() const {
255 return type_as_Bool();
256}
257
258template<> inline const Decimal *Tensor::type_as<Decimal>() const {
259 return type_as_Decimal();
260}
261
262template<> inline const Date *Tensor::type_as<Date>() const {
263 return type_as_Date();
264}
265
266template<> inline const Time *Tensor::type_as<Time>() const {
267 return type_as_Time();
268}
269
270template<> inline const Timestamp *Tensor::type_as<Timestamp>() const {
271 return type_as_Timestamp();
272}
273
274template<> inline const Interval *Tensor::type_as<Interval>() const {
275 return type_as_Interval();
276}
277
278template<> inline const List *Tensor::type_as<List>() const {
279 return type_as_List();
280}
281
282template<> inline const Struct_ *Tensor::type_as<Struct_>() const {
283 return type_as_Struct_();
284}
285
286template<> inline const Union *Tensor::type_as<Union>() const {
287 return type_as_Union();
288}
289
290template<> inline const FixedSizeBinary *Tensor::type_as<FixedSizeBinary>() const {
291 return type_as_FixedSizeBinary();
292}
293
294template<> inline const FixedSizeList *Tensor::type_as<FixedSizeList>() const {
295 return type_as_FixedSizeList();
296}
297
298template<> inline const Map *Tensor::type_as<Map>() const {
299 return type_as_Map();
300}
301
302struct TensorBuilder {
303 flatbuffers::FlatBufferBuilder &fbb_;
304 flatbuffers::uoffset_t start_;
305 void add_type_type(Type type_type) {
306 fbb_.AddElement<uint8_t>(Tensor::VT_TYPE_TYPE, static_cast<uint8_t>(type_type), 0);
307 }
308 void add_type(flatbuffers::Offset<void> type) {
309 fbb_.AddOffset(Tensor::VT_TYPE, type);
310 }
311 void add_shape(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<TensorDim>>> shape) {
312 fbb_.AddOffset(Tensor::VT_SHAPE, shape);
313 }
314 void add_strides(flatbuffers::Offset<flatbuffers::Vector<int64_t>> strides) {
315 fbb_.AddOffset(Tensor::VT_STRIDES, strides);
316 }
317 void add_data(const Buffer *data) {
318 fbb_.AddStruct(Tensor::VT_DATA, data);
319 }
320 explicit TensorBuilder(flatbuffers::FlatBufferBuilder &_fbb)
321 : fbb_(_fbb) {
322 start_ = fbb_.StartTable();
323 }
324 TensorBuilder &operator=(const TensorBuilder &);
325 flatbuffers::Offset<Tensor> Finish() {
326 const auto end = fbb_.EndTable(start_);
327 auto o = flatbuffers::Offset<Tensor>(end);
328 return o;
329 }
330};
331
332inline flatbuffers::Offset<Tensor> CreateTensor(
333 flatbuffers::FlatBufferBuilder &_fbb,
334 Type type_type = Type_NONE,
335 flatbuffers::Offset<void> type = 0,
336 flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<TensorDim>>> shape = 0,
337 flatbuffers::Offset<flatbuffers::Vector<int64_t>> strides = 0,
338 const Buffer *data = 0) {
339 TensorBuilder builder_(_fbb);
340 builder_.add_data(data);
341 builder_.add_strides(strides);
342 builder_.add_shape(shape);
343 builder_.add_type(type);
344 builder_.add_type_type(type_type);
345 return builder_.Finish();
346}
347
348inline flatbuffers::Offset<Tensor> CreateTensorDirect(
349 flatbuffers::FlatBufferBuilder &_fbb,
350 Type type_type = Type_NONE,
351 flatbuffers::Offset<void> type = 0,
352 const std::vector<flatbuffers::Offset<TensorDim>> *shape = nullptr,
353 const std::vector<int64_t> *strides = nullptr,
354 const Buffer *data = 0) {
355 return org::apache::arrow::flatbuf::CreateTensor(
356 _fbb,
357 type_type,
358 type,
359 shape ? _fbb.CreateVector<flatbuffers::Offset<TensorDim>>(*shape) : 0,
360 strides ? _fbb.CreateVector<int64_t>(*strides) : 0,
361 data);
362}
363
364/// ----------------------------------------------------------------------
365/// EXPERIMENTAL: Data structures for sparse tensors
366/// Coodinate format of sparse tensor index.
367struct SparseTensorIndexCOO FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
368 enum {
369 VT_INDICESBUFFER = 4
370 };
371 /// COO's index list are represented as a NxM matrix,
372 /// where N is the number of non-zero values,
373 /// and M is the number of dimensions of a sparse tensor.
374 /// indicesBuffer stores the location and size of this index matrix.
375 /// The type of index value is long, so the stride for the index matrix is unnecessary.
376 ///
377 /// For example, let X be a 2x3x4x5 tensor, and it has the following 6 non-zero values:
378 ///
379 /// X[0, 1, 2, 0] := 1
380 /// X[1, 1, 2, 3] := 2
381 /// X[0, 2, 1, 0] := 3
382 /// X[0, 1, 3, 0] := 4
383 /// X[0, 1, 2, 1] := 5
384 /// X[1, 2, 0, 4] := 6
385 ///
386 /// In COO format, the index matrix of X is the following 4x6 matrix:
387 ///
388 /// [[0, 0, 0, 0, 1, 1],
389 /// [1, 1, 1, 2, 1, 2],
390 /// [2, 2, 3, 1, 2, 0],
391 /// [0, 1, 0, 0, 3, 4]]
392 ///
393 /// Note that the indices are sorted in lexcographical order.
394 const Buffer *indicesBuffer() const {
395 return GetStruct<const Buffer *>(VT_INDICESBUFFER);
396 }
397 bool Verify(flatbuffers::Verifier &verifier) const {
398 return VerifyTableStart(verifier) &&
399 VerifyField<Buffer>(verifier, VT_INDICESBUFFER) &&
400 verifier.EndTable();
401 }
402};
403
404struct SparseTensorIndexCOOBuilder {
405 flatbuffers::FlatBufferBuilder &fbb_;
406 flatbuffers::uoffset_t start_;
407 void add_indicesBuffer(const Buffer *indicesBuffer) {
408 fbb_.AddStruct(SparseTensorIndexCOO::VT_INDICESBUFFER, indicesBuffer);
409 }
410 explicit SparseTensorIndexCOOBuilder(flatbuffers::FlatBufferBuilder &_fbb)
411 : fbb_(_fbb) {
412 start_ = fbb_.StartTable();
413 }
414 SparseTensorIndexCOOBuilder &operator=(const SparseTensorIndexCOOBuilder &);
415 flatbuffers::Offset<SparseTensorIndexCOO> Finish() {
416 const auto end = fbb_.EndTable(start_);
417 auto o = flatbuffers::Offset<SparseTensorIndexCOO>(end);
418 return o;
419 }
420};
421
422inline flatbuffers::Offset<SparseTensorIndexCOO> CreateSparseTensorIndexCOO(
423 flatbuffers::FlatBufferBuilder &_fbb,
424 const Buffer *indicesBuffer = 0) {
425 SparseTensorIndexCOOBuilder builder_(_fbb);
426 builder_.add_indicesBuffer(indicesBuffer);
427 return builder_.Finish();
428}
429
430/// Compressed Sparse Row format, that is matrix-specific.
431struct SparseMatrixIndexCSR FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
432 enum {
433 VT_INDPTRBUFFER = 4,
434 VT_INDICESBUFFER = 6
435 };
436 /// indptrBuffer stores the location and size of indptr array that
437 /// represents the range of the rows.
438 /// The i-th row spans from indptr[i] to indptr[i+1] in the data.
439 /// The length of this array is 1 + (the number of rows), and the type
440 /// of index value is long.
441 ///
442 /// For example, let X be the following 6x4 matrix:
443 ///
444 /// X := [[0, 1, 2, 0],
445 /// [0, 0, 3, 0],
446 /// [0, 4, 0, 5],
447 /// [0, 0, 0, 0],
448 /// [6, 0, 7, 8],
449 /// [0, 9, 0, 0]].
450 ///
451 /// The array of non-zero values in X is:
452 ///
453 /// values(X) = [1, 2, 3, 4, 5, 6, 7, 8, 9].
454 ///
455 /// And the indptr of X is:
456 ///
457 /// indptr(X) = [0, 2, 3, 5, 5, 8, 10].
458 const Buffer *indptrBuffer() const {
459 return GetStruct<const Buffer *>(VT_INDPTRBUFFER);
460 }
461 /// indicesBuffer stores the location and size of the array that
462 /// contains the column indices of the corresponding non-zero values.
463 /// The type of index value is long.
464 ///
465 /// For example, the indices of the above X is:
466 ///
467 /// indices(X) = [1, 2, 2, 1, 3, 0, 2, 3, 1].
468 const Buffer *indicesBuffer() const {
469 return GetStruct<const Buffer *>(VT_INDICESBUFFER);
470 }
471 bool Verify(flatbuffers::Verifier &verifier) const {
472 return VerifyTableStart(verifier) &&
473 VerifyField<Buffer>(verifier, VT_INDPTRBUFFER) &&
474 VerifyField<Buffer>(verifier, VT_INDICESBUFFER) &&
475 verifier.EndTable();
476 }
477};
478
479struct SparseMatrixIndexCSRBuilder {
480 flatbuffers::FlatBufferBuilder &fbb_;
481 flatbuffers::uoffset_t start_;
482 void add_indptrBuffer(const Buffer *indptrBuffer) {
483 fbb_.AddStruct(SparseMatrixIndexCSR::VT_INDPTRBUFFER, indptrBuffer);
484 }
485 void add_indicesBuffer(const Buffer *indicesBuffer) {
486 fbb_.AddStruct(SparseMatrixIndexCSR::VT_INDICESBUFFER, indicesBuffer);
487 }
488 explicit SparseMatrixIndexCSRBuilder(flatbuffers::FlatBufferBuilder &_fbb)
489 : fbb_(_fbb) {
490 start_ = fbb_.StartTable();
491 }
492 SparseMatrixIndexCSRBuilder &operator=(const SparseMatrixIndexCSRBuilder &);
493 flatbuffers::Offset<SparseMatrixIndexCSR> Finish() {
494 const auto end = fbb_.EndTable(start_);
495 auto o = flatbuffers::Offset<SparseMatrixIndexCSR>(end);
496 return o;
497 }
498};
499
500inline flatbuffers::Offset<SparseMatrixIndexCSR> CreateSparseMatrixIndexCSR(
501 flatbuffers::FlatBufferBuilder &_fbb,
502 const Buffer *indptrBuffer = 0,
503 const Buffer *indicesBuffer = 0) {
504 SparseMatrixIndexCSRBuilder builder_(_fbb);
505 builder_.add_indicesBuffer(indicesBuffer);
506 builder_.add_indptrBuffer(indptrBuffer);
507 return builder_.Finish();
508}
509
510struct SparseTensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
511 enum {
512 VT_TYPE_TYPE = 4,
513 VT_TYPE = 6,
514 VT_SHAPE = 8,
515 VT_NON_ZERO_LENGTH = 10,
516 VT_SPARSEINDEX_TYPE = 12,
517 VT_SPARSEINDEX = 14,
518 VT_DATA = 16
519 };
520 Type type_type() const {
521 return static_cast<Type>(GetField<uint8_t>(VT_TYPE_TYPE, 0));
522 }
523 /// The type of data contained in a value cell.
524 /// Currently only fixed-width value types are supported,
525 /// no strings or nested types.
526 const void *type() const {
527 return GetPointer<const void *>(VT_TYPE);
528 }
529 template<typename T> const T *type_as() const;
530 const Null *type_as_Null() const {
531 return type_type() == Type_Null ? static_cast<const Null *>(type()) : nullptr;
532 }
533 const Int *type_as_Int() const {
534 return type_type() == Type_Int ? static_cast<const Int *>(type()) : nullptr;
535 }
536 const FloatingPoint *type_as_FloatingPoint() const {
537 return type_type() == Type_FloatingPoint ? static_cast<const FloatingPoint *>(type()) : nullptr;
538 }
539 const Binary *type_as_Binary() const {
540 return type_type() == Type_Binary ? static_cast<const Binary *>(type()) : nullptr;
541 }
542 const Utf8 *type_as_Utf8() const {
543 return type_type() == Type_Utf8 ? static_cast<const Utf8 *>(type()) : nullptr;
544 }
545 const Bool *type_as_Bool() const {
546 return type_type() == Type_Bool ? static_cast<const Bool *>(type()) : nullptr;
547 }
548 const Decimal *type_as_Decimal() const {
549 return type_type() == Type_Decimal ? static_cast<const Decimal *>(type()) : nullptr;
550 }
551 const Date *type_as_Date() const {
552 return type_type() == Type_Date ? static_cast<const Date *>(type()) : nullptr;
553 }
554 const Time *type_as_Time() const {
555 return type_type() == Type_Time ? static_cast<const Time *>(type()) : nullptr;
556 }
557 const Timestamp *type_as_Timestamp() const {
558 return type_type() == Type_Timestamp ? static_cast<const Timestamp *>(type()) : nullptr;
559 }
560 const Interval *type_as_Interval() const {
561 return type_type() == Type_Interval ? static_cast<const Interval *>(type()) : nullptr;
562 }
563 const List *type_as_List() const {
564 return type_type() == Type_List ? static_cast<const List *>(type()) : nullptr;
565 }
566 const Struct_ *type_as_Struct_() const {
567 return type_type() == Type_Struct_ ? static_cast<const Struct_ *>(type()) : nullptr;
568 }
569 const Union *type_as_Union() const {
570 return type_type() == Type_Union ? static_cast<const Union *>(type()) : nullptr;
571 }
572 const FixedSizeBinary *type_as_FixedSizeBinary() const {
573 return type_type() == Type_FixedSizeBinary ? static_cast<const FixedSizeBinary *>(type()) : nullptr;
574 }
575 const FixedSizeList *type_as_FixedSizeList() const {
576 return type_type() == Type_FixedSizeList ? static_cast<const FixedSizeList *>(type()) : nullptr;
577 }
578 const Map *type_as_Map() const {
579 return type_type() == Type_Map ? static_cast<const Map *>(type()) : nullptr;
580 }
581 /// The dimensions of the tensor, optionally named.
582 const flatbuffers::Vector<flatbuffers::Offset<TensorDim>> *shape() const {
583 return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<TensorDim>> *>(VT_SHAPE);
584 }
585 /// The number of non-zero values in a sparse tensor.
586 int64_t non_zero_length() const {
587 return GetField<int64_t>(VT_NON_ZERO_LENGTH, 0);
588 }
589 SparseTensorIndex sparseIndex_type() const {
590 return static_cast<SparseTensorIndex>(GetField<uint8_t>(VT_SPARSEINDEX_TYPE, 0));
591 }
592 /// Sparse tensor index
593 const void *sparseIndex() const {
594 return GetPointer<const void *>(VT_SPARSEINDEX);
595 }
596 template<typename T> const T *sparseIndex_as() const;
597 const SparseTensorIndexCOO *sparseIndex_as_SparseTensorIndexCOO() const {
598 return sparseIndex_type() == SparseTensorIndex_SparseTensorIndexCOO ? static_cast<const SparseTensorIndexCOO *>(sparseIndex()) : nullptr;
599 }
600 const SparseMatrixIndexCSR *sparseIndex_as_SparseMatrixIndexCSR() const {
601 return sparseIndex_type() == SparseTensorIndex_SparseMatrixIndexCSR ? static_cast<const SparseMatrixIndexCSR *>(sparseIndex()) : nullptr;
602 }
603 /// The location and size of the tensor's data
604 const Buffer *data() const {
605 return GetStruct<const Buffer *>(VT_DATA);
606 }
607 bool Verify(flatbuffers::Verifier &verifier) const {
608 return VerifyTableStart(verifier) &&
609 VerifyField<uint8_t>(verifier, VT_TYPE_TYPE) &&
610 VerifyOffset(verifier, VT_TYPE) &&
611 VerifyType(verifier, type(), type_type()) &&
612 VerifyOffset(verifier, VT_SHAPE) &&
613 verifier.VerifyVector(shape()) &&
614 verifier.VerifyVectorOfTables(shape()) &&
615 VerifyField<int64_t>(verifier, VT_NON_ZERO_LENGTH) &&
616 VerifyField<uint8_t>(verifier, VT_SPARSEINDEX_TYPE) &&
617 VerifyOffset(verifier, VT_SPARSEINDEX) &&
618 VerifySparseTensorIndex(verifier, sparseIndex(), sparseIndex_type()) &&
619 VerifyField<Buffer>(verifier, VT_DATA) &&
620 verifier.EndTable();
621 }
622};
623
624template<> inline const Null *SparseTensor::type_as<Null>() const {
625 return type_as_Null();
626}
627
628template<> inline const Int *SparseTensor::type_as<Int>() const {
629 return type_as_Int();
630}
631
632template<> inline const FloatingPoint *SparseTensor::type_as<FloatingPoint>() const {
633 return type_as_FloatingPoint();
634}
635
636template<> inline const Binary *SparseTensor::type_as<Binary>() const {
637 return type_as_Binary();
638}
639
640template<> inline const Utf8 *SparseTensor::type_as<Utf8>() const {
641 return type_as_Utf8();
642}
643
644template<> inline const Bool *SparseTensor::type_as<Bool>() const {
645 return type_as_Bool();
646}
647
648template<> inline const Decimal *SparseTensor::type_as<Decimal>() const {
649 return type_as_Decimal();
650}
651
652template<> inline const Date *SparseTensor::type_as<Date>() const {
653 return type_as_Date();
654}
655
656template<> inline const Time *SparseTensor::type_as<Time>() const {
657 return type_as_Time();
658}
659
660template<> inline const Timestamp *SparseTensor::type_as<Timestamp>() const {
661 return type_as_Timestamp();
662}
663
664template<> inline const Interval *SparseTensor::type_as<Interval>() const {
665 return type_as_Interval();
666}
667
668template<> inline const List *SparseTensor::type_as<List>() const {
669 return type_as_List();
670}
671
672template<> inline const Struct_ *SparseTensor::type_as<Struct_>() const {
673 return type_as_Struct_();
674}
675
676template<> inline const Union *SparseTensor::type_as<Union>() const {
677 return type_as_Union();
678}
679
680template<> inline const FixedSizeBinary *SparseTensor::type_as<FixedSizeBinary>() const {
681 return type_as_FixedSizeBinary();
682}
683
684template<> inline const FixedSizeList *SparseTensor::type_as<FixedSizeList>() const {
685 return type_as_FixedSizeList();
686}
687
688template<> inline const Map *SparseTensor::type_as<Map>() const {
689 return type_as_Map();
690}
691
692template<> inline const SparseTensorIndexCOO *SparseTensor::sparseIndex_as<SparseTensorIndexCOO>() const {
693 return sparseIndex_as_SparseTensorIndexCOO();
694}
695
696template<> inline const SparseMatrixIndexCSR *SparseTensor::sparseIndex_as<SparseMatrixIndexCSR>() const {
697 return sparseIndex_as_SparseMatrixIndexCSR();
698}
699
700struct SparseTensorBuilder {
701 flatbuffers::FlatBufferBuilder &fbb_;
702 flatbuffers::uoffset_t start_;
703 void add_type_type(Type type_type) {
704 fbb_.AddElement<uint8_t>(SparseTensor::VT_TYPE_TYPE, static_cast<uint8_t>(type_type), 0);
705 }
706 void add_type(flatbuffers::Offset<void> type) {
707 fbb_.AddOffset(SparseTensor::VT_TYPE, type);
708 }
709 void add_shape(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<TensorDim>>> shape) {
710 fbb_.AddOffset(SparseTensor::VT_SHAPE, shape);
711 }
712 void add_non_zero_length(int64_t non_zero_length) {
713 fbb_.AddElement<int64_t>(SparseTensor::VT_NON_ZERO_LENGTH, non_zero_length, 0);
714 }
715 void add_sparseIndex_type(SparseTensorIndex sparseIndex_type) {
716 fbb_.AddElement<uint8_t>(SparseTensor::VT_SPARSEINDEX_TYPE, static_cast<uint8_t>(sparseIndex_type), 0);
717 }
718 void add_sparseIndex(flatbuffers::Offset<void> sparseIndex) {
719 fbb_.AddOffset(SparseTensor::VT_SPARSEINDEX, sparseIndex);
720 }
721 void add_data(const Buffer *data) {
722 fbb_.AddStruct(SparseTensor::VT_DATA, data);
723 }
724 explicit SparseTensorBuilder(flatbuffers::FlatBufferBuilder &_fbb)
725 : fbb_(_fbb) {
726 start_ = fbb_.StartTable();
727 }
728 SparseTensorBuilder &operator=(const SparseTensorBuilder &);
729 flatbuffers::Offset<SparseTensor> Finish() {
730 const auto end = fbb_.EndTable(start_);
731 auto o = flatbuffers::Offset<SparseTensor>(end);
732 return o;
733 }
734};
735
736inline flatbuffers::Offset<SparseTensor> CreateSparseTensor(
737 flatbuffers::FlatBufferBuilder &_fbb,
738 Type type_type = Type_NONE,
739 flatbuffers::Offset<void> type = 0,
740 flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<TensorDim>>> shape = 0,
741 int64_t non_zero_length = 0,
742 SparseTensorIndex sparseIndex_type = SparseTensorIndex_NONE,
743 flatbuffers::Offset<void> sparseIndex = 0,
744 const Buffer *data = 0) {
745 SparseTensorBuilder builder_(_fbb);
746 builder_.add_non_zero_length(non_zero_length);
747 builder_.add_data(data);
748 builder_.add_sparseIndex(sparseIndex);
749 builder_.add_shape(shape);
750 builder_.add_type(type);
751 builder_.add_sparseIndex_type(sparseIndex_type);
752 builder_.add_type_type(type_type);
753 return builder_.Finish();
754}
755
756inline flatbuffers::Offset<SparseTensor> CreateSparseTensorDirect(
757 flatbuffers::FlatBufferBuilder &_fbb,
758 Type type_type = Type_NONE,
759 flatbuffers::Offset<void> type = 0,
760 const std::vector<flatbuffers::Offset<TensorDim>> *shape = nullptr,
761 int64_t non_zero_length = 0,
762 SparseTensorIndex sparseIndex_type = SparseTensorIndex_NONE,
763 flatbuffers::Offset<void> sparseIndex = 0,
764 const Buffer *data = 0) {
765 return org::apache::arrow::flatbuf::CreateSparseTensor(
766 _fbb,
767 type_type,
768 type,
769 shape ? _fbb.CreateVector<flatbuffers::Offset<TensorDim>>(*shape) : 0,
770 non_zero_length,
771 sparseIndex_type,
772 sparseIndex,
773 data);
774}
775
776inline bool VerifySparseTensorIndex(flatbuffers::Verifier &verifier, const void *obj, SparseTensorIndex type) {
777 switch (type) {
778 case SparseTensorIndex_NONE: {
779 return true;
780 }
781 case SparseTensorIndex_SparseTensorIndexCOO: {
782 auto ptr = reinterpret_cast<const SparseTensorIndexCOO *>(obj);
783 return verifier.VerifyTable(ptr);
784 }
785 case SparseTensorIndex_SparseMatrixIndexCSR: {
786 auto ptr = reinterpret_cast<const SparseMatrixIndexCSR *>(obj);
787 return verifier.VerifyTable(ptr);
788 }
789 default: return false;
790 }
791}
792
793inline bool VerifySparseTensorIndexVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector<flatbuffers::Offset<void>> *values, const flatbuffers::Vector<uint8_t> *types) {
794 if (!values || !types) return !values && !types;
795 if (values->size() != types->size()) return false;
796 for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i) {
797 if (!VerifySparseTensorIndex(
798 verifier, values->Get(i), types->GetEnum<SparseTensorIndex>(i))) {
799 return false;
800 }
801 }
802 return true;
803}
804
805inline const org::apache::arrow::flatbuf::SparseTensor *GetSparseTensor(const void *buf) {
806 return flatbuffers::GetRoot<org::apache::arrow::flatbuf::SparseTensor>(buf);
807}
808
809inline const org::apache::arrow::flatbuf::SparseTensor *GetSizePrefixedSparseTensor(const void *buf) {
810 return flatbuffers::GetSizePrefixedRoot<org::apache::arrow::flatbuf::SparseTensor>(buf);
811}
812
813inline bool VerifySparseTensorBuffer(
814 flatbuffers::Verifier &verifier) {
815 return verifier.VerifyBuffer<org::apache::arrow::flatbuf::SparseTensor>(nullptr);
816}
817
818inline bool VerifySizePrefixedSparseTensorBuffer(
819 flatbuffers::Verifier &verifier) {
820 return verifier.VerifySizePrefixedBuffer<org::apache::arrow::flatbuf::SparseTensor>(nullptr);
821}
822
823inline void FinishSparseTensorBuffer(
824 flatbuffers::FlatBufferBuilder &fbb,
825 flatbuffers::Offset<org::apache::arrow::flatbuf::SparseTensor> root) {
826 fbb.Finish(root);
827}
828
829inline void FinishSizePrefixedSparseTensorBuffer(
830 flatbuffers::FlatBufferBuilder &fbb,
831 flatbuffers::Offset<org::apache::arrow::flatbuf::SparseTensor> root) {
832 fbb.FinishSizePrefixed(root);
833}
834
835} // namespace flatbuf
836} // namespace arrow
837} // namespace apache
838} // namespace org
839
840#endif // FLATBUFFERS_GENERATED_TENSOR_ORG_APACHE_ARROW_FLATBUF_H_
841