Line data Source code
1 : #pragma GCC diagnostic push
2 : #pragma GCC diagnostic ignored "-Wmisleading-indentation"
3 :
4 : #ifndef FLATBUFFERS_COMMON_READER_H
5 : #define FLATBUFFERS_COMMON_READER_H
6 :
7 : /* Generated by flatcc 0.6.2 FlatBuffers schema compiler for C by dvide.com */
8 :
9 : /* Common FlatBuffers read functionality for C. */
10 :
11 : #include "flatcc/flatcc_prologue.h"
12 : #include "flatcc/flatcc_flatbuffers.h"
13 :
14 :
15 0 : #define __flatbuffers_read_scalar_at_byteoffset(N, p, o) N ## _read_from_pe((uint8_t *)(p) + (o))
16 0 : #define __flatbuffers_read_scalar(N, p) N ## _read_from_pe(p)
17 0 : #define __flatbuffers_read_vt(ID, offset, t)\
18 0 : flatbuffers_voffset_t offset = 0;\
19 0 : { flatbuffers_voffset_t id__tmp, *vt__tmp;\
20 0 : FLATCC_ASSERT(t != 0 && "null pointer table access");\
21 0 : id__tmp = ID;\
22 0 : vt__tmp = (flatbuffers_voffset_t *)((uint8_t *)(t) -\
23 0 : __flatbuffers_soffset_read_from_pe(t));\
24 0 : if (__flatbuffers_voffset_read_from_pe(vt__tmp) >= sizeof(vt__tmp[0]) * (id__tmp + 3u)) {\
25 0 : offset = __flatbuffers_voffset_read_from_pe(vt__tmp + id__tmp + 2);\
26 0 : }\
27 0 : }
28 0 : #define __flatbuffers_field_present(ID, t) { __flatbuffers_read_vt(ID, offset__tmp, t) return offset__tmp != 0; }
29 0 : #define __flatbuffers_scalar_field(T, ID, t)\
30 0 : {\
31 0 : __flatbuffers_read_vt(ID, offset__tmp, t)\
32 0 : if (offset__tmp) {\
33 0 : return (const T *)((uint8_t *)(t) + offset__tmp);\
34 0 : }\
35 0 : return 0;\
36 0 : }
37 : #define __flatbuffers_define_scalar_field(ID, N, NK, TK, T, V)\
38 0 : static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
39 0 : { __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
40 0 : return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
41 0 : }\
42 0 : static inline T N ## _ ## NK(N ## _table_t t__tmp)\
43 0 : { __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
44 0 : return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
45 0 : }\
46 : static inline const T *N ## _ ## NK ## _get_ptr(N ## _table_t t__tmp)\
47 : __flatbuffers_scalar_field(T, ID, t__tmp)\
48 : static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
49 : __flatbuffers_field_present(ID, t__tmp)\
50 : __flatbuffers_define_scan_by_scalar_field(N, NK, T)
51 : #define __flatbuffers_define_scalar_optional_field(ID, N, NK, TK, T, V)\
52 : __flatbuffers_define_scalar_field(ID, N, NK, TK, T, V)\
53 : static inline TK ## _option_t N ## _ ## NK ## _option(N ## _table_t t__tmp)\
54 : { TK ## _option_t ret; __flatbuffers_read_vt(ID, offset__tmp, t__tmp)\
55 : ret.is_null = offset__tmp == 0; ret.value = offset__tmp ?\
56 : __flatbuffers_read_scalar_at_byteoffset(TK, t__tmp, offset__tmp) : V;\
57 : return ret; }
58 0 : #define __flatbuffers_struct_field(T, ID, t, r)\
59 0 : {\
60 0 : __flatbuffers_read_vt(ID, offset__tmp, t)\
61 0 : if (offset__tmp) {\
62 0 : return (T)((uint8_t *)(t) + offset__tmp);\
63 0 : }\
64 0 : FLATCC_ASSERT(!(r) && "required field missing");\
65 0 : return 0;\
66 0 : }
67 0 : #define __flatbuffers_offset_field(T, ID, t, r, adjust)\
68 0 : {\
69 0 : flatbuffers_uoffset_t *elem__tmp;\
70 0 : __flatbuffers_read_vt(ID, offset__tmp, t)\
71 0 : if (offset__tmp) {\
72 0 : elem__tmp = (flatbuffers_uoffset_t *)((uint8_t *)(t) + offset__tmp);\
73 0 : /* Add sizeof so C api can have raw access past header field. */\
74 0 : return (T)((uint8_t *)(elem__tmp) + adjust +\
75 0 : __flatbuffers_uoffset_read_from_pe(elem__tmp));\
76 0 : }\
77 0 : FLATCC_ASSERT(!(r) && "required field missing");\
78 0 : return 0;\
79 0 : }
80 : #define __flatbuffers_vector_field(T, ID, t, r) __flatbuffers_offset_field(T, ID, t, r, sizeof(flatbuffers_uoffset_t))
81 : #define __flatbuffers_table_field(T, ID, t, r) __flatbuffers_offset_field(T, ID, t, r, 0)
82 : #define __flatbuffers_define_struct_field(ID, N, NK, T, r)\
83 : static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
84 : __flatbuffers_struct_field(T, ID, t__tmp, r)\
85 : static inline T N ## _ ## NK(N ## _table_t t__tmp)\
86 : __flatbuffers_struct_field(T, ID, t__tmp, r)\
87 : static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
88 : __flatbuffers_field_present(ID, t__tmp)
89 : #define __flatbuffers_define_vector_field(ID, N, NK, T, r)\
90 : static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
91 : __flatbuffers_vector_field(T, ID, t__tmp, r)\
92 : static inline T N ## _ ## NK(N ## _table_t t__tmp)\
93 : __flatbuffers_vector_field(T, ID, t__tmp, r)\
94 : static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
95 : __flatbuffers_field_present(ID, t__tmp)
96 : #define __flatbuffers_define_table_field(ID, N, NK, T, r)\
97 : static inline T N ## _ ## NK ## _get(N ## _table_t t__tmp)\
98 : __flatbuffers_table_field(T, ID, t__tmp, r)\
99 : static inline T N ## _ ## NK(N ## _table_t t__tmp)\
100 : __flatbuffers_table_field(T, ID, t__tmp, r)\
101 : static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
102 : __flatbuffers_field_present(ID, t__tmp)
103 : #define __flatbuffers_define_string_field(ID, N, NK, r)\
104 : static inline flatbuffers_string_t N ## _ ## NK ## _get(N ## _table_t t__tmp)\
105 : __flatbuffers_vector_field(flatbuffers_string_t, ID, t__tmp, r)\
106 : static inline flatbuffers_string_t N ## _ ## NK(N ## _table_t t__tmp)\
107 : __flatbuffers_vector_field(flatbuffers_string_t, ID, t__tmp, r)\
108 : static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
109 : __flatbuffers_field_present(ID, t__tmp)\
110 : __flatbuffers_define_scan_by_string_field(N, NK)
111 0 : #define __flatbuffers_vec_len(vec)\
112 0 : { return (vec) ? (size_t)__flatbuffers_uoffset_read_from_pe((flatbuffers_uoffset_t *)vec - 1) : 0; }
113 : #define __flatbuffers_string_len(s) __flatbuffers_vec_len(s)
114 : static inline size_t flatbuffers_vec_len(const void *vec)
115 : __flatbuffers_vec_len(vec)
116 0 : #define __flatbuffers_scalar_vec_at(N, vec, i)\
117 0 : { FLATCC_ASSERT(flatbuffers_vec_len(vec) > (i) && "index out of range");\
118 0 : return __flatbuffers_read_scalar(N, &(vec)[i]); }
119 0 : #define __flatbuffers_struct_vec_at(vec, i)\
120 0 : { FLATCC_ASSERT(flatbuffers_vec_len(vec) > (i) && "index out of range"); return (vec) + (i); }
121 : /* `adjust` skips past the header for string vectors. */
122 0 : #define __flatbuffers_offset_vec_at(T, vec, i, adjust)\
123 0 : { const flatbuffers_uoffset_t *elem__tmp = (vec) + (i);\
124 0 : FLATCC_ASSERT(flatbuffers_vec_len(vec) > (i) && "index out of range");\
125 0 : return (T)((uint8_t *)(elem__tmp) + (size_t)__flatbuffers_uoffset_read_from_pe(elem__tmp) + (adjust)); }
126 : #define __flatbuffers_define_scalar_vec_len(N)\
127 0 : static inline size_t N ## _vec_len(N ##_vec_t vec__tmp)\
128 0 : { return flatbuffers_vec_len(vec__tmp); }
129 : #define __flatbuffers_define_scalar_vec_at(N, T) \
130 : static inline T N ## _vec_at(N ## _vec_t vec__tmp, size_t i__tmp)\
131 : __flatbuffers_scalar_vec_at(N, vec__tmp, i__tmp)
132 : typedef const char *flatbuffers_string_t;
133 : static inline size_t flatbuffers_string_len(flatbuffers_string_t s)
134 : __flatbuffers_string_len(s)
135 : typedef const flatbuffers_uoffset_t *flatbuffers_string_vec_t;
136 : typedef flatbuffers_uoffset_t *flatbuffers_string_mutable_vec_t;
137 : static inline size_t flatbuffers_string_vec_len(flatbuffers_string_vec_t vec)
138 : __flatbuffers_vec_len(vec)
139 : static inline flatbuffers_string_t flatbuffers_string_vec_at(flatbuffers_string_vec_t vec, size_t i)
140 : __flatbuffers_offset_vec_at(flatbuffers_string_t, vec, i, sizeof(vec[0]))
141 : typedef const void *flatbuffers_generic_t;
142 : typedef void *flatbuffers_mutable_generic_t;
143 : static inline flatbuffers_string_t flatbuffers_string_cast_from_generic(const flatbuffers_generic_t p)
144 0 : { return p ? ((const char *)p) + __flatbuffers_uoffset__size() : 0; }
145 : typedef const flatbuffers_uoffset_t *flatbuffers_generic_vec_t;
146 : typedef flatbuffers_uoffset_t *flatbuffers_generic_table_mutable_vec_t;
147 : static inline size_t flatbuffers_generic_vec_len(flatbuffers_generic_vec_t vec)
148 : __flatbuffers_vec_len(vec)
149 : static inline flatbuffers_generic_t flatbuffers_generic_vec_at(flatbuffers_generic_vec_t vec, size_t i)
150 : __flatbuffers_offset_vec_at(flatbuffers_generic_t, vec, i, 0)
151 : static inline flatbuffers_generic_t flatbuffers_generic_vec_at_as_string(flatbuffers_generic_vec_t vec, size_t i)
152 : __flatbuffers_offset_vec_at(flatbuffers_generic_t, vec, i, sizeof(vec[0]))
153 : typedef struct flatbuffers_union {
154 : flatbuffers_union_type_t type;
155 : flatbuffers_generic_t value;
156 : } flatbuffers_union_t;
157 : typedef struct flatbuffers_union_vec {
158 : const flatbuffers_union_type_t *type;
159 : const flatbuffers_uoffset_t *value;
160 : } flatbuffers_union_vec_t;
161 : typedef struct flatbuffers_mutable_union {
162 : flatbuffers_union_type_t type;
163 : flatbuffers_mutable_generic_t value;
164 : } flatbuffers_mutable_union_t;
165 : typedef struct flatbuffers_mutable_union_vec {
166 : flatbuffers_union_type_t *type;
167 : flatbuffers_uoffset_t *value;
168 : } flatbuffers_mutable_union_vec_t;
169 0 : static inline flatbuffers_mutable_union_t flatbuffers_mutable_union_cast(flatbuffers_union_t u__tmp)\
170 0 : { flatbuffers_mutable_union_t mu = { u__tmp.type, (flatbuffers_mutable_generic_t)u__tmp.value };\
171 0 : return mu; }
172 0 : static inline flatbuffers_mutable_union_vec_t flatbuffers_mutable_union_vec_cast(flatbuffers_union_vec_t uv__tmp)\
173 0 : { flatbuffers_mutable_union_vec_t muv =\
174 0 : { (flatbuffers_union_type_t *)uv__tmp.type, (flatbuffers_uoffset_t *)uv__tmp.value }; return muv; }
175 : #define __flatbuffers_union_type_field(ID, t)\
176 : {\
177 : __flatbuffers_read_vt(ID, offset__tmp, t)\
178 : return offset__tmp ? __flatbuffers_read_scalar_at_byteoffset(__flatbuffers_utype, t, offset__tmp) : 0;\
179 : }
180 0 : static inline flatbuffers_string_t flatbuffers_string_cast_from_union(const flatbuffers_union_t u__tmp)\
181 0 : { return flatbuffers_string_cast_from_generic(u__tmp.value); }
182 : #define __flatbuffers_define_union_field(NS, ID, N, NK, T, r)\
183 : static inline T ## _union_type_t N ## _ ## NK ## _type_get(N ## _table_t t__tmp)\
184 : __## NS ## union_type_field(((ID) - 1), t__tmp)\
185 : static inline NS ## generic_t N ## _ ## NK ## _get(N ## _table_t t__tmp)\
186 : __## NS ## table_field(NS ## generic_t, ID, t__tmp, r)\
187 : static inline T ## _union_type_t N ## _ ## NK ## _type(N ## _table_t t__tmp)\
188 : __## NS ## union_type_field(((ID) - 1), t__tmp)\
189 : static inline NS ## generic_t N ## _ ## NK(N ## _table_t t__tmp)\
190 : __## NS ## table_field(NS ## generic_t, ID, t__tmp, r)\
191 : static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
192 : __## NS ## field_present(ID, t__tmp)\
193 : static inline T ## _union_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\
194 : { T ## _union_t u__tmp = { 0, 0 }; u__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\
195 : if (u__tmp.type == 0) { return u__tmp; } u__tmp.value = N ## _ ## NK ## _get(t__tmp); return u__tmp; }\
196 : static inline NS ## string_t N ## _ ## NK ## _as_string(N ## _table_t t__tmp)\
197 : { return NS ## string_cast_from_generic(N ## _ ## NK ## _get(t__tmp)); }\
198 :
199 : #define __flatbuffers_define_union_vector_ops(NS, T)\
200 : static inline size_t T ## _union_vec_len(T ## _union_vec_t uv__tmp)\
201 : { return NS ## vec_len(uv__tmp.type); }\
202 : static inline T ## _union_t T ## _union_vec_at(T ## _union_vec_t uv__tmp, size_t i__tmp)\
203 : { T ## _union_t u__tmp = { 0, 0 }; size_t n__tmp = NS ## vec_len(uv__tmp.type);\
204 : FLATCC_ASSERT(n__tmp > (i__tmp) && "index out of range"); u__tmp.type = uv__tmp.type[i__tmp];\
205 : /* Unknown type is treated as NONE for schema evolution. */\
206 : if (u__tmp.type == 0) return u__tmp;\
207 : u__tmp.value = NS ## generic_vec_at(uv__tmp.value, i__tmp); return u__tmp; }\
208 : static inline NS ## string_t T ## _union_vec_at_as_string(T ## _union_vec_t uv__tmp, size_t i__tmp)\
209 : { return (NS ## string_t) NS ## generic_vec_at_as_string(uv__tmp.value, i__tmp); }\
210 :
211 : #define __flatbuffers_define_union_vector(NS, T)\
212 : typedef NS ## union_vec_t T ## _union_vec_t;\
213 : typedef NS ## mutable_union_vec_t T ## _mutable_union_vec_t;\
214 : static inline T ## _mutable_union_vec_t T ## _mutable_union_vec_cast(T ## _union_vec_t u__tmp)\
215 : { return NS ## mutable_union_vec_cast(u__tmp); }\
216 : __## NS ## define_union_vector_ops(NS, T)
217 : #define __flatbuffers_define_union(NS, T)\
218 : typedef NS ## union_t T ## _union_t;\
219 : typedef NS ## mutable_union_t T ## _mutable_union_t;\
220 : static inline T ## _mutable_union_t T ## _mutable_union_cast(T ## _union_t u__tmp)\
221 : { return NS ## mutable_union_cast(u__tmp); }\
222 : __## NS ## define_union_vector(NS, T)
223 : #define __flatbuffers_define_union_vector_field(NS, ID, N, NK, T, r)\
224 : __## NS ## define_vector_field(ID - 1, N, NK ## _type, T ## _vec_t, r)\
225 : __## NS ## define_vector_field(ID, N, NK, flatbuffers_generic_vec_t, r)\
226 : static inline T ## _union_vec_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\
227 : { T ## _union_vec_t uv__tmp; uv__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\
228 : uv__tmp.value = N ## _ ## NK(t__tmp);\
229 : FLATCC_ASSERT(NS ## vec_len(uv__tmp.type) == NS ## vec_len(uv__tmp.value)\
230 : && "union vector type length mismatch"); return uv__tmp; }
231 : #include <string.h>
232 : static const size_t flatbuffers_not_found = (size_t)-1;
233 : static const size_t flatbuffers_end = (size_t)-1;
234 : #define __flatbuffers_identity(n) (n)
235 : #define __flatbuffers_min(a, b) ((a) < (b) ? (a) : (b))
236 : /* Subtraction doesn't work for unsigned types. */
237 : #define __flatbuffers_scalar_cmp(x, y, n) ((x) < (y) ? -1 : (x) > (y))
238 : static inline int __flatbuffers_string_n_cmp(flatbuffers_string_t v, const char *s, size_t n)
239 0 : { size_t nv = flatbuffers_string_len(v); int x = strncmp(v, s, nv < n ? nv : n);
240 0 : return x != 0 ? x : nv < n ? -1 : nv > n; }
241 : /* `n` arg unused, but needed by string find macro expansion. */
242 0 : static inline int __flatbuffers_string_cmp(flatbuffers_string_t v, const char *s, size_t n) { (void)n; return strcmp(v, s); }
243 : /* A = identity if searching scalar vectors rather than key fields. */
244 : /* Returns lowest matching index or not_found. */
245 0 : #define __flatbuffers_find_by_field(A, V, E, L, K, Kn, T, D)\
246 0 : { T v__tmp; size_t a__tmp = 0, b__tmp, m__tmp; if (!(b__tmp = L(V))) { return flatbuffers_not_found; }\
247 0 : --b__tmp;\
248 0 : while (a__tmp < b__tmp) {\
249 0 : m__tmp = a__tmp + ((b__tmp - a__tmp) >> 1);\
250 0 : v__tmp = A(E(V, m__tmp));\
251 0 : if ((D(v__tmp, (K), (Kn))) < 0) {\
252 0 : a__tmp = m__tmp + 1;\
253 0 : } else {\
254 0 : b__tmp = m__tmp;\
255 0 : }\
256 0 : }\
257 0 : if (a__tmp == b__tmp) {\
258 0 : v__tmp = A(E(V, a__tmp));\
259 0 : if (D(v__tmp, (K), (Kn)) == 0) {\
260 0 : return a__tmp;\
261 0 : }\
262 0 : }\
263 0 : return flatbuffers_not_found;\
264 0 : }
265 : #define __flatbuffers_find_by_scalar_field(A, V, E, L, K, T)\
266 : __flatbuffers_find_by_field(A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
267 : #define __flatbuffers_find_by_string_field(A, V, E, L, K)\
268 : __flatbuffers_find_by_field(A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
269 : #define __flatbuffers_find_by_string_n_field(A, V, E, L, K, Kn)\
270 : __flatbuffers_find_by_field(A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
271 : #define __flatbuffers_define_find_by_scalar_field(N, NK, TK)\
272 : static inline size_t N ## _vec_find_by_ ## NK(N ## _vec_t vec__tmp, TK key__tmp)\
273 : __flatbuffers_find_by_scalar_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, TK)
274 : #define __flatbuffers_define_scalar_find(N, T)\
275 : static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, T key__tmp)\
276 : __flatbuffers_find_by_scalar_field(__flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
277 : #define __flatbuffers_define_find_by_string_field(N, NK) \
278 : /* Note: find only works on vectors sorted by this field. */\
279 : static inline size_t N ## _vec_find_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
280 : __flatbuffers_find_by_string_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
281 : static inline size_t N ## _vec_find_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
282 : __flatbuffers_find_by_string_n_field(N ## _ ## NK, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)
283 : #define __flatbuffers_define_default_find_by_scalar_field(N, NK, TK)\
284 : static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, TK key__tmp)\
285 : { return N ## _vec_find_by_ ## NK(vec__tmp, key__tmp); }
286 : #define __flatbuffers_define_default_find_by_string_field(N, NK) \
287 : static inline size_t N ## _vec_find(N ## _vec_t vec__tmp, const char *s__tmp)\
288 : { return N ## _vec_find_by_ ## NK(vec__tmp, s__tmp); }\
289 : static inline size_t N ## _vec_find_n(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
290 : { return N ## _vec_find_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }
291 : /* A = identity if searching scalar vectors rather than key fields. */
292 : /* Returns lowest matching index or not_found. */
293 0 : #define __flatbuffers_scan_by_field(b, e, A, V, E, L, K, Kn, T, D)\
294 0 : { T v__tmp; size_t i__tmp;\
295 0 : for (i__tmp = b; i__tmp < e; ++i__tmp) {\
296 0 : v__tmp = A(E(V, i__tmp));\
297 0 : if (D(v__tmp, (K), (Kn)) == 0) {\
298 0 : return i__tmp;\
299 0 : }\
300 0 : }\
301 0 : return flatbuffers_not_found;\
302 0 : }
303 0 : #define __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, Kn, T, D)\
304 0 : { T v__tmp; size_t i__tmp = e;\
305 0 : while (i__tmp-- > b) {\
306 0 : v__tmp = A(E(V, i__tmp));\
307 0 : if (D(v__tmp, (K), (Kn)) == 0) {\
308 0 : return i__tmp;\
309 0 : }\
310 0 : }\
311 0 : return flatbuffers_not_found;\
312 0 : }
313 : #define __flatbuffers_scan_by_scalar_field(b, e, A, V, E, L, K, T)\
314 : __flatbuffers_scan_by_field(b, e, A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
315 : #define __flatbuffers_scan_by_string_field(b, e, A, V, E, L, K)\
316 : __flatbuffers_scan_by_field(b, e, A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
317 : #define __flatbuffers_scan_by_string_n_field(b, e, A, V, E, L, K, Kn)\
318 : __flatbuffers_scan_by_field(b, e, A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
319 : #define __flatbuffers_rscan_by_scalar_field(b, e, A, V, E, L, K, T)\
320 : __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, 0, T, __flatbuffers_scalar_cmp)
321 : #define __flatbuffers_rscan_by_string_field(b, e, A, V, E, L, K)\
322 : __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, 0, flatbuffers_string_t, __flatbuffers_string_cmp)
323 : #define __flatbuffers_rscan_by_string_n_field(b, e, A, V, E, L, K, Kn)\
324 : __flatbuffers_rscan_by_field(b, e, A, V, E, L, K, Kn, flatbuffers_string_t, __flatbuffers_string_n_cmp)
325 : #define __flatbuffers_define_scan_by_scalar_field(N, NK, T)\
326 : static inline size_t N ## _vec_scan_by_ ## NK(N ## _vec_t vec__tmp, T key__tmp)\
327 : __flatbuffers_scan_by_scalar_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
328 : static inline size_t N ## _vec_scan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
329 : __flatbuffers_scan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
330 : static inline size_t N ## _vec_rscan_by_ ## NK(N ## _vec_t vec__tmp, T key__tmp)\
331 : __flatbuffers_rscan_by_scalar_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
332 : static inline size_t N ## _vec_rscan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
333 : __flatbuffers_rscan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
334 : #define __flatbuffers_define_scalar_scan(N, T)\
335 : static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, T key__tmp)\
336 : __flatbuffers_scan_by_scalar_field(0, N ## _vec_len(vec__tmp), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
337 : static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
338 : __flatbuffers_scan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
339 : static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, T key__tmp)\
340 : __flatbuffers_rscan_by_scalar_field(0, N ## _vec_len(vec__tmp), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)\
341 : static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, T key__tmp)\
342 : __flatbuffers_rscan_by_scalar_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), __flatbuffers_identity, vec__tmp, N ## _vec_at, N ## _vec_len, key__tmp, T)
343 : #define __flatbuffers_define_scan_by_string_field(N, NK) \
344 : static inline size_t N ## _vec_scan_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
345 : __flatbuffers_scan_by_string_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
346 : static inline size_t N ## _vec_scan_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
347 : __flatbuffers_scan_by_string_n_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
348 : static inline size_t N ## _vec_scan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
349 : __flatbuffers_scan_by_string_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
350 : static inline size_t N ## _vec_scan_ex_n_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
351 : __flatbuffers_scan_by_string_n_field(begin__tmp, __flatbuffers_min( end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
352 : static inline size_t N ## _vec_rscan_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp)\
353 : __flatbuffers_rscan_by_string_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
354 : static inline size_t N ## _vec_rscan_n_by_ ## NK(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
355 : __flatbuffers_rscan_by_string_n_field(0, N ## _vec_len(vec__tmp), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)\
356 : static inline size_t N ## _vec_rscan_ex_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
357 : __flatbuffers_rscan_by_string_field(begin__tmp, __flatbuffers_min(end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp)\
358 : static inline size_t N ## _vec_rscan_ex_n_by_ ## NK(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
359 : __flatbuffers_rscan_by_string_n_field(begin__tmp, __flatbuffers_min( end__tmp, N ## _vec_len(vec__tmp)), N ## _ ## NK ## _get, vec__tmp, N ## _vec_at, N ## _vec_len, s__tmp, n__tmp)
360 : #define __flatbuffers_define_default_scan_by_scalar_field(N, NK, TK)\
361 : static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, TK key__tmp)\
362 : { return N ## _vec_scan_by_ ## NK(vec__tmp, key__tmp); }\
363 : static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, TK key__tmp)\
364 : { return N ## _vec_scan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, key__tmp); }\
365 : static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, TK key__tmp)\
366 : { return N ## _vec_rscan_by_ ## NK(vec__tmp, key__tmp); }\
367 : static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, TK key__tmp)\
368 : { return N ## _vec_rscan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, key__tmp); }
369 : #define __flatbuffers_define_default_scan_by_string_field(N, NK) \
370 : static inline size_t N ## _vec_scan(N ## _vec_t vec__tmp, const char *s__tmp)\
371 : { return N ## _vec_scan_by_ ## NK(vec__tmp, s__tmp); }\
372 : static inline size_t N ## _vec_scan_n(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
373 : { return N ## _vec_scan_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }\
374 : static inline size_t N ## _vec_scan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
375 : { return N ## _vec_scan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp); }\
376 : static inline size_t N ## _vec_scan_ex_n(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
377 : { return N ## _vec_scan_ex_n_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp, n__tmp); }\
378 : static inline size_t N ## _vec_rscan(N ## _vec_t vec__tmp, const char *s__tmp)\
379 : { return N ## _vec_rscan_by_ ## NK(vec__tmp, s__tmp); }\
380 : static inline size_t N ## _vec_rscan_n(N ## _vec_t vec__tmp, const char *s__tmp, size_t n__tmp)\
381 : { return N ## _vec_rscan_n_by_ ## NK(vec__tmp, s__tmp, n__tmp); }\
382 : static inline size_t N ## _vec_rscan_ex(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp)\
383 : { return N ## _vec_rscan_ex_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp); }\
384 : static inline size_t N ## _vec_rscan_ex_n(N ## _vec_t vec__tmp, size_t begin__tmp, size_t end__tmp, const char *s__tmp, size_t n__tmp)\
385 : { return N ## _vec_rscan_ex_n_by_ ## NK(vec__tmp, begin__tmp, end__tmp, s__tmp, n__tmp); }
386 : #define __flatbuffers_heap_sort(N, X, A, E, L, TK, TE, D, S)\
387 : static inline void __ ## N ## X ## __heap_sift_down(\
388 0 : N ## _mutable_vec_t vec__tmp, size_t start__tmp, size_t end__tmp)\
389 0 : { size_t child__tmp, root__tmp; TK v1__tmp, v2__tmp, vroot__tmp;\
390 0 : root__tmp = start__tmp;\
391 0 : while ((root__tmp << 1) <= end__tmp) {\
392 0 : child__tmp = root__tmp << 1;\
393 0 : if (child__tmp < end__tmp) {\
394 0 : v1__tmp = A(E(vec__tmp, child__tmp));\
395 0 : v2__tmp = A(E(vec__tmp, child__tmp + 1));\
396 0 : if (D(v1__tmp, v2__tmp) < 0) {\
397 0 : child__tmp++;\
398 0 : }\
399 0 : }\
400 0 : vroot__tmp = A(E(vec__tmp, root__tmp));\
401 0 : v1__tmp = A(E(vec__tmp, child__tmp));\
402 0 : if (D(vroot__tmp, v1__tmp) < 0) {\
403 0 : S(vec__tmp, root__tmp, child__tmp, TE);\
404 0 : root__tmp = child__tmp;\
405 0 : } else {\
406 0 : return;\
407 0 : }\
408 0 : }\
409 0 : }\
410 0 : static inline void __ ## N ## X ## __heap_sort(N ## _mutable_vec_t vec__tmp)\
411 0 : { size_t start__tmp, end__tmp, size__tmp;\
412 0 : size__tmp = L(vec__tmp); if (size__tmp == 0) return; end__tmp = size__tmp - 1; start__tmp = size__tmp >> 1;\
413 0 : do { __ ## N ## X ## __heap_sift_down(vec__tmp, start__tmp, end__tmp); } while (start__tmp--);\
414 0 : while (end__tmp > 0) { \
415 0 : S(vec__tmp, 0, end__tmp, TE);\
416 0 : __ ## N ## X ## __heap_sift_down(vec__tmp, 0, --end__tmp); } }
417 : #define __flatbuffers_define_sort_by_field(N, NK, TK, TE, D, S)\
418 : __flatbuffers_heap_sort(N, _sort_by_ ## NK, N ## _ ## NK ## _get, N ## _vec_at, N ## _vec_len, TK, TE, D, S)\
419 : static inline void N ## _vec_sort_by_ ## NK(N ## _mutable_vec_t vec__tmp)\
420 : { __ ## N ## _sort_by_ ## NK ## __heap_sort(vec__tmp); }
421 : #define __flatbuffers_define_sort(N, TK, TE, D, S)\
422 : __flatbuffers_heap_sort(N, , __flatbuffers_identity, N ## _vec_at, N ## _vec_len, TK, TE, D, S)\
423 0 : static inline void N ## _vec_sort(N ## _mutable_vec_t vec__tmp) { __ ## N ## __heap_sort(vec__tmp); }
424 : #define __flatbuffers_scalar_diff(x, y) ((x) < (y) ? -1 : (x) > (y))
425 : #define __flatbuffers_string_diff(x, y) __flatbuffers_string_n_cmp((x), (const char *)(y), flatbuffers_string_len(y))
426 : #define __flatbuffers_value_swap(vec, a, b, TE) { TE x__tmp = vec[b]; vec[b] = vec[a]; vec[a] = x__tmp; }
427 : #define __flatbuffers_uoffset_swap(vec, a, b, TE)\
428 : { TE ta__tmp, tb__tmp, d__tmp;\
429 : d__tmp = (TE)((a - b) * sizeof(vec[0]));\
430 : ta__tmp = __flatbuffers_uoffset_read_from_pe(vec + b) - d__tmp;\
431 : tb__tmp = __flatbuffers_uoffset_read_from_pe(vec + a) + d__tmp;\
432 : __flatbuffers_uoffset_write_to_pe(vec + a, ta__tmp);\
433 : __flatbuffers_uoffset_write_to_pe(vec + b, tb__tmp); }
434 : #define __flatbuffers_scalar_swap(vec, a, b, TE) __flatbuffers_value_swap(vec, a, b, TE)
435 : #define __flatbuffers_string_swap(vec, a, b, TE) __flatbuffers_uoffset_swap(vec, a, b, TE)
436 : #define __flatbuffers_struct_swap(vec, a, b, TE) __flatbuffers_value_swap(vec, a, b, TE)
437 : #define __flatbuffers_table_swap(vec, a, b, TE) __flatbuffers_uoffset_swap(vec, a, b, TE)
438 : #define __flatbuffers_define_struct_sort_by_scalar_field(N, NK, TK, TE)\
439 : __flatbuffers_define_sort_by_field(N, NK, TK, TE, __flatbuffers_scalar_diff, __flatbuffers_struct_swap)
440 : #define __flatbuffers_define_table_sort_by_scalar_field(N, NK, TK)\
441 : __flatbuffers_define_sort_by_field(N, NK, TK, flatbuffers_uoffset_t, __flatbuffers_scalar_diff, __flatbuffers_table_swap)
442 : #define __flatbuffers_define_table_sort_by_string_field(N, NK)\
443 : __flatbuffers_define_sort_by_field(N, NK, flatbuffers_string_t, flatbuffers_uoffset_t, __flatbuffers_string_diff, __flatbuffers_table_swap)
444 : #define __flatbuffers_define_scalar_sort(N, T) __flatbuffers_define_sort(N, T, T, __flatbuffers_scalar_diff, __flatbuffers_scalar_swap)
445 : #define __flatbuffers_define_string_sort() __flatbuffers_define_sort(flatbuffers_string, flatbuffers_string_t, flatbuffers_uoffset_t, __flatbuffers_string_diff, __flatbuffers_string_swap)
446 : #define __flatbuffers_sort_vector_field(N, NK, T, t)\
447 : { T ## _mutable_vec_t v__tmp = (T ## _mutable_vec_t) N ## _ ## NK ## _get(t);\
448 : if (v__tmp) T ## _vec_sort(v__tmp); }
449 : #define __flatbuffers_sort_table_field(N, NK, T, t)\
450 : { T ## _sort((T ## _mutable_table_t)N ## _ ## NK ## _get(t)); }
451 : #define __flatbuffers_sort_union_field(N, NK, T, t)\
452 : { T ## _sort(T ## _mutable_union_cast(N ## _ ## NK ## _union(t))); }
453 : #define __flatbuffers_sort_table_vector_field_elements(N, NK, T, t)\
454 : { T ## _vec_t v__tmp = N ## _ ## NK ## _get(t); size_t i__tmp, n__tmp;\
455 : n__tmp = T ## _vec_len(v__tmp); for (i__tmp = 0; i__tmp < n__tmp; ++i__tmp) {\
456 : T ## _sort((T ## _mutable_table_t)T ## _vec_at(v__tmp, i__tmp)); }}
457 : #define __flatbuffers_sort_union_vector_field_elements(N, NK, T, t)\
458 : { T ## _union_vec_t v__tmp = N ## _ ## NK ## _union(t); size_t i__tmp, n__tmp;\
459 : n__tmp = T ## _union_vec_len(v__tmp); for (i__tmp = 0; i__tmp < n__tmp; ++i__tmp) {\
460 : T ## _sort(T ## _mutable_union_cast(T ## _union_vec_at(v__tmp, i__tmp))); }}
461 : #define __flatbuffers_define_scalar_vector(N, T)\
462 : typedef const T *N ## _vec_t;\
463 : typedef T *N ## _mutable_vec_t;\
464 : __flatbuffers_define_scalar_vec_len(N)\
465 : __flatbuffers_define_scalar_vec_at(N, T)\
466 : __flatbuffers_define_scalar_find(N, T)\
467 : __flatbuffers_define_scalar_scan(N, T)\
468 : __flatbuffers_define_scalar_sort(N, T)
469 :
470 : #define __flatbuffers_define_integer_type(N, T, W)\
471 : __flatcc_define_integer_accessors(N, T, W, flatbuffers_endian)\
472 : __flatbuffers_define_scalar_vector(N, T)
473 : __flatbuffers_define_scalar_vector(flatbuffers_bool, flatbuffers_bool_t)
474 : __flatbuffers_define_scalar_vector(flatbuffers_char, char)
475 : __flatbuffers_define_scalar_vector(flatbuffers_uint8, uint8_t)
476 : __flatbuffers_define_scalar_vector(flatbuffers_int8, int8_t)
477 : __flatbuffers_define_scalar_vector(flatbuffers_uint16, uint16_t)
478 : __flatbuffers_define_scalar_vector(flatbuffers_int16, int16_t)
479 : __flatbuffers_define_scalar_vector(flatbuffers_uint32, uint32_t)
480 : __flatbuffers_define_scalar_vector(flatbuffers_int32, int32_t)
481 0 : __flatbuffers_define_scalar_vector(flatbuffers_uint64, uint64_t)
482 0 : __flatbuffers_define_scalar_vector(flatbuffers_int64, int64_t)
483 0 : __flatbuffers_define_scalar_vector(flatbuffers_float, float)
484 0 : __flatbuffers_define_scalar_vector(flatbuffers_double, double)
485 0 : __flatbuffers_define_scalar_vector(flatbuffers_union_type, flatbuffers_union_type_t)
486 0 : static inline size_t flatbuffers_string_vec_find(flatbuffers_string_vec_t vec, const char *s)
487 0 : __flatbuffers_find_by_string_field(__flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
488 0 : static inline size_t flatbuffers_string_vec_find_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
489 0 : __flatbuffers_find_by_string_n_field(__flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
490 0 : static inline size_t flatbuffers_string_vec_scan(flatbuffers_string_vec_t vec, const char *s)
491 0 : __flatbuffers_scan_by_string_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
492 0 : static inline size_t flatbuffers_string_vec_scan_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
493 0 : __flatbuffers_scan_by_string_n_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
494 0 : static inline size_t flatbuffers_string_vec_scan_ex(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s)
495 0 : __flatbuffers_scan_by_string_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
496 0 : static inline size_t flatbuffers_string_vec_scan_ex_n(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s, size_t n)
497 0 : __flatbuffers_scan_by_string_n_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
498 0 : static inline size_t flatbuffers_string_vec_rscan(flatbuffers_string_vec_t vec, const char *s)
499 0 : __flatbuffers_rscan_by_string_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
500 0 : static inline size_t flatbuffers_string_vec_rscan_n(flatbuffers_string_vec_t vec, const char *s, size_t n)
501 0 : __flatbuffers_rscan_by_string_n_field(0, flatbuffers_string_vec_len(vec), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
502 0 : static inline size_t flatbuffers_string_vec_rscan_ex(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s)
503 0 : __flatbuffers_rscan_by_string_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s)
504 0 : static inline size_t flatbuffers_string_vec_rscan_ex_n(flatbuffers_string_vec_t vec, size_t begin, size_t end, const char *s, size_t n)
505 0 : __flatbuffers_rscan_by_string_n_field(begin, __flatbuffers_min(end, flatbuffers_string_vec_len(vec)), __flatbuffers_identity, vec, flatbuffers_string_vec_at, flatbuffers_string_vec_len, s, n)
506 0 : __flatbuffers_define_string_sort()
507 0 : #define __flatbuffers_define_struct_scalar_fixed_array_field(N, NK, TK, T, L)\
508 0 : static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp, size_t i__tmp)\
509 0 : { if (!t__tmp || i__tmp >= L) return 0;\
510 0 : return __flatbuffers_read_scalar(TK, &(t__tmp->NK[i__tmp])); }\
511 0 : static inline const T *N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
512 0 : { return t__tmp ? t__tmp->NK : 0; }\
513 0 : static inline size_t N ## _ ## NK ## _get_len(void) { return L; }\
514 0 : static inline T N ## _ ## NK (N ## _struct_t t__tmp, size_t i__tmp)\
515 0 : { return N ## _ ## NK ## _get(t__tmp, i__tmp); }
516 : #define __flatbuffers_define_struct_struct_fixed_array_field(N, NK, T, L)\
517 : static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp, size_t i__tmp)\
518 : { if (!t__tmp || i__tmp >= L) return 0; return t__tmp->NK + i__tmp; }static inline T N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
519 : { return t__tmp ? t__tmp->NK : 0; }\
520 : static inline size_t N ## _ ## NK ## _get_len(void) { return L; }\
521 : static inline T N ## _ ## NK(N ## _struct_t t__tmp, size_t i__tmp)\
522 : { if (!t__tmp || i__tmp >= L) return 0; return t__tmp->NK + i__tmp; }
523 : #define __flatbuffers_define_struct_scalar_field(N, NK, TK, T)\
524 : static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp)\
525 : { return t__tmp ? __flatbuffers_read_scalar(TK, &(t__tmp->NK)) : 0; }\
526 : static inline const T *N ## _ ## NK ## _get_ptr(N ## _struct_t t__tmp)\
527 : { return t__tmp ? &(t__tmp->NK) : 0; }\
528 : static inline T N ## _ ## NK (N ## _struct_t t__tmp)\
529 : { return t__tmp ? __flatbuffers_read_scalar(TK, &(t__tmp->NK)) : 0; }\
530 : __flatbuffers_define_scan_by_scalar_field(N, NK, T)
531 : #define __flatbuffers_define_struct_struct_field(N, NK, T)\
532 : static inline T N ## _ ## NK ## _get(N ## _struct_t t__tmp) { return t__tmp ? &(t__tmp->NK) : 0; }\
533 : static inline T N ## _ ## NK (N ## _struct_t t__tmp) { return t__tmp ? &(t__tmp->NK) : 0; }
534 : /* If fid is null, the function returns true without testing as buffer is not expected to have any id. */
535 : static inline int flatbuffers_has_identifier(const void *buffer, const char *fid)
536 0 : { flatbuffers_thash_t id, id2 = 0; if (fid == 0) { return 1; };
537 0 : id2 = flatbuffers_type_hash_from_string(fid);
538 0 : id = __flatbuffers_thash_read_from_pe(((flatbuffers_uoffset_t *)buffer) + 1);
539 0 : return id2 == 0 || id == id2; }
540 : static inline int flatbuffers_has_type_hash(const void *buffer, flatbuffers_thash_t thash)
541 0 : { return thash == 0 || (__flatbuffers_thash_read_from_pe((flatbuffers_uoffset_t *)buffer + 1) == thash); }
542 :
543 : static inline flatbuffers_thash_t flatbuffers_get_type_hash(const void *buffer)
544 0 : { return __flatbuffers_thash_read_from_pe((flatbuffers_uoffset_t *)buffer + 1); }
545 :
546 : #define flatbuffers_verify_endian() flatbuffers_has_identifier("\x00\x00\x00\x00" "1234", "1234")
547 : static inline void *flatbuffers_read_size_prefix(void *b, size_t *size_out)
548 0 : { if (size_out) { *size_out = (size_t)__flatbuffers_uoffset_read_from_pe(b); }
549 0 : return (uint8_t *)b + sizeof(flatbuffers_uoffset_t); }
550 : /* Null file identifier accepts anything, otherwise fid should be 4 characters. */
551 : #define __flatbuffers_read_root(T, K, buffer, fid)\
552 0 : ((!buffer || !flatbuffers_has_identifier(buffer, fid)) ? 0 :\
553 0 : ((T ## _ ## K ## t)(((uint8_t *)buffer) +\
554 0 : __flatbuffers_uoffset_read_from_pe(buffer))))
555 : #define __flatbuffers_read_typed_root(T, K, buffer, thash)\
556 : ((!buffer || !flatbuffers_has_type_hash(buffer, thash)) ? 0 :\
557 : ((T ## _ ## K ## t)(((uint8_t *)buffer) +\
558 : __flatbuffers_uoffset_read_from_pe(buffer))))
559 : #define __flatbuffers_nested_buffer_as_root(C, N, T, K)\
560 : static inline T ## _ ## K ## t C ## _ ## N ## _as_root_with_identifier(C ## _ ## table_t t__tmp, const char *fid__tmp)\
561 : { const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, fid__tmp); }\
562 : static inline T ## _ ## K ## t C ## _ ## N ## _as_typed_root(C ## _ ## table_t t__tmp)\
563 : { const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, C ## _ ## type_identifier); }\
564 : static inline T ## _ ## K ## t C ## _ ## N ## _as_root(C ## _ ## table_t t__tmp)\
565 : { const char *fid__tmp = T ## _file_identifier;\
566 : const uint8_t *buffer__tmp = C ## _ ## N(t__tmp); return __flatbuffers_read_root(T, K, buffer__tmp, fid__tmp); }
567 : #define __flatbuffers_buffer_as_root(N, K)\
568 0 : static inline N ## _ ## K ## t N ## _as_root_with_identifier(const void *buffer__tmp, const char *fid__tmp)\
569 0 : { return __flatbuffers_read_root(N, K, buffer__tmp, fid__tmp); }\
570 0 : static inline N ## _ ## K ## t N ## _as_root_with_type_hash(const void *buffer__tmp, flatbuffers_thash_t thash__tmp)\
571 0 : { return __flatbuffers_read_typed_root(N, K, buffer__tmp, thash__tmp); }\
572 0 : static inline N ## _ ## K ## t N ## _as_root(const void *buffer__tmp)\
573 0 : { const char *fid__tmp = N ## _file_identifier;\
574 0 : return __flatbuffers_read_root(N, K, buffer__tmp, fid__tmp); }\
575 0 : static inline N ## _ ## K ## t N ## _as_typed_root(const void *buffer__tmp)\
576 0 : { return __flatbuffers_read_typed_root(N, K, buffer__tmp, N ## _type_hash); }
577 : #define __flatbuffers_struct_as_root(N) __flatbuffers_buffer_as_root(N, struct_)
578 : #define __flatbuffers_table_as_root(N) __flatbuffers_buffer_as_root(N, table_)
579 :
580 : #include "flatcc/flatcc_epilogue.h"
581 : #endif /* FLATBUFFERS_COMMON_H */
582 :
583 : #pragma GCC diagnostic pop
|