26 #define GRN_RA_SEGMENT_SIZE (1 << 22)
29 _grn_ra_create(
grn_ctx *ctx,
grn_ra *ra,
const char *path,
unsigned int element_size)
32 int max_segments, n_elm, w_elm;
34 unsigned int actual_size;
39 for (actual_size = 1; actual_size <
element_size; actual_size *= 2) ;
44 if (!io) {
return NULL; }
49 for (w_elm = 22; (1 << w_elm) > n_elm; w_elm--);
65 if (!_grn_ra_create(ctx, ra, path, element_size)) {
80 if (!io) {
return NULL; }
92 for (w_elm = 22; (1 << w_elm) > n_elm; w_elm--);
145 if (!_grn_ra_create(ctx, ra, path, element_size)) {
161 if (!p) {
return NULL; }
182 if (seg == cache->
seg) {
190 if (!p) {
return NULL; }
206 #define GRN_JA_W_SEGREGATE_THRESH 7
207 #define GRN_JA_W_CAPACITY 38
208 #define GRN_JA_W_SEGMENT 22
210 #define JA_ESEG_VOID (0xffffffffU)
211 #define JA_SEGMENT_SIZE (1U << GRN_JA_W_SEGMENT)
213 #define JA_W_SEGMENTS_MAX (GRN_JA_W_CAPACITY - GRN_JA_W_SEGMENT)
214 #define JA_W_EINFO_IN_A_SEGMENT (GRN_JA_W_SEGMENT - JA_W_EINFO)
215 #define JA_N_EINFO_IN_A_SEGMENT (1U << JA_W_EINFO_IN_A_SEGMENT)
216 #define JA_M_EINFO_IN_A_SEGMENT (JA_N_EINFO_IN_A_SEGMENT - 1)
217 #define JA_N_GARBAGES_IN_A_SEGMENT ((1U << (GRN_JA_W_SEGMENT - 3)) - 2)
218 #define JA_N_ELEMENT_VARIATION (GRN_JA_W_SEGREGATE_THRESH - JA_W_EINFO + 1)
219 #define JA_N_DSEGMENTS (1U << JA_W_SEGMENTS_MAX)
220 #define JA_N_ESEGMENTS (1U << (GRN_ID_WIDTH - JA_W_EINFO_IN_A_SEGMENT))
245 #define ETINY_P(e) ((e)->u.c[7] & ETINY)
246 #define ETINY_ENC(e,_size) ((e)->u.c[7] = (_size) + ETINY)
247 #define ETINY_DEC(e,_size) ((_size) = (e)->u.c[7] & ~(ETINY|EHUGE))
248 #define EHUGE_P(e) ((e)->u.c[7] & EHUGE)
249 #define EHUGE_ENC(e,_seg,_size) do {\
251 (e)->u.h.c2 = EHUGE;\
252 (e)->u.h.seg = (_seg);\
253 (e)->u.h.size = (_size);\
255 #define EHUGE_DEC(e,_seg,_size) do {\
256 (_seg) = (e)->u.h.seg;\
257 (_size) = (e)->u.h.size;\
259 #define EINFO_ENC(e,_seg,_pos,_size) do {\
260 (e)->u.n.c1 = (_pos) >> 16;\
261 (e)->u.n.c2 = ((_size) >> 16);\
262 (e)->u.n.seg = (_seg);\
263 (e)->u.n.pos = (_pos);\
264 (e)->u.n.size = (_size);\
266 #define EINFO_DEC(e,_seg,_pos,_size) do {\
267 (_seg) = (e)->u.n.seg;\
268 (_pos) = ((e)->u.n.c1 << 16) + (e)->u.n.pos;\
269 (_size) = ((e)->u.n.c2 << 16) + (e)->u.n.size;\
297 #define SEG_SEQ (0x10000000U)
298 #define SEG_HUGE (0x20000000U)
299 #define SEG_EINFO (0x30000000U)
300 #define SEG_GINFO (0x40000000U)
301 #define SEG_MASK (0xf0000000U)
303 #define SEGMENTS_AT(ja,seg) ((ja)->header->dsegs[seg])
304 #define SEGMENTS_SEGRE_ON(ja,seg,width) (SEGMENTS_AT(ja,seg) = width)
305 #define SEGMENTS_SEQ_ON(ja,seg) (SEGMENTS_AT(ja,seg) = SEG_SEQ)
306 #define SEGMENTS_HUGE_ON(ja,seg) (SEGMENTS_AT(ja,seg) = SEG_HUGE)
307 #define SEGMENTS_EINFO_ON(ja,seg,lseg) (SEGMENTS_AT(ja,seg) = SEG_EINFO|(lseg))
308 #define SEGMENTS_GINFO_ON(ja,seg,width) (SEGMENTS_AT(ja,seg) = SEG_GINFO|(width))
309 #define SEGMENTS_OFF(ja,seg) (SEGMENTS_AT(ja,seg) = 0)
315 unsigned int max_element_size, uint32_t flags)
323 if (!io) {
return NULL; }
333 header->
esegs[0] = 0;
345 if (!_grn_ja_create(ctx, ja, path, max_element_size, flags)) {
359 if (!io) {
return NULL; }
422 if (!_grn_ja_create(ctx, ja, path, max_element_size, flags)) {
447 iw->
addr = (
void *)ei;
449 uint32_t jag, vpos, vsize;
462 *value_len = iw->
size;
475 #define DELETED 0x80000000
481 uint32_t seg, pos, element_size, aligned_size, m, *gseg;
490 EINFO_DEC(einfo, seg, pos, element_size);
493 int es = element_size - 1;
501 aligned_size = (element_size +
sizeof(
grn_id) - 1) & ~(
sizeof(
grn_id) - 1);
502 *(uint32_t *)(addr + pos -
sizeof(
grn_id)) =
DELETED|aligned_size;
517 uint32_t lseg = 0, lseg_;
519 while ((lseg_ = *gseg)) {
566 uint32_t lseg, *pseg, pos;
597 if (cas && *cas != *((uint64_t *)&eback)) {
605 uint64_t *location = (uint64_t *)(einfo + pos);
606 uint64_t value = *((uint64_t *)ei);
607 GRN_SET_64BIT(location, value);
610 grn_ja_free(ctx, ja, &eback);
616 #define JA_N_GARBAGES_TH 10
627 if (element_size < 8) {
630 iw->
addr = (
void *)einfo;
660 int m, aligned_size, es = element_size - 1;
683 *(
grn_id *)(addr + pos) = id;
684 aligned_size = (element_size +
sizeof(
grn_id) - 1) & ~(
sizeof(
grn_id) - 1);
690 EINFO_ENC(einfo, seg, pos, element_size);
692 iw->
addr = addr + pos;
697 uint32_t lseg = 0, lseg_;
698 aligned_size = 1 << m;
701 uint32_t seg, pos, *gseg;
703 while ((lseg_ = *gseg)) {
720 EINFO_ENC(einfo, seg, pos, element_size);
722 iw->
addr = addr + pos;
783 if ((rc = grn_ja_alloc(ctx, ja,
id, value_len +
sizeof(uint32_t), einfo, &iw))) {
786 memcpy(iw.
addr, value, value_len);
787 memset((
byte *)iw.
addr + value_len, 0,
sizeof(uint32_t));
790 if ((rc = grn_ja_alloc(ctx, ja,
id, value_len, einfo, &iw))) {
return rc; }
791 memcpy(iw.
addr, value, value_len);
799 void *value, uint32_t value_len,
int flags, uint64_t *cas)
814 old_value =
grn_ja_ref(ctx, ja,
id, &jw, &old_len);
815 if (value_len == old_len && memcmp(value, old_value, value_len) == 0) {
824 switch (flags & GRN_OBJ_SET_MASK) {
829 void *oldvalue =
grn_ja_ref(ctx, ja,
id, &jw, &old_len);
835 uint32_t el = old_len -
sizeof(uint32_t);
836 uint32_t pos = *((uint32_t *)(b + el));
838 if (el <= pos + value_len) {
839 uint32_t rest = el - pos;
840 memcpy(b + pos, value, rest);
841 memcpy(b, (
byte *)value + rest, value_len - rest);
842 *((uint32_t *)(b + el)) = value_len - rest;
844 memcpy(b + pos, value, value_len);
845 *((uint32_t *)(b + el)) = pos + value_len;
849 if ((rc = grn_ja_alloc(ctx, ja,
id,
850 value_len + old_len +
sizeof(uint32_t),
855 memcpy(iw.
addr, oldvalue, old_len);
856 memcpy((
byte *)iw.
addr + old_len, value, value_len);
857 memset((
byte *)iw.
addr + old_len + value_len, 0,
sizeof(uint32_t));
861 if ((rc = grn_ja_alloc(ctx, ja,
id, value_len + old_len, &einfo, &iw))) {
865 memcpy(iw.
addr, oldvalue, old_len);
866 memcpy((
byte *)iw.
addr + old_len, value, value_len);
871 set_value(ctx, ja,
id, value, value_len, &einfo);
879 void *oldvalue =
grn_ja_ref(ctx, ja,
id, &jw, &old_len);
885 uint32_t el = old_len -
sizeof(uint32_t);
886 uint32_t pos = *((uint32_t *)(b + el));
888 if (pos < value_len) {
889 uint32_t rest = value_len - pos;
890 memcpy(b, (
byte *)value + rest, pos);
891 memcpy(b + el - rest, value, rest);
892 *((uint32_t *)(b + el)) = el - rest;
894 memcpy(b + pos - value_len, value, value_len);
895 *((uint32_t *)(b + el)) = pos - value_len;
899 if ((rc = grn_ja_alloc(ctx, ja,
id,
900 value_len + old_len +
sizeof(uint32_t),
905 memcpy(iw.
addr, value, value_len);
906 memcpy((
byte *)iw.
addr + value_len, oldvalue, old_len);
907 memset((
byte *)iw.
addr + value_len + old_len, 0,
sizeof(uint32_t));
911 if ((rc = grn_ja_alloc(ctx, ja,
id, value_len + old_len, &einfo, &iw))) {
915 memcpy(iw.
addr, value, value_len);
916 memcpy((
byte *)iw.
addr + value_len, oldvalue, old_len);
921 set_value(ctx, ja,
id, value, value_len, &einfo);
926 if (value_len ==
sizeof(int64_t)) {
927 int64_t *v = (int64_t *)&buf;
928 *v = -(*(int64_t *)value);
930 }
else if (value_len ==
sizeof(int32_t)) {
931 int32_t *v = (int32_t *)&buf;
932 *v = -(*(int32_t *)value);
942 void *oldvalue =
grn_ja_ref(ctx, ja,
id, &jw, &old_len);
943 if (oldvalue && old_len) {
945 if (old_len ==
sizeof(int64_t) && value_len ==
sizeof(int64_t)) {
946 (*(int64_t *)oldvalue) += (*(int64_t *)value);
948 }
else if (old_len ==
sizeof(int32_t) && value_len ==
sizeof(int32_t)) {
949 (*(int32_t *)oldvalue) += (*(int32_t *)value);
959 set_value(ctx, ja,
id, value, value_len, &einfo);
970 grn_ja_free(ctx, ja, &einfo);
987 for (i = 0, vp = vector->
u.
v.sections; i < n; i++, vp++) {
992 for (i = 0, vp = vector->
u.
v.sections; i < n; i++, vp++) {
1004 if ((rc = grn_ja_alloc(ctx, ja,
id, sizeh + sizev + sizef, &einfo, &iw))) {
goto exit; }
1007 if (f) { memcpy((
char *)iw.
addr + sizeh + sizev,
GRN_BULK_HEAD(&footer), sizef); }
1021 uint32_t lseg, *pseg, pos, size;
1039 size = ei->u.h.size;
1041 size = (ei->u.n.c2 << 16) + ei->u.n.size;
1050 uint64_t *cas, uint32_t *pos, uint32_t *size)
1082 #ifdef GRN_WITH_ZLIB
1091 void *value, *zvalue;
1092 uint32_t zvalue_len;
1093 if (!(zvalue = grn_ja_ref_raw(ctx, ja,
id, iw, &zvalue_len))) {
1097 zstream.next_in = (Bytef *)(((uint64_t *)zvalue) + 1);
1098 zstream.avail_in = zvalue_len +
sizeof(uint64_t);
1099 zstream.zalloc = Z_NULL;
1100 zstream.zfree = Z_NULL;
1101 if (inflateInit2(&zstream, 15 ) != Z_OK) {
1105 if (!(value =
GRN_MALLOC(*((uint64_t *)zvalue)))) {
1106 inflateEnd(&zstream);
1110 zstream.next_out = (Bytef *)value;
1111 zstream.avail_out = *(uint64_t *)zvalue;
1112 if (inflate(&zstream, Z_FINISH) != Z_STREAM_END) {
1113 inflateEnd(&zstream);
1118 *value_len = zstream.total_out;
1119 if (inflateEnd(&zstream) != Z_OK) {
1129 #include <lzo/lzo1x.h>
1136 void *value, *lvalue;
1137 uint32_t lvalue_len;
1139 if (!(lvalue = grn_ja_ref_raw(ctx, ja,
id, iw, &lvalue_len))) {
1143 if (!(value =
GRN_MALLOC(*((uint64_t *)lvalue)))) {
1147 lout_len = *((uint64_t *)lvalue);
1148 switch (lzo1x_decompress((lzo_bytep)(((uint64_t *)lvalue) + 1),
1154 case LZO_E_INPUT_NOT_CONSUMED :
1161 *value_len = lout_len;
1169 #ifdef GRN_WITH_ZLIB
1171 return grn_ja_ref_zlib(ctx, ja,
id, iw, value_len);
1176 return grn_ja_ref_lzo(ctx, ja,
id, iw, value_len);
1179 return grn_ja_ref_raw(ctx, ja,
id, iw, value_len);
1194 if ((v =
grn_ja_ref(ctx, ja,
id, &iw, &len))) {
1198 uint32_t el = len -
sizeof(uint32_t);
1199 uint32_t pos = *((uint32_t *)(b + el));
1212 #ifdef GRN_WITH_ZLIB
1215 void *value, uint32_t value_len,
int flags, uint64_t *cas)
1222 if (value_len == 0) {
1223 return grn_ja_put_raw(ctx, ja,
id, value, value_len, flags, cas);
1226 zstream.next_in = value;
1227 zstream.avail_in = value_len;
1228 zstream.zalloc = Z_NULL;
1229 zstream.zfree = Z_NULL;
1230 if (deflateInit2(&zstream, Z_DEFAULT_COMPRESSION, Z_DEFLATED,
1233 Z_DEFAULT_STRATEGY) != Z_OK) {
1237 zvalue_len = deflateBound(&zstream, value_len);
1239 zstream.next_out = (Bytef *)(((uint64_t *)zvalue) + 1);
1240 zstream.avail_out = zvalue_len;
1241 if (deflate(&zstream, Z_FINISH) != Z_STREAM_END) {
1242 deflateEnd(&zstream);
1247 zvalue_len = zstream.total_out;
1248 if (deflateEnd(&zstream) != Z_OK) {
1253 *(uint64_t *)zvalue = value_len;
1254 rc =
grn_ja_put_raw(ctx, ja,
id, zvalue, zvalue_len +
sizeof(uint64_t), flags, cas);
1263 void *value, uint32_t value_len,
int flags, uint64_t *cas)
1266 void *lvalue, *lwork;
1267 lzo_uint lvalue_len = value_len + value_len / 16 + 64 + 3;
1269 if (value_len == 0) {
1270 return grn_ja_put_raw(ctx, ja,
id, value, value_len, flags, cas);
1275 if (lzo1x_1_compress(value, value_len, (lzo_bytep)((uint64_t *)lvalue + 1), &lvalue_len, lwork) != LZO_E_OK) {
1282 *(uint64_t *)lvalue = value_len;
1283 rc =
grn_ja_put_raw(ctx, ja,
id, lvalue, lvalue_len +
sizeof(uint64_t), flags, cas);
1291 int flags, uint64_t *cas)
1293 #ifdef GRN_WITH_ZLIB
1295 return grn_ja_put_zlib(ctx, ja,
id, value, value_len, flags, cas);
1300 return grn_ja_put_lzo(ctx, ja,
id, value, value_len, flags, cas);
1303 return grn_ja_put_raw(ctx, ja,
id, value, value_len, flags, cas);
1309 byte *v = NULL, *ve;
1310 uint32_t element_size, cum = 0, *seginfo = &
SEGMENTS_AT(ja,seg), sum;
1315 while (v < ve && cum < sum) {
1319 element_size = (
id & ~DELETED);
1324 if (v +
sizeof(uint32_t) != ve - JA_SEGMENT_SIZE + pos) {
1326 "dseges[%d] = pos unmatch (%d != %" GRN_FMT_LLD ")",
1327 seg, pos, (
long long int)(v +
sizeof(uint32_t) + JA_SEGMENT_SIZE - ve));
1332 "dseges[%d] = put failed (%d)", seg,
id);
1335 element_size = (element_size +
sizeof(
grn_id) - 1) & ~(
sizeof(
grn_id) - 1);
1336 cum +=
sizeof(uint32_t) + element_size;
1338 v +=
sizeof(uint32_t) + element_size;
1356 if (!grn_ja_defrag_seg(ctx, ja, seg)) { nsegs++; }
1391 if ((dseg & SEG_MASK) ==
SEG_SEQ) {
1392 byte *v = NULL, *ve;
1393 uint32_t element_size, cum = 0, sum = dseg & ~SEG_MASK;
1394 uint32_t n_del_elements = 0, n_elements = 0, s_del_elements = 0, s_elements = 0;
1402 while (v < ve && cum < sum) {
1411 element_size = (
id & ~DELETED);
1413 s_del_elements += element_size;
1416 element_size = (element_size +
sizeof(
grn_id) - 1) & ~(
sizeof(
grn_id) - 1);
1417 cum +=
sizeof(uint32_t) + element_size;
1419 s_elements +=
sizeof(uint32_t) + element_size;
1421 v +=
sizeof(uint32_t) + element_size;