20 #include <glog/logging.h>
42 : engine_(storage.get_engine()),
43 storage_(storage.get_engine(), storage.get_control_block()),
45 current_xct_(&context->get_current_xct()) {
47 forward_cursor_ =
true;
52 should_skip_cur_route_ =
false;
54 end_inclusive_ =
false;
57 end_key_slices_ =
nullptr;
59 cur_route_prefix_slices_ =
nullptr;
60 cur_route_prefix_be_ =
nullptr;
63 cur_key_owner_id_address =
nullptr;
64 cur_key_suffix_ =
nullptr;
65 cur_key_in_layer_slice_ = 0;
66 cur_key_in_layer_remainder_ = 0;
67 cur_key_next_layer_ =
false;
68 cur_key_location_.
clear();
70 cur_payload_length_ = 0;
72 search_key_length_ = 0;
73 search_key_ =
nullptr;
74 search_key_slices_ =
nullptr;
79 const Route* route = cur_route();
83 ASSERT_ND(cur_key_length_ == cur_key_in_layer_remainder_ + layer *
sizeof(
KeySlice));
86 assorted::write_bigendian<KeySlice>(cur_key_in_layer_slice_, buffer + layer *
sizeof(
KeySlice));
88 if (suffix_length > 0) {
90 buffer + (layer + 1U) *
sizeof(
KeySlice),
97 ASSERT_ND(offset + len <= cur_key_length_);
98 const Route* route = cur_route();
102 ASSERT_ND(cur_key_length_ == cur_key_in_layer_remainder_ + layer *
sizeof(
KeySlice));
107 if (offset < prefix_len) {
108 KeyLength prefix_copy_len = std::min<KeyLength>(prefix_len - offset, len_remaining);
109 std::memcpy(buffer, cur_route_prefix_be_ + offset, prefix_copy_len);
110 buffer_pos += prefix_copy_len;
111 len_remaining -= prefix_copy_len;
114 if (len_remaining > 0) {
115 if (offset < prefix_len +
sizeof(
KeySlice)) {
116 char cur_slice_be[
sizeof(
KeySlice)];
117 assorted::write_bigendian<KeySlice>(cur_key_in_layer_slice_, cur_slice_be);
119 KeyLength cur_slice_offset = offset - prefix_len;
121 = std::min<KeyLength>(
sizeof(
KeySlice) - cur_slice_offset, len_remaining);
122 std::memcpy(buffer + buffer_pos, cur_slice_be + cur_slice_offset, cur_slice_copy_len);
123 buffer_pos += cur_slice_copy_len;
124 len_remaining -= cur_slice_copy_len;
127 if (len_remaining > 0) {
130 KeyLength suffix_copy_len = std::min<KeyLength>(suffix_length - suffix_offset, len_remaining);
133 cur_key_suffix_ + suffix_offset,
135 buffer_pos += suffix_copy_len;
136 len_remaining -= suffix_copy_len;
144 template <
typename T>
145 inline ErrorCode MasstreeCursor::allocate_if_not_exist(T** pointer) {
156 *pointer =
reinterpret_cast<T*
>(out);
160 MasstreePage* MasstreeCursor::resolve_volatile(VolatilePagePointer ptr)
const {
163 MasstreePage* ret =
reinterpret_cast<MasstreePage*
>(resolver.resolve_offset(ptr));
186 while (should_skip_cur_route_
188 DVLOG(0) <<
"Rare. next() needs to move on to find non-deleted records/pages";
189 should_skip_cur_route_ =
false;
191 if (route_count_ == 0) {
192 LOG(INFO) <<
"The empty page was the last page that might have had the record. we are done";
206 inline ErrorCode MasstreeCursor::proceed_route() {
208 if (cur_route()->page_->is_border()) {
209 return proceed_route_border();
211 return proceed_route_intermediate();
215 ErrorCode MasstreeCursor::proceed_route_border() {
216 Route* route = cur_route();
230 MasstreeBorderPage* page =
reinterpret_cast<MasstreeBorderPage*
>(route->page_);
232 if (forward_cursor_) {
237 if (route->index_ < route->key_count_) {
238 CHECK_ERROR_CODE(fetch_cur_record_logical(page, route->get_cur_original_index()));
240 if (is_cur_key_next_layer()) {
260 void MasstreeCursor::proceed_route_intermediate_rebase_separator() {
262 Route* route = cur_route();
264 MasstreeIntermediatePage* page =
reinterpret_cast<MasstreeIntermediatePage*
>(route->page_);
268 route->key_count_ = route->page_->get_key_count();
272 const KeySlice last = route->latest_separator_;
273 ASSERT_ND(last <= page->get_high_fence() && last >= page->get_low_fence());
274 for (route->index_ = 0; route->index_ < route->key_count_; ++route->index_) {
275 if (last <= page->get_separator(route->index_)) {
279 ASSERT_ND(route->index_ == route->key_count_ || last <= page->get_separator(route->index_));
281 DVLOG(0) <<
"rebase. new index=" << route->index_ <<
"/" << route->key_count_;
282 if (route->index_ < route->key_count_ && last == page->get_separator(route->index_)) {
283 DVLOG(0) <<
"oh, matched with first level separator";
285 if (forward_cursor_) {
287 const MasstreeIntermediatePage::MiniPage& minipage = page->get_minipage(route->index_);
288 route->key_count_mini_ = minipage.key_count_;
289 route->index_mini_ = 0;
291 const MasstreeIntermediatePage::MiniPage& minipage = page->get_minipage(route->index_);
292 route->key_count_mini_ = minipage.key_count_;
293 route->index_mini_ = minipage.key_count_;
298 const MasstreeIntermediatePage::MiniPage& minipage = page->get_minipage(route->index_);
300 route->key_count_mini_ = minipage.key_count_;
301 DVLOG(0) <<
"checking second level... count=" << route->key_count_mini_;
302 for (route->index_mini_ = 0;
303 route->index_mini_ < route->key_count_mini_;
304 ++route->index_mini_) {
305 ASSERT_ND(last >= minipage.separators_[route->index_mini_]);
306 if (last == minipage.separators_[route->index_mini_]) {
310 ASSERT_ND(route->index_mini_ <= route->key_count_mini_);
311 }
while (route->key_count_mini_ != minipage.key_count_ ||
312 route->index_mini_ == route->key_count_mini_);
313 DVLOG(0) <<
"checked second level... index_mini=" << route->index_mini_;
314 ASSERT_ND(route->index_mini_ < route->key_count_mini_);
315 if (forward_cursor_) {
317 ++route->index_mini_;
323 ErrorCode MasstreeCursor::proceed_route_intermediate() {
325 Route* route = cur_route();
327 ASSERT_ND(route->index_ <= route->key_count_);
328 MasstreeIntermediatePage* page =
reinterpret_cast<MasstreeIntermediatePage*
>(route->page_);
330 if (forward_cursor_) {
331 ++route->index_mini_;
333 --route->index_mini_;
335 if (route->index_mini_ > route->key_count_mini_) {
336 if (forward_cursor_) {
341 if (route->index_ > route->key_count_) {
343 return proceed_pop();
345 route->key_count_mini_ = page->get_minipage(route->index_).key_count_;
346 if (forward_cursor_) {
347 route->index_mini_ = 0;
349 route->index_mini_ = route->key_count_mini_;
360 ASSERT_ND(route->latest_separator_ >= page->get_low_fence());
361 ASSERT_ND(route->latest_separator_ <= page->get_high_fence());
362 MasstreeIntermediatePage::MiniPage& minipage = page->get_minipage(route->index_);
363 DualPagePointer& pointer = minipage.pointers_[route->index_mini_];
366 MasstreeStoragePimpl(&storage_).follow_page(context_, for_writes_, &pointer, &next));
368 if (forward_cursor_) {
369 if (
UNLIKELY(next->get_low_fence() != route->latest_separator_)) {
370 VLOG(0) <<
"Interesting3A. separator doesn't match. concurrent adoption. local retry.";
371 proceed_route_intermediate_rebase_separator();
374 new_separator = next->get_high_fence();
376 if (
UNLIKELY(next->get_high_fence() != route->latest_separator_)) {
377 VLOG(0) <<
"Interesting3B. separator doesn't match. concurrent adoption. local retry.";
378 proceed_route_intermediate_rebase_separator();
381 new_separator = next->get_low_fence();
386 route->latest_separator_ = new_separator;
388 return proceed_deeper();
393 inline ErrorCode MasstreeCursor::proceed_pop() {
396 if (route_count_ == 0) {
400 Route& route = routes_[route_count_ - 1];
401 if (route.page_->is_border() && route.was_stably_moved()) {
410 const auto left = route.page_->get_foster_minor();
411 const auto right = route.page_->get_foster_major();
412 MasstreePage* next = resolve_volatile(forward_cursor_ ? right : left);
417 return proceed_deeper();
421 return proceed_route();
425 inline ErrorCode MasstreeCursor::proceed_next_layer() {
426 Route* route = cur_route();
428 MasstreeBorderPage* page =
reinterpret_cast<MasstreeBorderPage*
>(route->page_);
429 Layer layer = page->get_layer();
430 KeySlice record_slice = page->get_slice(route->get_cur_original_index());
431 cur_route_prefix_slices_[layer] = record_slice;
432 assorted::write_bigendian<KeySlice>(
434 cur_route_prefix_be_ + (layer *
sizeof(
KeySlice)));
435 DualPagePointer* pointer = page->get_next_layer(route->get_cur_original_index());
438 MasstreeStoragePimpl(&storage_).follow_page(context_, for_writes_, pointer, &next));
440 return proceed_deeper();
443 inline ErrorCode MasstreeCursor::proceed_deeper() {
444 Route* cur = cur_route();
445 if (cur->page_->is_border()) {
447 while (
UNLIKELY(cur->was_stably_moved())) {
448 const auto left = cur->page_->get_foster_minor();
449 const auto right = cur->page_->get_foster_major();
450 MasstreePage* next_page = resolve_volatile(forward_cursor_ ? left : right);
459 return proceed_deeper_border();
464 return proceed_deeper_intermediate();
468 inline ErrorCode MasstreeCursor::proceed_deeper_border() {
469 Route* route = cur_route();
472 MasstreeBorderPage* page =
reinterpret_cast<MasstreeBorderPage*
>(cur_route()->
page_);
475 if (route->key_count_ == 0) {
476 LOG(INFO) <<
"Huh, rare but possible. A cursor hit an empty border page. Just skips";
477 set_should_skip_cur_route();
482 route->index_ = forward_cursor_ ? 0 : route->key_count_ - 1;
483 SlotIndex record = route->get_original_index(route->index_);
485 if (is_cur_key_next_layer()) {
486 return proceed_next_layer();
491 inline ErrorCode MasstreeCursor::proceed_deeper_intermediate() {
492 Route* route = cur_route();
494 MasstreeIntermediatePage* page =
reinterpret_cast<MasstreeIntermediatePage*
>(cur_route()->
page_);
495 route->index_ = forward_cursor_ ? 0 : route->key_count_;
496 MasstreeIntermediatePage::MiniPage& minipage = page->get_minipage(route->index_);
499 KeySlice separator_low, separator_high;
501 route->key_count_mini_ = minipage.key_count_;
503 route->index_mini_ = forward_cursor_ ? 0 : route->key_count_mini_;
505 extract_separators(&separator_low, &separator_high);
506 DualPagePointer& pointer = minipage.pointers_[route->index_mini_];
509 MasstreeStoragePimpl(&storage_).follow_page(context_, for_writes_, &pointer, &next));
510 if (
UNLIKELY(next->get_low_fence() != separator_low ||
511 next->get_high_fence() != separator_high)) {
512 VLOG(0) <<
"Interesting4. first sep doesn't match. concurrent adoption. local retry.";
519 route->latest_separator_ = forward_cursor_ ? separator_high : separator_low;
521 return proceed_deeper();
530 ErrorCode MasstreeCursor::fetch_cur_record_logical(MasstreeBorderPage* page,
SlotIndex record) {
532 ASSERT_ND(record < page->get_key_count());
533 cur_key_owner_id_address = page->get_owner_id(record);
535 if (!page->header().snapshot_) {
538 cur_key_owner_id_address);
541 KeyLength remainder = page->get_remainder_length(record);
542 cur_key_in_layer_remainder_ = remainder;
543 cur_key_in_layer_slice_ = page->get_slice(record);
544 Layer layer = page->get_layer();
545 cur_key_length_ = layer *
sizeof(
KeySlice) + remainder;
553 cur_key_suffix_ = page->get_record(record);
554 cur_payload_length_ = page->get_payload_length(record);
555 cur_payload_ = page->get_record_payload(record);
557 cur_key_suffix_ =
nullptr;
558 cur_payload_length_ = 0;
559 cur_payload_ =
nullptr;
574 if (page->is_consecutive_inserts()) {
575 DVLOG(2) <<
"lucky, an already sorted border page.";
579 page->prefetch_additional_if_needed(key_count_);
583 KeySlice left_slice = target_->get_slice(left);
584 KeySlice right_slice = target_->get_slice(right);
585 if (left_slice < right_slice) {
587 }
else if (left_slice == right_slice) {
588 return target_->get_remainder_length(left) < target_->get_remainder_length(right);
612 const bool is_border = page->
is_border();
613 Route& route = routes_[route_count_];
619 if (is_border && route.was_stably_moved()) {
629 if (is_border && !route.was_stably_moved()) {
648 if (!is_border || page->
header().
snapshot_ || route.was_stably_moved()) {
657 Route* route = cur_route();
661 || route->page_->within_fences(slice)
662 || route->page_->get_high_fence() == slice);
663 if (
LIKELY(!route->was_stably_moved())) {
670 KeySlice foster_fence = route->page_->get_foster_fence();
671 MasstreePage* left = resolve_volatile(route->page_->get_foster_minor());
672 MasstreePage* right = resolve_volatile(route->page_->get_foster_major());
674 if (slice < foster_fence) {
676 }
else if (slice > foster_fence) {
688 if ((forward_cursor_ && page == left) || (!forward_cursor_ && page == right)) {
698 inline void MasstreeCursor::extract_separators(
704 const Route* route = cur_route();
706 ASSERT_ND(route->index_ <= route->key_count_);
707 ASSERT_ND(route->index_mini_ <= route->key_count_mini_);
708 MasstreeIntermediatePage* cur =
reinterpret_cast<MasstreeIntermediatePage*
>(route->page_);
709 const MasstreeIntermediatePage::MiniPage& minipage = cur->get_minipage(route->index_);
710 if (route->index_mini_ == 0) {
711 if (route->index_ == 0) {
712 *separator_low = cur->get_low_fence();
714 *separator_low = cur->get_separator(route->index_ - 1U);
717 *separator_low = minipage.separators_[route->index_mini_ - 1U];
719 if (route->index_mini_ == route->key_count_mini_) {
720 if (route->index_ == route->key_count_) {
721 *separator_high = cur->get_high_fence();
723 *separator_high = cur->get_separator(route->index_);
726 *separator_high = minipage.separators_[route->index_mini_];
730 inline void MasstreeCursor::check_end_key() {
736 if (!forward_cursor_) {
740 if (forward_cursor_) {
745 if (!end_inclusive_) {
757 uint8_t layer)
const {
758 if (is_search_key_extremum() || search_key_length_ <= layer *
sizeof(
KeySlice)) {
759 if (forward_cursor_) {
769 for (
Layer i = 0; i < layer &&
sizeof(
KeySlice) * i < search_key_length_; ++i) {
771 ASSERT_ND(search_key_slices_[i] <= cur_route_prefix_slices_[i]);
773 ASSERT_ND(search_key_slices_[i] >= cur_route_prefix_slices_[i]);
777 return compare_cur_key(slice, layer, search_key_, search_key_length_);
781 if (is_end_key_supremum()) {
791 for (
Layer i = 0; i < layer &&
sizeof(
KeySlice) * i < end_key_length_; ++i) {
792 if (end_key_slices_[i] > cur_route_prefix_slices_[i]) {
794 }
else if (end_key_slices_[i] < cur_route_prefix_slices_[i]) {
801 if (
sizeof(
KeySlice) * layer > end_key_length_) {
802 ASSERT_ND(cur_key_length_ > end_key_length_);
814 KeySlice end_slice = end_key_slices_[layer];
815 if (cur_key_in_layer_slice_ < end_slice) {
817 }
else if (cur_key_in_layer_slice_ > end_slice) {
822 if (cur_key_in_layer_remainder_ <=
sizeof(
KeySlice) || end_remainder <=
sizeof(
KeySlice)) {
823 if (cur_key_in_layer_remainder_ == end_remainder) {
825 }
else if (cur_key_in_layer_remainder_ >= end_remainder) {
837 KeyLength min_suffix_len = std::min(cur_suffix_len, end_suffix_len);
839 int cmp = std::memcmp(
841 end_key_ + (layer + 1U) *
sizeof(
KeySlice),
845 }
else if (cmp > 0) {
848 if (cur_key_length_ > end_key_length_) {
850 }
else if (cur_key_length_ < end_key_length_) {
861 const char* full_key,
865 if (is_cur_key_next_layer()) {
867 if (cur_key_in_layer_slice_ < slice) {
869 }
else if (cur_key_in_layer_slice_ > slice) {
871 }
else if (remainder <
sizeof(
KeySlice)) {
873 }
else if (remainder ==
sizeof(
KeySlice)) {
878 }
else if (cur_key_in_layer_slice_ < slice) {
880 }
else if (cur_key_in_layer_slice_ > slice) {
883 if (cur_key_in_layer_remainder_ <=
sizeof(
KeySlice)) {
885 if (cur_key_in_layer_remainder_ < remainder) {
887 }
else if (cur_key_in_layer_remainder_ == remainder) {
893 }
else if (remainder <=
sizeof(
KeySlice)) {
897 KeyLength min_length = std::min<KeyLength>(remainder, cur_key_in_layer_remainder_);
898 int cmp = std::memcmp(
900 full_key + (layer + 1U) *
sizeof(
KeySlice),
904 }
else if (cmp > 0) {
907 if (cur_key_in_layer_remainder_ < remainder) {
909 }
else if (cur_key_in_layer_remainder_ == remainder) {
933 const char* begin_key,
939 bool begin_inclusive,
940 bool end_inclusive) {
953 forward_cursor_ = forward_cursor;
954 reached_end_ =
false;
955 for_writes_ = for_writes;
956 end_inclusive_ = end_inclusive;
957 end_key_length_ = end_key_length;
959 if (!is_end_key_supremum()) {
960 copy_input_key(end_key, end_key_length, end_key_, end_key_slices_);
963 search_key_length_ = begin_key_length;
966 if (!is_search_key_extremum()) {
967 copy_input_key(begin_key, begin_key_length, search_key_, search_key_slices_);
976 ASSERT_ND(cur_route()->page_->is_border());
986 while (should_skip_cur_route_
988 DVLOG(0) <<
"Rare. open() needs to move on to find non-deleted records/pages";
989 should_skip_cur_route_ =
false;
991 if (route_count_ == 0) {
992 LOG(INFO) <<
"The empty page was the last page that might have had the record. we are done";
1006 inline ErrorCode MasstreeCursor::locate_layer(uint8_t layer) {
1012 search_key_in_layer_extremum_ =
false;
1013 if (is_search_key_extremum() || search_key_length_ <= layer *
sizeof(
KeySlice)) {
1015 search_key_in_layer_extremum_ =
true;
1016 }
else if (search_key_length_ >= (layer + 1U) *
sizeof(
KeySlice)) {
1017 slice = search_key_slices_[layer];
1021 if (forward_cursor_) {
1030 search_key_ + layer *
sizeof(
KeySlice),
1031 search_key_length_ - layer *
sizeof(
KeySlice));
1032 slice = assorted::read_bigendian<KeySlice>(&slice);
1039 ASSERT_ND(cur_route()->page_->is_border());
1045 if (cur_route()->layer_ == layer) {
1046 cur_key_slice_this_layer = cur_key_in_layer_slice_;
1049 cur_key_slice_this_layer = cur_route_prefix_slices_[layer];
1051 if (forward_cursor_) {
1052 ASSERT_ND(cur_key_slice_this_layer >= slice);
1054 ASSERT_ND(cur_key_slice_this_layer <= slice);
1067 Route* route = cur_route();
1068 MasstreeBorderPage* border =
reinterpret_cast<MasstreeBorderPage*
>(route->page_);
1069 ASSERT_ND(border->get_low_fence() <= slice);
1070 ASSERT_ND(border->get_high_fence() >= slice);
1073 || border->within_fences(slice)
1074 || border->get_high_fence() == slice);
1078 if (route->key_count_ == 0) {
1079 LOG(INFO) <<
"Huh, rare but possible. Cursor's Initial locate() hits an empty border page.";
1080 set_should_skip_cur_route();
1084 uint8_t layer = border->get_layer();
1093 for (index = 0; index < route->key_count_; ++index) {
1094 SlotIndex record = route->get_original_index(index);
1095 if (border->get_slice(record) < slice) {
1102 ASSERT_ND(cur_key_in_layer_slice_ >= slice);
1113 for (index = route->key_count_ - 1; index < route->key_count_; --index) {
1114 SlotIndex record = route->get_original_index(index);
1115 if (border->get_slice(record) > slice) {
1119 ASSERT_ND(cur_key_in_layer_slice_ <= slice);
1128 route->index_ = index;
1130 if (route->index_ >= route->key_count_) {
1131 DVLOG(2) <<
"Initial locate() hits page boundary.";
1132 set_should_skip_cur_route();
1138 return locate_next_layer();
1145 ErrorCode MasstreeCursor::locate_next_layer() {
1146 Route* route = cur_route();
1147 MasstreeBorderPage* border =
reinterpret_cast<MasstreeBorderPage*
>(route->page_);
1148 Layer layer = border->get_layer();
1149 KeySlice record_slice = border->get_slice(route->get_cur_original_index());
1150 cur_route_prefix_slices_[layer] = record_slice;
1151 assorted::write_bigendian<KeySlice>(
1153 cur_route_prefix_be_ + (layer *
sizeof(
KeySlice)));
1154 DualPagePointer* pointer = border->get_next_layer(route->get_cur_original_index());
1157 MasstreeStoragePimpl(&storage_).follow_page(context_, for_writes_, pointer, &next));
1159 return locate_layer(layer + 1U);
1167 Route* route = cur_route();
1169 MasstreeIntermediatePage* cur =
reinterpret_cast<MasstreeIntermediatePage*
>(route->page_);
1172 || cur->within_fences(slice)
1173 || cur->get_high_fence() == slice);
1178 if (search_key_in_layer_extremum_) {
1179 if (forward_cursor_) {
1182 index = route->key_count_;
1185 for (; index < route->key_count_; ++index) {
1186 if (slice < cur->get_separator(index)) {
1191 for (; index < route->key_count_; ++index) {
1192 if (slice <= cur->get_separator(index)) {
1197 route->index_ = index;
1198 MasstreeIntermediatePage::MiniPage& minipage = cur->get_minipage(route->index_);
1200 minipage.prefetch();
1201 route->key_count_mini_ = minipage.key_count_;
1204 if (search_key_in_layer_extremum_) {
1206 if (forward_cursor_) {
1209 index_mini = route->key_count_mini_;
1212 for (; index_mini < route->key_count_mini_; ++index_mini) {
1213 if (slice < minipage.separators_[index_mini]) {
1218 for (; index_mini < route->key_count_mini_; ++index_mini) {
1219 if (slice <= minipage.separators_[index_mini]) {
1224 route->index_mini_ = index_mini;
1226 KeySlice separator_low, separator_high;
1227 extract_separators(&separator_low, &separator_high);
1228 if (
UNLIKELY(slice < separator_low || slice > separator_high)) {
1229 VLOG(0) <<
"Interesting5. separator doesn't match. concurrent adopt. local retry.";
1231 route->key_count_ = cur->get_key_count();
1234 ASSERT_ND(separator_low <= slice && slice <= separator_high);
1236 DualPagePointer& pointer = minipage.pointers_[route->index_mini_];
1240 MasstreeStoragePimpl(&storage_).follow_page(context_, for_writes_, &pointer, &next));
1246 if (
UNLIKELY(next->get_low_fence() != separator_low ||
1247 next->get_high_fence() != separator_high)) {
1248 VLOG(0) <<
"Interesting. separator doesn't match. concurrent adopt. local retry.";
1250 route->key_count_ = cur->get_key_count();
1254 route->latest_separator_ = forward_cursor_ ? separator_high : separator_low;
1255 ASSERT_ND(next->get_low_fence() <= slice);
1256 ASSERT_ND(next->get_high_fence() >= slice);
1258 if (next->is_border()) {
1272 const void* payload,
1288 template <
typename PAYLOAD>
1316 template <
typename PAYLOAD>
1330 void MasstreeCursor::assert_route_impl()
const {
1331 for (uint16_t i = 0; i + 1U < route_count_; ++i) {
1332 const Route* route = routes_ + i;
1334 ASSERT_ND(route->layer_ == route->page_->get_layer());
1335 if (route->was_stably_moved()) {
1337 }
else if (reinterpret_cast<Page*>(route->page_)->get_header().get_page_type()
1340 ASSERT_ND(route->index_ < route->key_count_);
1342 ASSERT_ND(route->index_ <= route->key_count_);
1344 ASSERT_ND(route->index_mini_ <= route->key_count_mini_);
1350 void MasstreeCursor::set_should_skip_cur_route() {
1351 Route* route = cur_route();
1353 should_skip_cur_route_ =
true;
1355 if (forward_cursor_) {
1356 route->index_ = route->key_count_;
void assert_aligned_page(const void *page)
const memory::GlobalVolatilePageResolver & get_global_volatile_page_resolver() const
Returns the page resolver to convert page ID to page pointer.
const KeySlice kInfimumSlice
SlotIndex get_key_count() const __attribute__((always_inline))
physical key count (those keys might be deleted) in this page.
This means either was_stably_moved() is false or it's intermediate page, in which case we don't neeed...
uint16_t PayloadLength
Represents a byte-length of a payload in this package.
ErrorCode overwrite_record_primitive(PAYLOAD payload, PayloadLength payload_offset)
uint16_t SlotIndex
Index of a record in a (border) page.
const KeyLength kMaxKeyLength
Max length of a key.
Root package of FOEDUS (Fast Optimistic Engine for Data Unification Services).
bool is_border() const __attribute__((always_inline))
Represents one thread running on one NUMA core.
void prefetch_general() const __attribute__((always_inline))
prefetch upto keys/separators, whether this page is border or interior.
ErrorCode next()
Moves the cursor to next record.
ErrorCode open(const char *begin_key=nullptr, KeyLength begin_key_length=kKeyLengthExtremum, const char *end_key=nullptr, KeyLength end_key_length=kKeyLengthExtremum, bool forward_cursor=true, bool for_writes=false, bool begin_inclusive=true, bool end_inclusive=false)
Represents one border page in Masstree Storage.
bool is_active() const
Returns whether the object is an active transaction.
void copy_combined_key(char *buffer) const
Copies the entire big-endian key of the current record to the given buffer.
ErrorCode overwrite_general(thread::Thread *context, const RecordLocation &location, const void *be_key, KeyLength key_length, const void *payload, PayloadLength payload_offset, PayloadLength payload_count)
implementation of overwrite_record family.
uint64_t KeySlice
Each key slice is an 8-byte integer.
bool is_layer_root() const __attribute__((always_inline))
const uint16_t kMaxIntermediateMiniSeparators
Max number of separators stored in the second level of intermediate pages.
uint16_t KeyLength
Represents a byte-length of a key in this package.
#define ASSUME_ALIGNED(x, y)
Wraps GCC's __builtin_assume_aligned.
Common base of MasstreeIntermediatePage and MasstreeBorderPage.
#define LIKELY(x)
Hints that x is highly likely true.
Represents one page in the current search path from layer0-root.
void copy_combined_key_part(KeyLength offset, KeyLength len, char *buffer) const
Another version to get a part of the key, from the offset for len bytes.
void copy_input_key(const char *input_key, KeyLength length, char *buffer, KeySlice *slice_buffer)
KeySlice slice_key(const void *be_bytes, KeyLength slice_length)
Extract a part of a big-endian byte array of given length as KeySlice.
uint8_t Layer
Represents the depth of a B-trie layer.
bool is_valid_record() const __attribute__((always_inline))
const uint16_t kMaxIntermediateSeparators
Max number of separators stored in the first level of intermediate pages.
bool is_forward_cursor() const
const KeyLength kInitiallyNextLayer
A special key length value that denotes the record in a border page was initially a next-layer pointe...
void assert_within_valid_volatile_page(const memory::GlobalVolatilePageResolver &resolver, const void *address)
ErrorCode get_first_root(thread::Thread *context, bool for_write, MasstreeIntermediatePage **root)
Root-node related, such as a method to retrieve 1st-root, to grow, etc.
bool is_next_layer() const __attribute__((always_inline))
uint8_t get_layer() const __attribute__((always_inline))
Layer-0 stores the first 8 byte slice, Layer-1 next 8 byte...
bool is_deleted() const __attribute__((always_inline))
Pimpl object of MasstreeStorage.
Represents a Masstree storage.
ErrorCode increment_general(thread::Thread *context, const RecordLocation &location, const void *be_key, KeyLength key_length, PAYLOAD *value, PayloadLength payload_offset)
implementation of increment_record family.
ErrorCode delete_general(thread::Thread *context, const RecordLocation &location, const void *be_key, KeyLength key_length)
implementation of delete_record family.
#define CHECK_ERROR_CODE(x)
This macro calls x and checks its returned error code.
MasstreeCursor(MasstreeStorage storage, thread::Thread *context)
PageVersionStatus status_
ErrorCode add_to_page_version_set(const storage::PageVersion *version_address, storage::PageVersionStatus observed)
Add the given page version to the page version set of this transaction.
Atomic fence methods and load/store with fences that work for both C++11/non-C++11 code...
void memory_fence_consume()
Equivalent to std::atomic_thread_fence(std::memory_order_consume).
SlotIndex key_count_
same as stable_.get_key_count()
void memory_fence_acquire()
Equivalent to std::atomic_thread_fence(std::memory_order_acquire).
const KeySlice kSupremumSlice
Layer layer_
Shorthand for page_->get_layer()
SlotIndex order_[kBorderPageMaxSlots]
only for border.
Represents one intermediate page in Masstree Storage.
void memory_fence_seq_cst()
Equivalent to std::atomic_thread_fence(std::memory_order_seq_cst).
KeyLength calculate_suffix_length(KeyLength remainder_length) __attribute__((always_inline))
ErrorCode acquire_local_work_memory(uint32_t size, void **out, uint32_t alignment=8)
Get a tentative work memory of the specified size from pre-allocated thread-private memory...
0x0A04 : "XCTION : This thread is not running any transaction." .
#define UNLIKELY(x)
Hints that x is highly likely false.
#define ASSERT_ND(x)
A warning-free wrapper macro of assert() that has no performance effect in release mode even when 'x'...
ErrorCode overwrite_record(const void *payload, PayloadLength payload_offset, PayloadLength payload_count)
ErrorCode delete_record()
xct::XctId observed_
TID as of locate_record() identifying the record.
ErrorCode
Enum of error codes defined in error_code.xmacro.
ErrorCode increment_record(PAYLOAD *value, PayloadLength payload_offset)
ErrorCode populate_logical(xct::Xct *cur_xct, MasstreeBorderPage *page, SlotIndex index, bool intended_for_write)
Populates the result with XID and possibly readset.
0x0814 : "STORAGE: MASSTREE: Cursor encountered a too deep path" .
const PageVersion & get_version() const __attribute__((always_inline))