+ header_t *h = get_header(x);
+ int b_scale = h->scale;
+ TRACE("m1", "nbd_free: header %p scale %llu", h, b_scale);
+ ASSERT(b_scale && b_scale <= MAX_SCALE);
+#ifdef RECYCLE_PAGES
+ if (b_scale > PAGE_SCALE) {
+ int rc = munmap(x, 1ULL << b_scale);
+ ASSERT(rc == 0);
+ rc = rc;
+ }
+#endif
+#ifndef NDEBUG
+ memset(b, 0xcd, (1ULL << b_scale)); // bear trap
+#endif
+ tl_t *tl = &tl_[tid_]; // thread-local data
+ if (h->owner == tid_) {
+ TRACE("m1", "nbd_free: private block, old free list head %p", tl->free_list[b_scale], 0);
+
+#ifndef RECYCLE_PAGES
+ b->next = tl->free_list[b_scale];
+ tl->free_list[b_scale] = b;
+#else //RECYCLE_PAGES
+ b->next = h->free_list;
+ h->free_list = b;
+ h->num_in_use--;
+ size_class_t *sc = &tl->size_class[b_scale];
+ if (sc->active_page != h) {
+ if (h->num_in_use == 0) {
+ // remove <h> from the partial-page list
+ if (h->next != NULL) { h->next->prev = h->prev; }
+ if (h->prev != NULL) { h->prev->next = h->next; }
+ // put <h> on the free-page list
+ h->next = tl->free_pages;
+ tl->free_pages = h;
+ } else {
+ // move <h> to the top of the partial-page list
+ if (h->next != NULL) {
+ h->next->prev = h->prev;
+ if (h->prev != NULL) { h->prev->next = h->next; }
+ h->prev = sc->newest_partial;
+ h->next = NULL;
+ sc->newest_partial = h;
+ }
+ }
+ }
+#endif//RECYCLE_PAGES
+ } else {
+ // push <b> onto it's owner's queue
+ int b_owner = h->owner;
+ TRACE("m1", "nbd_free: owner %llu", b_owner, 0);
+
+ // The assignment statements are volatile to prevent the compiler from reordering them.
+ VOLATILE_DEREF(b).next = NULL;
+ VOLATILE_DEREF(tl->blocks_to[b_owner]).next = b;
+
+ tl->blocks_to[b_owner] = b;
+ }
+}
+
+static inline void process_incoming_blocks (tl_t *tl) {
+ for (int p = 0; p < MAX_NUM_THREADS; ++p) {
+ block_t *b = tl->blocks_from[p];
+ if (EXPECT_FALSE(b == NULL)) continue; // the queue is completely empty
+
+ // Leave the last block on the queue. Removing the last block on the queue would create a
+ // race with the producer thread putting a new block on the queue.
+ for (block_t *next = b->next; next != NULL; b = next, next = b->next) {
+ // push <b> onto the appropriate free list
+#ifndef RECYCLE_PAGES
+ int b_scale = get_header(b)->scale;
+ b->next = tl->free_list[b_scale];
+ tl->free_list[b_scale] = b;
+#else //RECYCLE_PAGES
+ header_t *h = get_header(b);
+ b->next = h->free_list;
+ h->free_list = b;
+#endif//RECYCLE_PAGES
+ }
+ tl->blocks_from[p] = b;
+ }