Lines Matching +full:depth +full:-

1 // SPDX-License-Identifier: GPL-2.0-only
4 * Copyright (C) 2013-2014 Jens Axboe
14 unsigned depth = sb->depth; in init_alloc_hint() local
16 sb->alloc_hint = alloc_percpu_gfp(unsigned int, flags); in init_alloc_hint()
17 if (!sb->alloc_hint) in init_alloc_hint()
18 return -ENOMEM; in init_alloc_hint()
20 if (depth && !sb->round_robin) { in init_alloc_hint()
24 *per_cpu_ptr(sb->alloc_hint, i) = get_random_u32_below(depth); in init_alloc_hint()
30 unsigned int depth) in update_alloc_hint_before_get() argument
34 hint = this_cpu_read(*sb->alloc_hint); in update_alloc_hint_before_get()
35 if (unlikely(hint >= depth)) { in update_alloc_hint_before_get()
36 hint = depth ? get_random_u32_below(depth) : 0; in update_alloc_hint_before_get()
37 this_cpu_write(*sb->alloc_hint, hint); in update_alloc_hint_before_get()
44 unsigned int depth, in update_alloc_hint_after_get() argument
48 if (nr == -1) { in update_alloc_hint_after_get()
50 this_cpu_write(*sb->alloc_hint, 0); in update_alloc_hint_after_get()
51 } else if (nr == hint || unlikely(sb->round_robin)) { in update_alloc_hint_after_get()
54 if (hint >= depth - 1) in update_alloc_hint_after_get()
56 this_cpu_write(*sb->alloc_hint, hint); in update_alloc_hint_after_get()
64 unsigned int depth, unsigned int alloc_hint, bool wrap) in sbitmap_deferred_clear() argument
68 guard(raw_spinlock_irqsave)(&map->swap_lock); in sbitmap_deferred_clear()
70 if (!map->cleared) { in sbitmap_deferred_clear()
71 if (depth == 0) in sbitmap_deferred_clear()
74 word_mask = (~0UL) >> (BITS_PER_LONG - depth); in sbitmap_deferred_clear()
77 * ->cleared to word, and we change it to retry in case in sbitmap_deferred_clear()
83 word_mask &= ~((1UL << alloc_hint) - 1); in sbitmap_deferred_clear()
85 return (READ_ONCE(map->word) & word_mask) != word_mask; in sbitmap_deferred_clear()
91 mask = xchg(&map->cleared, 0); in sbitmap_deferred_clear()
96 atomic_long_andnot(mask, (atomic_long_t *)&map->word); in sbitmap_deferred_clear()
97 BUILD_BUG_ON(sizeof(atomic_long_t) != sizeof(map->word)); in sbitmap_deferred_clear()
101 int sbitmap_init_node(struct sbitmap *sb, unsigned int depth, int shift, in sbitmap_init_node() argument
109 shift = sbitmap_calculate_shift(depth); in sbitmap_init_node()
113 return -EINVAL; in sbitmap_init_node()
115 sb->shift = shift; in sbitmap_init_node()
116 sb->depth = depth; in sbitmap_init_node()
117 sb->map_nr = DIV_ROUND_UP(sb->depth, bits_per_word); in sbitmap_init_node()
118 sb->round_robin = round_robin; in sbitmap_init_node()
120 if (depth == 0) { in sbitmap_init_node()
121 sb->map = NULL; in sbitmap_init_node()
127 return -ENOMEM; in sbitmap_init_node()
129 sb->alloc_hint = NULL; in sbitmap_init_node()
132 sb->map = kvzalloc_node(sb->map_nr * sizeof(*sb->map), flags, node); in sbitmap_init_node()
133 if (!sb->map) { in sbitmap_init_node()
134 free_percpu(sb->alloc_hint); in sbitmap_init_node()
135 return -ENOMEM; in sbitmap_init_node()
138 for (i = 0; i < sb->map_nr; i++) in sbitmap_init_node()
139 raw_spin_lock_init(&sb->map[i].swap_lock); in sbitmap_init_node()
145 void sbitmap_resize(struct sbitmap *sb, unsigned int depth) in sbitmap_resize() argument
147 unsigned int bits_per_word = 1U << sb->shift; in sbitmap_resize()
150 for (i = 0; i < sb->map_nr; i++) in sbitmap_resize()
151 sbitmap_deferred_clear(&sb->map[i], 0, 0, 0); in sbitmap_resize()
153 sb->depth = depth; in sbitmap_resize()
154 sb->map_nr = DIV_ROUND_UP(sb->depth, bits_per_word); in sbitmap_resize()
158 static int __sbitmap_get_word(unsigned long *word, unsigned long depth, in __sbitmap_get_word() argument
167 nr = find_next_zero_bit(word, depth, hint); in __sbitmap_get_word()
168 if (unlikely(nr >= depth)) { in __sbitmap_get_word()
178 return -1; in __sbitmap_get_word()
185 if (hint >= depth - 1) in __sbitmap_get_word()
193 unsigned int depth, in sbitmap_find_bit_in_word() argument
200 nr = __sbitmap_get_word(&map->word, depth, in sbitmap_find_bit_in_word()
202 if (nr != -1) in sbitmap_find_bit_in_word()
204 if (!sbitmap_deferred_clear(map, depth, alloc_hint, wrap)) in sbitmap_find_bit_in_word()
212 unsigned int depth, in sbitmap_find_bit() argument
218 int nr = -1; in sbitmap_find_bit()
220 for (i = 0; i < sb->map_nr; i++) { in sbitmap_find_bit()
221 nr = sbitmap_find_bit_in_word(&sb->map[index], in sbitmap_find_bit()
224 depth), in sbitmap_find_bit()
227 if (nr != -1) { in sbitmap_find_bit()
228 nr += index << sb->shift; in sbitmap_find_bit()
234 if (++index >= sb->map_nr) in sbitmap_find_bit()
252 if (sb->round_robin) in __sbitmap_get()
258 !sb->round_robin); in __sbitmap_get()
264 unsigned int hint, depth; in sbitmap_get() local
266 if (WARN_ON_ONCE(unlikely(!sb->alloc_hint))) in sbitmap_get()
267 return -1; in sbitmap_get()
269 depth = READ_ONCE(sb->depth); in sbitmap_get()
270 hint = update_alloc_hint_before_get(sb, depth); in sbitmap_get()
272 update_alloc_hint_after_get(sb, depth, hint, nr); in sbitmap_get()
293 unsigned int hint, depth; in sbitmap_get_shallow() local
295 if (WARN_ON_ONCE(unlikely(!sb->alloc_hint))) in sbitmap_get_shallow()
296 return -1; in sbitmap_get_shallow()
298 depth = READ_ONCE(sb->depth); in sbitmap_get_shallow()
299 hint = update_alloc_hint_before_get(sb, depth); in sbitmap_get_shallow()
301 update_alloc_hint_after_get(sb, depth, hint, nr); in sbitmap_get_shallow()
311 for (i = 0; i < sb->map_nr; i++) { in sbitmap_any_bit_set()
312 if (sb->map[i].word & ~sb->map[i].cleared) in sbitmap_any_bit_set()
323 for (i = 0; i < sb->map_nr; i++) { in __sbitmap_weight()
324 const struct sbitmap_word *word = &sb->map[i]; in __sbitmap_weight()
328 weight += bitmap_weight(&word->word, word_depth); in __sbitmap_weight()
330 weight += bitmap_weight(&word->cleared, word_depth); in __sbitmap_weight()
342 return __sbitmap_weight(sb, true) - sbitmap_cleared(sb); in sbitmap_weight()
348 seq_printf(m, "depth=%u\n", sb->depth); in sbitmap_show()
351 seq_printf(m, "bits_per_word=%u\n", 1U << sb->shift); in sbitmap_show()
352 seq_printf(m, "map_nr=%u\n", sb->map_nr); in sbitmap_show()
375 for (i = 0; i < sb->map_nr; i++) { in sbitmap_bitmap_show()
376 unsigned long word = READ_ONCE(sb->map[i].word); in sbitmap_bitmap_show()
377 unsigned long cleared = READ_ONCE(sb->map[i].cleared); in sbitmap_bitmap_show()
383 unsigned int bits = min(8 - byte_bits, word_bits); in sbitmap_bitmap_show()
385 byte |= (word & (BIT(bits) - 1)) << byte_bits; in sbitmap_bitmap_show()
394 word_bits -= bits; in sbitmap_bitmap_show()
407 unsigned int depth) in sbq_calc_wake_batch() argument
414 * batch size is small enough that the full depth of the bitmap, in sbq_calc_wake_batch()
415 * potentially limited by a shallow depth, is enough to wake up all of in sbq_calc_wake_batch()
419 * be a partial word. There are depth / bits_per_word full words and in sbq_calc_wake_batch()
420 * depth % bits_per_word bits left over. In bitwise arithmetic: in sbq_calc_wake_batch()
423 * depth / bits_per_word = depth >> shift in sbq_calc_wake_batch()
424 * depth % bits_per_word = depth & ((1 << shift) - 1) in sbq_calc_wake_batch()
426 * Each word can be limited to sbq->min_shallow_depth bits. in sbq_calc_wake_batch()
428 shallow_depth = min(1U << sbq->sb.shift, sbq->min_shallow_depth); in sbq_calc_wake_batch()
429 depth = ((depth >> sbq->sb.shift) * shallow_depth + in sbq_calc_wake_batch()
430 min(depth & ((1U << sbq->sb.shift) - 1), shallow_depth)); in sbq_calc_wake_batch()
431 wake_batch = clamp_t(unsigned int, depth / SBQ_WAIT_QUEUES, 1, in sbq_calc_wake_batch()
437 int sbitmap_queue_init_node(struct sbitmap_queue *sbq, unsigned int depth, in sbitmap_queue_init_node() argument
443 ret = sbitmap_init_node(&sbq->sb, depth, shift, flags, node, in sbitmap_queue_init_node()
448 sbq->min_shallow_depth = UINT_MAX; in sbitmap_queue_init_node()
449 sbq->wake_batch = sbq_calc_wake_batch(sbq, depth); in sbitmap_queue_init_node()
450 atomic_set(&sbq->wake_index, 0); in sbitmap_queue_init_node()
451 atomic_set(&sbq->ws_active, 0); in sbitmap_queue_init_node()
452 atomic_set(&sbq->completion_cnt, 0); in sbitmap_queue_init_node()
453 atomic_set(&sbq->wakeup_cnt, 0); in sbitmap_queue_init_node()
455 sbq->ws = kzalloc_node(SBQ_WAIT_QUEUES * sizeof(*sbq->ws), flags, node); in sbitmap_queue_init_node()
456 if (!sbq->ws) { in sbitmap_queue_init_node()
457 sbitmap_free(&sbq->sb); in sbitmap_queue_init_node()
458 return -ENOMEM; in sbitmap_queue_init_node()
462 init_waitqueue_head(&sbq->ws[i].wait); in sbitmap_queue_init_node()
469 unsigned int depth) in sbitmap_queue_update_wake_batch() argument
473 wake_batch = sbq_calc_wake_batch(sbq, depth); in sbitmap_queue_update_wake_batch()
474 if (sbq->wake_batch != wake_batch) in sbitmap_queue_update_wake_batch()
475 WRITE_ONCE(sbq->wake_batch, wake_batch); in sbitmap_queue_update_wake_batch()
482 unsigned int depth = (sbq->sb.depth + users - 1) / users; in sbitmap_queue_recalculate_wake_batch() local
484 wake_batch = clamp_val(depth / SBQ_WAIT_QUEUES, in sbitmap_queue_recalculate_wake_batch()
487 WRITE_ONCE(sbq->wake_batch, wake_batch); in sbitmap_queue_recalculate_wake_batch()
491 void sbitmap_queue_resize(struct sbitmap_queue *sbq, unsigned int depth) in sbitmap_queue_resize() argument
493 sbitmap_queue_update_wake_batch(sbq, depth); in sbitmap_queue_resize()
494 sbitmap_resize(&sbq->sb, depth); in sbitmap_queue_resize()
500 return sbitmap_get(&sbq->sb); in __sbitmap_queue_get()
507 struct sbitmap *sb = &sbq->sb; in __sbitmap_queue_get_batch()
508 unsigned int hint, depth; in __sbitmap_queue_get_batch() local
512 if (unlikely(sb->round_robin)) in __sbitmap_queue_get_batch()
515 depth = READ_ONCE(sb->depth); in __sbitmap_queue_get_batch()
516 hint = update_alloc_hint_before_get(sb, depth); in __sbitmap_queue_get_batch()
520 for (i = 0; i < sb->map_nr; i++) { in __sbitmap_queue_get_batch()
521 struct sbitmap_word *map = &sb->map[index]; in __sbitmap_queue_get_batch()
527 val = READ_ONCE(map->word); in __sbitmap_queue_get_batch()
528 if (val == (1UL << (map_depth - 1)) - 1) in __sbitmap_queue_get_batch()
533 atomic_long_t *ptr = (atomic_long_t *) &map->word; in __sbitmap_queue_get_batch()
535 get_mask = ((1UL << nr_tags) - 1) << nr; in __sbitmap_queue_get_batch()
541 *offset = nr + (index << sb->shift); in __sbitmap_queue_get_batch()
542 update_alloc_hint_after_get(sb, depth, hint, in __sbitmap_queue_get_batch()
543 *offset + nr_tags - 1); in __sbitmap_queue_get_batch()
549 if (++index >= sb->map_nr) in __sbitmap_queue_get_batch()
559 WARN_ON_ONCE(shallow_depth < sbq->min_shallow_depth); in sbitmap_queue_get_shallow()
561 return sbitmap_get_shallow(&sbq->sb, shallow_depth); in sbitmap_queue_get_shallow()
568 sbq->min_shallow_depth = min_shallow_depth; in sbitmap_queue_min_shallow_depth()
569 sbitmap_queue_update_wake_batch(sbq, sbq->sb.depth); in sbitmap_queue_min_shallow_depth()
577 if (!atomic_read(&sbq->ws_active)) in __sbitmap_queue_wake_up()
580 wake_index = atomic_read(&sbq->wake_index); in __sbitmap_queue_wake_up()
582 struct sbq_wait_state *ws = &sbq->ws[wake_index]; in __sbitmap_queue_wake_up()
592 if (waitqueue_active(&ws->wait)) { in __sbitmap_queue_wake_up()
593 woken = wake_up_nr(&ws->wait, nr); in __sbitmap_queue_wake_up()
596 nr -= woken; in __sbitmap_queue_wake_up()
600 if (wake_index != atomic_read(&sbq->wake_index)) in __sbitmap_queue_wake_up()
601 atomic_set(&sbq->wake_index, wake_index); in __sbitmap_queue_wake_up()
606 unsigned int wake_batch = READ_ONCE(sbq->wake_batch); in sbitmap_queue_wake_up()
609 if (!atomic_read(&sbq->ws_active)) in sbitmap_queue_wake_up()
612 atomic_add(nr, &sbq->completion_cnt); in sbitmap_queue_wake_up()
613 wakeups = atomic_read(&sbq->wakeup_cnt); in sbitmap_queue_wake_up()
616 if (atomic_read(&sbq->completion_cnt) - wakeups < wake_batch) in sbitmap_queue_wake_up()
618 } while (!atomic_try_cmpxchg(&sbq->wakeup_cnt, in sbitmap_queue_wake_up()
627 if (likely(!sb->round_robin && tag < sb->depth)) in sbitmap_update_cpu_hint()
628 data_race(*per_cpu_ptr(sb->alloc_hint, cpu) = tag); in sbitmap_update_cpu_hint()
634 struct sbitmap *sb = &sbq->sb; in sbitmap_queue_clear_batch()
641 const int tag = tags[i] - offset; in sbitmap_queue_clear_batch()
645 this_addr = &sb->map[SB_NR_TO_INDEX(sb, tag)].word; in sbitmap_queue_clear_batch()
661 sbitmap_update_cpu_hint(&sbq->sb, raw_smp_processor_id(), in sbitmap_queue_clear_batch()
662 tags[nr_tags - 1] - offset); in sbitmap_queue_clear_batch()
672 * of blk_mq) by this bit for avoiding race with re-allocation, in sbitmap_queue_clear()
679 sbitmap_deferred_clear_bit(&sbq->sb, nr); in sbitmap_queue_clear()
689 sbitmap_update_cpu_hint(&sbq->sb, cpu, nr); in sbitmap_queue_clear()
702 wake_index = atomic_read(&sbq->wake_index); in sbitmap_queue_wake_all()
704 struct sbq_wait_state *ws = &sbq->ws[wake_index]; in sbitmap_queue_wake_all()
706 if (waitqueue_active(&ws->wait)) in sbitmap_queue_wake_all()
707 wake_up(&ws->wait); in sbitmap_queue_wake_all()
719 sbitmap_show(&sbq->sb, m); in sbitmap_queue_show()
727 seq_printf(m, "%u", *per_cpu_ptr(sbq->sb.alloc_hint, i)); in sbitmap_queue_show()
731 seq_printf(m, "wake_batch=%u\n", sbq->wake_batch); in sbitmap_queue_show()
732 seq_printf(m, "wake_index=%d\n", atomic_read(&sbq->wake_index)); in sbitmap_queue_show()
733 seq_printf(m, "ws_active=%d\n", atomic_read(&sbq->ws_active)); in sbitmap_queue_show()
737 struct sbq_wait_state *ws = &sbq->ws[i]; in sbitmap_queue_show()
739 waitqueue_active(&ws->wait) ? "active" : "inactive"); in sbitmap_queue_show()
743 seq_printf(m, "round_robin=%d\n", sbq->sb.round_robin); in sbitmap_queue_show()
744 seq_printf(m, "min_shallow_depth=%u\n", sbq->min_shallow_depth); in sbitmap_queue_show()
752 if (!sbq_wait->sbq) { in sbitmap_add_wait_queue()
753 sbq_wait->sbq = sbq; in sbitmap_add_wait_queue()
754 atomic_inc(&sbq->ws_active); in sbitmap_add_wait_queue()
755 add_wait_queue(&ws->wait, &sbq_wait->wait); in sbitmap_add_wait_queue()
762 list_del_init(&sbq_wait->wait.entry); in sbitmap_del_wait_queue()
763 if (sbq_wait->sbq) { in sbitmap_del_wait_queue()
764 atomic_dec(&sbq_wait->sbq->ws_active); in sbitmap_del_wait_queue()
765 sbq_wait->sbq = NULL; in sbitmap_del_wait_queue()
774 if (!sbq_wait->sbq) { in sbitmap_prepare_to_wait()
775 atomic_inc(&sbq->ws_active); in sbitmap_prepare_to_wait()
776 sbq_wait->sbq = sbq; in sbitmap_prepare_to_wait()
778 prepare_to_wait_exclusive(&ws->wait, &sbq_wait->wait, state); in sbitmap_prepare_to_wait()
785 finish_wait(&ws->wait, &sbq_wait->wait); in sbitmap_finish_wait()
786 if (sbq_wait->sbq) { in sbitmap_finish_wait()
787 atomic_dec(&sbq->ws_active); in sbitmap_finish_wait()
788 sbq_wait->sbq = NULL; in sbitmap_finish_wait()