Lines Matching refs:mt

378 static inline bool mt_is_alloc(struct maple_tree *mt)  in mt_is_alloc()  argument
380 return (mt->ma_flags & MT_FLAGS_ALLOC_RANGE); in mt_is_alloc()
783 static inline void __rcu **ma_slots(struct maple_node *mn, enum maple_type mt) in ma_slots() argument
785 switch (mt) { in ma_slots()
797 static inline bool mt_write_locked(const struct maple_tree *mt) in mt_write_locked() argument
799 return mt_external_lock(mt) ? mt_write_lock_is_held(mt) : in mt_write_locked()
800 lockdep_is_held(&mt->ma_lock); in mt_write_locked()
803 static inline bool mt_locked(const struct maple_tree *mt) in mt_locked() argument
805 return mt_external_lock(mt) ? mt_lock_is_held(mt) : in mt_locked()
806 lockdep_is_held(&mt->ma_lock); in mt_locked()
809 static inline void *mt_slot(const struct maple_tree *mt, in mt_slot() argument
812 return rcu_dereference_check(slots[offset], mt_locked(mt)); in mt_slot()
815 static inline void *mt_slot_locked(struct maple_tree *mt, void __rcu **slots, in mt_slot_locked() argument
818 return rcu_dereference_protected(slots[offset], mt_write_locked(mt)); in mt_slot_locked()
859 static inline void *mt_root_locked(struct maple_tree *mt) in mt_root_locked() argument
861 return rcu_dereference_protected(mt->ma_root, mt_write_locked(mt)); in mt_root_locked()
876 enum maple_type mt) in ma_meta() argument
878 switch (mt) { in ma_meta()
893 static inline void ma_set_meta(struct maple_node *mn, enum maple_type mt, in ma_set_meta() argument
896 struct maple_metadata *meta = ma_meta(mn, mt); in ma_set_meta()
910 static inline void mt_clear_meta(struct maple_tree *mt, struct maple_node *mn, in mt_clear_meta() argument
923 next = mt_slot_locked(mt, slots, in mt_clear_meta()
947 enum maple_type mt) in ma_meta_end() argument
949 struct maple_metadata *meta = ma_meta(mn, mt); in ma_meta_end()
960 enum maple_type mt) in ma_meta_gap() argument
971 static inline void ma_set_meta_gap(struct maple_node *mn, enum maple_type mt, in ma_set_meta_gap() argument
975 struct maple_metadata *meta = ma_meta(mn, mt); in ma_set_meta_gap()
1002 static void mt_destroy_walk(struct maple_enode *enode, struct maple_tree *mt,
1494 enum maple_type mt; in mas_leaf_max_gap() local
1502 mt = mte_node_type(mas->node); in mas_leaf_max_gap()
1504 slots = ma_slots(mn, mt); in mas_leaf_max_gap()
1506 if (unlikely(ma_is_dense(mt))) { in mas_leaf_max_gap()
1508 for (i = 0; i < mt_slots[mt]; i++) { in mas_leaf_max_gap()
1526 pivots = ma_pivots(mn, mt); in mas_leaf_max_gap()
1535 max_piv = ma_data_end(mn, mt, pivots, mas->max) - 1; in mas_leaf_max_gap()
1574 ma_max_gap(struct maple_node *node, unsigned long *gaps, enum maple_type mt, in ma_max_gap() argument
1580 i = offset = ma_meta_end(node, mt); in ma_max_gap()
1602 enum maple_type mt; in mas_max_gap() local
1605 mt = mte_node_type(mas->node); in mas_max_gap()
1606 if (ma_is_leaf(mt)) in mas_max_gap()
1610 MAS_BUG_ON(mas, mt != maple_arange_64); in mas_max_gap()
1611 offset = ma_meta_gap(node, mt); in mas_max_gap()
1612 gaps = ma_gaps(node, mt); in mas_max_gap()
1772 enum maple_type mt; in mas_find_child() local
1780 mt = mte_node_type(mas->node); in mas_find_child()
1782 slots = ma_slots(node, mt); in mas_find_child()
1783 pivots = ma_pivots(node, mt); in mas_find_child()
1784 end = ma_data_end(node, mt, pivots, mas->max); in mas_find_child()
1946 enum maple_type mt; in mas_mab_cp() local
1954 mt = mte_node_type(mas->node); in mas_mab_cp()
1955 pivots = ma_pivots(node, mt); in mas_mab_cp()
1963 piv_end = min(mas_end, mt_pivots[mt]); in mas_mab_cp()
1974 b_node->pivot[j] = mas_safe_pivot(mas, pivots, i, mt); in mas_mab_cp()
1979 slots = ma_slots(node, mt); in mas_mab_cp()
1981 if (!ma_is_leaf(mt) && mt_is_alloc(mas->tree)) { in mas_mab_cp()
1982 gaps = ma_gaps(node, mt); in mas_mab_cp()
2002 enum maple_type mt, unsigned char end) in mas_leaf_set_meta() argument
2005 if (mt_pivots[mt] <= end) in mas_leaf_set_meta()
2011 if (end < mt_slots[mt] - 1) in mas_leaf_set_meta()
2012 ma_set_meta(node, mt, 0, end); in mas_leaf_set_meta()
2027 enum maple_type mt = mte_node_type(mas->node); in mab_mas_cp() local
2029 void __rcu **slots = ma_slots(node, mt); in mab_mas_cp()
2030 unsigned long *pivots = ma_pivots(node, mt); in mab_mas_cp()
2034 if (mab_end - mab_start > mt_pivots[mt]) in mab_mas_cp()
2037 if (!pivots[mt_pivots[mt] - 1]) in mab_mas_cp()
2038 slots[mt_pivots[mt]] = NULL; in mab_mas_cp()
2052 if (likely(!ma_is_leaf(mt) && mt_is_alloc(mas->tree))) { in mab_mas_cp()
2056 gaps = ma_gaps(node, mt); in mab_mas_cp()
2065 ma_set_meta(node, mt, offset, end); in mab_mas_cp()
2067 mas_leaf_set_meta(mas, node, pivots, mt, end); in mab_mas_cp()
2078 enum maple_type mt) in mas_bulk_rebalance() argument
2086 if (end > mt_min_slots[mt]) { in mas_bulk_rebalance()
3051 enum maple_type mt = mte_node_type(mas->node); in mas_destroy_rebalance() local
3054 unsigned char offset, tmp, split = mt_slots[mt] / 2; in mas_destroy_rebalance()
3078 slots = ma_slots(newnode, mt); in mas_destroy_rebalance()
3079 pivs = ma_pivots(newnode, mt); in mas_destroy_rebalance()
3081 l_slots = ma_slots(left, mt); in mas_destroy_rebalance()
3082 l_pivs = ma_pivots(left, mt); in mas_destroy_rebalance()
3090 memcpy(slots + tmp, ma_slots(node, mt), sizeof(void *) * end); in mas_destroy_rebalance()
3091 memcpy(pivs + tmp, ma_pivots(node, mt), sizeof(unsigned long) * end); in mas_destroy_rebalance()
3099 unsigned char max_p = mt_pivots[mt]; in mas_destroy_rebalance()
3100 unsigned char max_s = mt_slots[mt]; in mas_destroy_rebalance()
3106 if (tmp < mt_slots[mt]) in mas_destroy_rebalance()
3110 ma_set_meta(node, mt, 0, tmp - 1); in mas_destroy_rebalance()
3118 ma_set_meta(left, mt, 0, split); in mas_destroy_rebalance()
3125 mas->node = mt_mk_node(newnode, mt); in mas_destroy_rebalance()
3126 ma_set_meta(newnode, mt, 0, tmp); in mas_destroy_rebalance()
3130 mt = mte_node_type(l_mas.node); in mas_destroy_rebalance()
3131 slots = ma_slots(new_left, mt); in mas_destroy_rebalance()
3132 pivs = ma_pivots(new_left, mt); in mas_destroy_rebalance()
3135 ma_set_meta(new_left, mt, 0, split); in mas_destroy_rebalance()
3136 l_mas.node = mt_mk_node(new_left, mt); in mas_destroy_rebalance()
3140 mt = mas_parent_type(&l_mas, l_mas.node); in mas_destroy_rebalance()
3142 slots = ma_slots(parent, mt); in mas_destroy_rebalance()
3143 pivs = ma_pivots(parent, mt); in mas_destroy_rebalance()
3148 eparent = mt_mk_node(parent, mt); in mas_destroy_rebalance()
4361 enum maple_type mt; in mas_prev_node() local
4390 mt = mte_node_type(mas->node); in mas_prev_node()
4393 slots = ma_slots(node, mt); in mas_prev_node()
4398 mt = mte_node_type(mas->node); in mas_prev_node()
4400 pivots = ma_pivots(node, mt); in mas_prev_node()
4401 offset = ma_data_end(node, mt, pivots, max); in mas_prev_node()
4406 slots = ma_slots(node, mt); in mas_prev_node()
4408 pivots = ma_pivots(node, mt); in mas_prev_node()
4527 enum maple_type mt; in mas_next_node() local
4545 mt = mte_node_type(mas->node); in mas_next_node()
4546 pivots = ma_pivots(node, mt); in mas_next_node()
4547 node_end = ma_data_end(node, mt, pivots, mas->max); in mas_next_node()
4553 slots = ma_slots(node, mt); in mas_next_node()
4566 mt = mte_node_type(mas->node); in mas_next_node()
4567 slots = ma_slots(node, mt); in mas_next_node()
4574 pivots = ma_pivots(node, mt); in mas_next_node()
4576 mas->max = mas_safe_pivot(mas, pivots, mas->offset, mt); in mas_next_node()
5028 enum maple_type mt; in mas_empty_area() local
5059 mt = mte_node_type(mas->node); in mas_empty_area()
5060 pivots = ma_pivots(mas_mn(mas), mt); in mas_empty_area()
5140 unsigned char mte_dead_leaves(struct maple_enode *enode, struct maple_tree *mt, in mte_dead_leaves() argument
5149 entry = mt_slot(mt, slots, offset); in mte_dead_leaves()
5236 struct maple_tree *mt, struct maple_enode *prev, unsigned char offset) in mte_destroy_descend() argument
5249 next = mt_slot_locked(mt, slots, next_offset); in mte_destroy_descend()
5251 next = mt_slot_locked(mt, slots, ++next_offset); in mte_destroy_descend()
5265 static void mt_destroy_walk(struct maple_enode *enode, struct maple_tree *mt, in mt_destroy_walk() argument
5278 slots = mte_destroy_descend(&enode, mt, start, 0); in mt_destroy_walk()
5285 node->slot_len = mte_dead_leaves(enode, mt, slots); in mt_destroy_walk()
5298 tmp = mt_slot_locked(mt, slots, offset); in mt_destroy_walk()
5302 slots = mte_destroy_descend(&enode, mt, parent, offset); in mt_destroy_walk()
5309 node->slot_len = mte_dead_leaves(enode, mt, slots); in mt_destroy_walk()
5317 mt_clear_meta(mt, node, node->type); in mt_destroy_walk()
5328 struct maple_tree *mt) in mte_destroy_walk() argument
5332 if (mt_in_rcu(mt)) { in mte_destroy_walk()
5333 mt_destroy_walk(enode, mt, false); in mte_destroy_walk()
5336 mt_destroy_walk(enode, mt, true); in mte_destroy_walk()
5761 void *mt_next(struct maple_tree *mt, unsigned long index, unsigned long max) in mt_next() argument
5764 MA_STATE(mas, mt, index, index); in mt_next()
5884 void *mt_prev(struct maple_tree *mt, unsigned long index, unsigned long min) in mt_prev() argument
5887 MA_STATE(mas, mt, index, index); in mt_prev()
6239 void *mtree_load(struct maple_tree *mt, unsigned long index) in mtree_load() argument
6241 MA_STATE(mas, mt, index, index); in mtree_load()
6281 int mtree_store_range(struct maple_tree *mt, unsigned long index, in mtree_store_range() argument
6284 MA_STATE(mas, mt, index, last); in mtree_store_range()
6294 mtree_lock(mt); in mtree_store_range()
6300 mtree_unlock(mt); in mtree_store_range()
6318 int mtree_store(struct maple_tree *mt, unsigned long index, void *entry, in mtree_store() argument
6321 return mtree_store_range(mt, index, index, entry, gfp); in mtree_store()
6336 int mtree_insert_range(struct maple_tree *mt, unsigned long first, in mtree_insert_range() argument
6339 MA_STATE(ms, mt, first, last); in mtree_insert_range()
6347 mtree_lock(mt); in mtree_insert_range()
6353 mtree_unlock(mt); in mtree_insert_range()
6371 int mtree_insert(struct maple_tree *mt, unsigned long index, void *entry, in mtree_insert() argument
6374 return mtree_insert_range(mt, index, index, entry, gfp); in mtree_insert()
6378 int mtree_alloc_range(struct maple_tree *mt, unsigned long *startp, in mtree_alloc_range() argument
6384 MA_STATE(mas, mt, 0, 0); in mtree_alloc_range()
6385 if (!mt_is_alloc(mt)) in mtree_alloc_range()
6391 mtree_lock(mt); in mtree_alloc_range()
6411 mtree_unlock(mt); in mtree_alloc_range()
6416 int mtree_alloc_rrange(struct maple_tree *mt, unsigned long *startp, in mtree_alloc_rrange() argument
6422 MA_STATE(mas, mt, 0, 0); in mtree_alloc_rrange()
6423 if (!mt_is_alloc(mt)) in mtree_alloc_rrange()
6429 mtree_lock(mt); in mtree_alloc_rrange()
6449 mtree_unlock(mt); in mtree_alloc_rrange()
6464 void *mtree_erase(struct maple_tree *mt, unsigned long index) in mtree_erase() argument
6468 MA_STATE(mas, mt, index, index); in mtree_erase()
6471 mtree_lock(mt); in mtree_erase()
6473 mtree_unlock(mt); in mtree_erase()
6485 void __mt_destroy(struct maple_tree *mt) in __mt_destroy() argument
6487 void *root = mt_root_locked(mt); in __mt_destroy()
6489 rcu_assign_pointer(mt->ma_root, NULL); in __mt_destroy()
6491 mte_destroy_walk(root, mt); in __mt_destroy()
6493 mt->ma_flags = 0; in __mt_destroy()
6503 void mtree_destroy(struct maple_tree *mt) in mtree_destroy() argument
6505 mtree_lock(mt); in mtree_destroy()
6506 __mt_destroy(mt); in mtree_destroy()
6507 mtree_unlock(mt); in mtree_destroy()
6527 void *mt_find(struct maple_tree *mt, unsigned long *index, unsigned long max) in mt_find() argument
6529 MA_STATE(mas, mt, *index, *index); in mt_find()
6565 if (MT_WARN_ON(mt, (*index) && ((*index) <= copy))) in mt_find()
6587 void *mt_find_after(struct maple_tree *mt, unsigned long *index, in mt_find_after() argument
6593 return mt_find(mt, index, max); in mt_find_after()
6717 static void mt_dump_node(const struct maple_tree *mt, void *entry,
6757 static void mt_dump_range64(const struct maple_tree *mt, void *entry, in mt_dump_range64() argument
6788 mt_dump_entry(mt_slot(mt, node->slot, i), in mt_dump_range64()
6791 mt_dump_node(mt, mt_slot(mt, node->slot, i), in mt_dump_range64()
6812 static void mt_dump_arange64(const struct maple_tree *mt, void *entry, in mt_dump_arange64() argument
6854 mt_dump_entry(mt_slot(mt, node->slot, i), in mt_dump_arange64()
6857 mt_dump_node(mt, mt_slot(mt, node->slot, i), in mt_dump_arange64()
6871 static void mt_dump_node(const struct maple_tree *mt, void *entry, in mt_dump_node() argument
6889 mt_dump_entry(mt_slot(mt, node->slot, i), in mt_dump_node()
6895 mt_dump_range64(mt, entry, min, max, depth, format); in mt_dump_node()
6898 mt_dump_arange64(mt, entry, min, max, depth, format); in mt_dump_node()
6906 void mt_dump(const struct maple_tree *mt, enum mt_dump_format format) in mt_dump() argument
6908 void *entry = rcu_dereference_check(mt->ma_root, mt_locked(mt)); in mt_dump()
6911 mt, mt->ma_flags, mt_height(mt), entry); in mt_dump()
6915 mt_dump_node(mt, entry, 0, mt_node_max(entry), 0, format); in mt_dump()
6927 enum maple_type mt = mte_node_type(mas->node); in mas_validate_gaps() local
6932 unsigned long *pivots = ma_pivots(node, mt); in mas_validate_gaps()
6935 if (ma_is_dense(mt)) { in mas_validate_gaps()
6948 gaps = ma_gaps(node, mt); in mas_validate_gaps()
6950 p_end = mas_safe_pivot(mas, pivots, i, mt); in mas_validate_gaps()
6978 if (mt == maple_arange_64) { in mas_validate_gaps()
6979 offset = ma_meta_gap(node, mt); in mas_validate_gaps()
7161 static void mt_validate_nulls(struct maple_tree *mt) in mt_validate_nulls() argument
7166 MA_STATE(mas, mt, 0, 0); in mt_validate_nulls()
7182 MT_BUG_ON(mt, !last && !entry); in mt_validate_nulls()
7203 void mt_validate(struct maple_tree *mt) in mt_validate() argument
7207 MA_STATE(mas, mt, 0, 0); in mt_validate()
7227 if (mt_is_alloc(mt)) in mt_validate()
7231 mt_validate_nulls(mt); in mt_validate()