xref: /openbmc/linux/include/linux/interval_tree.h (revision 5fe93786)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2fff3fd8aSMichel Lespinasse #ifndef _LINUX_INTERVAL_TREE_H
3fff3fd8aSMichel Lespinasse #define _LINUX_INTERVAL_TREE_H
4fff3fd8aSMichel Lespinasse 
5fff3fd8aSMichel Lespinasse #include <linux/rbtree.h>
6fff3fd8aSMichel Lespinasse 
7fff3fd8aSMichel Lespinasse struct interval_tree_node {
8fff3fd8aSMichel Lespinasse 	struct rb_node rb;
9fff3fd8aSMichel Lespinasse 	unsigned long start;	/* Start of interval */
10fff3fd8aSMichel Lespinasse 	unsigned long last;	/* Last location _in_ interval */
11fff3fd8aSMichel Lespinasse 	unsigned long __subtree_last;
12fff3fd8aSMichel Lespinasse };
13fff3fd8aSMichel Lespinasse 
14fff3fd8aSMichel Lespinasse extern void
15f808c13fSDavidlohr Bueso interval_tree_insert(struct interval_tree_node *node,
16f808c13fSDavidlohr Bueso 		     struct rb_root_cached *root);
17fff3fd8aSMichel Lespinasse 
18fff3fd8aSMichel Lespinasse extern void
19f808c13fSDavidlohr Bueso interval_tree_remove(struct interval_tree_node *node,
20f808c13fSDavidlohr Bueso 		     struct rb_root_cached *root);
21fff3fd8aSMichel Lespinasse 
22fff3fd8aSMichel Lespinasse extern struct interval_tree_node *
23f808c13fSDavidlohr Bueso interval_tree_iter_first(struct rb_root_cached *root,
24fff3fd8aSMichel Lespinasse 			 unsigned long start, unsigned long last);
25fff3fd8aSMichel Lespinasse 
26fff3fd8aSMichel Lespinasse extern struct interval_tree_node *
27fff3fd8aSMichel Lespinasse interval_tree_iter_next(struct interval_tree_node *node,
28fff3fd8aSMichel Lespinasse 			unsigned long start, unsigned long last);
29fff3fd8aSMichel Lespinasse 
30*5fe93786SJason Gunthorpe /**
31*5fe93786SJason Gunthorpe  * struct interval_tree_span_iter - Find used and unused spans.
32*5fe93786SJason Gunthorpe  * @start_hole: Start of an interval for a hole when is_hole == 1
33*5fe93786SJason Gunthorpe  * @last_hole: Inclusive end of an interval for a hole when is_hole == 1
34*5fe93786SJason Gunthorpe  * @start_used: Start of a used interval when is_hole == 0
35*5fe93786SJason Gunthorpe  * @last_used: Inclusive end of a used interval when is_hole == 0
36*5fe93786SJason Gunthorpe  * @is_hole: 0 == used, 1 == is_hole, -1 == done iteration
37*5fe93786SJason Gunthorpe  *
38*5fe93786SJason Gunthorpe  * This iterator travels over spans in an interval tree. It does not return
39*5fe93786SJason Gunthorpe  * nodes but classifies each span as either a hole, where no nodes intersect, or
40*5fe93786SJason Gunthorpe  * a used, which is fully covered by nodes. Each iteration step toggles between
41*5fe93786SJason Gunthorpe  * hole and used until the entire range is covered. The returned spans always
42*5fe93786SJason Gunthorpe  * fully cover the requested range.
43*5fe93786SJason Gunthorpe  *
44*5fe93786SJason Gunthorpe  * The iterator is greedy, it always returns the largest hole or used possible,
45*5fe93786SJason Gunthorpe  * consolidating all consecutive nodes.
46*5fe93786SJason Gunthorpe  *
47*5fe93786SJason Gunthorpe  * Use interval_tree_span_iter_done() to detect end of iteration.
48*5fe93786SJason Gunthorpe  */
49*5fe93786SJason Gunthorpe struct interval_tree_span_iter {
50*5fe93786SJason Gunthorpe 	/* private: not for use by the caller */
51*5fe93786SJason Gunthorpe 	struct interval_tree_node *nodes[2];
52*5fe93786SJason Gunthorpe 	unsigned long first_index;
53*5fe93786SJason Gunthorpe 	unsigned long last_index;
54*5fe93786SJason Gunthorpe 
55*5fe93786SJason Gunthorpe 	/* public: */
56*5fe93786SJason Gunthorpe 	union {
57*5fe93786SJason Gunthorpe 		unsigned long start_hole;
58*5fe93786SJason Gunthorpe 		unsigned long start_used;
59*5fe93786SJason Gunthorpe 	};
60*5fe93786SJason Gunthorpe 	union {
61*5fe93786SJason Gunthorpe 		unsigned long last_hole;
62*5fe93786SJason Gunthorpe 		unsigned long last_used;
63*5fe93786SJason Gunthorpe 	};
64*5fe93786SJason Gunthorpe 	int is_hole;
65*5fe93786SJason Gunthorpe };
66*5fe93786SJason Gunthorpe 
67*5fe93786SJason Gunthorpe void interval_tree_span_iter_first(struct interval_tree_span_iter *state,
68*5fe93786SJason Gunthorpe 				   struct rb_root_cached *itree,
69*5fe93786SJason Gunthorpe 				   unsigned long first_index,
70*5fe93786SJason Gunthorpe 				   unsigned long last_index);
71*5fe93786SJason Gunthorpe void interval_tree_span_iter_advance(struct interval_tree_span_iter *iter,
72*5fe93786SJason Gunthorpe 				     struct rb_root_cached *itree,
73*5fe93786SJason Gunthorpe 				     unsigned long new_index);
74*5fe93786SJason Gunthorpe void interval_tree_span_iter_next(struct interval_tree_span_iter *state);
75*5fe93786SJason Gunthorpe 
76*5fe93786SJason Gunthorpe static inline bool
interval_tree_span_iter_done(struct interval_tree_span_iter * state)77*5fe93786SJason Gunthorpe interval_tree_span_iter_done(struct interval_tree_span_iter *state)
78*5fe93786SJason Gunthorpe {
79*5fe93786SJason Gunthorpe 	return state->is_hole == -1;
80*5fe93786SJason Gunthorpe }
81*5fe93786SJason Gunthorpe 
82*5fe93786SJason Gunthorpe #define interval_tree_for_each_span(span, itree, first_index, last_index)      \
83*5fe93786SJason Gunthorpe 	for (interval_tree_span_iter_first(span, itree,                        \
84*5fe93786SJason Gunthorpe 					   first_index, last_index);           \
85*5fe93786SJason Gunthorpe 	     !interval_tree_span_iter_done(span);                              \
86*5fe93786SJason Gunthorpe 	     interval_tree_span_iter_next(span))
87*5fe93786SJason Gunthorpe 
88fff3fd8aSMichel Lespinasse #endif	/* _LINUX_INTERVAL_TREE_H */
89