aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHauke Mehrtens2013-10-05 08:36:40 -0500
committerHauke Mehrtens2013-10-23 12:24:40 -0500
commitee8018a9dc07a6a94d5f76edf75f558217b9c21a (patch)
tree33e8189363ec67e1ac645919b841bcb61c35fa54
parentc303995ba11009c4d49fcf6531122245e602e6b9 (diff)
downloadbackports-ee8018a9dc07a6a94d5f76edf75f558217b9c21a.tar.gz
backports-ee8018a9dc07a6a94d5f76edf75f558217b9c21a.tar.xz
backports-ee8018a9dc07a6a94d5f76edf75f558217b9c21a.zip
backports: remove BACKPORT_BUILD_RADIX_HELPERS
This was only needed for some of the DRM drivers, remove it. Signed-off-by: Hauke Mehrtens <hauke@hauke-m.de>
-rw-r--r--backport/backport-include/linux/radix-tree.h218
-rw-r--r--backport/compat/Kconfig7
-rw-r--r--backport/compat/Makefile1
-rw-r--r--backport/compat/lib-radix-tree-helpers.c266
4 files changed, 0 insertions, 492 deletions
diff --git a/backport/backport-include/linux/radix-tree.h b/backport/backport-include/linux/radix-tree.h
deleted file mode 100644
index 6ca7c052..00000000
--- a/backport/backport-include/linux/radix-tree.h
+++ /dev/null
@@ -1,218 +0,0 @@
1/*
2 * Copyright (C) 2013 Konstantin Khlebnikov
3 * Copyright (C) 2013 Luis R. Rodriguez <mcgrof@do-not-panic.com>
4 *
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License as
7 * published by the Free Software Foundation; either version 2, or (at
8 * your option) any later version.
9 *
10 */
11#ifndef BACKPORT_LINUX_RADIX_TREE_H
12#define BACKPORT_LINUX_RADIX_TREE_H
13
14#include_next <linux/radix-tree.h>
15
16#ifdef CPTCFG_BACKPORT_BUILD_RADIX_HELPERS
17
18/**
19 * struct radix_tree_iter - radix tree iterator state
20 *
21 * @index: index of current slot
22 * @next_index: next-to-last index for this chunk
23 * @tags: bit-mask for tag-iterating
24 *
25 * This radix tree iterator works in terms of "chunks" of slots. A chunk is a
26 * subinterval of slots contained within one radix tree leaf node. It is
27 * described by a pointer to its first slot and a struct radix_tree_iter
28 * which holds the chunk's position in the tree and its size. For tagged
29 * iteration radix_tree_iter also holds the slots' bit-mask for one chosen
30 * radix tree tag.
31 */
32struct radix_tree_iter {
33 unsigned long index;
34 unsigned long next_index;
35 unsigned long tags;
36};
37
38#define RADIX_TREE_ITER_TAG_MASK 0x00FF /* tag index in lower byte */
39#define RADIX_TREE_ITER_TAGGED 0x0100 /* lookup tagged slots */
40#define RADIX_TREE_ITER_CONTIG 0x0200 /* stop at first hole */
41
42/**
43 * radix_tree_iter_init - initialize radix tree iterator
44 *
45 * @iter: pointer to iterator state
46 * @start: iteration starting index
47 * Returns: NULL
48 */
49static __always_inline void **
50radix_tree_iter_init(struct radix_tree_iter *iter, unsigned long start)
51{
52 /*
53 * Leave iter->tags uninitialized. radix_tree_next_chunk() will fill it
54 * in the case of a successful tagged chunk lookup. If the lookup was
55 * unsuccessful or non-tagged then nobody cares about ->tags.
56 *
57 * Set index to zero to bypass next_index overflow protection.
58 * See the comment in radix_tree_next_chunk() for details.
59 */
60 iter->index = 0;
61 iter->next_index = start;
62 return NULL;
63}
64
65/**
66 * radix_tree_next_chunk - find next chunk of slots for iteration
67 *
68 * @root: radix tree root
69 * @iter: iterator state
70 * @flags: RADIX_TREE_ITER_* flags and tag index
71 * Returns: pointer to chunk first slot, or NULL if there no more left
72 *
73 * This function looks up the next chunk in the radix tree starting from
74 * @iter->next_index. It returns a pointer to the chunk's first slot.
75 * Also it fills @iter with data about chunk: position in the tree (index),
76 * its end (next_index), and constructs a bit mask for tagged iterating (tags).
77 */
78void **radix_tree_next_chunk(struct radix_tree_root *root,
79 struct radix_tree_iter *iter, unsigned flags);
80
81/**
82 * radix_tree_chunk_size - get current chunk size
83 *
84 * @iter: pointer to radix tree iterator
85 * Returns: current chunk size
86 */
87static __always_inline unsigned
88radix_tree_chunk_size(struct radix_tree_iter *iter)
89{
90 return iter->next_index - iter->index;
91}
92
93/**
94 * radix_tree_next_slot - find next slot in chunk
95 *
96 * @slot: pointer to current slot
97 * @iter: pointer to interator state
98 * @flags: RADIX_TREE_ITER_*, should be constant
99 * Returns: pointer to next slot, or NULL if there no more left
100 *
101 * This function updates @iter->index in the case of a successful lookup.
102 * For tagged lookup it also eats @iter->tags.
103 */
104static __always_inline void **
105radix_tree_next_slot(void **slot, struct radix_tree_iter *iter, unsigned flags)
106{
107 if (flags & RADIX_TREE_ITER_TAGGED) {
108 iter->tags >>= 1;
109 if (likely(iter->tags & 1ul)) {
110 iter->index++;
111 return slot + 1;
112 }
113 if (!(flags & RADIX_TREE_ITER_CONTIG) && likely(iter->tags)) {
114 unsigned offset = __ffs(iter->tags);
115
116 iter->tags >>= offset;
117 iter->index += offset + 1;
118 return slot + offset + 1;
119 }
120 } else {
121 unsigned size = radix_tree_chunk_size(iter) - 1;
122
123 while (size--) {
124 slot++;
125 iter->index++;
126 if (likely(*slot))
127 return slot;
128 if (flags & RADIX_TREE_ITER_CONTIG) {
129 /* forbid switching to the next chunk */
130 iter->next_index = 0;
131 break;
132 }
133 }
134 }
135 return NULL;
136}
137
138/**
139 * radix_tree_for_each_chunk - iterate over chunks
140 *
141 * @slot: the void** variable for pointer to chunk first slot
142 * @root: the struct radix_tree_root pointer
143 * @iter: the struct radix_tree_iter pointer
144 * @start: iteration starting index
145 * @flags: RADIX_TREE_ITER_* and tag index
146 *
147 * Locks can be released and reacquired between iterations.
148 */
149#define radix_tree_for_each_chunk(slot, root, iter, start, flags) \
150 for (slot = radix_tree_iter_init(iter, start) ; \
151 (slot = radix_tree_next_chunk(root, iter, flags)) ;)
152
153/**
154 * radix_tree_for_each_chunk_slot - iterate over slots in one chunk
155 *
156 * @slot: the void** variable, at the beginning points to chunk first slot
157 * @iter: the struct radix_tree_iter pointer
158 * @flags: RADIX_TREE_ITER_*, should be constant
159 *
160 * This macro is designed to be nested inside radix_tree_for_each_chunk().
161 * @slot points to the radix tree slot, @iter->index contains its index.
162 */
163#define radix_tree_for_each_chunk_slot(slot, iter, flags) \
164 for (; slot ; slot = radix_tree_next_slot(slot, iter, flags))
165
166/**
167 * radix_tree_for_each_slot - iterate over non-empty slots
168 *
169 * @slot: the void** variable for pointer to slot
170 * @root: the struct radix_tree_root pointer
171 * @iter: the struct radix_tree_iter pointer
172 * @start: iteration starting index
173 *
174 * @slot points to radix tree slot, @iter->index contains its index.
175 */
176#define radix_tree_for_each_slot(slot, root, iter, start) \
177 for (slot = radix_tree_iter_init(iter, start) ; \
178 slot || (slot = radix_tree_next_chunk(root, iter, 0)) ; \
179 slot = radix_tree_next_slot(slot, iter, 0))
180
181/**
182 * radix_tree_for_each_contig - iterate over contiguous slots
183 *
184 * @slot: the void** variable for pointer to slot
185 * @root: the struct radix_tree_root pointer
186 * @iter: the struct radix_tree_iter pointer
187 * @start: iteration starting index
188 *
189 * @slot points to radix tree slot, @iter->index contains its index.
190 */
191#define radix_tree_for_each_contig(slot, root, iter, start) \
192 for (slot = radix_tree_iter_init(iter, start) ; \
193 slot || (slot = radix_tree_next_chunk(root, iter, \
194 RADIX_TREE_ITER_CONTIG)) ; \
195 slot = radix_tree_next_slot(slot, iter, \
196 RADIX_TREE_ITER_CONTIG))
197
198/**
199 * radix_tree_for_each_tagged - iterate over tagged slots
200 *
201 * @slot: the void** variable for pointer to slot
202 * @root: the struct radix_tree_root pointer
203 * @iter: the struct radix_tree_iter pointer
204 * @start: iteration starting index
205 * @tag: tag index
206 *
207 * @slot points to radix tree slot, @iter->index contains its index.
208 */
209#define radix_tree_for_each_tagged(slot, root, iter, start, tag) \
210 for (slot = radix_tree_iter_init(iter, start) ; \
211 slot || (slot = radix_tree_next_chunk(root, iter, \
212 RADIX_TREE_ITER_TAGGED | tag)) ; \
213 slot = radix_tree_next_slot(slot, iter, \
214 RADIX_TREE_ITER_TAGGED))
215
216#endif /* CPTCFG_BACKPORT_BUILD_RADIX_HELPERS */
217
218#endif /* BACKPORT_LINUX_RADIX_TREE_H */
diff --git a/backport/compat/Kconfig b/backport/compat/Kconfig
index e2f0cdd0..215de8e5 100644
--- a/backport/compat/Kconfig
+++ b/backport/compat/Kconfig
@@ -185,13 +185,6 @@ config BACKPORT_LEDS_CLASS
185config BACKPORT_LEDS_TRIGGERS 185config BACKPORT_LEDS_TRIGGERS
186 bool 186 bool
187 187
188config BACKPORT_BUILD_RADIX_HELPERS
189 bool
190 # You have selected to build backported DRM drivers
191 # Build only if on < 3.4
192 depends on DRM && BACKPORT_KERNEL_3_4
193 default y if BACKPORT_USERSEL_BUILD_ALL
194
195config BACKPORT_USERSEL_BUILD_ALL 188config BACKPORT_USERSEL_BUILD_ALL
196 bool "Build all compat code" 189 bool "Build all compat code"
197 help 190 help
diff --git a/backport/compat/Makefile b/backport/compat/Makefile
index a74507e4..2d4928cc 100644
--- a/backport/compat/Makefile
+++ b/backport/compat/Makefile
@@ -40,4 +40,3 @@ compat-$(CPTCFG_BACKPORT_KERNEL_3_12) += backport-3.12.o
40compat-$(CPTCFG_BACKPORT_BUILD_KFIFO) += kfifo.o 40compat-$(CPTCFG_BACKPORT_BUILD_KFIFO) += kfifo.o
41compat-$(CPTCFG_BACKPORT_BUILD_GENERIC_ATOMIC64) += compat_atomic.o 41compat-$(CPTCFG_BACKPORT_BUILD_GENERIC_ATOMIC64) += compat_atomic.o
42compat-$(CPTCFG_BACKPORT_BUILD_DMA_SHARED_HELPERS) += dma-shared-helpers.o 42compat-$(CPTCFG_BACKPORT_BUILD_DMA_SHARED_HELPERS) += dma-shared-helpers.o
43compat-$(CPTCFG_BACKPORT_BUILD_RADIX_HELPERS) += lib-radix-tree-helpers.o
diff --git a/backport/compat/lib-radix-tree-helpers.c b/backport/compat/lib-radix-tree-helpers.c
deleted file mode 100644
index 837de046..00000000
--- a/backport/compat/lib-radix-tree-helpers.c
+++ /dev/null
@@ -1,266 +0,0 @@
1/*
2 * Copyright (C) 2013 Konstantin Khlebnikov
3 * Copyright (c) 2013 Luis R. Rodriguez <mcgrof@do-not-panic.com>
4 *
5 * Backports radix_tree_next_chunk()
6 *
7 * This program is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU General Public License as
9 * published by the Free Software Foundation; either version 2, or (at
10 * your option) any later version.
11 */
12
13#include <linux/errno.h>
14#include <linux/init.h>
15#include <linux/kernel.h>
16#include <linux/export.h>
17#include <linux/radix-tree.h>
18#include <linux/percpu.h>
19#include <linux/slab.h>
20#include <linux/notifier.h>
21#include <linux/cpu.h>
22#include <linux/string.h>
23#include <linux/bitops.h>
24#include <linux/rcupdate.h>
25
26#ifdef __KERNEL__
27#define RADIX_TREE_MAP_SHIFT (CONFIG_BASE_SMALL ? 4 : 6)
28#else
29#define RADIX_TREE_MAP_SHIFT 3 /* For more stressful testing */
30#endif
31
32#define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT)
33#define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1)
34
35#define RADIX_TREE_TAG_LONGS \
36 ((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG)
37
38struct radix_tree_node {
39 unsigned int height; /* Height from the bottom */
40 unsigned int count;
41 union {
42 struct radix_tree_node *parent; /* Used when ascending tree */
43 struct rcu_head rcu_head; /* Used when freeing node */
44 };
45 void __rcu *slots[RADIX_TREE_MAP_SIZE];
46 unsigned long tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS];
47};
48
49static inline void *ptr_to_indirect(void *ptr)
50{
51 return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
52}
53
54static inline void *indirect_to_ptr(void *ptr)
55{
56 return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
57}
58
59static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
60{
61 return root->gfp_mask & __GFP_BITS_MASK;
62}
63
64static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
65 int offset)
66{
67 __set_bit(offset, node->tags[tag]);
68}
69
70static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
71 int offset)
72{
73 __clear_bit(offset, node->tags[tag]);
74}
75
76static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
77 int offset)
78{
79 return test_bit(offset, node->tags[tag]);
80}
81
82static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
83{
84 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
85}
86
87static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
88{
89 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
90}
91
92static inline void root_tag_clear_all(struct radix_tree_root *root)
93{
94 root->gfp_mask &= __GFP_BITS_MASK;
95}
96
97static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
98{
99 return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
100}
101
102/*
103 * Returns 1 if any slot in the node has this tag set.
104 * Otherwise returns 0.
105 */
106static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
107{
108 int idx;
109 for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
110 if (node->tags[tag][idx])
111 return 1;
112 }
113 return 0;
114}
115
116/**
117 * radix_tree_find_next_bit - find the next set bit in a memory region
118 *
119 * @addr: The address to base the search on
120 * @size: The bitmap size in bits
121 * @offset: The bitnumber to start searching at
122 *
123 * Unrollable variant of find_next_bit() for constant size arrays.
124 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
125 * Returns next bit offset, or size if nothing found.
126 */
127static __always_inline unsigned long
128radix_tree_find_next_bit(const unsigned long *addr,
129 unsigned long size, unsigned long offset)
130{
131 if (!__builtin_constant_p(size))
132 return find_next_bit(addr, size, offset);
133
134 if (offset < size) {
135 unsigned long tmp;
136
137 addr += offset / BITS_PER_LONG;
138 tmp = *addr >> (offset % BITS_PER_LONG);
139 if (tmp)
140 return __ffs(tmp) + offset;
141 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
142 while (offset < size) {
143 tmp = *++addr;
144 if (tmp)
145 return __ffs(tmp) + offset;
146 offset += BITS_PER_LONG;
147 }
148 }
149 return size;
150}
151
152/**
153 * radix_tree_next_chunk - find next chunk of slots for iteration
154 *
155 * @root: radix tree root
156 * @iter: iterator state
157 * @flags: RADIX_TREE_ITER_* flags and tag index
158 * Returns: pointer to chunk first slot, or NULL if iteration is over
159 */
160void **radix_tree_next_chunk(struct radix_tree_root *root,
161 struct radix_tree_iter *iter, unsigned flags)
162{
163 unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
164 struct radix_tree_node *rnode, *node;
165 unsigned long index, offset;
166
167 if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
168 return NULL;
169
170 /*
171 * Catch next_index overflow after ~0UL. iter->index never overflows
172 * during iterating; it can be zero only at the beginning.
173 * And we cannot overflow iter->next_index in a single step,
174 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
175 *
176 * This condition also used by radix_tree_next_slot() to stop
177 * contiguous iterating, and forbid swithing to the next chunk.
178 */
179 index = iter->next_index;
180 if (!index && iter->index)
181 return NULL;
182
183 rnode = rcu_dereference_raw(root->rnode);
184 if (radix_tree_is_indirect_ptr(rnode)) {
185 rnode = indirect_to_ptr(rnode);
186 } else if (rnode && !index) {
187 /* Single-slot tree */
188 iter->index = 0;
189 iter->next_index = 1;
190 iter->tags = 1;
191 return (void **)&root->rnode;
192 } else
193 return NULL;
194
195restart:
196 shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT;
197 offset = index >> shift;
198
199 /* Index outside of the tree */
200 if (offset >= RADIX_TREE_MAP_SIZE)
201 return NULL;
202
203 node = rnode;
204 while (1) {
205 if ((flags & RADIX_TREE_ITER_TAGGED) ?
206 !test_bit(offset, node->tags[tag]) :
207 !node->slots[offset]) {
208 /* Hole detected */
209 if (flags & RADIX_TREE_ITER_CONTIG)
210 return NULL;
211
212 if (flags & RADIX_TREE_ITER_TAGGED)
213 offset = radix_tree_find_next_bit(
214 node->tags[tag],
215 RADIX_TREE_MAP_SIZE,
216 offset + 1);
217 else
218 while (++offset < RADIX_TREE_MAP_SIZE) {
219 if (node->slots[offset])
220 break;
221 }
222 index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
223 index += offset << shift;
224 /* Overflow after ~0UL */
225 if (!index)
226 return NULL;
227 if (offset == RADIX_TREE_MAP_SIZE)
228 goto restart;
229 }
230
231 /* This is leaf-node */
232 if (!shift)
233 break;
234
235 node = rcu_dereference_raw(node->slots[offset]);
236 if (node == NULL)
237 goto restart;
238 shift -= RADIX_TREE_MAP_SHIFT;
239 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
240 }
241
242 /* Update the iterator state */
243 iter->index = index;
244 iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
245
246 /* Construct iter->tags bit-mask from node->tags[tag] array */
247 if (flags & RADIX_TREE_ITER_TAGGED) {
248 unsigned tag_long, tag_bit;
249
250 tag_long = offset / BITS_PER_LONG;
251 tag_bit = offset % BITS_PER_LONG;
252 iter->tags = node->tags[tag][tag_long] >> tag_bit;
253 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
254 if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
255 /* Pick tags from next element */
256 if (tag_bit)
257 iter->tags |= node->tags[tag][tag_long + 1] <<
258 (BITS_PER_LONG - tag_bit);
259 /* Clip chunk size, here only BITS_PER_LONG tags */
260 iter->next_index = index + BITS_PER_LONG;
261 }
262 }
263
264 return node->slots + offset;
265}
266EXPORT_SYMBOL_GPL(radix_tree_next_chunk);