Matthew Wilcox wrote:
Yes. Wei's patch still can not work.+/**This is going to be slower than the implementation I sent yesterday. If I
+ * xb_find_set - find the next set bit in a range of bits
+ * @xb: the xbitmap to search from
+ * @offset: the offset in the range to start searching
+ * @size: the size of the range
+ *
+ * Returns: the found bit or, @size if no set bit is found.
+ */
+unsigned long xb_find_set(struct xb *xb, unsigned long size,
+ unsigned long offset)
+{
+ struct radix_tree_root *root = &xb->xbrt;
+ struct radix_tree_node *node;
+ void __rcu **slot;
+ struct ida_bitmap *bitmap;
+ unsigned long index = offset / IDA_BITMAP_BITS;
+ unsigned long index_end = size / IDA_BITMAP_BITS;
+ unsigned long bit = offset % IDA_BITMAP_BITS;
+
+ if (unlikely(offset >= size))
+ return size;
+
+ while (index <= index_end) {
+ unsigned long ret;
+ unsigned int nbits = size - index * IDA_BITMAP_BITS;
+
+ bitmap = __radix_tree_lookup(root, index, &node, &slot);
+
+ if (!node && !bitmap)
+ return size;
+
+ if (bitmap) {
+ if (nbits > IDA_BITMAP_BITS)
+ nbits = IDA_BITMAP_BITS;
+
+ ret = find_next_bit(bitmap->bitmap, nbits, bit);
+ if (ret != nbits)
+ return ret + index * IDA_BITMAP_BITS;
+ }
+ bit = 0;
+ index++;
+ }
+
+ return size;
+}
+EXPORT_SYMBOL(xb_find_set);
call:
xb_init(xb);
xb_set_bit(xb, ULONG_MAX);
xb_find_set(xb, ULONG_MAX, 0);
it's going to call __radix_tree_lookup() 16 quadrillion times.
My implementation will walk the tree precisely once.
We should start reviewing Matthew's implementation.