+static __always_inline __must_checkhere when the result LT_ZERO, you will saturate the r->refs.counter and make the
+int __refcount_add_unless(refcount_t *r, int a, int u)
+{
+ int c, new;
+
+ c = atomic_read(&(r->refs));
+ do {
+ if (unlikely(c == u))
+ break;
+
+ asm volatile("addl %2,%0\n\t"
+ REFCOUNT_CHECK_LT_ZERO
+ : "=r" (new)
+ : "0" (c), "ir" (a),
+ [counter] "m" (r->refs.counter)
+ : "cc", "cx");
+
+ } while (!atomic_try_cmpxchg(&(r->refs), &c, new));
+
+ return c;
+}
+