|
@@ -660,6 +660,7 @@ look_up_lock_class(struct lockdep_map *lock, unsigned int subclass)
|
|
|
struct lockdep_subclass_key *key;
|
|
|
struct hlist_head *hash_head;
|
|
|
struct lock_class *class;
|
|
|
+ bool is_static = false;
|
|
|
|
|
|
if (unlikely(subclass >= MAX_LOCKDEP_SUBCLASSES)) {
|
|
|
debug_locks_off();
|
|
@@ -673,10 +674,23 @@ look_up_lock_class(struct lockdep_map *lock, unsigned int subclass)
|
|
|
|
|
|
/*
|
|
|
* Static locks do not have their class-keys yet - for them the key
|
|
|
- * is the lock object itself:
|
|
|
+ * is the lock object itself. If the lock is in the per cpu area,
|
|
|
+ * the canonical address of the lock (per cpu offset removed) is
|
|
|
+ * used.
|
|
|
*/
|
|
|
- if (unlikely(!lock->key))
|
|
|
- lock->key = (void *)lock;
|
|
|
+ if (unlikely(!lock->key)) {
|
|
|
+ unsigned long can_addr, addr = (unsigned long)lock;
|
|
|
+
|
|
|
+ if (__is_kernel_percpu_address(addr, &can_addr))
|
|
|
+ lock->key = (void *)can_addr;
|
|
|
+ else if (__is_module_percpu_address(addr, &can_addr))
|
|
|
+ lock->key = (void *)can_addr;
|
|
|
+ else if (static_obj(lock))
|
|
|
+ lock->key = (void *)lock;
|
|
|
+ else
|
|
|
+ return ERR_PTR(-EINVAL);
|
|
|
+ is_static = true;
|
|
|
+ }
|
|
|
|
|
|
/*
|
|
|
* NOTE: the class-key must be unique. For dynamic locks, a static
|
|
@@ -708,7 +722,7 @@ look_up_lock_class(struct lockdep_map *lock, unsigned int subclass)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- return NULL;
|
|
|
+ return is_static || static_obj(lock->key) ? NULL : ERR_PTR(-EINVAL);
|
|
|
}
|
|
|
|
|
|
/*
|
|
@@ -726,19 +740,18 @@ register_lock_class(struct lockdep_map *lock, unsigned int subclass, int force)
|
|
|
DEBUG_LOCKS_WARN_ON(!irqs_disabled());
|
|
|
|
|
|
class = look_up_lock_class(lock, subclass);
|
|
|
- if (likely(class))
|
|
|
+ if (likely(!IS_ERR_OR_NULL(class)))
|
|
|
goto out_set_class_cache;
|
|
|
|
|
|
/*
|
|
|
* Debug-check: all keys must be persistent!
|
|
|
- */
|
|
|
- if (!static_obj(lock->key)) {
|
|
|
+ */
|
|
|
+ if (IS_ERR(class)) {
|
|
|
debug_locks_off();
|
|
|
printk("INFO: trying to register non-static key.\n");
|
|
|
printk("the code is fine but needs lockdep annotation.\n");
|
|
|
printk("turning off the locking correctness validator.\n");
|
|
|
dump_stack();
|
|
|
-
|
|
|
return NULL;
|
|
|
}
|
|
|
|
|
@@ -3419,7 +3432,7 @@ static int match_held_lock(struct held_lock *hlock, struct lockdep_map *lock)
|
|
|
* Clearly if the lock hasn't been acquired _ever_, we're not
|
|
|
* holding it either, so report failure.
|
|
|
*/
|
|
|
- if (!class)
|
|
|
+ if (IS_ERR_OR_NULL(class))
|
|
|
return 0;
|
|
|
|
|
|
/*
|
|
@@ -4225,7 +4238,7 @@ void lockdep_reset_lock(struct lockdep_map *lock)
|
|
|
* If the class exists we look it up and zap it:
|
|
|
*/
|
|
|
class = look_up_lock_class(lock, j);
|
|
|
- if (class)
|
|
|
+ if (!IS_ERR_OR_NULL(class))
|
|
|
zap_class(class);
|
|
|
}
|
|
|
/*
|