|
@@ -1104,7 +1104,7 @@ extern void __error_too_big_MAX_LOCKDEP_SUBCLASSES(void);
|
|
* itself, so actual lookup of the hash should be once per lock object.
|
|
* itself, so actual lookup of the hash should be once per lock object.
|
|
*/
|
|
*/
|
|
static inline struct lock_class *
|
|
static inline struct lock_class *
|
|
-register_lock_class(struct lockdep_map *lock, unsigned int subclass)
|
|
|
|
|
|
+look_up_lock_class(struct lockdep_map *lock, unsigned int subclass)
|
|
{
|
|
{
|
|
struct lockdep_subclass_key *key;
|
|
struct lockdep_subclass_key *key;
|
|
struct list_head *hash_head;
|
|
struct list_head *hash_head;
|
|
@@ -1148,7 +1148,26 @@ register_lock_class(struct lockdep_map *lock, unsigned int subclass)
|
|
*/
|
|
*/
|
|
list_for_each_entry(class, hash_head, hash_entry)
|
|
list_for_each_entry(class, hash_head, hash_entry)
|
|
if (class->key == key)
|
|
if (class->key == key)
|
|
- goto out_set;
|
|
|
|
|
|
+ return class;
|
|
|
|
+
|
|
|
|
+ return NULL;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * Register a lock's class in the hash-table, if the class is not present
|
|
|
|
+ * yet. Otherwise we look it up. We cache the result in the lock object
|
|
|
|
+ * itself, so actual lookup of the hash should be once per lock object.
|
|
|
|
+ */
|
|
|
|
+static inline struct lock_class *
|
|
|
|
+register_lock_class(struct lockdep_map *lock, unsigned int subclass)
|
|
|
|
+{
|
|
|
|
+ struct lockdep_subclass_key *key;
|
|
|
|
+ struct list_head *hash_head;
|
|
|
|
+ struct lock_class *class;
|
|
|
|
+
|
|
|
|
+ class = look_up_lock_class(lock, subclass);
|
|
|
|
+ if (likely(class))
|
|
|
|
+ return class;
|
|
|
|
|
|
/*
|
|
/*
|
|
* Debug-check: all keys must be persistent!
|
|
* Debug-check: all keys must be persistent!
|
|
@@ -1163,6 +1182,9 @@ register_lock_class(struct lockdep_map *lock, unsigned int subclass)
|
|
return NULL;
|
|
return NULL;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ key = lock->key->subkeys + subclass;
|
|
|
|
+ hash_head = classhashentry(key);
|
|
|
|
+
|
|
__raw_spin_lock(&hash_lock);
|
|
__raw_spin_lock(&hash_lock);
|
|
/*
|
|
/*
|
|
* We have to do the hash-walk again, to avoid races
|
|
* We have to do the hash-walk again, to avoid races
|
|
@@ -1209,8 +1231,8 @@ register_lock_class(struct lockdep_map *lock, unsigned int subclass)
|
|
out_unlock_set:
|
|
out_unlock_set:
|
|
__raw_spin_unlock(&hash_lock);
|
|
__raw_spin_unlock(&hash_lock);
|
|
|
|
|
|
-out_set:
|
|
|
|
- lock->class[subclass] = class;
|
|
|
|
|
|
+ if (!subclass)
|
|
|
|
+ lock->class_cache = class;
|
|
|
|
|
|
DEBUG_LOCKS_WARN_ON(class->subclass != subclass);
|
|
DEBUG_LOCKS_WARN_ON(class->subclass != subclass);
|
|
|
|
|
|
@@ -1914,7 +1936,7 @@ void lockdep_init_map(struct lockdep_map *lock, const char *name,
|
|
}
|
|
}
|
|
lock->name = name;
|
|
lock->name = name;
|
|
lock->key = key;
|
|
lock->key = key;
|
|
- memset(lock->class, 0, sizeof(lock->class[0])*MAX_LOCKDEP_SUBCLASSES);
|
|
|
|
|
|
+ lock->class_cache = NULL;
|
|
}
|
|
}
|
|
|
|
|
|
EXPORT_SYMBOL_GPL(lockdep_init_map);
|
|
EXPORT_SYMBOL_GPL(lockdep_init_map);
|
|
@@ -1928,8 +1950,8 @@ static int __lock_acquire(struct lockdep_map *lock, unsigned int subclass,
|
|
unsigned long ip)
|
|
unsigned long ip)
|
|
{
|
|
{
|
|
struct task_struct *curr = current;
|
|
struct task_struct *curr = current;
|
|
|
|
+ struct lock_class *class = NULL;
|
|
struct held_lock *hlock;
|
|
struct held_lock *hlock;
|
|
- struct lock_class *class;
|
|
|
|
unsigned int depth, id;
|
|
unsigned int depth, id;
|
|
int chain_head = 0;
|
|
int chain_head = 0;
|
|
u64 chain_key;
|
|
u64 chain_key;
|
|
@@ -1947,8 +1969,11 @@ static int __lock_acquire(struct lockdep_map *lock, unsigned int subclass,
|
|
return 0;
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
|
|
- class = lock->class[subclass];
|
|
|
|
- /* not cached yet? */
|
|
|
|
|
|
+ if (!subclass)
|
|
|
|
+ class = lock->class_cache;
|
|
|
|
+ /*
|
|
|
|
+ * Not cached yet or subclass?
|
|
|
|
+ */
|
|
if (unlikely(!class)) {
|
|
if (unlikely(!class)) {
|
|
class = register_lock_class(lock, subclass);
|
|
class = register_lock_class(lock, subclass);
|
|
if (!class)
|
|
if (!class)
|
|
@@ -2449,48 +2474,44 @@ void lockdep_free_key_range(void *start, unsigned long size)
|
|
|
|
|
|
void lockdep_reset_lock(struct lockdep_map *lock)
|
|
void lockdep_reset_lock(struct lockdep_map *lock)
|
|
{
|
|
{
|
|
- struct lock_class *class, *next, *entry;
|
|
|
|
|
|
+ struct lock_class *class, *next;
|
|
struct list_head *head;
|
|
struct list_head *head;
|
|
unsigned long flags;
|
|
unsigned long flags;
|
|
int i, j;
|
|
int i, j;
|
|
|
|
|
|
raw_local_irq_save(flags);
|
|
raw_local_irq_save(flags);
|
|
- __raw_spin_lock(&hash_lock);
|
|
|
|
|
|
|
|
/*
|
|
/*
|
|
- * Remove all classes this lock has:
|
|
|
|
|
|
+ * Remove all classes this lock might have:
|
|
|
|
+ */
|
|
|
|
+ for (j = 0; j < MAX_LOCKDEP_SUBCLASSES; j++) {
|
|
|
|
+ /*
|
|
|
|
+ * If the class exists we look it up and zap it:
|
|
|
|
+ */
|
|
|
|
+ class = look_up_lock_class(lock, j);
|
|
|
|
+ if (class)
|
|
|
|
+ zap_class(class);
|
|
|
|
+ }
|
|
|
|
+ /*
|
|
|
|
+ * Debug check: in the end all mapped classes should
|
|
|
|
+ * be gone.
|
|
*/
|
|
*/
|
|
|
|
+ __raw_spin_lock(&hash_lock);
|
|
for (i = 0; i < CLASSHASH_SIZE; i++) {
|
|
for (i = 0; i < CLASSHASH_SIZE; i++) {
|
|
head = classhash_table + i;
|
|
head = classhash_table + i;
|
|
if (list_empty(head))
|
|
if (list_empty(head))
|
|
continue;
|
|
continue;
|
|
list_for_each_entry_safe(class, next, head, hash_entry) {
|
|
list_for_each_entry_safe(class, next, head, hash_entry) {
|
|
- for (j = 0; j < MAX_LOCKDEP_SUBCLASSES; j++) {
|
|
|
|
- entry = lock->class[j];
|
|
|
|
- if (class == entry) {
|
|
|
|
- zap_class(class);
|
|
|
|
- lock->class[j] = NULL;
|
|
|
|
- break;
|
|
|
|
- }
|
|
|
|
|
|
+ if (unlikely(class == lock->class_cache)) {
|
|
|
|
+ __raw_spin_unlock(&hash_lock);
|
|
|
|
+ DEBUG_LOCKS_WARN_ON(1);
|
|
|
|
+ goto out_restore;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-
|
|
|
|
- /*
|
|
|
|
- * Debug check: in the end all mapped classes should
|
|
|
|
- * be gone.
|
|
|
|
- */
|
|
|
|
- for (j = 0; j < MAX_LOCKDEP_SUBCLASSES; j++) {
|
|
|
|
- entry = lock->class[j];
|
|
|
|
- if (!entry)
|
|
|
|
- continue;
|
|
|
|
- __raw_spin_unlock(&hash_lock);
|
|
|
|
- DEBUG_LOCKS_WARN_ON(1);
|
|
|
|
- raw_local_irq_restore(flags);
|
|
|
|
- return;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
__raw_spin_unlock(&hash_lock);
|
|
__raw_spin_unlock(&hash_lock);
|
|
|
|
+
|
|
|
|
+out_restore:
|
|
raw_local_irq_restore(flags);
|
|
raw_local_irq_restore(flags);
|
|
}
|
|
}
|
|
|
|
|