|
@@ -4136,7 +4136,7 @@ static void ext4_mb_add_n_trim(struct ext4_allocation_context *ac)
|
|
|
/* The max size of hash table is PREALLOC_TB_SIZE */
|
|
|
order = PREALLOC_TB_SIZE - 1;
|
|
|
/* Add the prealloc space to lg */
|
|
|
- rcu_read_lock();
|
|
|
+ spin_lock(&lg->lg_prealloc_lock);
|
|
|
list_for_each_entry_rcu(tmp_pa, &lg->lg_prealloc_list[order],
|
|
|
pa_inode_list) {
|
|
|
spin_lock(&tmp_pa->pa_lock);
|
|
@@ -4160,12 +4160,12 @@ static void ext4_mb_add_n_trim(struct ext4_allocation_context *ac)
|
|
|
if (!added)
|
|
|
list_add_tail_rcu(&pa->pa_inode_list,
|
|
|
&lg->lg_prealloc_list[order]);
|
|
|
- rcu_read_unlock();
|
|
|
+ spin_unlock(&lg->lg_prealloc_lock);
|
|
|
|
|
|
/* Now trim the list to be not more than 8 elements */
|
|
|
if (lg_prealloc_count > 8) {
|
|
|
ext4_mb_discard_lg_preallocations(sb, lg,
|
|
|
- order, lg_prealloc_count);
|
|
|
+ order, lg_prealloc_count);
|
|
|
return;
|
|
|
}
|
|
|
return ;
|