|
@@ -780,20 +780,18 @@ hit_next:
|
|
|
if (err)
|
|
|
goto out;
|
|
|
|
|
|
+ next_node = rb_next(node);
|
|
|
cache_state(state, cached_state);
|
|
|
merge_state(tree, state);
|
|
|
if (last_end == (u64)-1)
|
|
|
goto out;
|
|
|
|
|
|
start = last_end + 1;
|
|
|
- if (start < end && prealloc && !need_resched()) {
|
|
|
- next_node = rb_next(node);
|
|
|
- if (next_node) {
|
|
|
- state = rb_entry(next_node, struct extent_state,
|
|
|
- rb_node);
|
|
|
- if (state->start == start)
|
|
|
- goto hit_next;
|
|
|
- }
|
|
|
+ if (next_node && start < end && prealloc && !need_resched()) {
|
|
|
+ state = rb_entry(next_node, struct extent_state,
|
|
|
+ rb_node);
|
|
|
+ if (state->start == start)
|
|
|
+ goto hit_next;
|
|
|
}
|
|
|
goto search_again;
|
|
|
}
|
|
@@ -856,14 +854,22 @@ hit_next:
|
|
|
|
|
|
prealloc = alloc_extent_state_atomic(prealloc);
|
|
|
BUG_ON(!prealloc);
|
|
|
+
|
|
|
+ /*
|
|
|
+ * Avoid to free 'prealloc' if it can be merged with
|
|
|
+ * the later extent.
|
|
|
+ */
|
|
|
+ atomic_inc(&prealloc->refs);
|
|
|
err = insert_state(tree, prealloc, start, this_end,
|
|
|
&bits);
|
|
|
BUG_ON(err == -EEXIST);
|
|
|
if (err) {
|
|
|
+ free_extent_state(prealloc);
|
|
|
prealloc = NULL;
|
|
|
goto out;
|
|
|
}
|
|
|
cache_state(prealloc, cached_state);
|
|
|
+ free_extent_state(prealloc);
|
|
|
prealloc = NULL;
|
|
|
start = this_end + 1;
|
|
|
goto search_again;
|