|
@@ -388,17 +388,23 @@ static inline void
|
|
__vma_link_list(struct mm_struct *mm, struct vm_area_struct *vma,
|
|
__vma_link_list(struct mm_struct *mm, struct vm_area_struct *vma,
|
|
struct vm_area_struct *prev, struct rb_node *rb_parent)
|
|
struct vm_area_struct *prev, struct rb_node *rb_parent)
|
|
{
|
|
{
|
|
|
|
+ struct vm_area_struct *next;
|
|
|
|
+
|
|
|
|
+ vma->vm_prev = prev;
|
|
if (prev) {
|
|
if (prev) {
|
|
- vma->vm_next = prev->vm_next;
|
|
|
|
|
|
+ next = prev->vm_next;
|
|
prev->vm_next = vma;
|
|
prev->vm_next = vma;
|
|
} else {
|
|
} else {
|
|
mm->mmap = vma;
|
|
mm->mmap = vma;
|
|
if (rb_parent)
|
|
if (rb_parent)
|
|
- vma->vm_next = rb_entry(rb_parent,
|
|
|
|
|
|
+ next = rb_entry(rb_parent,
|
|
struct vm_area_struct, vm_rb);
|
|
struct vm_area_struct, vm_rb);
|
|
else
|
|
else
|
|
- vma->vm_next = NULL;
|
|
|
|
|
|
+ next = NULL;
|
|
}
|
|
}
|
|
|
|
+ vma->vm_next = next;
|
|
|
|
+ if (next)
|
|
|
|
+ next->vm_prev = vma;
|
|
}
|
|
}
|
|
|
|
|
|
void __vma_link_rb(struct mm_struct *mm, struct vm_area_struct *vma,
|
|
void __vma_link_rb(struct mm_struct *mm, struct vm_area_struct *vma,
|
|
@@ -483,7 +489,11 @@ static inline void
|
|
__vma_unlink(struct mm_struct *mm, struct vm_area_struct *vma,
|
|
__vma_unlink(struct mm_struct *mm, struct vm_area_struct *vma,
|
|
struct vm_area_struct *prev)
|
|
struct vm_area_struct *prev)
|
|
{
|
|
{
|
|
- prev->vm_next = vma->vm_next;
|
|
|
|
|
|
+ struct vm_area_struct *next = vma->vm_next;
|
|
|
|
+
|
|
|
|
+ prev->vm_next = next;
|
|
|
|
+ if (next)
|
|
|
|
+ next->vm_prev = prev;
|
|
rb_erase(&vma->vm_rb, &mm->mm_rb);
|
|
rb_erase(&vma->vm_rb, &mm->mm_rb);
|
|
if (mm->mmap_cache == vma)
|
|
if (mm->mmap_cache == vma)
|
|
mm->mmap_cache = prev;
|
|
mm->mmap_cache = prev;
|
|
@@ -1915,6 +1925,7 @@ detach_vmas_to_be_unmapped(struct mm_struct *mm, struct vm_area_struct *vma,
|
|
unsigned long addr;
|
|
unsigned long addr;
|
|
|
|
|
|
insertion_point = (prev ? &prev->vm_next : &mm->mmap);
|
|
insertion_point = (prev ? &prev->vm_next : &mm->mmap);
|
|
|
|
+ vma->vm_prev = NULL;
|
|
do {
|
|
do {
|
|
rb_erase(&vma->vm_rb, &mm->mm_rb);
|
|
rb_erase(&vma->vm_rb, &mm->mm_rb);
|
|
mm->map_count--;
|
|
mm->map_count--;
|
|
@@ -1922,6 +1933,8 @@ detach_vmas_to_be_unmapped(struct mm_struct *mm, struct vm_area_struct *vma,
|
|
vma = vma->vm_next;
|
|
vma = vma->vm_next;
|
|
} while (vma && vma->vm_start < end);
|
|
} while (vma && vma->vm_start < end);
|
|
*insertion_point = vma;
|
|
*insertion_point = vma;
|
|
|
|
+ if (vma)
|
|
|
|
+ vma->vm_prev = prev;
|
|
tail_vma->vm_next = NULL;
|
|
tail_vma->vm_next = NULL;
|
|
if (mm->unmap_area == arch_unmap_area)
|
|
if (mm->unmap_area == arch_unmap_area)
|
|
addr = prev ? prev->vm_end : mm->mmap_base;
|
|
addr = prev ? prev->vm_end : mm->mmap_base;
|