}
/*
- * walk_page_range - walk the page tables of a VMA with a callback
- * @vma - VMA to walk
+ * walk_page_range - walk a memory map's page tables with a callback
+ * @mm - memory map to walk
+ * @addr - starting address
+ * @end - ending address
* @action - callback invoked for every bottom-level (PTE) page table
* @private - private data passed to the callback function
*
* Recursively walk the page table for the memory area in a VMA, calling
* a callback for every bottom-level (PTE) page table.
*/
-static void walk_page_range(struct vm_area_struct *vma,
+static void walk_page_range(struct mm_struct *mm,
+ unsigned long addr, unsigned long end,
void (*action)(pmd_t *, unsigned long,
unsigned long, void *),
void *private)
{
- unsigned long addr = vma->vm_start;
- unsigned long end = vma->vm_end;
pgd_t *pgd;
unsigned long next;
- for (pgd = pgd_offset(vma->vm_mm, addr); addr != end;
+ for (pgd = pgd_offset(mm, addr); addr != end;
pgd++, addr = next) {
next = pgd_addr_end(addr, end);
if (pgd_none_or_clear_bad(pgd))
memset(&mss, 0, sizeof mss);
mss.vma = vma;
if (vma->vm_mm && !is_vm_hugetlb_page(vma))
- walk_page_range(vma, smaps_pte_range, &mss);
+ walk_page_range(vma->vm_mm, vma->vm_start, vma->vm_end,
+ smaps_pte_range, &mss);
return show_map_internal(m, v, &mss);
}
down_read(&mm->mmap_sem);
for (vma = mm->mmap; vma; vma = vma->vm_next)
if (vma->vm_mm && !is_vm_hugetlb_page(vma))
- walk_page_range(vma, clear_refs_pte_range, vma);
+ walk_page_range(vma->vm_mm, vma->vm_start, vma->vm_end,
+ clear_refs_pte_range, vma);
flush_tlb_mm(mm);
up_read(&mm->mmap_sem);
}