unsigned int alloc_flags)
{
post_alloc_hook(page, order, gfp_flags);
- set_page_refcounted(page);
if (order && (gfp_flags & __GFP_COMP))
prep_compound_page(page, order);
gfp_mask, alloc_flags, ac->migratetype);
if (page) {
prep_new_page(page, order, gfp_mask, alloc_flags);
+ set_page_refcounted(page);
/*
* If this is a high-order atomic allocation then check
count_vm_event(COMPACTSTALL);
/* Prep a captured page if available */
- if (page)
+ if (page) {
prep_new_page(page, order, gfp_mask, alloc_flags);
+ set_page_refcounted(page);
+ }
/* Try get a page from the freelist if available */
if (!page)
nr_account++;
prep_new_page(page, 0, gfp, 0);
+ set_page_refcounted(page);
if (page_list)
list_add(&page->lru, page_list);
else
check_new_pages(head, order);
prep_new_page(head, order, gfp_mask, 0);
+ set_page_refcounted(head);
} else {
ret = -EINVAL;
WARN(true, "PFN range: requested [%lu, %lu), allocated [%lu, %lu)\n",