mm/hugetlb: try preferred node first when alloc gigantic page from cma
Since commitcf11e85fc0
("mm: hugetlb: optionally allocate gigantic hugepages using cma"), the gigantic page would be allocated from node which is not the preferred node, although there are pages available from that node. The reason is that the nid parameter has been ignored in alloc_gigantic_page(). Besides, the __GFP_THISNODE also need be checked if user required to alloc only from the preferred node. After this patch, the preferred node is tried first before other allowed nodes, and don't try to allocate from other nodes if __GFP_THISNODE is specified. If user don't specify the preferred node, the current node will be used as preferred node, which makes sure consistent behavior of allocating gigantic and non-gigantic hugetlb page. Fixes:cf11e85fc0
("mm: hugetlb: optionally allocate gigantic hugepages using cma") Signed-off-by: Li Xinhai <lixinhai.lxh@gmail.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Reviewed-by: Mike Kravetz <mike.kravetz@oracle.com> Acked-by: Michal Hocko <mhocko@suse.com> Cc: Roman Gushchin <guro@fb.com> Link: https://lkml.kernel.org/r/20200902025016.697260-1-lixinhai.lxh@gmail.com Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
parent
3d321bf82c
commit
953f064aa6
23
mm/hugetlb.c
23
mm/hugetlb.c
|
@ -1250,21 +1250,32 @@ static struct page *alloc_gigantic_page(struct hstate *h, gfp_t gfp_mask,
|
|||
int nid, nodemask_t *nodemask)
|
||||
{
|
||||
unsigned long nr_pages = 1UL << huge_page_order(h);
|
||||
if (nid == NUMA_NO_NODE)
|
||||
nid = numa_mem_id();
|
||||
|
||||
#ifdef CONFIG_CMA
|
||||
{
|
||||
struct page *page;
|
||||
int node;
|
||||
|
||||
for_each_node_mask(node, *nodemask) {
|
||||
if (!hugetlb_cma[node])
|
||||
continue;
|
||||
|
||||
page = cma_alloc(hugetlb_cma[node], nr_pages,
|
||||
huge_page_order(h), true);
|
||||
if (hugetlb_cma[nid]) {
|
||||
page = cma_alloc(hugetlb_cma[nid], nr_pages,
|
||||
huge_page_order(h), true);
|
||||
if (page)
|
||||
return page;
|
||||
}
|
||||
|
||||
if (!(gfp_mask & __GFP_THISNODE)) {
|
||||
for_each_node_mask(node, *nodemask) {
|
||||
if (node == nid || !hugetlb_cma[node])
|
||||
continue;
|
||||
|
||||
page = cma_alloc(hugetlb_cma[node], nr_pages,
|
||||
huge_page_order(h), true);
|
||||
if (page)
|
||||
return page;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
Loading…
Reference in New Issue