Path: blob/master/tools/testing/memblock/tests/alloc_nid_api.c
26299 views
// SPDX-License-Identifier: GPL-2.0-or-later1#include "alloc_nid_api.h"23static int alloc_nid_test_flags = TEST_F_NONE;45/*6* contains the fraction of MEM_SIZE contained in each node in basis point7* units (one hundredth of 1% or 1/10000)8*/9static const unsigned int node_fractions[] = {102500, /* 1/4 */11625, /* 1/16 */121250, /* 1/8 */131250, /* 1/8 */14625, /* 1/16 */15625, /* 1/16 */162500, /* 1/4 */17625, /* 1/16 */18};1920static inline const char * const get_memblock_alloc_nid_name(int flags)21{22if (flags & TEST_F_EXACT)23return "memblock_alloc_exact_nid_raw";24if (flags & TEST_F_RAW)25return "memblock_alloc_try_nid_raw";26return "memblock_alloc_try_nid";27}2829static inline void *run_memblock_alloc_nid(phys_addr_t size,30phys_addr_t align,31phys_addr_t min_addr,32phys_addr_t max_addr, int nid)33{34assert(!(alloc_nid_test_flags & TEST_F_EXACT) ||35(alloc_nid_test_flags & TEST_F_RAW));36/*37* TEST_F_EXACT should be checked before TEST_F_RAW since38* memblock_alloc_exact_nid_raw() performs raw allocations.39*/40if (alloc_nid_test_flags & TEST_F_EXACT)41return memblock_alloc_exact_nid_raw(size, align, min_addr,42max_addr, nid);43if (alloc_nid_test_flags & TEST_F_RAW)44return memblock_alloc_try_nid_raw(size, align, min_addr,45max_addr, nid);46return memblock_alloc_try_nid(size, align, min_addr, max_addr, nid);47}4849/*50* A simple test that tries to allocate a memory region within min_addr and51* max_addr range:52*53* + +54* | + +-----------+ |55* | | | rgn | |56* +----+-------+-----------+------+57* ^ ^58* | |59* min_addr max_addr60*61* Expect to allocate a region that ends at max_addr.62*/63static int alloc_nid_top_down_simple_check(void)64{65struct memblock_region *rgn = &memblock.reserved.regions[0];66void *allocated_ptr = NULL;67phys_addr_t size = SZ_128;68phys_addr_t min_addr;69phys_addr_t max_addr;70phys_addr_t rgn_end;7172PREFIX_PUSH();73setup_memblock();7475min_addr = memblock_start_of_DRAM() + SMP_CACHE_BYTES * 2;76max_addr = min_addr + SZ_512;7778allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,79min_addr, max_addr,80NUMA_NO_NODE);81rgn_end = rgn->base + rgn->size;8283ASSERT_NE(allocated_ptr, NULL);84assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);8586ASSERT_EQ(rgn->size, size);87ASSERT_EQ(rgn->base, max_addr - size);88ASSERT_EQ(rgn_end, max_addr);8990ASSERT_EQ(memblock.reserved.cnt, 1);91ASSERT_EQ(memblock.reserved.total_size, size);9293test_pass_pop();9495return 0;96}9798/*99* A simple test that tries to allocate a memory region within min_addr and100* max_addr range, where the end address is misaligned:101*102* + + +103* | + +---------+ + |104* | | | rgn | | |105* +------+-------+---------+--+----+106* ^ ^ ^107* | | |108* min_add | max_addr109* |110* Aligned address111* boundary112*113* Expect to allocate an aligned region that ends before max_addr.114*/115static int alloc_nid_top_down_end_misaligned_check(void)116{117struct memblock_region *rgn = &memblock.reserved.regions[0];118void *allocated_ptr = NULL;119phys_addr_t size = SZ_128;120phys_addr_t misalign = SZ_2;121phys_addr_t min_addr;122phys_addr_t max_addr;123phys_addr_t rgn_end;124125PREFIX_PUSH();126setup_memblock();127128min_addr = memblock_start_of_DRAM() + SMP_CACHE_BYTES * 2;129max_addr = min_addr + SZ_512 + misalign;130131allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,132min_addr, max_addr,133NUMA_NO_NODE);134rgn_end = rgn->base + rgn->size;135136ASSERT_NE(allocated_ptr, NULL);137assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);138139ASSERT_EQ(rgn->size, size);140ASSERT_EQ(rgn->base, max_addr - size - misalign);141ASSERT_LT(rgn_end, max_addr);142143ASSERT_EQ(memblock.reserved.cnt, 1);144ASSERT_EQ(memblock.reserved.total_size, size);145146test_pass_pop();147148return 0;149}150151/*152* A simple test that tries to allocate a memory region, which spans over the153* min_addr and max_addr range:154*155* + +156* | +---------------+ |157* | | rgn | |158* +------+---------------+-------+159* ^ ^160* | |161* min_addr max_addr162*163* Expect to allocate a region that starts at min_addr and ends at164* max_addr, given that min_addr is aligned.165*/166static int alloc_nid_exact_address_generic_check(void)167{168struct memblock_region *rgn = &memblock.reserved.regions[0];169void *allocated_ptr = NULL;170phys_addr_t size = SZ_1K;171phys_addr_t min_addr;172phys_addr_t max_addr;173phys_addr_t rgn_end;174175PREFIX_PUSH();176setup_memblock();177178min_addr = memblock_start_of_DRAM() + SMP_CACHE_BYTES;179max_addr = min_addr + size;180181allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,182min_addr, max_addr,183NUMA_NO_NODE);184rgn_end = rgn->base + rgn->size;185186ASSERT_NE(allocated_ptr, NULL);187assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);188189ASSERT_EQ(rgn->size, size);190ASSERT_EQ(rgn->base, min_addr);191ASSERT_EQ(rgn_end, max_addr);192193ASSERT_EQ(memblock.reserved.cnt, 1);194ASSERT_EQ(memblock.reserved.total_size, size);195196test_pass_pop();197198return 0;199}200201/*202* A test that tries to allocate a memory region, which can't fit into203* min_addr and max_addr range:204*205* + + +206* | +----------+-----+ |207* | | rgn + | |208* +--------+----------+-----+----+209* ^ ^ ^210* | | |211* Aligned | max_addr212* address |213* boundary min_add214*215* Expect to drop the lower limit and allocate a memory region which216* ends at max_addr (if the address is aligned).217*/218static int alloc_nid_top_down_narrow_range_check(void)219{220struct memblock_region *rgn = &memblock.reserved.regions[0];221void *allocated_ptr = NULL;222phys_addr_t size = SZ_256;223phys_addr_t min_addr;224phys_addr_t max_addr;225226PREFIX_PUSH();227setup_memblock();228229min_addr = memblock_start_of_DRAM() + SZ_512;230max_addr = min_addr + SMP_CACHE_BYTES;231232allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,233min_addr, max_addr,234NUMA_NO_NODE);235236ASSERT_NE(allocated_ptr, NULL);237assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);238239ASSERT_EQ(rgn->size, size);240ASSERT_EQ(rgn->base, max_addr - size);241242ASSERT_EQ(memblock.reserved.cnt, 1);243ASSERT_EQ(memblock.reserved.total_size, size);244245test_pass_pop();246247return 0;248}249250/*251* A test that tries to allocate a memory region, which can't fit into252* min_addr and max_addr range, with the latter being too close to the beginning253* of the available memory:254*255* +-------------+256* | new |257* +-------------+258* + +259* | + |260* | | |261* +-------+--------------+262* ^ ^263* | |264* | max_addr265* |266* min_addr267*268* Expect no allocation to happen.269*/270static int alloc_nid_low_max_generic_check(void)271{272void *allocated_ptr = NULL;273phys_addr_t size = SZ_1K;274phys_addr_t min_addr;275phys_addr_t max_addr;276277PREFIX_PUSH();278setup_memblock();279280min_addr = memblock_start_of_DRAM();281max_addr = min_addr + SMP_CACHE_BYTES;282283allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,284min_addr, max_addr,285NUMA_NO_NODE);286287ASSERT_EQ(allocated_ptr, NULL);288289test_pass_pop();290291return 0;292}293294/*295* A test that tries to allocate a memory region within min_addr min_addr range,296* with min_addr being so close that it's next to an allocated region:297*298* + +299* | +--------+---------------|300* | | r1 | rgn |301* +-------+--------+---------------+302* ^ ^303* | |304* min_addr max_addr305*306* Expect a merge of both regions. Only the region size gets updated.307*/308static int alloc_nid_min_reserved_generic_check(void)309{310struct memblock_region *rgn = &memblock.reserved.regions[0];311void *allocated_ptr = NULL;312phys_addr_t r1_size = SZ_128;313phys_addr_t r2_size = SZ_64;314phys_addr_t total_size = r1_size + r2_size;315phys_addr_t min_addr;316phys_addr_t max_addr;317phys_addr_t reserved_base;318319PREFIX_PUSH();320setup_memblock();321322max_addr = memblock_end_of_DRAM();323min_addr = max_addr - r2_size;324reserved_base = min_addr - r1_size;325326memblock_reserve_kern(reserved_base, r1_size);327328allocated_ptr = run_memblock_alloc_nid(r2_size, SMP_CACHE_BYTES,329min_addr, max_addr,330NUMA_NO_NODE);331332ASSERT_NE(allocated_ptr, NULL);333assert_mem_content(allocated_ptr, r2_size, alloc_nid_test_flags);334335ASSERT_EQ(rgn->size, total_size);336ASSERT_EQ(rgn->base, reserved_base);337338ASSERT_EQ(memblock.reserved.cnt, 1);339ASSERT_EQ(memblock.reserved.total_size, total_size);340341test_pass_pop();342343return 0;344}345346/*347* A test that tries to allocate a memory region within min_addr and max_addr,348* with max_addr being so close that it's next to an allocated region:349*350* + +351* | +-------------+--------|352* | | rgn | r1 |353* +----------+-------------+--------+354* ^ ^355* | |356* min_addr max_addr357*358* Expect a merge of regions. Only the region size gets updated.359*/360static int alloc_nid_max_reserved_generic_check(void)361{362struct memblock_region *rgn = &memblock.reserved.regions[0];363void *allocated_ptr = NULL;364phys_addr_t r1_size = SZ_64;365phys_addr_t r2_size = SZ_128;366phys_addr_t total_size = r1_size + r2_size;367phys_addr_t min_addr;368phys_addr_t max_addr;369370PREFIX_PUSH();371setup_memblock();372373max_addr = memblock_end_of_DRAM() - r1_size;374min_addr = max_addr - r2_size;375376memblock_reserve_kern(max_addr, r1_size);377378allocated_ptr = run_memblock_alloc_nid(r2_size, SMP_CACHE_BYTES,379min_addr, max_addr,380NUMA_NO_NODE);381382ASSERT_NE(allocated_ptr, NULL);383assert_mem_content(allocated_ptr, r2_size, alloc_nid_test_flags);384385ASSERT_EQ(rgn->size, total_size);386ASSERT_EQ(rgn->base, min_addr);387388ASSERT_EQ(memblock.reserved.cnt, 1);389ASSERT_EQ(memblock.reserved.total_size, total_size);390391test_pass_pop();392393return 0;394}395396/*397* A test that tries to allocate memory within min_addr and max_add range, when398* there are two reserved regions at the borders, with a gap big enough to fit399* a new region:400*401* + +402* | +--------+ +-------+------+ |403* | | r2 | | rgn | r1 | |404* +----+--------+---+-------+------+--+405* ^ ^406* | |407* min_addr max_addr408*409* Expect to merge the new region with r1. The second region does not get410* updated. The total size field gets updated.411*/412413static int alloc_nid_top_down_reserved_with_space_check(void)414{415struct memblock_region *rgn1 = &memblock.reserved.regions[1];416struct memblock_region *rgn2 = &memblock.reserved.regions[0];417void *allocated_ptr = NULL;418struct region r1, r2;419phys_addr_t r3_size = SZ_64;420phys_addr_t gap_size = SMP_CACHE_BYTES;421phys_addr_t total_size;422phys_addr_t max_addr;423phys_addr_t min_addr;424425PREFIX_PUSH();426setup_memblock();427428r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;429r1.size = SMP_CACHE_BYTES;430431r2.size = SZ_128;432r2.base = r1.base - (r3_size + gap_size + r2.size);433434total_size = r1.size + r2.size + r3_size;435min_addr = r2.base + r2.size;436max_addr = r1.base;437438memblock_reserve_kern(r1.base, r1.size);439memblock_reserve_kern(r2.base, r2.size);440441allocated_ptr = run_memblock_alloc_nid(r3_size, SMP_CACHE_BYTES,442min_addr, max_addr,443NUMA_NO_NODE);444445ASSERT_NE(allocated_ptr, NULL);446assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);447448ASSERT_EQ(rgn1->size, r1.size + r3_size);449ASSERT_EQ(rgn1->base, max_addr - r3_size);450451ASSERT_EQ(rgn2->size, r2.size);452ASSERT_EQ(rgn2->base, r2.base);453454ASSERT_EQ(memblock.reserved.cnt, 2);455ASSERT_EQ(memblock.reserved.total_size, total_size);456457test_pass_pop();458459return 0;460}461462/*463* A test that tries to allocate memory within min_addr and max_add range, when464* there are two reserved regions at the borders, with a gap of a size equal to465* the size of the new region:466*467* + +468* | +--------+--------+--------+ |469* | | r2 | r3 | r1 | |470* +-----+--------+--------+--------+-----+471* ^ ^472* | |473* min_addr max_addr474*475* Expect to merge all of the regions into one. The region counter and total476* size fields get updated.477*/478static int alloc_nid_reserved_full_merge_generic_check(void)479{480struct memblock_region *rgn = &memblock.reserved.regions[0];481void *allocated_ptr = NULL;482struct region r1, r2;483phys_addr_t r3_size = SZ_64;484phys_addr_t total_size;485phys_addr_t max_addr;486phys_addr_t min_addr;487488PREFIX_PUSH();489setup_memblock();490491r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;492r1.size = SMP_CACHE_BYTES;493494r2.size = SZ_128;495r2.base = r1.base - (r3_size + r2.size);496497total_size = r1.size + r2.size + r3_size;498min_addr = r2.base + r2.size;499max_addr = r1.base;500501memblock_reserve_kern(r1.base, r1.size);502memblock_reserve_kern(r2.base, r2.size);503504allocated_ptr = run_memblock_alloc_nid(r3_size, SMP_CACHE_BYTES,505min_addr, max_addr,506NUMA_NO_NODE);507508ASSERT_NE(allocated_ptr, NULL);509assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);510511ASSERT_EQ(rgn->size, total_size);512ASSERT_EQ(rgn->base, r2.base);513514ASSERT_EQ(memblock.reserved.cnt, 1);515ASSERT_EQ(memblock.reserved.total_size, total_size);516517test_pass_pop();518519return 0;520}521522/*523* A test that tries to allocate memory within min_addr and max_add range, when524* there are two reserved regions at the borders, with a gap that can't fit525* a new region:526*527* + +528* | +----------+------+ +------+ |529* | | r3 | r2 | | r1 | |530* +--+----------+------+----+------+---+531* ^ ^532* | |533* | max_addr534* |535* min_addr536*537* Expect to merge the new region with r2. The second region does not get538* updated. The total size counter gets updated.539*/540static int alloc_nid_top_down_reserved_no_space_check(void)541{542struct memblock_region *rgn1 = &memblock.reserved.regions[1];543struct memblock_region *rgn2 = &memblock.reserved.regions[0];544void *allocated_ptr = NULL;545struct region r1, r2;546phys_addr_t r3_size = SZ_256;547phys_addr_t gap_size = SMP_CACHE_BYTES;548phys_addr_t total_size;549phys_addr_t max_addr;550phys_addr_t min_addr;551552PREFIX_PUSH();553setup_memblock();554555r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;556r1.size = SMP_CACHE_BYTES;557558r2.size = SZ_128;559r2.base = r1.base - (r2.size + gap_size);560561total_size = r1.size + r2.size + r3_size;562min_addr = r2.base + r2.size;563max_addr = r1.base;564565memblock_reserve_kern(r1.base, r1.size);566memblock_reserve_kern(r2.base, r2.size);567568allocated_ptr = run_memblock_alloc_nid(r3_size, SMP_CACHE_BYTES,569min_addr, max_addr,570NUMA_NO_NODE);571572ASSERT_NE(allocated_ptr, NULL);573assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);574575ASSERT_EQ(rgn1->size, r1.size);576ASSERT_EQ(rgn1->base, r1.base);577578ASSERT_EQ(rgn2->size, r2.size + r3_size);579ASSERT_EQ(rgn2->base, r2.base - r3_size);580581ASSERT_EQ(memblock.reserved.cnt, 2);582ASSERT_EQ(memblock.reserved.total_size, total_size);583584test_pass_pop();585586return 0;587}588589/*590* A test that tries to allocate memory within min_addr and max_add range, but591* it's too narrow and everything else is reserved:592*593* +-----------+594* | new |595* +-----------+596* + +597* |--------------+ +----------|598* | r2 | | r1 |599* +--------------+------+----------+600* ^ ^601* | |602* | max_addr603* |604* min_addr605*606* Expect no allocation to happen.607*/608609static int alloc_nid_reserved_all_generic_check(void)610{611void *allocated_ptr = NULL;612struct region r1, r2;613phys_addr_t r3_size = SZ_256;614phys_addr_t gap_size = SMP_CACHE_BYTES;615phys_addr_t max_addr;616phys_addr_t min_addr;617618PREFIX_PUSH();619setup_memblock();620621r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES;622r1.size = SMP_CACHE_BYTES;623624r2.size = MEM_SIZE - (r1.size + gap_size);625r2.base = memblock_start_of_DRAM();626627min_addr = r2.base + r2.size;628max_addr = r1.base;629630memblock_reserve(r1.base, r1.size);631memblock_reserve(r2.base, r2.size);632633allocated_ptr = run_memblock_alloc_nid(r3_size, SMP_CACHE_BYTES,634min_addr, max_addr,635NUMA_NO_NODE);636637ASSERT_EQ(allocated_ptr, NULL);638639test_pass_pop();640641return 0;642}643644/*645* A test that tries to allocate a memory region, where max_addr is646* bigger than the end address of the available memory. Expect to allocate647* a region that ends before the end of the memory.648*/649static int alloc_nid_top_down_cap_max_check(void)650{651struct memblock_region *rgn = &memblock.reserved.regions[0];652void *allocated_ptr = NULL;653phys_addr_t size = SZ_256;654phys_addr_t min_addr;655phys_addr_t max_addr;656657PREFIX_PUSH();658setup_memblock();659660min_addr = memblock_end_of_DRAM() - SZ_1K;661max_addr = memblock_end_of_DRAM() + SZ_256;662663allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,664min_addr, max_addr,665NUMA_NO_NODE);666667ASSERT_NE(allocated_ptr, NULL);668assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);669670ASSERT_EQ(rgn->size, size);671ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size);672673ASSERT_EQ(memblock.reserved.cnt, 1);674ASSERT_EQ(memblock.reserved.total_size, size);675676test_pass_pop();677678return 0;679}680681/*682* A test that tries to allocate a memory region, where min_addr is683* smaller than the start address of the available memory. Expect to allocate684* a region that ends before the end of the memory.685*/686static int alloc_nid_top_down_cap_min_check(void)687{688struct memblock_region *rgn = &memblock.reserved.regions[0];689void *allocated_ptr = NULL;690phys_addr_t size = SZ_1K;691phys_addr_t min_addr;692phys_addr_t max_addr;693694PREFIX_PUSH();695setup_memblock();696697min_addr = memblock_start_of_DRAM() - SZ_256;698max_addr = memblock_end_of_DRAM();699700allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,701min_addr, max_addr,702NUMA_NO_NODE);703704ASSERT_NE(allocated_ptr, NULL);705assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);706707ASSERT_EQ(rgn->size, size);708ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size);709710ASSERT_EQ(memblock.reserved.cnt, 1);711ASSERT_EQ(memblock.reserved.total_size, size);712713test_pass_pop();714715return 0;716}717718/*719* A simple test that tries to allocate a memory region within min_addr and720* max_addr range:721*722* + +723* | +-----------+ | |724* | | rgn | | |725* +----+-----------+-----------+------+726* ^ ^727* | |728* min_addr max_addr729*730* Expect to allocate a region that ends before max_addr.731*/732static int alloc_nid_bottom_up_simple_check(void)733{734struct memblock_region *rgn = &memblock.reserved.regions[0];735void *allocated_ptr = NULL;736phys_addr_t size = SZ_128;737phys_addr_t min_addr;738phys_addr_t max_addr;739phys_addr_t rgn_end;740741PREFIX_PUSH();742setup_memblock();743744min_addr = memblock_start_of_DRAM() + SMP_CACHE_BYTES * 2;745max_addr = min_addr + SZ_512;746747allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,748min_addr, max_addr,749NUMA_NO_NODE);750rgn_end = rgn->base + rgn->size;751752ASSERT_NE(allocated_ptr, NULL);753assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);754755ASSERT_EQ(rgn->size, size);756ASSERT_EQ(rgn->base, min_addr);757ASSERT_LT(rgn_end, max_addr);758759ASSERT_EQ(memblock.reserved.cnt, 1);760ASSERT_EQ(memblock.reserved.total_size, size);761762test_pass_pop();763764return 0;765}766767/*768* A simple test that tries to allocate a memory region within min_addr and769* max_addr range, where the start address is misaligned:770*771* + +772* | + +-----------+ + |773* | | | rgn | | |774* +-----+---+-----------+-----+-----+775* ^ ^----. ^776* | | |777* min_add | max_addr778* |779* Aligned address780* boundary781*782* Expect to allocate an aligned region that ends before max_addr.783*/784static int alloc_nid_bottom_up_start_misaligned_check(void)785{786struct memblock_region *rgn = &memblock.reserved.regions[0];787void *allocated_ptr = NULL;788phys_addr_t size = SZ_128;789phys_addr_t misalign = SZ_2;790phys_addr_t min_addr;791phys_addr_t max_addr;792phys_addr_t rgn_end;793794PREFIX_PUSH();795setup_memblock();796797min_addr = memblock_start_of_DRAM() + misalign;798max_addr = min_addr + SZ_512;799800allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,801min_addr, max_addr,802NUMA_NO_NODE);803rgn_end = rgn->base + rgn->size;804805ASSERT_NE(allocated_ptr, NULL);806assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);807808ASSERT_EQ(rgn->size, size);809ASSERT_EQ(rgn->base, min_addr + (SMP_CACHE_BYTES - misalign));810ASSERT_LT(rgn_end, max_addr);811812ASSERT_EQ(memblock.reserved.cnt, 1);813ASSERT_EQ(memblock.reserved.total_size, size);814815test_pass_pop();816817return 0;818}819820/*821* A test that tries to allocate a memory region, which can't fit into min_addr822* and max_addr range:823*824* + +825* |---------+ + + |826* | rgn | | | |827* +---------+---------+----+------+828* ^ ^829* | |830* | max_addr831* |832* min_add833*834* Expect to drop the lower limit and allocate a memory region which835* starts at the beginning of the available memory.836*/837static int alloc_nid_bottom_up_narrow_range_check(void)838{839struct memblock_region *rgn = &memblock.reserved.regions[0];840void *allocated_ptr = NULL;841phys_addr_t size = SZ_256;842phys_addr_t min_addr;843phys_addr_t max_addr;844845PREFIX_PUSH();846setup_memblock();847848min_addr = memblock_start_of_DRAM() + SZ_512;849max_addr = min_addr + SMP_CACHE_BYTES;850851allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,852min_addr, max_addr,853NUMA_NO_NODE);854855ASSERT_NE(allocated_ptr, NULL);856assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);857858ASSERT_EQ(rgn->size, size);859ASSERT_EQ(rgn->base, memblock_start_of_DRAM());860861ASSERT_EQ(memblock.reserved.cnt, 1);862ASSERT_EQ(memblock.reserved.total_size, size);863864test_pass_pop();865866return 0;867}868869/*870* A test that tries to allocate memory within min_addr and max_add range, when871* there are two reserved regions at the borders, with a gap big enough to fit872* a new region:873*874* + +875* | +--------+-------+ +------+ |876* | | r2 | rgn | | r1 | |877* +----+--------+-------+---+------+--+878* ^ ^879* | |880* min_addr max_addr881*882* Expect to merge the new region with r2. The second region does not get883* updated. The total size field gets updated.884*/885886static int alloc_nid_bottom_up_reserved_with_space_check(void)887{888struct memblock_region *rgn1 = &memblock.reserved.regions[1];889struct memblock_region *rgn2 = &memblock.reserved.regions[0];890void *allocated_ptr = NULL;891struct region r1, r2;892phys_addr_t r3_size = SZ_64;893phys_addr_t gap_size = SMP_CACHE_BYTES;894phys_addr_t total_size;895phys_addr_t max_addr;896phys_addr_t min_addr;897898PREFIX_PUSH();899setup_memblock();900901r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;902r1.size = SMP_CACHE_BYTES;903904r2.size = SZ_128;905r2.base = r1.base - (r3_size + gap_size + r2.size);906907total_size = r1.size + r2.size + r3_size;908min_addr = r2.base + r2.size;909max_addr = r1.base;910911memblock_reserve_kern(r1.base, r1.size);912memblock_reserve_kern(r2.base, r2.size);913914allocated_ptr = run_memblock_alloc_nid(r3_size, SMP_CACHE_BYTES,915min_addr, max_addr,916NUMA_NO_NODE);917918ASSERT_NE(allocated_ptr, NULL);919assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);920921ASSERT_EQ(rgn1->size, r1.size);922ASSERT_EQ(rgn1->base, max_addr);923924ASSERT_EQ(rgn2->size, r2.size + r3_size);925ASSERT_EQ(rgn2->base, r2.base);926927ASSERT_EQ(memblock.reserved.cnt, 2);928ASSERT_EQ(memblock.reserved.total_size, total_size);929930test_pass_pop();931932return 0;933}934935/*936* A test that tries to allocate memory within min_addr and max_add range, when937* there are two reserved regions at the borders, with a gap of a size equal to938* the size of the new region:939*940* + +941* |----------+ +------+ +----+ |942* | r3 | | r2 | | r1 | |943* +----------+----+------+---+----+--+944* ^ ^945* | |946* | max_addr947* |948* min_addr949*950* Expect to drop the lower limit and allocate memory at the beginning of the951* available memory. The region counter and total size fields get updated.952* Other regions are not modified.953*/954955static int alloc_nid_bottom_up_reserved_no_space_check(void)956{957struct memblock_region *rgn1 = &memblock.reserved.regions[2];958struct memblock_region *rgn2 = &memblock.reserved.regions[1];959struct memblock_region *rgn3 = &memblock.reserved.regions[0];960void *allocated_ptr = NULL;961struct region r1, r2;962phys_addr_t r3_size = SZ_256;963phys_addr_t gap_size = SMP_CACHE_BYTES;964phys_addr_t total_size;965phys_addr_t max_addr;966phys_addr_t min_addr;967968PREFIX_PUSH();969setup_memblock();970971r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;972r1.size = SMP_CACHE_BYTES;973974r2.size = SZ_128;975r2.base = r1.base - (r2.size + gap_size);976977total_size = r1.size + r2.size + r3_size;978min_addr = r2.base + r2.size;979max_addr = r1.base;980981memblock_reserve(r1.base, r1.size);982memblock_reserve(r2.base, r2.size);983984allocated_ptr = run_memblock_alloc_nid(r3_size, SMP_CACHE_BYTES,985min_addr, max_addr,986NUMA_NO_NODE);987988ASSERT_NE(allocated_ptr, NULL);989assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);990991ASSERT_EQ(rgn3->size, r3_size);992ASSERT_EQ(rgn3->base, memblock_start_of_DRAM());993994ASSERT_EQ(rgn2->size, r2.size);995ASSERT_EQ(rgn2->base, r2.base);996997ASSERT_EQ(rgn1->size, r1.size);998ASSERT_EQ(rgn1->base, r1.base);9991000ASSERT_EQ(memblock.reserved.cnt, 3);1001ASSERT_EQ(memblock.reserved.total_size, total_size);10021003test_pass_pop();10041005return 0;1006}10071008/*1009* A test that tries to allocate a memory region, where max_addr is1010* bigger than the end address of the available memory. Expect to allocate1011* a region that starts at the min_addr.1012*/1013static int alloc_nid_bottom_up_cap_max_check(void)1014{1015struct memblock_region *rgn = &memblock.reserved.regions[0];1016void *allocated_ptr = NULL;1017phys_addr_t size = SZ_256;1018phys_addr_t min_addr;1019phys_addr_t max_addr;10201021PREFIX_PUSH();1022setup_memblock();10231024min_addr = memblock_start_of_DRAM() + SZ_1K;1025max_addr = memblock_end_of_DRAM() + SZ_256;10261027allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1028min_addr, max_addr,1029NUMA_NO_NODE);10301031ASSERT_NE(allocated_ptr, NULL);1032assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);10331034ASSERT_EQ(rgn->size, size);1035ASSERT_EQ(rgn->base, min_addr);10361037ASSERT_EQ(memblock.reserved.cnt, 1);1038ASSERT_EQ(memblock.reserved.total_size, size);10391040test_pass_pop();10411042return 0;1043}10441045/*1046* A test that tries to allocate a memory region, where min_addr is1047* smaller than the start address of the available memory. Expect to allocate1048* a region at the beginning of the available memory.1049*/1050static int alloc_nid_bottom_up_cap_min_check(void)1051{1052struct memblock_region *rgn = &memblock.reserved.regions[0];1053void *allocated_ptr = NULL;1054phys_addr_t size = SZ_1K;1055phys_addr_t min_addr;1056phys_addr_t max_addr;10571058PREFIX_PUSH();1059setup_memblock();10601061min_addr = memblock_start_of_DRAM();1062max_addr = memblock_end_of_DRAM() - SZ_256;10631064allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1065min_addr, max_addr,1066NUMA_NO_NODE);10671068ASSERT_NE(allocated_ptr, NULL);1069assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);10701071ASSERT_EQ(rgn->size, size);1072ASSERT_EQ(rgn->base, memblock_start_of_DRAM());10731074ASSERT_EQ(memblock.reserved.cnt, 1);1075ASSERT_EQ(memblock.reserved.total_size, size);10761077test_pass_pop();10781079return 0;1080}10811082/* Test case wrappers for range tests */1083static int alloc_nid_simple_check(void)1084{1085test_print("\tRunning %s...\n", __func__);1086memblock_set_bottom_up(false);1087alloc_nid_top_down_simple_check();1088memblock_set_bottom_up(true);1089alloc_nid_bottom_up_simple_check();10901091return 0;1092}10931094static int alloc_nid_misaligned_check(void)1095{1096test_print("\tRunning %s...\n", __func__);1097memblock_set_bottom_up(false);1098alloc_nid_top_down_end_misaligned_check();1099memblock_set_bottom_up(true);1100alloc_nid_bottom_up_start_misaligned_check();11011102return 0;1103}11041105static int alloc_nid_narrow_range_check(void)1106{1107test_print("\tRunning %s...\n", __func__);1108memblock_set_bottom_up(false);1109alloc_nid_top_down_narrow_range_check();1110memblock_set_bottom_up(true);1111alloc_nid_bottom_up_narrow_range_check();11121113return 0;1114}11151116static int alloc_nid_reserved_with_space_check(void)1117{1118test_print("\tRunning %s...\n", __func__);1119memblock_set_bottom_up(false);1120alloc_nid_top_down_reserved_with_space_check();1121memblock_set_bottom_up(true);1122alloc_nid_bottom_up_reserved_with_space_check();11231124return 0;1125}11261127static int alloc_nid_reserved_no_space_check(void)1128{1129test_print("\tRunning %s...\n", __func__);1130memblock_set_bottom_up(false);1131alloc_nid_top_down_reserved_no_space_check();1132memblock_set_bottom_up(true);1133alloc_nid_bottom_up_reserved_no_space_check();11341135return 0;1136}11371138static int alloc_nid_cap_max_check(void)1139{1140test_print("\tRunning %s...\n", __func__);1141memblock_set_bottom_up(false);1142alloc_nid_top_down_cap_max_check();1143memblock_set_bottom_up(true);1144alloc_nid_bottom_up_cap_max_check();11451146return 0;1147}11481149static int alloc_nid_cap_min_check(void)1150{1151test_print("\tRunning %s...\n", __func__);1152memblock_set_bottom_up(false);1153alloc_nid_top_down_cap_min_check();1154memblock_set_bottom_up(true);1155alloc_nid_bottom_up_cap_min_check();11561157return 0;1158}11591160static int alloc_nid_min_reserved_check(void)1161{1162test_print("\tRunning %s...\n", __func__);1163run_top_down(alloc_nid_min_reserved_generic_check);1164run_bottom_up(alloc_nid_min_reserved_generic_check);11651166return 0;1167}11681169static int alloc_nid_max_reserved_check(void)1170{1171test_print("\tRunning %s...\n", __func__);1172run_top_down(alloc_nid_max_reserved_generic_check);1173run_bottom_up(alloc_nid_max_reserved_generic_check);11741175return 0;1176}11771178static int alloc_nid_exact_address_check(void)1179{1180test_print("\tRunning %s...\n", __func__);1181run_top_down(alloc_nid_exact_address_generic_check);1182run_bottom_up(alloc_nid_exact_address_generic_check);11831184return 0;1185}11861187static int alloc_nid_reserved_full_merge_check(void)1188{1189test_print("\tRunning %s...\n", __func__);1190run_top_down(alloc_nid_reserved_full_merge_generic_check);1191run_bottom_up(alloc_nid_reserved_full_merge_generic_check);11921193return 0;1194}11951196static int alloc_nid_reserved_all_check(void)1197{1198test_print("\tRunning %s...\n", __func__);1199run_top_down(alloc_nid_reserved_all_generic_check);1200run_bottom_up(alloc_nid_reserved_all_generic_check);12011202return 0;1203}12041205static int alloc_nid_low_max_check(void)1206{1207test_print("\tRunning %s...\n", __func__);1208run_top_down(alloc_nid_low_max_generic_check);1209run_bottom_up(alloc_nid_low_max_generic_check);12101211return 0;1212}12131214static int memblock_alloc_nid_range_checks(void)1215{1216test_print("Running %s range tests...\n",1217get_memblock_alloc_nid_name(alloc_nid_test_flags));12181219alloc_nid_simple_check();1220alloc_nid_misaligned_check();1221alloc_nid_narrow_range_check();1222alloc_nid_reserved_with_space_check();1223alloc_nid_reserved_no_space_check();1224alloc_nid_cap_max_check();1225alloc_nid_cap_min_check();12261227alloc_nid_min_reserved_check();1228alloc_nid_max_reserved_check();1229alloc_nid_exact_address_check();1230alloc_nid_reserved_full_merge_check();1231alloc_nid_reserved_all_check();1232alloc_nid_low_max_check();12331234return 0;1235}12361237/*1238* A test that tries to allocate a memory region in a specific NUMA node that1239* has enough memory to allocate a region of the requested size.1240* Expect to allocate an aligned region at the end of the requested node.1241*/1242static int alloc_nid_top_down_numa_simple_check(void)1243{1244int nid_req = 3;1245struct memblock_region *new_rgn = &memblock.reserved.regions[0];1246struct memblock_region *req_node = &memblock.memory.regions[nid_req];1247void *allocated_ptr = NULL;1248phys_addr_t size;1249phys_addr_t min_addr;1250phys_addr_t max_addr;12511252PREFIX_PUSH();1253setup_numa_memblock(node_fractions);12541255ASSERT_LE(SZ_4, req_node->size);1256size = req_node->size / SZ_4;1257min_addr = memblock_start_of_DRAM();1258max_addr = memblock_end_of_DRAM();12591260allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1261min_addr, max_addr, nid_req);12621263ASSERT_NE(allocated_ptr, NULL);1264assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);12651266ASSERT_EQ(new_rgn->size, size);1267ASSERT_EQ(new_rgn->base, region_end(req_node) - size);1268ASSERT_LE(req_node->base, new_rgn->base);12691270ASSERT_EQ(memblock.reserved.cnt, 1);1271ASSERT_EQ(memblock.reserved.total_size, size);12721273test_pass_pop();12741275return 0;1276}12771278/*1279* A test that tries to allocate a memory region in a specific NUMA node that1280* does not have enough memory to allocate a region of the requested size:1281*1282* | +-----+ +------------------+ |1283* | | req | | expected | |1284* +---+-----+----------+------------------+-----+1285*1286* | +---------+ |1287* | | rgn | |1288* +-----------------------------+---------+-----+1289*1290* Expect to allocate an aligned region at the end of the last node that has1291* enough memory (in this case, nid = 6) after falling back to NUMA_NO_NODE.1292*/1293static int alloc_nid_top_down_numa_small_node_check(void)1294{1295int nid_req = 1;1296int nid_exp = 6;1297struct memblock_region *new_rgn = &memblock.reserved.regions[0];1298struct memblock_region *req_node = &memblock.memory.regions[nid_req];1299struct memblock_region *exp_node = &memblock.memory.regions[nid_exp];1300void *allocated_ptr = NULL;1301phys_addr_t size;1302phys_addr_t min_addr;1303phys_addr_t max_addr;13041305PREFIX_PUSH();1306setup_numa_memblock(node_fractions);13071308size = SZ_2 * req_node->size;1309min_addr = memblock_start_of_DRAM();1310max_addr = memblock_end_of_DRAM();13111312allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1313min_addr, max_addr, nid_req);13141315ASSERT_NE(allocated_ptr, NULL);1316assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);13171318ASSERT_EQ(new_rgn->size, size);1319ASSERT_EQ(new_rgn->base, region_end(exp_node) - size);1320ASSERT_LE(exp_node->base, new_rgn->base);13211322ASSERT_EQ(memblock.reserved.cnt, 1);1323ASSERT_EQ(memblock.reserved.total_size, size);13241325test_pass_pop();13261327return 0;1328}13291330/*1331* A test that tries to allocate a memory region in a specific NUMA node that1332* is fully reserved:1333*1334* | +---------+ +------------------+ |1335* | |requested| | expected | |1336* +--------------+---------+------------+------------------+-----+1337*1338* | +---------+ +---------+ |1339* | | reserved| | new | |1340* +--------------+---------+---------------------+---------+-----+1341*1342* Expect to allocate an aligned region at the end of the last node that is1343* large enough and has enough unreserved memory (in this case, nid = 6) after1344* falling back to NUMA_NO_NODE. The region count and total size get updated.1345*/1346static int alloc_nid_top_down_numa_node_reserved_check(void)1347{1348int nid_req = 2;1349int nid_exp = 6;1350struct memblock_region *new_rgn = &memblock.reserved.regions[1];1351struct memblock_region *req_node = &memblock.memory.regions[nid_req];1352struct memblock_region *exp_node = &memblock.memory.regions[nid_exp];1353void *allocated_ptr = NULL;1354phys_addr_t size;1355phys_addr_t min_addr;1356phys_addr_t max_addr;13571358PREFIX_PUSH();1359setup_numa_memblock(node_fractions);13601361size = req_node->size;1362min_addr = memblock_start_of_DRAM();1363max_addr = memblock_end_of_DRAM();13641365memblock_reserve(req_node->base, req_node->size);1366allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1367min_addr, max_addr, nid_req);13681369ASSERT_NE(allocated_ptr, NULL);1370assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);13711372ASSERT_EQ(new_rgn->size, size);1373ASSERT_EQ(new_rgn->base, region_end(exp_node) - size);1374ASSERT_LE(exp_node->base, new_rgn->base);13751376ASSERT_EQ(memblock.reserved.cnt, 2);1377ASSERT_EQ(memblock.reserved.total_size, size + req_node->size);13781379test_pass_pop();13801381return 0;1382}13831384/*1385* A test that tries to allocate a memory region in a specific NUMA node that1386* is partially reserved but has enough memory for the allocated region:1387*1388* | +---------------------------------------+ |1389* | | requested | |1390* +-----------+---------------------------------------+----------+1391*1392* | +------------------+ +-----+ |1393* | | reserved | | new | |1394* +-----------+------------------+--------------+-----+----------+1395*1396* Expect to allocate an aligned region at the end of the requested node. The1397* region count and total size get updated.1398*/1399static int alloc_nid_top_down_numa_part_reserved_check(void)1400{1401int nid_req = 4;1402struct memblock_region *new_rgn = &memblock.reserved.regions[1];1403struct memblock_region *req_node = &memblock.memory.regions[nid_req];1404void *allocated_ptr = NULL;1405struct region r1;1406phys_addr_t size;1407phys_addr_t min_addr;1408phys_addr_t max_addr;14091410PREFIX_PUSH();1411setup_numa_memblock(node_fractions);14121413ASSERT_LE(SZ_8, req_node->size);1414r1.base = req_node->base;1415r1.size = req_node->size / SZ_2;1416size = r1.size / SZ_4;1417min_addr = memblock_start_of_DRAM();1418max_addr = memblock_end_of_DRAM();14191420memblock_reserve(r1.base, r1.size);1421allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1422min_addr, max_addr, nid_req);14231424ASSERT_NE(allocated_ptr, NULL);1425assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);14261427ASSERT_EQ(new_rgn->size, size);1428ASSERT_EQ(new_rgn->base, region_end(req_node) - size);1429ASSERT_LE(req_node->base, new_rgn->base);14301431ASSERT_EQ(memblock.reserved.cnt, 2);1432ASSERT_EQ(memblock.reserved.total_size, size + r1.size);14331434test_pass_pop();14351436return 0;1437}14381439/*1440* A test that tries to allocate a memory region in a specific NUMA node that1441* is partially reserved and does not have enough contiguous memory for the1442* allocated region:1443*1444* | +-----------------------+ +----------------------|1445* | | requested | | expected |1446* +-----------+-----------------------+---------+----------------------+1447*1448* | +----------+ +-----------|1449* | | reserved | | new |1450* +-----------------+----------+---------------------------+-----------+1451*1452* Expect to allocate an aligned region at the end of the last node that is1453* large enough and has enough unreserved memory (in this case,1454* nid = NUMA_NODES - 1) after falling back to NUMA_NO_NODE. The region count1455* and total size get updated.1456*/1457static int alloc_nid_top_down_numa_part_reserved_fallback_check(void)1458{1459int nid_req = 4;1460int nid_exp = NUMA_NODES - 1;1461struct memblock_region *new_rgn = &memblock.reserved.regions[1];1462struct memblock_region *req_node = &memblock.memory.regions[nid_req];1463struct memblock_region *exp_node = &memblock.memory.regions[nid_exp];1464void *allocated_ptr = NULL;1465struct region r1;1466phys_addr_t size;1467phys_addr_t min_addr;1468phys_addr_t max_addr;14691470PREFIX_PUSH();1471setup_numa_memblock(node_fractions);14721473ASSERT_LE(SZ_4, req_node->size);1474size = req_node->size / SZ_2;1475r1.base = req_node->base + (size / SZ_2);1476r1.size = size;14771478min_addr = memblock_start_of_DRAM();1479max_addr = memblock_end_of_DRAM();14801481memblock_reserve(r1.base, r1.size);1482allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1483min_addr, max_addr, nid_req);14841485ASSERT_NE(allocated_ptr, NULL);1486assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);14871488ASSERT_EQ(new_rgn->size, size);1489ASSERT_EQ(new_rgn->base, region_end(exp_node) - size);1490ASSERT_LE(exp_node->base, new_rgn->base);14911492ASSERT_EQ(memblock.reserved.cnt, 2);1493ASSERT_EQ(memblock.reserved.total_size, size + r1.size);14941495test_pass_pop();14961497return 0;1498}14991500/*1501* A test that tries to allocate a memory region that spans over the min_addr1502* and max_addr range and overlaps with two different nodes, where the first1503* node is the requested node:1504*1505* min_addr1506* | max_addr1507* | |1508* v v1509* | +-----------------------+-----------+ |1510* | | requested | node3 | |1511* +-----------+-----------------------+-----------+--------------+1512* + +1513* | +-----------+ |1514* | | rgn | |1515* +-----------------------+-----------+--------------------------+1516*1517* Expect to drop the lower limit and allocate a memory region that ends at1518* the end of the requested node.1519*/1520static int alloc_nid_top_down_numa_split_range_low_check(void)1521{1522int nid_req = 2;1523struct memblock_region *new_rgn = &memblock.reserved.regions[0];1524struct memblock_region *req_node = &memblock.memory.regions[nid_req];1525void *allocated_ptr = NULL;1526phys_addr_t size = SZ_512;1527phys_addr_t min_addr;1528phys_addr_t max_addr;1529phys_addr_t req_node_end;15301531PREFIX_PUSH();1532setup_numa_memblock(node_fractions);15331534req_node_end = region_end(req_node);1535min_addr = req_node_end - SZ_256;1536max_addr = min_addr + size;15371538allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1539min_addr, max_addr, nid_req);15401541ASSERT_NE(allocated_ptr, NULL);1542assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);15431544ASSERT_EQ(new_rgn->size, size);1545ASSERT_EQ(new_rgn->base, req_node_end - size);1546ASSERT_LE(req_node->base, new_rgn->base);15471548ASSERT_EQ(memblock.reserved.cnt, 1);1549ASSERT_EQ(memblock.reserved.total_size, size);15501551test_pass_pop();15521553return 0;1554}15551556/*1557* A test that tries to allocate a memory region that spans over the min_addr1558* and max_addr range and overlaps with two different nodes, where the second1559* node is the requested node:1560*1561* min_addr1562* | max_addr1563* | |1564* v v1565* | +--------------------------+---------+ |1566* | | expected |requested| |1567* +------+--------------------------+---------+----------------+1568* + +1569* | +---------+ |1570* | | rgn | |1571* +-----------------------+---------+--------------------------+1572*1573* Expect to drop the lower limit and allocate a memory region that1574* ends at the end of the first node that overlaps with the range.1575*/1576static int alloc_nid_top_down_numa_split_range_high_check(void)1577{1578int nid_req = 3;1579int nid_exp = nid_req - 1;1580struct memblock_region *new_rgn = &memblock.reserved.regions[0];1581struct memblock_region *exp_node = &memblock.memory.regions[nid_exp];1582void *allocated_ptr = NULL;1583phys_addr_t size = SZ_512;1584phys_addr_t min_addr;1585phys_addr_t max_addr;1586phys_addr_t exp_node_end;15871588PREFIX_PUSH();1589setup_numa_memblock(node_fractions);15901591exp_node_end = region_end(exp_node);1592min_addr = exp_node_end - SZ_256;1593max_addr = min_addr + size;15941595allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1596min_addr, max_addr, nid_req);15971598ASSERT_NE(allocated_ptr, NULL);1599assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);16001601ASSERT_EQ(new_rgn->size, size);1602ASSERT_EQ(new_rgn->base, exp_node_end - size);1603ASSERT_LE(exp_node->base, new_rgn->base);16041605ASSERT_EQ(memblock.reserved.cnt, 1);1606ASSERT_EQ(memblock.reserved.total_size, size);16071608test_pass_pop();16091610return 0;1611}16121613/*1614* A test that tries to allocate a memory region that spans over the min_addr1615* and max_addr range and overlaps with two different nodes, where the requested1616* node ends before min_addr:1617*1618* min_addr1619* | max_addr1620* | |1621* v v1622* | +---------------+ +-------------+---------+ |1623* | | requested | | node1 | node2 | |1624* +----+---------------+--------+-------------+---------+----------+1625* + +1626* | +---------+ |1627* | | rgn | |1628* +----------+---------+-------------------------------------------+1629*1630* Expect to drop the lower limit and allocate a memory region that ends at1631* the end of the requested node.1632*/1633static int alloc_nid_top_down_numa_no_overlap_split_check(void)1634{1635int nid_req = 2;1636struct memblock_region *new_rgn = &memblock.reserved.regions[0];1637struct memblock_region *req_node = &memblock.memory.regions[nid_req];1638struct memblock_region *node2 = &memblock.memory.regions[6];1639void *allocated_ptr = NULL;1640phys_addr_t size;1641phys_addr_t min_addr;1642phys_addr_t max_addr;16431644PREFIX_PUSH();1645setup_numa_memblock(node_fractions);16461647size = SZ_512;1648min_addr = node2->base - SZ_256;1649max_addr = min_addr + size;16501651allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1652min_addr, max_addr, nid_req);16531654ASSERT_NE(allocated_ptr, NULL);1655assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);16561657ASSERT_EQ(new_rgn->size, size);1658ASSERT_EQ(new_rgn->base, region_end(req_node) - size);1659ASSERT_LE(req_node->base, new_rgn->base);16601661ASSERT_EQ(memblock.reserved.cnt, 1);1662ASSERT_EQ(memblock.reserved.total_size, size);16631664test_pass_pop();16651666return 0;1667}16681669/*1670* A test that tries to allocate memory within min_addr and max_add range when1671* the requested node and the range do not overlap, and requested node ends1672* before min_addr. The range overlaps with multiple nodes along node1673* boundaries:1674*1675* min_addr1676* | max_addr1677* | |1678* v v1679* |-----------+ +----------+----...----+----------+ |1680* | requested | | min node | ... | max node | |1681* +-----------+-----------+----------+----...----+----------+------+1682* + +1683* | +-----+ |1684* | | rgn | |1685* +---------------------------------------------------+-----+------+1686*1687* Expect to allocate a memory region at the end of the final node in1688* the range after falling back to NUMA_NO_NODE.1689*/1690static int alloc_nid_top_down_numa_no_overlap_low_check(void)1691{1692int nid_req = 0;1693struct memblock_region *new_rgn = &memblock.reserved.regions[0];1694struct memblock_region *min_node = &memblock.memory.regions[2];1695struct memblock_region *max_node = &memblock.memory.regions[5];1696void *allocated_ptr = NULL;1697phys_addr_t size = SZ_64;1698phys_addr_t max_addr;1699phys_addr_t min_addr;17001701PREFIX_PUSH();1702setup_numa_memblock(node_fractions);17031704min_addr = min_node->base;1705max_addr = region_end(max_node);17061707allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1708min_addr, max_addr, nid_req);17091710ASSERT_NE(allocated_ptr, NULL);1711assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);17121713ASSERT_EQ(new_rgn->size, size);1714ASSERT_EQ(new_rgn->base, max_addr - size);1715ASSERT_LE(max_node->base, new_rgn->base);17161717ASSERT_EQ(memblock.reserved.cnt, 1);1718ASSERT_EQ(memblock.reserved.total_size, size);17191720test_pass_pop();17211722return 0;1723}17241725/*1726* A test that tries to allocate memory within min_addr and max_add range when1727* the requested node and the range do not overlap, and requested node starts1728* after max_addr. The range overlaps with multiple nodes along node1729* boundaries:1730*1731* min_addr1732* | max_addr1733* | |1734* v v1735* | +----------+----...----+----------+ +-----------+ |1736* | | min node | ... | max node | | requested | |1737* +-----+----------+----...----+----------+--------+-----------+---+1738* + +1739* | +-----+ |1740* | | rgn | |1741* +---------------------------------+-----+------------------------+1742*1743* Expect to allocate a memory region at the end of the final node in1744* the range after falling back to NUMA_NO_NODE.1745*/1746static int alloc_nid_top_down_numa_no_overlap_high_check(void)1747{1748int nid_req = 7;1749struct memblock_region *new_rgn = &memblock.reserved.regions[0];1750struct memblock_region *min_node = &memblock.memory.regions[2];1751struct memblock_region *max_node = &memblock.memory.regions[5];1752void *allocated_ptr = NULL;1753phys_addr_t size = SZ_64;1754phys_addr_t max_addr;1755phys_addr_t min_addr;17561757PREFIX_PUSH();1758setup_numa_memblock(node_fractions);17591760min_addr = min_node->base;1761max_addr = region_end(max_node);17621763allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1764min_addr, max_addr, nid_req);17651766ASSERT_NE(allocated_ptr, NULL);1767assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);17681769ASSERT_EQ(new_rgn->size, size);1770ASSERT_EQ(new_rgn->base, max_addr - size);1771ASSERT_LE(max_node->base, new_rgn->base);17721773ASSERT_EQ(memblock.reserved.cnt, 1);1774ASSERT_EQ(memblock.reserved.total_size, size);17751776test_pass_pop();17771778return 0;1779}17801781/*1782* A test that tries to allocate a memory region in a specific NUMA node that1783* has enough memory to allocate a region of the requested size.1784* Expect to allocate an aligned region at the beginning of the requested node.1785*/1786static int alloc_nid_bottom_up_numa_simple_check(void)1787{1788int nid_req = 3;1789struct memblock_region *new_rgn = &memblock.reserved.regions[0];1790struct memblock_region *req_node = &memblock.memory.regions[nid_req];1791void *allocated_ptr = NULL;1792phys_addr_t size;1793phys_addr_t min_addr;1794phys_addr_t max_addr;17951796PREFIX_PUSH();1797setup_numa_memblock(node_fractions);17981799ASSERT_LE(SZ_4, req_node->size);1800size = req_node->size / SZ_4;1801min_addr = memblock_start_of_DRAM();1802max_addr = memblock_end_of_DRAM();18031804allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1805min_addr, max_addr, nid_req);18061807ASSERT_NE(allocated_ptr, NULL);1808assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);18091810ASSERT_EQ(new_rgn->size, size);1811ASSERT_EQ(new_rgn->base, req_node->base);1812ASSERT_LE(region_end(new_rgn), region_end(req_node));18131814ASSERT_EQ(memblock.reserved.cnt, 1);1815ASSERT_EQ(memblock.reserved.total_size, size);18161817test_pass_pop();18181819return 0;1820}18211822/*1823* A test that tries to allocate a memory region in a specific NUMA node that1824* does not have enough memory to allocate a region of the requested size:1825*1826* |----------------------+-----+ |1827* | expected | req | |1828* +----------------------+-----+----------------+1829*1830* |---------+ |1831* | rgn | |1832* +---------+-----------------------------------+1833*1834* Expect to allocate an aligned region at the beginning of the first node that1835* has enough memory (in this case, nid = 0) after falling back to NUMA_NO_NODE.1836*/1837static int alloc_nid_bottom_up_numa_small_node_check(void)1838{1839int nid_req = 1;1840int nid_exp = 0;1841struct memblock_region *new_rgn = &memblock.reserved.regions[0];1842struct memblock_region *req_node = &memblock.memory.regions[nid_req];1843struct memblock_region *exp_node = &memblock.memory.regions[nid_exp];1844void *allocated_ptr = NULL;1845phys_addr_t size;1846phys_addr_t min_addr;1847phys_addr_t max_addr;18481849PREFIX_PUSH();1850setup_numa_memblock(node_fractions);18511852size = SZ_2 * req_node->size;1853min_addr = memblock_start_of_DRAM();1854max_addr = memblock_end_of_DRAM();18551856allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1857min_addr, max_addr, nid_req);18581859ASSERT_NE(allocated_ptr, NULL);1860assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);18611862ASSERT_EQ(new_rgn->size, size);1863ASSERT_EQ(new_rgn->base, exp_node->base);1864ASSERT_LE(region_end(new_rgn), region_end(exp_node));18651866ASSERT_EQ(memblock.reserved.cnt, 1);1867ASSERT_EQ(memblock.reserved.total_size, size);18681869test_pass_pop();18701871return 0;1872}18731874/*1875* A test that tries to allocate a memory region in a specific NUMA node that1876* is fully reserved:1877*1878* |----------------------+ +-----------+ |1879* | expected | | requested | |1880* +----------------------+-----+-----------+--------------------+1881*1882* |-----------+ +-----------+ |1883* | new | | reserved | |1884* +-----------+----------------+-----------+--------------------+1885*1886* Expect to allocate an aligned region at the beginning of the first node that1887* is large enough and has enough unreserved memory (in this case, nid = 0)1888* after falling back to NUMA_NO_NODE. The region count and total size get1889* updated.1890*/1891static int alloc_nid_bottom_up_numa_node_reserved_check(void)1892{1893int nid_req = 2;1894int nid_exp = 0;1895struct memblock_region *new_rgn = &memblock.reserved.regions[0];1896struct memblock_region *req_node = &memblock.memory.regions[nid_req];1897struct memblock_region *exp_node = &memblock.memory.regions[nid_exp];1898void *allocated_ptr = NULL;1899phys_addr_t size;1900phys_addr_t min_addr;1901phys_addr_t max_addr;19021903PREFIX_PUSH();1904setup_numa_memblock(node_fractions);19051906size = req_node->size;1907min_addr = memblock_start_of_DRAM();1908max_addr = memblock_end_of_DRAM();19091910memblock_reserve(req_node->base, req_node->size);1911allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1912min_addr, max_addr, nid_req);19131914ASSERT_NE(allocated_ptr, NULL);1915assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);19161917ASSERT_EQ(new_rgn->size, size);1918ASSERT_EQ(new_rgn->base, exp_node->base);1919ASSERT_LE(region_end(new_rgn), region_end(exp_node));19201921ASSERT_EQ(memblock.reserved.cnt, 2);1922ASSERT_EQ(memblock.reserved.total_size, size + req_node->size);19231924test_pass_pop();19251926return 0;1927}19281929/*1930* A test that tries to allocate a memory region in a specific NUMA node that1931* is partially reserved but has enough memory for the allocated region:1932*1933* | +---------------------------------------+ |1934* | | requested | |1935* +-----------+---------------------------------------+---------+1936*1937* | +------------------+-----+ |1938* | | reserved | new | |1939* +-----------+------------------+-----+------------------------+1940*1941* Expect to allocate an aligned region in the requested node that merges with1942* the existing reserved region. The total size gets updated.1943*/1944static int alloc_nid_bottom_up_numa_part_reserved_check(void)1945{1946int nid_req = 4;1947struct memblock_region *new_rgn = &memblock.reserved.regions[0];1948struct memblock_region *req_node = &memblock.memory.regions[nid_req];1949void *allocated_ptr = NULL;1950struct region r1;1951phys_addr_t size;1952phys_addr_t min_addr;1953phys_addr_t max_addr;1954phys_addr_t total_size;19551956PREFIX_PUSH();1957setup_numa_memblock(node_fractions);19581959ASSERT_LE(SZ_8, req_node->size);1960r1.base = req_node->base;1961r1.size = req_node->size / SZ_2;1962size = r1.size / SZ_4;1963min_addr = memblock_start_of_DRAM();1964max_addr = memblock_end_of_DRAM();1965total_size = size + r1.size;19661967memblock_reserve(r1.base, r1.size);1968allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,1969min_addr, max_addr, nid_req);19701971ASSERT_NE(allocated_ptr, NULL);1972assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);19731974ASSERT_EQ(new_rgn->size, total_size);1975ASSERT_EQ(new_rgn->base, req_node->base);1976ASSERT_LE(region_end(new_rgn), region_end(req_node));19771978ASSERT_EQ(memblock.reserved.cnt, 1);1979ASSERT_EQ(memblock.reserved.total_size, total_size);19801981test_pass_pop();19821983return 0;1984}19851986/*1987* A test that tries to allocate a memory region in a specific NUMA node that1988* is partially reserved and does not have enough contiguous memory for the1989* allocated region:1990*1991* |----------------------+ +-----------------------+ |1992* | expected | | requested | |1993* +----------------------+-------+-----------------------+---------+1994*1995* |-----------+ +----------+ |1996* | new | | reserved | |1997* +-----------+------------------------+----------+----------------+1998*1999* Expect to allocate an aligned region at the beginning of the first2000* node that is large enough and has enough unreserved memory (in this case,2001* nid = 0) after falling back to NUMA_NO_NODE. The region count and total size2002* get updated.2003*/2004static int alloc_nid_bottom_up_numa_part_reserved_fallback_check(void)2005{2006int nid_req = 4;2007int nid_exp = 0;2008struct memblock_region *new_rgn = &memblock.reserved.regions[0];2009struct memblock_region *req_node = &memblock.memory.regions[nid_req];2010struct memblock_region *exp_node = &memblock.memory.regions[nid_exp];2011void *allocated_ptr = NULL;2012struct region r1;2013phys_addr_t size;2014phys_addr_t min_addr;2015phys_addr_t max_addr;20162017PREFIX_PUSH();2018setup_numa_memblock(node_fractions);20192020ASSERT_LE(SZ_4, req_node->size);2021size = req_node->size / SZ_2;2022r1.base = req_node->base + (size / SZ_2);2023r1.size = size;20242025min_addr = memblock_start_of_DRAM();2026max_addr = memblock_end_of_DRAM();20272028memblock_reserve(r1.base, r1.size);2029allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,2030min_addr, max_addr, nid_req);20312032ASSERT_NE(allocated_ptr, NULL);2033assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);20342035ASSERT_EQ(new_rgn->size, size);2036ASSERT_EQ(new_rgn->base, exp_node->base);2037ASSERT_LE(region_end(new_rgn), region_end(exp_node));20382039ASSERT_EQ(memblock.reserved.cnt, 2);2040ASSERT_EQ(memblock.reserved.total_size, size + r1.size);20412042test_pass_pop();20432044return 0;2045}20462047/*2048* A test that tries to allocate a memory region that spans over the min_addr2049* and max_addr range and overlaps with two different nodes, where the first2050* node is the requested node:2051*2052* min_addr2053* | max_addr2054* | |2055* v v2056* | +-----------------------+-----------+ |2057* | | requested | node3 | |2058* +-----------+-----------------------+-----------+--------------+2059* + +2060* | +-----------+ |2061* | | rgn | |2062* +-----------+-----------+--------------------------------------+2063*2064* Expect to drop the lower limit and allocate a memory region at the beginning2065* of the requested node.2066*/2067static int alloc_nid_bottom_up_numa_split_range_low_check(void)2068{2069int nid_req = 2;2070struct memblock_region *new_rgn = &memblock.reserved.regions[0];2071struct memblock_region *req_node = &memblock.memory.regions[nid_req];2072void *allocated_ptr = NULL;2073phys_addr_t size = SZ_512;2074phys_addr_t min_addr;2075phys_addr_t max_addr;2076phys_addr_t req_node_end;20772078PREFIX_PUSH();2079setup_numa_memblock(node_fractions);20802081req_node_end = region_end(req_node);2082min_addr = req_node_end - SZ_256;2083max_addr = min_addr + size;20842085allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,2086min_addr, max_addr, nid_req);20872088ASSERT_NE(allocated_ptr, NULL);2089assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);20902091ASSERT_EQ(new_rgn->size, size);2092ASSERT_EQ(new_rgn->base, req_node->base);2093ASSERT_LE(region_end(new_rgn), req_node_end);20942095ASSERT_EQ(memblock.reserved.cnt, 1);2096ASSERT_EQ(memblock.reserved.total_size, size);20972098test_pass_pop();20992100return 0;2101}21022103/*2104* A test that tries to allocate a memory region that spans over the min_addr2105* and max_addr range and overlaps with two different nodes, where the second2106* node is the requested node:2107*2108* min_addr2109* | max_addr2110* | |2111* v v2112* |------------------+ +----------------------+---------+ |2113* | expected | | previous |requested| |2114* +------------------+--------+----------------------+---------+------+2115* + +2116* |---------+ |2117* | rgn | |2118* +---------+---------------------------------------------------------+2119*2120* Expect to drop the lower limit and allocate a memory region at the beginning2121* of the first node that has enough memory.2122*/2123static int alloc_nid_bottom_up_numa_split_range_high_check(void)2124{2125int nid_req = 3;2126int nid_exp = 0;2127struct memblock_region *new_rgn = &memblock.reserved.regions[0];2128struct memblock_region *req_node = &memblock.memory.regions[nid_req];2129struct memblock_region *exp_node = &memblock.memory.regions[nid_exp];2130void *allocated_ptr = NULL;2131phys_addr_t size = SZ_512;2132phys_addr_t min_addr;2133phys_addr_t max_addr;2134phys_addr_t exp_node_end;21352136PREFIX_PUSH();2137setup_numa_memblock(node_fractions);21382139exp_node_end = region_end(req_node);2140min_addr = req_node->base - SZ_256;2141max_addr = min_addr + size;21422143allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,2144min_addr, max_addr, nid_req);21452146ASSERT_NE(allocated_ptr, NULL);2147assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);21482149ASSERT_EQ(new_rgn->size, size);2150ASSERT_EQ(new_rgn->base, exp_node->base);2151ASSERT_LE(region_end(new_rgn), exp_node_end);21522153ASSERT_EQ(memblock.reserved.cnt, 1);2154ASSERT_EQ(memblock.reserved.total_size, size);21552156test_pass_pop();21572158return 0;2159}21602161/*2162* A test that tries to allocate a memory region that spans over the min_addr2163* and max_addr range and overlaps with two different nodes, where the requested2164* node ends before min_addr:2165*2166* min_addr2167* | max_addr2168* | |2169* v v2170* | +---------------+ +-------------+---------+ |2171* | | requested | | node1 | node2 | |2172* +----+---------------+--------+-------------+---------+---------+2173* + +2174* | +---------+ |2175* | | rgn | |2176* +----+---------+------------------------------------------------+2177*2178* Expect to drop the lower limit and allocate a memory region that starts at2179* the beginning of the requested node.2180*/2181static int alloc_nid_bottom_up_numa_no_overlap_split_check(void)2182{2183int nid_req = 2;2184struct memblock_region *new_rgn = &memblock.reserved.regions[0];2185struct memblock_region *req_node = &memblock.memory.regions[nid_req];2186struct memblock_region *node2 = &memblock.memory.regions[6];2187void *allocated_ptr = NULL;2188phys_addr_t size;2189phys_addr_t min_addr;2190phys_addr_t max_addr;21912192PREFIX_PUSH();2193setup_numa_memblock(node_fractions);21942195size = SZ_512;2196min_addr = node2->base - SZ_256;2197max_addr = min_addr + size;21982199allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,2200min_addr, max_addr, nid_req);22012202ASSERT_NE(allocated_ptr, NULL);2203assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);22042205ASSERT_EQ(new_rgn->size, size);2206ASSERT_EQ(new_rgn->base, req_node->base);2207ASSERT_LE(region_end(new_rgn), region_end(req_node));22082209ASSERT_EQ(memblock.reserved.cnt, 1);2210ASSERT_EQ(memblock.reserved.total_size, size);22112212test_pass_pop();22132214return 0;2215}22162217/*2218* A test that tries to allocate memory within min_addr and max_add range when2219* the requested node and the range do not overlap, and requested node ends2220* before min_addr. The range overlaps with multiple nodes along node2221* boundaries:2222*2223* min_addr2224* | max_addr2225* | |2226* v v2227* |-----------+ +----------+----...----+----------+ |2228* | requested | | min node | ... | max node | |2229* +-----------+-----------+----------+----...----+----------+------+2230* + +2231* | +-----+ |2232* | | rgn | |2233* +-----------------------+-----+----------------------------------+2234*2235* Expect to allocate a memory region at the beginning of the first node2236* in the range after falling back to NUMA_NO_NODE.2237*/2238static int alloc_nid_bottom_up_numa_no_overlap_low_check(void)2239{2240int nid_req = 0;2241struct memblock_region *new_rgn = &memblock.reserved.regions[0];2242struct memblock_region *min_node = &memblock.memory.regions[2];2243struct memblock_region *max_node = &memblock.memory.regions[5];2244void *allocated_ptr = NULL;2245phys_addr_t size = SZ_64;2246phys_addr_t max_addr;2247phys_addr_t min_addr;22482249PREFIX_PUSH();2250setup_numa_memblock(node_fractions);22512252min_addr = min_node->base;2253max_addr = region_end(max_node);22542255allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,2256min_addr, max_addr, nid_req);22572258ASSERT_NE(allocated_ptr, NULL);2259assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);22602261ASSERT_EQ(new_rgn->size, size);2262ASSERT_EQ(new_rgn->base, min_addr);2263ASSERT_LE(region_end(new_rgn), region_end(min_node));22642265ASSERT_EQ(memblock.reserved.cnt, 1);2266ASSERT_EQ(memblock.reserved.total_size, size);22672268test_pass_pop();22692270return 0;2271}22722273/*2274* A test that tries to allocate memory within min_addr and max_add range when2275* the requested node and the range do not overlap, and requested node starts2276* after max_addr. The range overlaps with multiple nodes along node2277* boundaries:2278*2279* min_addr2280* | max_addr2281* | |2282* v v2283* | +----------+----...----+----------+ +---------+ |2284* | | min node | ... | max node | |requested| |2285* +-----+----------+----...----+----------+---------+---------+---+2286* + +2287* | +-----+ |2288* | | rgn | |2289* +-----+-----+---------------------------------------------------+2290*2291* Expect to allocate a memory region at the beginning of the first node2292* in the range after falling back to NUMA_NO_NODE.2293*/2294static int alloc_nid_bottom_up_numa_no_overlap_high_check(void)2295{2296int nid_req = 7;2297struct memblock_region *new_rgn = &memblock.reserved.regions[0];2298struct memblock_region *min_node = &memblock.memory.regions[2];2299struct memblock_region *max_node = &memblock.memory.regions[5];2300void *allocated_ptr = NULL;2301phys_addr_t size = SZ_64;2302phys_addr_t max_addr;2303phys_addr_t min_addr;23042305PREFIX_PUSH();2306setup_numa_memblock(node_fractions);23072308min_addr = min_node->base;2309max_addr = region_end(max_node);23102311allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,2312min_addr, max_addr, nid_req);23132314ASSERT_NE(allocated_ptr, NULL);2315assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);23162317ASSERT_EQ(new_rgn->size, size);2318ASSERT_EQ(new_rgn->base, min_addr);2319ASSERT_LE(region_end(new_rgn), region_end(min_node));23202321ASSERT_EQ(memblock.reserved.cnt, 1);2322ASSERT_EQ(memblock.reserved.total_size, size);23232324test_pass_pop();23252326return 0;2327}23282329/*2330* A test that tries to allocate a memory region in a specific NUMA node that2331* does not have enough memory to allocate a region of the requested size.2332* Additionally, none of the nodes have enough memory to allocate the region:2333*2334* +-----------------------------------+2335* | new |2336* +-----------------------------------+2337* |-------+-------+-------+-------+-------+-------+-------+-------|2338* | node0 | node1 | node2 | node3 | node4 | node5 | node6 | node7 |2339* +-------+-------+-------+-------+-------+-------+-------+-------+2340*2341* Expect no allocation to happen.2342*/2343static int alloc_nid_numa_large_region_generic_check(void)2344{2345int nid_req = 3;2346void *allocated_ptr = NULL;2347phys_addr_t size = MEM_SIZE / SZ_2;2348phys_addr_t min_addr;2349phys_addr_t max_addr;23502351PREFIX_PUSH();2352setup_numa_memblock(node_fractions);23532354min_addr = memblock_start_of_DRAM();2355max_addr = memblock_end_of_DRAM();23562357allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,2358min_addr, max_addr, nid_req);2359ASSERT_EQ(allocated_ptr, NULL);23602361test_pass_pop();23622363return 0;2364}23652366/*2367* A test that tries to allocate memory within min_addr and max_addr range when2368* there are two reserved regions at the borders. The requested node starts at2369* min_addr and ends at max_addr and is the same size as the region to be2370* allocated:2371*2372* min_addr2373* | max_addr2374* | |2375* v v2376* | +-----------+-----------------------+-----------------------|2377* | | node5 | requested | node7 |2378* +------+-----------+-----------------------+-----------------------+2379* + +2380* | +----+-----------------------+----+ |2381* | | r2 | new | r1 | |2382* +-------------+----+-----------------------+----+------------------+2383*2384* Expect to merge all of the regions into one. The region counter and total2385* size fields get updated.2386*/2387static int alloc_nid_numa_reserved_full_merge_generic_check(void)2388{2389int nid_req = 6;2390int nid_next = nid_req + 1;2391struct memblock_region *new_rgn = &memblock.reserved.regions[0];2392struct memblock_region *req_node = &memblock.memory.regions[nid_req];2393struct memblock_region *next_node = &memblock.memory.regions[nid_next];2394void *allocated_ptr = NULL;2395struct region r1, r2;2396phys_addr_t size = req_node->size;2397phys_addr_t total_size;2398phys_addr_t max_addr;2399phys_addr_t min_addr;24002401PREFIX_PUSH();2402setup_numa_memblock(node_fractions);24032404r1.base = next_node->base;2405r1.size = SZ_128;24062407r2.size = SZ_128;2408r2.base = r1.base - (size + r2.size);24092410total_size = r1.size + r2.size + size;2411min_addr = r2.base + r2.size;2412max_addr = r1.base;24132414memblock_reserve(r1.base, r1.size);2415memblock_reserve(r2.base, r2.size);24162417allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,2418min_addr, max_addr, nid_req);24192420ASSERT_NE(allocated_ptr, NULL);2421assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);24222423ASSERT_EQ(new_rgn->size, total_size);2424ASSERT_EQ(new_rgn->base, r2.base);24252426ASSERT_LE(new_rgn->base, req_node->base);2427ASSERT_LE(region_end(req_node), region_end(new_rgn));24282429ASSERT_EQ(memblock.reserved.cnt, 1);2430ASSERT_EQ(memblock.reserved.total_size, total_size);24312432test_pass_pop();24332434return 0;2435}24362437/*2438* A test that tries to allocate memory within min_addr and max_add range,2439* where the total range can fit the region, but it is split between two nodes2440* and everything else is reserved. Additionally, nid is set to NUMA_NO_NODE2441* instead of requesting a specific node:2442*2443* +-----------+2444* | new |2445* +-----------+2446* | +---------------------+-----------|2447* | | prev node | next node |2448* +------+---------------------+-----------+2449* + +2450* |----------------------+ +-----|2451* | r1 | | r2 |2452* +----------------------+-----------+-----+2453* ^ ^2454* | |2455* | max_addr2456* |2457* min_addr2458*2459* Expect no allocation to happen.2460*/2461static int alloc_nid_numa_split_all_reserved_generic_check(void)2462{2463void *allocated_ptr = NULL;2464struct memblock_region *next_node = &memblock.memory.regions[7];2465struct region r1, r2;2466phys_addr_t size = SZ_256;2467phys_addr_t max_addr;2468phys_addr_t min_addr;24692470PREFIX_PUSH();2471setup_numa_memblock(node_fractions);24722473r2.base = next_node->base + SZ_128;2474r2.size = memblock_end_of_DRAM() - r2.base;24752476r1.size = MEM_SIZE - (r2.size + size);2477r1.base = memblock_start_of_DRAM();24782479min_addr = r1.base + r1.size;2480max_addr = r2.base;24812482memblock_reserve(r1.base, r1.size);2483memblock_reserve(r2.base, r2.size);24842485allocated_ptr = run_memblock_alloc_nid(size, SMP_CACHE_BYTES,2486min_addr, max_addr,2487NUMA_NO_NODE);24882489ASSERT_EQ(allocated_ptr, NULL);24902491test_pass_pop();24922493return 0;2494}24952496/*2497* A simple test that tries to allocate a memory region through the2498* memblock_alloc_node() on a NUMA node with id `nid`. Expected to have the2499* correct NUMA node set for the new region.2500*/2501static int alloc_node_on_correct_nid(void)2502{2503int nid_req = 2;2504void *allocated_ptr = NULL;2505#ifdef CONFIG_NUMA2506struct memblock_region *req_node = &memblock.memory.regions[nid_req];2507#endif2508phys_addr_t size = SZ_512;25092510PREFIX_PUSH();2511setup_numa_memblock(node_fractions);25122513allocated_ptr = memblock_alloc_node(size, SMP_CACHE_BYTES, nid_req);25142515ASSERT_NE(allocated_ptr, NULL);2516#ifdef CONFIG_NUMA2517ASSERT_EQ(nid_req, req_node->nid);2518#endif25192520test_pass_pop();25212522return 0;2523}25242525/* Test case wrappers for NUMA tests */2526static int alloc_nid_numa_simple_check(void)2527{2528test_print("\tRunning %s...\n", __func__);2529memblock_set_bottom_up(false);2530alloc_nid_top_down_numa_simple_check();2531memblock_set_bottom_up(true);2532alloc_nid_bottom_up_numa_simple_check();25332534return 0;2535}25362537static int alloc_nid_numa_small_node_check(void)2538{2539test_print("\tRunning %s...\n", __func__);2540memblock_set_bottom_up(false);2541alloc_nid_top_down_numa_small_node_check();2542memblock_set_bottom_up(true);2543alloc_nid_bottom_up_numa_small_node_check();25442545return 0;2546}25472548static int alloc_nid_numa_node_reserved_check(void)2549{2550test_print("\tRunning %s...\n", __func__);2551memblock_set_bottom_up(false);2552alloc_nid_top_down_numa_node_reserved_check();2553memblock_set_bottom_up(true);2554alloc_nid_bottom_up_numa_node_reserved_check();25552556return 0;2557}25582559static int alloc_nid_numa_part_reserved_check(void)2560{2561test_print("\tRunning %s...\n", __func__);2562memblock_set_bottom_up(false);2563alloc_nid_top_down_numa_part_reserved_check();2564memblock_set_bottom_up(true);2565alloc_nid_bottom_up_numa_part_reserved_check();25662567return 0;2568}25692570static int alloc_nid_numa_part_reserved_fallback_check(void)2571{2572test_print("\tRunning %s...\n", __func__);2573memblock_set_bottom_up(false);2574alloc_nid_top_down_numa_part_reserved_fallback_check();2575memblock_set_bottom_up(true);2576alloc_nid_bottom_up_numa_part_reserved_fallback_check();25772578return 0;2579}25802581static int alloc_nid_numa_split_range_low_check(void)2582{2583test_print("\tRunning %s...\n", __func__);2584memblock_set_bottom_up(false);2585alloc_nid_top_down_numa_split_range_low_check();2586memblock_set_bottom_up(true);2587alloc_nid_bottom_up_numa_split_range_low_check();25882589return 0;2590}25912592static int alloc_nid_numa_split_range_high_check(void)2593{2594test_print("\tRunning %s...\n", __func__);2595memblock_set_bottom_up(false);2596alloc_nid_top_down_numa_split_range_high_check();2597memblock_set_bottom_up(true);2598alloc_nid_bottom_up_numa_split_range_high_check();25992600return 0;2601}26022603static int alloc_nid_numa_no_overlap_split_check(void)2604{2605test_print("\tRunning %s...\n", __func__);2606memblock_set_bottom_up(false);2607alloc_nid_top_down_numa_no_overlap_split_check();2608memblock_set_bottom_up(true);2609alloc_nid_bottom_up_numa_no_overlap_split_check();26102611return 0;2612}26132614static int alloc_nid_numa_no_overlap_low_check(void)2615{2616test_print("\tRunning %s...\n", __func__);2617memblock_set_bottom_up(false);2618alloc_nid_top_down_numa_no_overlap_low_check();2619memblock_set_bottom_up(true);2620alloc_nid_bottom_up_numa_no_overlap_low_check();26212622return 0;2623}26242625static int alloc_nid_numa_no_overlap_high_check(void)2626{2627test_print("\tRunning %s...\n", __func__);2628memblock_set_bottom_up(false);2629alloc_nid_top_down_numa_no_overlap_high_check();2630memblock_set_bottom_up(true);2631alloc_nid_bottom_up_numa_no_overlap_high_check();26322633return 0;2634}26352636static int alloc_nid_numa_large_region_check(void)2637{2638test_print("\tRunning %s...\n", __func__);2639run_top_down(alloc_nid_numa_large_region_generic_check);2640run_bottom_up(alloc_nid_numa_large_region_generic_check);26412642return 0;2643}26442645static int alloc_nid_numa_reserved_full_merge_check(void)2646{2647test_print("\tRunning %s...\n", __func__);2648run_top_down(alloc_nid_numa_reserved_full_merge_generic_check);2649run_bottom_up(alloc_nid_numa_reserved_full_merge_generic_check);26502651return 0;2652}26532654static int alloc_nid_numa_split_all_reserved_check(void)2655{2656test_print("\tRunning %s...\n", __func__);2657run_top_down(alloc_nid_numa_split_all_reserved_generic_check);2658run_bottom_up(alloc_nid_numa_split_all_reserved_generic_check);26592660return 0;2661}26622663static int alloc_node_numa_on_correct_nid(void)2664{2665test_print("\tRunning %s...\n", __func__);2666run_top_down(alloc_node_on_correct_nid);2667run_bottom_up(alloc_node_on_correct_nid);26682669return 0;2670}26712672int __memblock_alloc_nid_numa_checks(void)2673{2674test_print("Running %s NUMA tests...\n",2675get_memblock_alloc_nid_name(alloc_nid_test_flags));26762677alloc_nid_numa_simple_check();2678alloc_nid_numa_small_node_check();2679alloc_nid_numa_node_reserved_check();2680alloc_nid_numa_part_reserved_check();2681alloc_nid_numa_part_reserved_fallback_check();2682alloc_nid_numa_split_range_low_check();2683alloc_nid_numa_split_range_high_check();26842685alloc_nid_numa_no_overlap_split_check();2686alloc_nid_numa_no_overlap_low_check();2687alloc_nid_numa_no_overlap_high_check();2688alloc_nid_numa_large_region_check();2689alloc_nid_numa_reserved_full_merge_check();2690alloc_nid_numa_split_all_reserved_check();26912692alloc_node_numa_on_correct_nid();26932694return 0;2695}26962697static int memblock_alloc_nid_checks_internal(int flags)2698{2699alloc_nid_test_flags = flags;27002701prefix_reset();2702prefix_push(get_memblock_alloc_nid_name(flags));27032704reset_memblock_attributes();2705dummy_physical_memory_init();27062707memblock_alloc_nid_range_checks();2708memblock_alloc_nid_numa_checks();27092710dummy_physical_memory_cleanup();27112712prefix_pop();27132714return 0;2715}27162717int memblock_alloc_nid_checks(void)2718{2719memblock_alloc_nid_checks_internal(TEST_F_NONE);2720memblock_alloc_nid_checks_internal(TEST_F_RAW);27212722return 0;2723}27242725int memblock_alloc_exact_nid_range_checks(void)2726{2727alloc_nid_test_flags = (TEST_F_RAW | TEST_F_EXACT);27282729memblock_alloc_nid_range_checks();27302731return 0;2732}273327342735