|
@@ -51,39 +51,39 @@ u64 memblock_find_in_range(u64 start, u64 end, u64 size, u64 align);
|
|
int memblock_free_reserved_regions(void);
|
|
int memblock_free_reserved_regions(void);
|
|
int memblock_reserve_reserved_regions(void);
|
|
int memblock_reserve_reserved_regions(void);
|
|
|
|
|
|
-extern void __init memblock_init(void);
|
|
|
|
-extern void __init memblock_analyze(void);
|
|
|
|
|
|
+extern void memblock_init(void);
|
|
|
|
+extern void memblock_analyze(void);
|
|
extern long memblock_add(phys_addr_t base, phys_addr_t size);
|
|
extern long memblock_add(phys_addr_t base, phys_addr_t size);
|
|
extern long memblock_remove(phys_addr_t base, phys_addr_t size);
|
|
extern long memblock_remove(phys_addr_t base, phys_addr_t size);
|
|
-extern long __init memblock_free(phys_addr_t base, phys_addr_t size);
|
|
|
|
-extern long __init memblock_reserve(phys_addr_t base, phys_addr_t size);
|
|
|
|
|
|
+extern long memblock_free(phys_addr_t base, phys_addr_t size);
|
|
|
|
+extern long memblock_reserve(phys_addr_t base, phys_addr_t size);
|
|
|
|
|
|
/* The numa aware allocator is only available if
|
|
/* The numa aware allocator is only available if
|
|
* CONFIG_ARCH_POPULATES_NODE_MAP is set
|
|
* CONFIG_ARCH_POPULATES_NODE_MAP is set
|
|
*/
|
|
*/
|
|
-extern phys_addr_t __init memblock_alloc_nid(phys_addr_t size, phys_addr_t align,
|
|
|
|
|
|
+extern phys_addr_t memblock_alloc_nid(phys_addr_t size, phys_addr_t align,
|
|
int nid);
|
|
int nid);
|
|
-extern phys_addr_t __init memblock_alloc_try_nid(phys_addr_t size, phys_addr_t align,
|
|
|
|
|
|
+extern phys_addr_t memblock_alloc_try_nid(phys_addr_t size, phys_addr_t align,
|
|
int nid);
|
|
int nid);
|
|
|
|
|
|
-extern phys_addr_t __init memblock_alloc(phys_addr_t size, phys_addr_t align);
|
|
|
|
|
|
+extern phys_addr_t memblock_alloc(phys_addr_t size, phys_addr_t align);
|
|
|
|
|
|
/* Flags for memblock_alloc_base() amd __memblock_alloc_base() */
|
|
/* Flags for memblock_alloc_base() amd __memblock_alloc_base() */
|
|
#define MEMBLOCK_ALLOC_ANYWHERE (~(phys_addr_t)0)
|
|
#define MEMBLOCK_ALLOC_ANYWHERE (~(phys_addr_t)0)
|
|
#define MEMBLOCK_ALLOC_ACCESSIBLE 0
|
|
#define MEMBLOCK_ALLOC_ACCESSIBLE 0
|
|
|
|
|
|
-extern phys_addr_t __init memblock_alloc_base(phys_addr_t size,
|
|
|
|
|
|
+extern phys_addr_t memblock_alloc_base(phys_addr_t size,
|
|
phys_addr_t align,
|
|
phys_addr_t align,
|
|
phys_addr_t max_addr);
|
|
phys_addr_t max_addr);
|
|
-extern phys_addr_t __init __memblock_alloc_base(phys_addr_t size,
|
|
|
|
|
|
+extern phys_addr_t __memblock_alloc_base(phys_addr_t size,
|
|
phys_addr_t align,
|
|
phys_addr_t align,
|
|
phys_addr_t max_addr);
|
|
phys_addr_t max_addr);
|
|
-extern phys_addr_t __init memblock_phys_mem_size(void);
|
|
|
|
|
|
+extern phys_addr_t memblock_phys_mem_size(void);
|
|
extern phys_addr_t memblock_end_of_DRAM(void);
|
|
extern phys_addr_t memblock_end_of_DRAM(void);
|
|
-extern void __init memblock_enforce_memory_limit(phys_addr_t memory_limit);
|
|
|
|
|
|
+extern void memblock_enforce_memory_limit(phys_addr_t memory_limit);
|
|
extern int memblock_is_memory(phys_addr_t addr);
|
|
extern int memblock_is_memory(phys_addr_t addr);
|
|
extern int memblock_is_region_memory(phys_addr_t base, phys_addr_t size);
|
|
extern int memblock_is_region_memory(phys_addr_t base, phys_addr_t size);
|
|
-extern int __init memblock_is_reserved(phys_addr_t addr);
|
|
|
|
|
|
+extern int memblock_is_reserved(phys_addr_t addr);
|
|
extern int memblock_is_region_reserved(phys_addr_t base, phys_addr_t size);
|
|
extern int memblock_is_region_reserved(phys_addr_t base, phys_addr_t size);
|
|
|
|
|
|
extern void memblock_dump_all(void);
|
|
extern void memblock_dump_all(void);
|