diff options
author | Zhao Qiang <qiang.zhao@freescale.com> | 2015-11-30 10:48:52 +0800 |
---|---|---|
committer | Scott Wood <scottwood@freescale.com> | 2015-12-22 17:10:17 -0600 |
commit | de2dd0eb30af55d3893979d5641c50c7a8969c99 (patch) | |
tree | dcfd94f13f29b4ad8bcf32b2849876467aa3626c | |
parent | e9d764f803964a54ca7da4a67d124fe824ebd80a (diff) |
genalloc:support memory-allocation with bytes-alignment to genalloc
Bytes alignment is required to manage some special RAM,
so add gen_pool_first_fit_align to genalloc,
meanwhile add gen_pool_alloc_algo to pass algo in case user
layer using more than one algo, and pass data to
gen_pool_first_fit_align(modify gen_pool_alloc as a wrapper)
Signed-off-by: Zhao Qiang <qiang.zhao@freescale.com>
Signed-off-by: Scott Wood <scottwood@freescale.com>
-rw-r--r-- | include/linux/genalloc.h | 27 | ||||
-rw-r--r-- | lib/genalloc.c | 61 |
2 files changed, 78 insertions, 10 deletions
diff --git a/include/linux/genalloc.h b/include/linux/genalloc.h index 7ff168d06967..3c676ce46ee0 100644 --- a/include/linux/genalloc.h +++ b/include/linux/genalloc.h @@ -30,10 +30,12 @@ #ifndef __GENALLOC_H__ #define __GENALLOC_H__ +#include <linux/types.h> #include <linux/spinlock_types.h> struct device; struct device_node; +struct gen_pool; /** * Allocation callback function type definition @@ -47,7 +49,7 @@ typedef unsigned long (*genpool_algo_t)(unsigned long *map, unsigned long size, unsigned long start, unsigned int nr, - void *data); + void *data, struct gen_pool *pool); /* * General purpose special memory pool descriptor. @@ -75,6 +77,13 @@ struct gen_pool_chunk { unsigned long bits[0]; /* bitmap for allocating memory chunk */ }; +/* + * gen_pool data descriptor for gen_pool_first_fit_align. + */ +struct genpool_data_align { + int align; /* alignment by bytes for starting address */ +}; + extern struct gen_pool *gen_pool_create(int, int); extern phys_addr_t gen_pool_virt_to_phys(struct gen_pool *pool, unsigned long); extern int gen_pool_add_virt(struct gen_pool *, unsigned long, phys_addr_t, @@ -98,6 +107,8 @@ static inline int gen_pool_add(struct gen_pool *pool, unsigned long addr, } extern void gen_pool_destroy(struct gen_pool *); extern unsigned long gen_pool_alloc(struct gen_pool *, size_t); +extern unsigned long gen_pool_alloc_algo(struct gen_pool *, size_t, + genpool_algo_t algo, void *data); extern void *gen_pool_dma_alloc(struct gen_pool *pool, size_t size, dma_addr_t *dma); extern void gen_pool_free(struct gen_pool *, unsigned long, size_t); @@ -110,14 +121,22 @@ extern void gen_pool_set_algo(struct gen_pool *pool, genpool_algo_t algo, void *data); extern unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size, - unsigned long start, unsigned int nr, void *data); + unsigned long start, unsigned int nr, void *data, + struct gen_pool *pool); + +extern unsigned long gen_pool_first_fit_align(unsigned long *map, + unsigned long size, unsigned long start, unsigned int nr, + void *data, struct gen_pool *pool); + extern unsigned long gen_pool_first_fit_order_align(unsigned long *map, unsigned long size, unsigned long start, unsigned int nr, - void *data); + void *data, struct gen_pool *pool); extern unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size, - unsigned long start, unsigned int nr, void *data); + unsigned long start, unsigned int nr, void *data, + struct gen_pool *pool); + extern struct gen_pool *devm_gen_pool_create(struct device *dev, int min_alloc_order, int nid, const char *name); diff --git a/lib/genalloc.c b/lib/genalloc.c index 116a166b096f..b8cf89d9e17d 100644 --- a/lib/genalloc.c +++ b/lib/genalloc.c @@ -270,6 +270,25 @@ EXPORT_SYMBOL(gen_pool_destroy); */ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size) { + return gen_pool_alloc_algo(pool, size, pool->algo, pool->data); +} +EXPORT_SYMBOL(gen_pool_alloc); + +/** + * gen_pool_alloc_algo - allocate special memory from the pool + * @pool: pool to allocate from + * @size: number of bytes to allocate from the pool + * @algo: algorithm passed from caller + * @data: data passed to algorithm + * + * Allocate the requested number of bytes from the specified pool. + * Uses the pool allocation function (with first-fit algorithm by default). + * Can not be used in NMI handler on architectures without + * NMI-safe cmpxchg implementation. + */ +unsigned long gen_pool_alloc_algo(struct gen_pool *pool, size_t size, + genpool_algo_t algo, void *data) +{ struct gen_pool_chunk *chunk; unsigned long addr = 0; int order = pool->min_alloc_order; @@ -290,8 +309,8 @@ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size) end_bit = chunk_size(chunk) >> order; retry: - start_bit = pool->algo(chunk->bits, end_bit, start_bit, nbits, - pool->data); + start_bit = algo(chunk->bits, end_bit, start_bit, + nbits, data, pool); if (start_bit >= end_bit) continue; remain = bitmap_set_ll(chunk->bits, start_bit, nbits); @@ -310,7 +329,7 @@ retry: rcu_read_unlock(); return addr; } -EXPORT_SYMBOL(gen_pool_alloc); +EXPORT_SYMBOL(gen_pool_alloc_algo); /** * gen_pool_dma_alloc - allocate special memory from the pool for DMA usage @@ -501,15 +520,42 @@ EXPORT_SYMBOL(gen_pool_set_algo); * @start: The bitnumber to start searching at * @nr: The number of zeroed bits we're looking for * @data: additional data - unused + * @pool: pool to find the fit region memory from */ unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size, - unsigned long start, unsigned int nr, void *data) + unsigned long start, unsigned int nr, void *data, + struct gen_pool *pool) { return bitmap_find_next_zero_area(map, size, start, nr, 0); } EXPORT_SYMBOL(gen_pool_first_fit); /** + * gen_pool_first_fit_align - find the first available region + * of memory matching the size requirement (alignment constraint) + * @map: The address to base the search on + * @size: The bitmap size in bits + * @start: The bitnumber to start searching at + * @nr: The number of zeroed bits we're looking for + * @data: data for alignment + * @pool: pool to get order from + */ +unsigned long gen_pool_first_fit_align(unsigned long *map, unsigned long size, + unsigned long start, unsigned int nr, void *data, + struct gen_pool *pool) +{ + struct genpool_data_align *alignment; + unsigned long align_mask; + int order; + + alignment = data; + order = pool->min_alloc_order; + align_mask = ((alignment->align + (1UL << order) - 1) >> order) - 1; + return bitmap_find_next_zero_area(map, size, start, nr, align_mask); +} +EXPORT_SYMBOL(gen_pool_first_fit_align); + +/** * gen_pool_first_fit_order_align - find the first available region * of memory matching the size requirement. The region will be aligned * to the order of the size specified. @@ -518,10 +564,11 @@ EXPORT_SYMBOL(gen_pool_first_fit); * @start: The bitnumber to start searching at * @nr: The number of zeroed bits we're looking for * @data: additional data - unused + * @pool: pool to find the fit region memory from */ unsigned long gen_pool_first_fit_order_align(unsigned long *map, unsigned long size, unsigned long start, - unsigned int nr, void *data) + unsigned int nr, void *data, struct gen_pool *pool) { unsigned long align_mask = roundup_pow_of_two(nr) - 1; @@ -537,12 +584,14 @@ EXPORT_SYMBOL(gen_pool_first_fit_order_align); * @start: The bitnumber to start searching at * @nr: The number of zeroed bits we're looking for * @data: additional data - unused + * @pool: pool to find the fit region memory from * * Iterate over the bitmap to find the smallest free region * which we can allocate the memory. */ unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size, - unsigned long start, unsigned int nr, void *data) + unsigned long start, unsigned int nr, void *data, + struct gen_pool *pool) { unsigned long start_bit = size; unsigned long len = size + 1; |