diff options
Diffstat (limited to 'mm')
-rw-r--r-- | mm/slub.c | 12 |
1 files changed, 10 insertions, 2 deletions
diff --git a/mm/slub.c b/mm/slub.c index 332d4b459a90..9bf44955c4f1 100644 --- a/mm/slub.c +++ b/mm/slub.c @@ -3533,6 +3533,7 @@ static int calculate_sizes(struct kmem_cache *s, int forced_order) { slab_flags_t flags = s->flags; unsigned int size = s->object_size; + unsigned int freepointer_area; unsigned int order; /* @@ -3541,6 +3542,13 @@ static int calculate_sizes(struct kmem_cache *s, int forced_order) * the possible location of the free pointer. */ size = ALIGN(size, sizeof(void *)); + /* + * This is the area of the object where a freepointer can be + * safely written. If redzoning adds more to the inuse size, we + * can't use that portion for writing the freepointer, so + * s->offset must be limited within this for the general case. + */ + freepointer_area = size; #ifdef CONFIG_SLUB_DEBUG /* @@ -3582,13 +3590,13 @@ static int calculate_sizes(struct kmem_cache *s, int forced_order) */ s->offset = size; size += sizeof(void *); - } else if (size > sizeof(void *)) { + } else if (freepointer_area > sizeof(void *)) { /* * Store freelist pointer near middle of object to keep * it away from the edges of the object to avoid small * sized over/underflows from neighboring allocations. */ - s->offset = ALIGN(size / 2, sizeof(void *)); + s->offset = ALIGN(freepointer_area / 2, sizeof(void *)); } #ifdef CONFIG_SLUB_DEBUG |