| void *aligned_alloc(size_t align, size_t len) |
| if ((align & -align) != align) { |
| if (len > SIZE_MAX - align || align >= (1ULL<<31)*UNIT) { |
| if (DISABLE_ALIGNED_ALLOC) { |
| if (align <= UNIT) align = UNIT; |
| unsigned char *p = malloc(len + align - UNIT); |
| struct meta *g = get_meta(p); |
| int idx = get_slot_index(p); |
| size_t stride = get_stride(g); |
| unsigned char *start = g->mem->storage + stride*idx; |
| unsigned char *end = g->mem->storage + stride*(idx+1) - IB; |
| size_t adj = -(uintptr_t)p & (align-1); |
| uint32_t offset = (size_t)(p-g->mem->storage)/UNIT; |
| *(uint16_t *)(p-2) = offset; |
| // use a 32-bit offset if 16-bit doesn't fit. for this, |
| // 16-bit field must be zero, [-4] byte nonzero. |
| *(uint32_t *)(p-8) = offset; |
| // store offset to aligned enframing. this facilitates cycling |
| // offset and also iteration of heap for debugging/measurement. |
| // for extreme overalignment it won't fit but these are classless |
| *(uint16_t *)(start - 2) = (size_t)(p-start)/UNIT; |