diff options
Diffstat (limited to 'src')
| -rw-r--r-- | src/string/memset.c | 89 | 
1 files changed, 77 insertions, 12 deletions
| diff --git a/src/string/memset.c b/src/string/memset.c index 20e47c45..f438b073 100644 --- a/src/string/memset.c +++ b/src/string/memset.c @@ -1,21 +1,86 @@  #include <string.h> -#include <stdlib.h>  #include <stdint.h> -#include <limits.h> - -#define SS (sizeof(size_t)) -#define ALIGN (sizeof(size_t)-1) -#define ONES ((size_t)-1/UCHAR_MAX)  void *memset(void *dest, int c, size_t n)  {  	unsigned char *s = dest; -	c = (unsigned char)c; -	for (; ((uintptr_t)s & ALIGN) && n; n--) *s++ = c; -	if (n) { -		size_t *w, k = ONES * c; -		for (w = (void *)s; n>=SS; n-=SS, w++) *w = k; -		for (s = (void *)w; n; n--, s++) *s = c; +	size_t k; + +	/* Fill head and tail with minimal branching. Each +	 * conditional ensures that all the subsequently used +	 * offsets are well-defined and in the dest region. */ + +	if (!n) return dest; +	s[0] = s[n-1] = c; +	if (n <= 2) return dest; +	s[1] = s[n-2] = c; +	s[2] = s[n-3] = c; +	if (n <= 6) return dest; +	s[3] = s[n-4] = c; +	if (n <= 8) return dest; + +	/* Advance pointer to align it at a 4-byte boundary, +	 * and truncate n to a multiple of 4. The previous code +	 * already took care of any head/tail that get cut off +	 * by the alignment. */ + +	k = -(uintptr_t)s & 3; +	s += k; +	n -= k; +	n &= -4; + +#ifdef __GNUC__ +	typedef uint32_t __attribute__((__may_alias__)) u32; +	typedef uint64_t __attribute__((__may_alias__)) u64; + +	u32 c32 = ((u32)-1)/255 * (unsigned char)c; + +	/* In preparation to copy 32 bytes at a time, aligned on +	 * an 8-byte bounary, fill head/tail up to 28 bytes each. +	 * As in the initial byte-based head/tail fill, each +	 * conditional below ensures that the subsequent offsets +	 * are valid (e.g. !(n<=24) implies n>=28). */ + +	*(u32 *)(s+0) = c32; +	*(u32 *)(s+n-4) = c32; +	if (n <= 8) return dest; +	*(u32 *)(s+4) = c32; +	*(u32 *)(s+8) = c32; +	*(u32 *)(s+n-12) = c32; +	*(u32 *)(s+n-8) = c32; +	if (n <= 24) return dest; +	*(u32 *)(s+12) = c32; +	*(u32 *)(s+16) = c32; +	*(u32 *)(s+20) = c32; +	*(u32 *)(s+24) = c32; +	*(u32 *)(s+n-28) = c32; +	*(u32 *)(s+n-24) = c32; +	*(u32 *)(s+n-20) = c32; +	*(u32 *)(s+n-16) = c32; + +	/* Align to a multiple of 8 so we can fill 64 bits at a time, +	 * and avoid writing the same bytes twice as much as is +	 * practical without introducing additional branching. */ + +	k = 24 + ((uintptr_t)s & 4); +	s += k; +	n -= k; + +	/* If this loop is reached, 28 tail bytes have already been +	 * filled, so any remainder when n drops below 32 can be +	 * safely ignored. */ + +	u64 c64 = c32 | ((u64)c32 << 32); +	for (; n >= 32; n-=32, s+=32) { +		*(u64 *)(s+0) = c64; +		*(u64 *)(s+8) = c64; +		*(u64 *)(s+16) = c64; +		*(u64 *)(s+24) = c64;  	} +#else +	/* Pure C fallback with no aliasing violations. */ +	for (; n; n--, s++) *s = c; +#endif +  	return dest;  } | 
