fix hash generation for 64bit processors

git-svn-id: svn+ssh://svn.gna.org/svn/gnustep/libs/base/trunk@38294 72102866-910b-0410-8b05-ffd578937521
This commit is contained in:
Richard Frith-MacDonald 2015-01-16 14:29:38 +00:00
parent dd75f89d22
commit 13e4d77cec

View file

@ -1902,13 +1902,16 @@ static id gs_weak_load(id obj)
- (NSUInteger) hash
{
/*
* Ideally we would shift left to lose any zero bits produced by the
* alignment of the object in memory ... but that depends on the
* processor architecture and the memory allocatiion implementation.
* In the absence of detailed information, pick a reasonable value
* assuming the object will be aligned to an eight byte boundary.
* malloc() must return pointers aligned to point to any data type
*/
return (NSUInteger)(uintptr_t)self >> 3;
#define MAXALIGN (__alignof__(_Complex long double))
static int shift = MAXALIGN==16 ? 4 : (MAXALIGN==8 ? 3 : 2);
/* We shift left to lose any zero bits produced by the
* alignment of the object in memory.
*/
return (NSUInteger)((uintptr_t)self >> shift);
}
/**