mirror of
https://github.com/gnustep/libs-base.git
synced 2025-04-25 17:51:01 +00:00
fix hash generation for 64bit processors
git-svn-id: svn+ssh://svn.gna.org/svn/gnustep/libs/base/trunk@38294 72102866-910b-0410-8b05-ffd578937521
This commit is contained in:
parent
50201762bf
commit
e158dc8d68
1 changed files with 9 additions and 6 deletions
|
@ -1902,13 +1902,16 @@ static id gs_weak_load(id obj)
|
||||||
- (NSUInteger) hash
|
- (NSUInteger) hash
|
||||||
{
|
{
|
||||||
/*
|
/*
|
||||||
* Ideally we would shift left to lose any zero bits produced by the
|
* malloc() must return pointers aligned to point to any data type
|
||||||
* alignment of the object in memory ... but that depends on the
|
|
||||||
* processor architecture and the memory allocatiion implementation.
|
|
||||||
* In the absence of detailed information, pick a reasonable value
|
|
||||||
* assuming the object will be aligned to an eight byte boundary.
|
|
||||||
*/
|
*/
|
||||||
return (NSUInteger)(uintptr_t)self >> 3;
|
#define MAXALIGN (__alignof__(_Complex long double))
|
||||||
|
|
||||||
|
static int shift = MAXALIGN==16 ? 4 : (MAXALIGN==8 ? 3 : 2);
|
||||||
|
|
||||||
|
/* We shift left to lose any zero bits produced by the
|
||||||
|
* alignment of the object in memory.
|
||||||
|
*/
|
||||||
|
return (NSUInteger)((uintptr_t)self >> shift);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in a new issue