Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Potential atomic property/init deadlock fix #285

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
12 changes: 7 additions & 5 deletions associate.m
Expand Up @@ -12,6 +12,8 @@
#include "lock.h"
#include "gc_ops.h"

PRIVATE int associate_spinlocks[spinlock_count];

/**
* A single associative reference. Contains the key, value, and association
* policy.
Expand Down Expand Up @@ -140,7 +142,7 @@ static void setReference(struct reference_list *list,
break;
}
// While inserting into the list, we need to lock it temporarily.
volatile int *lock = lock_for_pointer(list);
volatile int *lock = associate_lock_for_pointer(list);
lock_spinlock(lock);
struct reference *r = findReference(list, key);
// If there's an existing reference, then we can update it, otherwise we
Expand All @@ -165,7 +167,7 @@ static void setReference(struct reference_list *list,
BOOL needLock = isAtomic(r->policy) || isAtomic(policy);
if (needLock)
{
lock = lock_for_pointer(r);
lock = associate_lock_for_pointer(r);
lock_spinlock(lock);
}
@try
Expand Down Expand Up @@ -289,7 +291,7 @@ static void deallocHiddenClass(id obj, SEL _cmd)
Class cls = (Class)object;
if ((NULL == cls->extra_data) && create)
{
volatile int *lock = lock_for_pointer(cls);
volatile int *lock = associate_lock_for_pointer(cls);
struct reference_list *list = gc->malloc(sizeof(struct reference_list));
lock_spinlock(lock);
if (NULL == cls->extra_data)
Expand All @@ -309,7 +311,7 @@ static void deallocHiddenClass(id obj, SEL _cmd)
Class hiddenClass = findHiddenClass(object);
if ((NULL == hiddenClass) && create)
{
volatile int *lock = lock_for_pointer(object);
volatile int *lock = associate_lock_for_pointer(object);
lock_spinlock(lock);
hiddenClass = findHiddenClass(object);
if (NULL == hiddenClass)
Expand Down Expand Up @@ -423,7 +425,7 @@ static Class hiddenClassForObject(id object)
Class hiddenClass = findHiddenClass(object);
if (NULL == hiddenClass)
{
volatile int *lock = lock_for_pointer(object);
volatile int *lock = associate_lock_for_pointer(object);
lock_spinlock(lock);
hiddenClass = findHiddenClass(object);
if (NULL == hiddenClass)
Expand Down
39 changes: 22 additions & 17 deletions properties.m
Expand Up @@ -12,7 +12,7 @@
#include "gc_ops.h"
#include "lock.h"

PRIVATE int spinlocks[spinlock_count];
PRIVATE int prop_spinlocks[spinlock_count];

/**
* Public function for getting a property.
Expand All @@ -26,7 +26,7 @@ id objc_getProperty(id obj, SEL _cmd, ptrdiff_t offset, BOOL isAtomic)
id ret;
if (isAtomic)
{
volatile int *lock = lock_for_pointer(addr);
volatile int *lock = prop_lock_for_pointer(addr);
lock_spinlock(lock);
ret = *(id*)addr;
ret = objc_retain(ret);
Expand Down Expand Up @@ -59,7 +59,7 @@ void objc_setProperty(id obj, SEL _cmd, ptrdiff_t offset, id arg, BOOL isAtomic,
id old;
if (isAtomic)
{
volatile int *lock = lock_for_pointer(addr);
volatile int *lock = prop_lock_for_pointer(addr);
lock_spinlock(lock);
old = *(id*)addr;
*(id*)addr = arg;
Expand All @@ -79,7 +79,7 @@ void objc_setProperty_atomic(id obj, SEL _cmd, id arg, ptrdiff_t offset)
char *addr = (char*)obj;
addr += offset;
arg = objc_retain(arg);
volatile int *lock = lock_for_pointer(addr);
volatile int *lock = prop_lock_for_pointer(addr);
lock_spinlock(lock);
id old = *(id*)addr;
*(id*)addr = arg;
Expand All @@ -94,7 +94,7 @@ void objc_setProperty_atomic_copy(id obj, SEL _cmd, id arg, ptrdiff_t offset)
addr += offset;

arg = [arg copy];
volatile int *lock = lock_for_pointer(addr);
volatile int *lock = prop_lock_for_pointer(addr);
lock_spinlock(lock);
id old = *(id*)addr;
*(id*)addr = arg;
Expand Down Expand Up @@ -127,20 +127,25 @@ void objc_setProperty_nonatomic_copy(id obj, SEL _cmd, id arg, ptrdiff_t offset)
void objc_copyCppObjectAtomic(void *dest, const void *src,
void (*copyHelper) (void *dest, const void *source))
{
volatile int *lock = lock_for_pointer(src < dest ? src : dest);
volatile int *lock2 = lock_for_pointer(src < dest ? dest : src);
volatile int *lock = prop_lock_for_pointer(src < dest ? src : dest);
volatile int *lock2 = prop_lock_for_pointer(src < dest ? dest : src);
lock_spinlock(lock);
lock_spinlock(lock2);
if (lock != lock2) {
// this might look odd, but anytime we take two spinlocks simutaniously, there
// is a non-zero chance that the pointers might hash to the same spinlock.
// in which cause, locking one locks both, and locking both would deadlock us.
lock_spinlock(lock2);
}
copyHelper(dest, src);
unlock_spinlock(lock);
unlock_spinlock(lock2);
if (lock != lock2) { unlock_spinlock(lock2); }
}

OBJC_PUBLIC
void objc_getCppObjectAtomic(void *dest, const void *src,
void (*copyHelper) (void *dest, const void *source))
{
volatile int *lock = lock_for_pointer(src);
volatile int *lock = prop_lock_for_pointer(src);
lock_spinlock(lock);
copyHelper(dest, src);
unlock_spinlock(lock);
Expand All @@ -150,7 +155,7 @@ void objc_getCppObjectAtomic(void *dest, const void *src,
void objc_setCppObjectAtomic(void *dest, const void *src,
void (*copyHelper) (void *dest, const void *source))
{
volatile int *lock = lock_for_pointer(dest);
volatile int *lock = prop_lock_for_pointer(dest);
lock_spinlock(lock);
copyHelper(dest, src);
unlock_spinlock(lock);
Expand All @@ -172,13 +177,13 @@ void objc_copyPropertyStruct(void *dest,
{
if (atomic)
{
volatile int *lock = lock_for_pointer(src < dest ? src : dest);
volatile int *lock2 = lock_for_pointer(src < dest ? dest : src);
volatile int *lock = prop_lock_for_pointer(src < dest ? src : dest);
volatile int *lock2 = prop_lock_for_pointer(src < dest ? dest : src);
lock_spinlock(lock);
lock_spinlock(lock2);
if (lock != lock2) { lock_spinlock(lock2); }
memcpy(dest, src, size);
unlock_spinlock(lock);
unlock_spinlock(lock2);
if (lock != lock2) { unlock_spinlock(lock2); }
}
else
{
Expand All @@ -199,7 +204,7 @@ void objc_getPropertyStruct(void *dest,
{
if (atomic)
{
volatile int *lock = lock_for_pointer(src);
volatile int *lock = prop_lock_for_pointer(src);
lock_spinlock(lock);
memcpy(dest, src, size);
unlock_spinlock(lock);
Expand All @@ -223,7 +228,7 @@ void objc_setPropertyStruct(void *dest,
{
if (atomic)
{
volatile int *lock = lock_for_pointer(dest);
volatile int *lock = prop_lock_for_pointer(dest);
lock_spinlock(lock);
memcpy(dest, src, size);
unlock_spinlock(lock);
Expand Down
30 changes: 21 additions & 9 deletions spinlock.h
Expand Up @@ -17,7 +17,20 @@ static const int spinlock_mask = spinlock_count - 1;
/**
* Integers used as spinlocks for atomic property access.
*/
extern int spinlocks[spinlock_count];
extern int prop_spinlocks[spinlock_count];
extern int associate_spinlocks[spinlock_count];

static inline intptr_t hash_for_pointer(const void *ptr)
{
intptr_t hash = (intptr_t)ptr;
// Most properties will be pointers, so disregard the lowest few bits
hash >>= sizeof(void*) == 4 ? 2 : 8;
intptr_t low = hash & spinlock_mask;
hash >>= 16;
hash |= low;
return (hash & spinlock_mask);
}

/**
* Get a spin lock from a pointer. We want to prevent lock contention between
* properties in the same object - if someone is stupid enough to be using
Expand All @@ -26,15 +39,14 @@ extern int spinlocks[spinlock_count];
* contention between the same property in different objects, so we can't just
* use the ivar offset.
*/
static inline volatile int *lock_for_pointer(const void *ptr)
static inline volatile int *prop_lock_for_pointer(const void *ptr)
{
intptr_t hash = (intptr_t)ptr;
// Most properties will be pointers, so disregard the lowest few bits
hash >>= sizeof(void*) == 4 ? 2 : 8;
intptr_t low = hash & spinlock_mask;
hash >>= 16;
hash |= low;
return spinlocks + (hash & spinlock_mask);
return prop_spinlocks + hash_for_pointer(ptr);
}

static inline volatile int *associate_lock_for_pointer(const void *ptr)
{
return associate_spinlocks + hash_for_pointer(ptr);
}

/**
Expand Down