]> rtime.felk.cvut.cz Git - lisovros/linux_canprio.git/blobdiff - arch/blackfin/include/asm/spinlock.h
locking: Convert raw_rwlock functions to arch_rwlock
[lisovros/linux_canprio.git] / arch / blackfin / include / asm / spinlock.h
index 7e1c56b0a571c7e4e03cb105dc9dfc7e96b10b2f..1942ccfedbe01fc0b9dfe5681aec33e5e2258a8b 100644 (file)
@@ -17,12 +17,12 @@ asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
 asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
 asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
 asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
-asmlinkage void __raw_read_lock_asm(volatile int *ptr);
-asmlinkage int __raw_read_trylock_asm(volatile int *ptr);
-asmlinkage void __raw_read_unlock_asm(volatile int *ptr);
-asmlinkage void __raw_write_lock_asm(volatile int *ptr);
-asmlinkage int __raw_write_trylock_asm(volatile int *ptr);
-asmlinkage void __raw_write_unlock_asm(volatile int *ptr);
+asmlinkage void arch_read_lock_asm(volatile int *ptr);
+asmlinkage int arch_read_trylock_asm(volatile int *ptr);
+asmlinkage void arch_read_unlock_asm(volatile int *ptr);
+asmlinkage void arch_write_lock_asm(volatile int *ptr);
+asmlinkage int arch_write_trylock_asm(volatile int *ptr);
+asmlinkage void arch_write_unlock_asm(volatile int *ptr);
 
 static inline int arch_spin_is_locked(arch_spinlock_t *lock)
 {
@@ -52,44 +52,44 @@ static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
                cpu_relax();
 }
 
-static inline int __raw_read_can_lock(arch_rwlock_t *rw)
+static inline int arch_read_can_lock(arch_rwlock_t *rw)
 {
        return __raw_uncached_fetch_asm(&rw->lock) > 0;
 }
 
-static inline int __raw_write_can_lock(arch_rwlock_t *rw)
+static inline int arch_write_can_lock(arch_rwlock_t *rw)
 {
        return __raw_uncached_fetch_asm(&rw->lock) == RW_LOCK_BIAS;
 }
 
-static inline void __raw_read_lock(arch_rwlock_t *rw)
+static inline void arch_read_lock(arch_rwlock_t *rw)
 {
-       __raw_read_lock_asm(&rw->lock);
+       arch_read_lock_asm(&rw->lock);
 }
 
-static inline int __raw_read_trylock(arch_rwlock_t *rw)
+static inline int arch_read_trylock(arch_rwlock_t *rw)
 {
-       return __raw_read_trylock_asm(&rw->lock);
+       return arch_read_trylock_asm(&rw->lock);
 }
 
-static inline void __raw_read_unlock(arch_rwlock_t *rw)
+static inline void arch_read_unlock(arch_rwlock_t *rw)
 {
-       __raw_read_unlock_asm(&rw->lock);
+       arch_read_unlock_asm(&rw->lock);
 }
 
-static inline void __raw_write_lock(arch_rwlock_t *rw)
+static inline void arch_write_lock(arch_rwlock_t *rw)
 {
-       __raw_write_lock_asm(&rw->lock);
+       arch_write_lock_asm(&rw->lock);
 }
 
-static inline int __raw_write_trylock(arch_rwlock_t *rw)
+static inline int arch_write_trylock(arch_rwlock_t *rw)
 {
-       return __raw_write_trylock_asm(&rw->lock);
+       return arch_write_trylock_asm(&rw->lock);
 }
 
-static inline void __raw_write_unlock(arch_rwlock_t *rw)
+static inline void arch_write_unlock(arch_rwlock_t *rw)
 {
-       __raw_write_unlock_asm(&rw->lock);
+       arch_write_unlock_asm(&rw->lock);
 }
 
 #define arch_spin_relax(lock)          cpu_relax()