5 Author: Pekka Riikonen <priikone@silcnet.org>
7 Copyright (C) 2006 Pekka Riikonen
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; version 2 of the License.
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
20 /****h* silcutil/SILC Atomic Operations Interface
24 * SILC Atomic operations interface provides utility functions to perform
25 * simple operations with integers atomically. This enables fast integer
26 * additions and subtractions safely in multithreaded environment. It is
27 * especially suited for reference counters and similar and is much faster
28 * than using locking. This interface supports 8, 16 and 32 bit integers
29 * and 32 or 64 bit pointers.
31 * On some platforms this interface actually use mutual exclusion lock
32 * instead of true atomic operations, leading into some performace penalty.
33 * Also on some platforms the 8 and 16 bit integers are actually 32 bit
36 * Fast operations are supported on: x86, x86_64, ia64, PPC
43 /****s* silcutil/SilcAtomicAPI/SilcAtomic32
47 * typedef struct { ... } SilcAtomic32;
51 * The atomic operation structure given as argument to all atomic
52 * operation functions. It hols the actual 32-bit atomic variable.
56 * SilcAtomic32 refcnt;
58 * // Initialize atomic variable
59 * silc_atomic_init32(&refcnt, 0);
62 * // Increment referene counter
63 * silc_atomic_add_int32(&refcnt, 1);
66 * // Uninitialize atomic variable
67 * silc_atomic_uninit32(&refcnt);
71 /****s* silcutil/SilcAtomicAPI/SilcAtomic16
75 * typedef struct { ... } SilcAtomic16;
79 * The atomic operation structure given as argument to all atomic
80 * operation functions. It hols the actual 16-bit atomic variable.
84 * SilcAtomic16 refcnt;
86 * // Initialize atomic variable
87 * silc_atomic_init16(&refcnt, 0);
90 * // Increment referene counter
91 * silc_atomic_add_int16(&refcnt, 1);
94 * // Uninitialize atomic variable
95 * silc_atomic_uninit16(&refcnt);
99 /****s* silcutil/SilcAtomicAPI/SilcAtomic8
103 * typedef struct { ... } SilcAtomic8;
107 * The atomic operation structure given as argument to all atomic
108 * operation functions. It hols the actual 8-bit atomic variable.
112 * SilcAtomic8 refcnt;
114 * // Initialize atomic variable
115 * silc_atomic_init8(&refcnt, 0);
118 * // Increment referene counter
119 * silc_atomic_add_int8(&refcnt, 1);
122 * // Uninitialize atomic variable
123 * silc_atomic_uninit8(&refcnt);
127 /****s* silcutil/SilcAtomicAPI/SilcAtomicPointer
131 * typedef struct { ... } SilcAtomicPointer;
135 * The atomic operation structure given as argument to all atomic
136 * operation functions. It hols the actual pointer variable.
140 * SilcAtomicPointer ptr;
142 * // Initialize atomic variable
143 * silc_atomic_init_pointer(&ptr, NULL);
147 * silc_atomic_set_pointer(&ptr, context);
150 * // Uninitialize atomic variable
151 * silc_atomic_uninit_pointer(&ptr);
155 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || (defined(__GNUC__) && \
156 (defined(SILC_I486) || defined(SILC_X86_64) || defined(SILC_IA64) || \
157 defined(SILC_POWERPC)))
159 volatile SilcUInt32 value;
162 volatile void *pointer;
165 #define SILC_ATOMIC_MUTEX
168 volatile SilcUInt32 value;
172 volatile void *pointer;
176 #if !defined(SILC_THREADS) || (defined(__GNUC__) && (defined(SILC_I486) || \
177 defined(SILC_X86_64)))
179 volatile SilcUInt16 value;
181 #elif defined(SILC_WIN32) || (defined(__GNUC__) && (defined(SILC_IA64) || \
182 defined(SILC_POWERPC)))
184 volatile SilcUInt32 value;
189 volatile SilcUInt16 value;
193 #if !defined(SILC_THREADS) || (defined(__GNUC__) && (defined(SILC_I486) || \
194 defined(SILC_X86_64)))
196 volatile SilcUInt8 value;
198 #elif defined(SILC_WIN32) || (defined(__GNUC__) && (defined(SILC_IA64) || \
199 defined(SILC_POWERPC)))
201 volatile SilcUInt32 value;
206 volatile SilcUInt8 value;
210 /****f* silcutil/SilcAtomicAPI/silc_atomic_init32
215 * SilcBool silc_atomic_init32(SilcAtomic32 *atomic, SilcUInt32 value);
219 * Initializes the atomic variable `atomic', and sets the `value' as its
220 * inital value. Returns FALSE on error. To uninitialize call the
221 * silc_atomic_uninit32 function.
226 SilcBool silc_atomic_init32(SilcAtomic32 *atomic, SilcUInt32 value)
228 atomic->value = value;
230 #if defined(SILC_ATOMIC_MUTEX)
231 if (!silc_mutex_alloc(&atomic->lock))
233 #endif /* SILC_ATOMIC_MUTEX */
238 /****f* silcutil/SilcAtomicAPI/silc_atomic_init16
243 * SilcBool silc_atomic_init16(SilcAtomic16 *atomic, SilcUInt16 value);
247 * Initializes the atomic variable `atomic', and sets the `value' as its
248 * inital value. Returns FALSE on error. To uninitialize call the
249 * silc_atomic_uninit32 function.
254 SilcBool silc_atomic_init16(SilcAtomic16 *atomic, SilcUInt16 value)
256 atomic->value = value;
258 #if defined(SILC_ATOMIC_MUTEX)
259 if (!silc_mutex_alloc(&atomic->lock))
261 #endif /* SILC_ATOMIC_MUTEX */
266 /****f* silcutil/SilcAtomicAPI/silc_atomic_init8
271 * SilcBool silc_atomic_init8(SilcAtomic8 *atomic, SilcUInt8 value);
275 * Initializes the atomic variable `atomic', and sets the `value' as its
276 * inital value. Returns FALSE on error. To uninitialize call the
277 * silc_atomic_uninit8 function.
282 SilcBool silc_atomic_init8(SilcAtomic8 *atomic, SilcUInt8 value)
284 atomic->value = value;
286 #if defined(SILC_ATOMIC_MUTEX)
287 if (!silc_mutex_alloc(&atomic->lock))
289 #endif /* SILC_ATOMIC_MUTEX */
294 /****f* silcutil/SilcAtomicAPI/silc_atomic_init_pointer
299 * SilcBool silc_atomic_init_pointer(SilcAtomicPointer *atomic,
304 * Initializes the atomic pointer variable `atomic', and sets the `pointer'
305 * as its inital pointer. Returns FALSE on error. To uninitialize call
306 * the silc_atomic_uninit_pointer function.
311 SilcBool silc_atomic_init_pointer(SilcAtomicPointer *atomic, void *pointer)
313 atomic->pointer = pointer;
315 #if defined(SILC_ATOMIC_MUTEX)
316 if (!silc_mutex_alloc(&atomic->lock))
318 #endif /* SILC_ATOMIC_MUTEX */
323 /****f* silcutil/SilcAtomicAPI/silc_atomic_uninit32
328 * void silc_atomic_uninit32(SilcAtomic32 *atomic);
332 * Uninitializes the atomic variable `atomic'. This should alwyas be
333 * called after the atomic variable is not used anymore.
338 void silc_atomic_uninit32(SilcAtomic32 *atomic)
341 #if defined(SILC_ATOMIC_MUTEX)
342 silc_mutex_free(atomic->lock);
343 #endif /* SILC_ATOMIC_MUTEX */
346 /****f* silcutil/SilcAtomicAPI/silc_atomic_uninit16
351 * void silc_atomic_uninit16(SilcAtomic16 *atomic);
355 * Uninitializes the atomic variable `atomic'. This should alwyas be
356 * called after the atomic variable is not used anymore.
361 void silc_atomic_uninit16(SilcAtomic16 *atomic)
364 #if defined(SILC_ATOMIC_MUTEX)
365 silc_mutex_free(atomic->lock);
366 #endif /* SILC_ATOMIC_MUTEX */
369 /****f* silcutil/SilcAtomicAPI/silc_atomic_uninit8
374 * void silc_atomic_uninit8(SilcAtomic8 *atomic);
378 * Uninitializes the atomic variable `atomic'. This should alwyas be
379 * called after the atomic variable is not used anymore.
384 void silc_atomic_uninit8(SilcAtomic8 *atomic)
387 #if defined(SILC_ATOMIC_MUTEX)
388 silc_mutex_free(atomic->lock);
389 #endif /* SILC_ATOMIC_MUTEX */
392 /****f* silcutil/SilcAtomicAPI/silc_atomic_uninit_pointer
397 * void silc_atomic_uninit_pointer(SilcAtomicPointer *atomic);
401 * Uninitializes the atomic variable `atomic'. This should alwyas be
402 * called after the atomic variable is not used anymore.
407 void silc_atomic_uninit_pointer(SilcAtomicPointer *atomic)
409 atomic->pointer = NULL;
410 #if defined(SILC_ATOMIC_MUTEX)
411 silc_mutex_free(atomic->lock);
412 #endif /* SILC_ATOMIC_MUTEX */
415 /****f* silcutil/SilcAtomicAPI/silc_atomic_set_int32
420 * void silc_atomic_set_int32(SilcAtomic32 *atomic, SilcUInt32 value);
424 * Atomically sets `value' to 32-bit integer.
429 void silc_atomic_set_int32(SilcAtomic32 *atomic, SilcUInt32 value)
431 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
432 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
433 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
434 atomic->value = value;
436 #elif defined(__GNUC__) && defined(SILC_IA64)
437 /* IA64, memory barrier needed */
438 atomic->value = value;
439 __sync_synchronize();
441 #elif defined(__GNUC__) && defined(SILC_POWERPC)
442 /* PowerPC, memory barrier needed */
443 atomic->value = value;
444 __asm("sync" : : : "memory");
448 silc_mutex_lock(atomic->lock);
449 atomic->value = value;
450 silc_mutex_unlock(atomic->lock);
454 /****f* silcutil/SilcAtomicAPI/silc_atomic_set_int16
459 * void silc_atomic_set_int16(SilcAtomic16 *atomic, SilcUInt16 value);
463 * Atomically sets `value' to 16-bit integer.
468 void silc_atomic_set_int16(SilcAtomic16 *atomic, SilcUInt16 value)
470 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
471 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
472 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
473 atomic->value = value;
475 #elif defined(__GNUC__) && defined(SILC_IA64)
476 /* IA64, memory barrier needed */
477 atomic->value = value;
478 __sync_synchronize();
480 #elif defined(__GNUC__) && defined(SILC_POWERPC)
481 /* PowerPC, memory barrier needed */
482 atomic->value = value;
483 __asm("sync" : : : "memory");
487 silc_mutex_lock(atomic->lock);
488 atomic->value = value;
489 silc_mutex_unlock(atomic->lock);
493 /****f* silcutil/SilcAtomicAPI/silc_atomic_set_int8
498 * void silc_atomic_set_int8(SilcAtomic8 *atomic, SilcUInt8 value);
502 * Atomically sets `value' to 8-bit integer.
507 void silc_atomic_set_int8(SilcAtomic8 *atomic, SilcUInt8 value)
509 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
510 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
511 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
512 atomic->value = value;
514 #elif defined(__GNUC__) && defined(SILC_IA64)
515 /* IA64, memory barrier needed */
516 atomic->value = value;
517 __sync_synchronize();
519 #elif defined(__GNUC__) && defined(SILC_POWERPC)
520 /* PowerPC, memory barrier needed */
521 atomic->value = value;
522 __asm("sync" : : : "memory");
526 silc_mutex_lock(atomic->lock);
527 atomic->value = value;
528 silc_mutex_unlock(atomic->lock);
532 /****f* silcutil/SilcAtomicAPI/silc_atomic_set_pointer
537 * void silc_atomic_set_pointer(SilcAtomicPointer *atomic, void *pointer);
541 * Atomically sets `pointer' to the atomic variable.
546 void silc_atomic_set_pointer(SilcAtomicPointer *atomic, void *pointer)
548 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
549 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
550 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
551 atomic->pointer = pointer;
553 #elif defined(__GNUC__) && defined(SILC_IA64)
554 /* IA64, memory barrier needed */
555 atomic->pointer = pointer;
556 __sync_synchronize();
558 #elif defined(__GNUC__) && defined(SILC_POWERPC)
559 /* PowerPC, memory barrier needed */
560 atomic->pointer = pointer;
561 __asm("sync" : : : "memory");
565 silc_mutex_lock(atomic->lock);
566 atomic->pointer = pointer;
567 silc_mutex_unlock(atomic->lock);
571 /****f* silcutil/SilcAtomicAPI/silc_atomic_get_int32
576 * SilcUInt32 silc_atomic_get_int32(SilcAtomic32 *atomic);
580 * Returns the current value of the atomic variable.
585 SilcUInt32 silc_atomic_get_int32(SilcAtomic32 *atomic)
589 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
590 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
591 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
595 #elif defined(__GNUC__) && defined(SILC_IA64)
596 /* IA64, memory barrier needed */
597 __sync_synchronize();
601 #elif defined(__GNUC__) && defined(SILC_POWERPC)
602 /* PowerPC, memory barrier needed */
603 __asm("sync" : : : "memory");
609 silc_mutex_lock(atomic->lock);
611 silc_mutex_unlock(atomic->lock);
616 /****f* silcutil/SilcAtomicAPI/silc_atomic_get_int16
621 * SilcUInt32 silc_atomic_get_int16(SilcAtomic16 *atomic);
625 * Returns the current value of the atomic variable.
630 SilcUInt16 silc_atomic_get_int16(SilcAtomic16 *atomic)
634 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
635 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
636 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
637 ret = atomic->value & 0xffff;
640 #elif defined(__GNUC__) && defined(SILC_IA64)
641 /* IA64, memory barrier needed */
642 __sync_synchronize();
643 ret = atomic->value & 0xffff;
646 #elif defined(__GNUC__) && defined(SILC_POWERPC)
647 /* PowerPC, memory barrier needed */
648 __asm("sync" : : : "memory");
649 ret = atomic->value & 0xffff;
654 silc_mutex_lock(atomic->lock);
655 ret = atomic->value & 0xffff;
656 silc_mutex_unlock(atomic->lock);
661 /****f* silcutil/SilcAtomicAPI/silc_atomic_get_int8
666 * SilcUInt32 silc_atomic_get_int8(SilcAtomic8 *atomic);
670 * Returns the current value of the atomic variable.
675 SilcUInt8 silc_atomic_get_int8(SilcAtomic8 *atomic)
679 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
680 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
681 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
682 ret = atomic->value & 0xff;
685 #elif defined(__GNUC__) && defined(SILC_IA64)
686 /* IA64, memory barrier needed */
687 __sync_synchronize();
688 ret = atomic->value & 0xff;
691 #elif defined(__GNUC__) && defined(SILC_POWERPC)
692 /* PowerPC, memory barrier needed */
693 __asm("sync" : : : "memory");
694 ret = atomic->value & 0xff;
699 silc_mutex_lock(atomic->lock);
700 ret = atomic->value & 0xff;
701 silc_mutex_unlock(atomic->lock);
706 /****f* silcutil/SilcAtomicAPI/silc_atomic_get_pointer
711 * SilcUInt8 silc_atomic_get_pointer(SilcAtomicPointer *atomic)
715 * Returns the current pointer value of the atomic variable.
720 void *silc_atomic_get_pointer(SilcAtomicPointer *atomic)
724 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
725 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
726 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
727 ret = (void *)atomic->pointer;
730 #elif defined(__GNUC__) && defined(SILC_IA64)
731 /* IA64, memory barrier needed */
732 __sync_synchronize();
733 ret = (void *)atomic->pointer;
736 #elif defined(__GNUC__) && defined(SILC_POWERPC)
737 /* PowerPC, memory barrier needed */
738 __asm("sync" : : : "memory");
739 ret = (void *)atomic->pointer;
744 silc_mutex_lock(atomic->lock);
745 ret = (void *)atomic->pointer;
746 silc_mutex_unlock(atomic->lock);
751 /****f* silcutil/SilcAtomicAPI/silc_atomic_add_int32
756 * SilcUInt32 silc_atomic_add_int32(SilcAtomic32 *atomic, SilcInt32 value);
760 * Atomically adds `value' to 32-bit integer. Returns the value after
766 SilcUInt32 silc_atomic_add_int32(SilcAtomic32 *atomic, SilcInt32 value)
770 #if !defined(SILC_THREADS)
771 /* No atomic operations */
773 atomic->value += value;
775 #elif defined(SILC_WIN32)
777 ret = InterlockedExchangeAdd(&atomic->value, (LONG)value);
779 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
780 /* GCC + i486 or x86_64 */
781 __asm __volatile("lock; xaddl %0, %1"
782 : "=r" (ret), "+m" (atomic->value)
785 #elif defined(__GNUC__) && defined(SILC_IA64)
786 /* GCC + IA64 (GCC builtin atomic operations) */
787 ret = __sync_fetch_and_add(&atomic->value, value);
789 #elif defined(__GNUC__) && defined(SILC_POWERPC)
790 /* GCC + PowerPC (code adapted from IBM's documentation) */
791 __asm __volatile("0: lwarx %0, 0, %2\n"
793 " stwcx. %0, 0, %2\n"
796 : "r" (value), "r" (&atomic->value)
802 silc_mutex_lock(atomic->lock);
804 atomic->value += value;
805 silc_mutex_unlock(atomic->lock);
811 /****f* silcutil/SilcAtomicAPI/silc_atomic_add_int32
816 * SilcUInt16 silc_atomic_add_int16(SilcAtomic16 *atomic, SilcInt16 value);
820 * Atomically adds `value' to 16-bit integer. Returns the value after
826 SilcUInt16 silc_atomic_add_int16(SilcAtomic16 *atomic, SilcInt16 value)
830 #if !defined(SILC_THREADS)
831 /* No atomic operations */
833 atomic->value += value;
835 #elif defined(SILC_WIN32)
838 ret = InterlockedExchangeAdd(&atomic->value, v);
840 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
841 /* GCC + i486 or x86_64 */
842 __asm __volatile("lock; xaddw %0, %1"
843 : "=c" (ret), "+m" (atomic->value)
846 #elif defined(__GNUC__) && defined(SILC_IA64)
847 /* GCC + IA64 (GCC builtin atomic operations) */
849 ret = __sync_fetch_and_add(&atomic->value, v);
851 #elif defined(__GNUC__) && defined(SILC_POWERPC)
852 /* GCC + PowerPC (code adapted from IBM's documentation) */
855 __asm __volatile("0: lwarx %0, 0, %2\n"
857 " stwcx. %0, 0, %2\n"
860 : "r" (v), "r" (&atomic->value)
862 return ret32 & 0xffff;
866 silc_mutex_lock(atomic->lock);
868 atomic->value += value;
869 silc_mutex_unlock(atomic->lock);
875 /****f* silcutil/SilcAtomicAPI/silc_atomic_add_int8
880 * SilcUInt8 silc_atomic_add_int8(SilcAtomic8 *atomic, SilcInt8 value);
884 * Atomically adds `value' to 8-bit integer. Returns the value after
890 SilcUInt8 silc_atomic_add_int8(SilcAtomic8 *atomic, SilcInt8 value)
894 #if !defined(SILC_THREADS)
895 /* No atomic operations */
897 atomic->value += value;
899 #elif defined(SILC_WIN32)
902 ret = InterlockedExchangeAdd(&atomic->value, v);
904 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
905 /* GCC + i486 or x86_64 */
906 __asm __volatile("lock; xaddb %0, %1"
907 : "=c" (ret), "+m" (atomic->value)
910 #elif defined(__GNUC__) && defined(SILC_IA64)
911 /* GCC + IA64 (GCC builtin atomic operations) */
913 ret = __sync_fetch_and_add(&atomic->value, v);
915 #elif defined(__GNUC__) && defined(SILC_POWERPC)
916 /* GCC + PowerPC (code adapted from IBM's documentation) */
919 __asm __volatile("0: lwarx %0, 0, %2\n"
921 " stwcx. %0, 0, %2\n"
924 : "r" (v), "r" (&atomic->value)
930 silc_mutex_lock(atomic->lock);
932 atomic->value += value;
933 silc_mutex_unlock(atomic->lock);
939 /****f* silcutil/SilcAtomicAPI/silc_atomic_sub_int32
944 * SilcUInt32 silc_atomic_sub_int32(SilcAtomic32 *atomic, SilcInt32 value);
948 * Atomically subtracts `value' from 32-bit integer. Returns the value
954 SilcUInt32 silc_atomic_sub_int32(SilcAtomic32 *atomic, SilcInt32 value)
956 return silc_atomic_add_int32(atomic, -value);
959 /****f* silcutil/SilcAtomicAPI/silc_atomic_sub_int16
964 * SilcUInt16 silc_atomic_sub_int16(SilcAtomic16 *atomic, SilcInt16 value);
968 * Atomically subtracts `value' from 16-bit integer. Returns the value
974 SilcUInt16 silc_atomic_sub_int16(SilcAtomic16 *atomic, SilcInt16 value)
976 return silc_atomic_add_int16(atomic, -value);
979 /****f* silcutil/SilcAtomicAPI/silc_atomic_sub_int8
984 * SilcUInt8 silc_atomic_sub_int8(SilcAtomic8 *atomic, SilcInt8 value);
988 * Atomically subtracts `value' from 8-bit integer. Returns the value
994 SilcUInt8 silc_atomic_sub_int8(SilcAtomic8 *atomic, SilcInt8 value)
996 return silc_atomic_add_int8(atomic, -value);
999 /****f* silcutil/SilcAtomicAPI/silc_atomic_cas32
1004 * SilcBool silc_atomic_cas32(SilcAtomic32 *atomic, SilcUInt32 old_val,
1005 * SilcUInt32 new_val)
1009 * Performs compare and swap (CAS). Atomically compares if the variable
1010 * `atomic' has the value `old_val' and in that case swaps it with the
1011 * value `new_val'. Returns TRUE if the old value was same and it was
1012 * swapped and FALSE if it differed and was not swapped.
1017 SilcBool silc_atomic_cas32(SilcAtomic32 *atomic, SilcUInt32 old_val,
1020 #if !defined(SILC_THREADS)
1021 /* No atomic operations */
1022 if (atomic->value == old_val) {
1023 atomic->value = new_val;
1028 #elif defined(SILC_WIN32)
1030 return InterlockedCompareExchange(&atomic->value, (LONG)new_val,
1031 (LONG)old_val) == old_val;
1033 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
1034 /* GCC + i486 or x86_64 */
1036 __asm __volatile("lock; cmpxchgl %2, %1"
1037 : "=a" (ret), "=m" (atomic->value)
1038 : "r" (new_val), "m" (atomic->value), "0" (old_val));
1039 return ret == old_val;
1041 #elif defined(__GNUC__) && defined(SILC_IA64)
1042 /* GCC + IA64 (GCC builtin atomic operations) */
1043 return __sync_bool_compare_and_swap(&atomic->value, old_val, new_val);
1045 #elif defined(__GNUC__) && defined(SILC_POWERPC)
1051 silc_mutex_lock(atomic->lock);
1052 if (atomic->value == old_val) {
1053 atomic->value = new_val;
1054 silc_mutex_unlock(atomic->lock);
1057 silc_mutex_unlock(atomic->lock);
1062 /****f* silcutil/SilcAtomicAPI/silc_atomic_cas16
1067 * SilcBool silc_atomic_cas16(SilcAtomic16 *atomic, SilcUInt16 old_val,
1068 * SilcUInt16 new_val)
1072 * Performs compare and swap (CAS). Atomically compares if the variable
1073 * `atomic' has the value `old_val' and in that case swaps it with the
1074 * value `new_val'. Returns TRUE if the old value was same and it was
1075 * swapped and FALSE if it differed and was not swapped.
1080 SilcBool silc_atomic_cas16(SilcAtomic16 *atomic, SilcUInt16 old_val,
1083 #if !defined(SILC_THREADS)
1084 /* No atomic operations */
1085 if (atomic->value == old_val) {
1086 atomic->value = new_val;
1091 #elif defined(SILC_WIN32)
1093 LONG o = old_val, n = new_val;
1094 return InterlockedCompareExchange(&atomic->value, n, o) == o;
1096 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
1097 /* GCC + i486 or x86_64 */
1099 __asm __volatile("lock; cmpxchgw %2, %1"
1100 : "=a" (ret), "=m" (atomic->value)
1101 : "c" (new_val), "m" (atomic->value), "0" (old_val));
1102 return ret == old_val;
1104 #elif defined(__GNUC__) && defined(SILC_IA64)
1105 /* GCC + IA64 (GCC builtin atomic operations) */
1106 SilcUInt32 o = old_val, n = new_val;
1107 return __sync_bool_compare_and_swap(&atomic->value, o, n);
1109 #elif defined(__GNUC__) && defined(SILC_POWERPC)
1115 silc_mutex_lock(atomic->lock);
1116 if (atomic->value == old_val) {
1117 atomic->value = new_val;
1118 silc_mutex_unlock(atomic->lock);
1121 silc_mutex_unlock(atomic->lock);
1126 /****f* silcutil/SilcAtomicAPI/silc_atomic_cas8
1131 * SilcBool silc_atomic_cas8(SilcAtomic8 *atomic, SilcUInt8 old_val,
1132 * SilcUInt8 new_val)
1136 * Performs compare and swap (CAS). Atomically compares if the variable
1137 * `atomic' has the value `old_val' and in that case swaps it with the
1138 * value `new_val'. Returns TRUE if the old value was same and it was
1139 * swapped and FALSE if it differed and was not swapped.
1144 SilcBool silc_atomic_cas8(SilcAtomic8 *atomic, SilcUInt8 old_val,
1147 #if !defined(SILC_THREADS)
1148 /* No atomic operations */
1149 if (atomic->value == old_val) {
1150 atomic->value = new_val;
1155 #elif defined(SILC_WIN32)
1157 LONG o = old_val, n = new_val;
1158 return InterlockedCompareExchange(&atomic->value, n, o) == o;
1160 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
1161 /* GCC + i486 or x86_64 */
1163 __asm __volatile("lock; cmpxchgb %2, %1"
1164 : "=a" (ret), "=m" (atomic->value)
1165 : "c" (new_val), "m" (atomic->value), "0" (old_val));
1166 return ret == old_val;
1168 #elif defined(__GNUC__) && defined(SILC_IA64)
1169 /* GCC + IA64 (GCC builtin atomic operations) */
1170 SilcUInt32 o = old_val, n = new_val;
1171 return __sync_bool_compare_and_swap(&atomic->value, o, n);
1173 #elif defined(__GNUC__) && defined(SILC_POWERPC)
1179 silc_mutex_lock(atomic->lock);
1180 if (atomic->value == old_val) {
1181 atomic->value = new_val;
1182 silc_mutex_unlock(atomic->lock);
1185 silc_mutex_unlock(atomic->lock);
1190 /****f* silcutil/SilcAtomicAPI/silc_atomic_cas_pointer
1195 * SilcBool silc_atomic_cas_pointer(SilcAtomicPointer *atomic,
1196 * void *old_ptr, void *new_ptr);
1200 * Performs compare and swap (CAS). Atomically compares if the variable
1201 * `atomic' has the pointer `old_ptr' and in that case swaps it with the
1202 * pointer `new_ptr'. Returns TRUE if the old pointer was same and it was
1203 * swapped and FALSE if it differed and was not swapped.
1208 SilcBool silc_atomic_cas_pointer(SilcAtomicPointer *atomic, void *old_val,
1211 #if !defined(SILC_THREADS)
1212 /* No atomic operations */
1213 if (atomic->pointer == old_val) {
1214 atomic->pointer = new_val;
1219 #elif defined(SILC_WIN32)
1221 return InterlockedCompareExchangePointer(&atomic->pointer, n, o) == o;
1223 #elif defined(__GNUC__) && defined(SILC_I486)
1226 __asm __volatile("lock; cmpxchgl %2, %1"
1227 : "=a" (ret), "=m" (atomic->pointer)
1228 : "c" (new_val), "m" (atomic->pointer), "0" (old_val));
1229 return ret == old_val;
1231 #elif defined(__GNUC__) && defined(SILC_X86_64)
1234 __asm __volatile("lock; cmpxchgq %q2, %1"
1235 : "=a" (ret), "=m" (atomic->pointer)
1236 : "c" (new_val), "m" (atomic->pointer), "0" (old_val));
1237 return ret == old_val;
1239 #elif defined(__GNUC__) && defined(SILC_IA64)
1240 /* GCC + IA64 (GCC builtin atomic operations) */
1241 return __sync_bool_compare_and_swap((long)&atomic->pointer, (long)old_val,
1244 #elif defined(__GNUC__) && defined(SILC_POWERPC)
1250 silc_mutex_lock(atomic->lock);
1251 if (atomic->pointer == old_val) {
1252 atomic->pointer = new_val;
1253 silc_mutex_unlock(atomic->lock);
1256 silc_mutex_unlock(atomic->lock);
1261 #endif /* SILCATOMIC_H */