5 Author: Pekka Riikonen <priikone@silcnet.org>
7 Copyright (C) 2006 Pekka Riikonen
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; version 2 of the License.
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
20 /****h* silcutil/SILC Atomic Operations Interface
24 * SILC Atomic operations interface provides utility functions to perform
25 * simple operations with integers atomically. This enables fast integer
26 * additions and subtractions safely in multithreaded environment. It is
27 * especially suited for reference counters and similar and is much faster
28 * than using locking. This interface supports 8, 16 and 32 bit integers
29 * and 32 or 64 bit pointers.
31 * On some platforms this interface actually use mutual exclusion lock
32 * instead of true atomic operations, leading into some performace penalty.
33 * Also on some platforms the 8 and 16 bit integers are actually 32 bit
36 * Fast operations are supported on: x86, x86_64, ia64, PPC
43 /****s* silcutil/SilcAtomicAPI/SilcAtomic32
47 * typedef struct { ... } SilcAtomic32;
51 * The atomic operation structure given as argument to all atomic
52 * operation functions. It hols the actual 32-bit atomic variable.
56 * SilcAtomic32 refcnt;
58 * // Initialize atomic variable
59 * silc_atomic_init32(&refcnt, 0);
62 * // Increment referene counter
63 * silc_atomic_add_int32(&refcnt, 1);
66 * // Uninitialize atomic variable
67 * silc_atomic_uninit32(&refcnt);
71 /****s* silcutil/SilcAtomicAPI/SilcAtomic16
75 * typedef struct { ... } SilcAtomic16;
79 * The atomic operation structure given as argument to all atomic
80 * operation functions. It hols the actual 16-bit atomic variable.
84 * SilcAtomic16 refcnt;
86 * // Initialize atomic variable
87 * silc_atomic_init16(&refcnt, 0);
90 * // Increment referene counter
91 * silc_atomic_add_int16(&refcnt, 1);
94 * // Uninitialize atomic variable
95 * silc_atomic_uninit16(&refcnt);
99 /****s* silcutil/SilcAtomicAPI/SilcAtomic8
103 * typedef struct { ... } SilcAtomic8;
107 * The atomic operation structure given as argument to all atomic
108 * operation functions. It hols the actual 8-bit atomic variable.
112 * SilcAtomic8 refcnt;
114 * // Initialize atomic variable
115 * silc_atomic_init8(&refcnt, 0);
118 * // Increment referene counter
119 * silc_atomic_add_int8(&refcnt, 1);
122 * // Uninitialize atomic variable
123 * silc_atomic_uninit8(&refcnt);
127 /****s* silcutil/SilcAtomicAPI/SilcAtomicPointer
131 * typedef struct { ... } SilcAtomicPointer;
135 * The atomic operation structure given as argument to all atomic
136 * operation functions. It hols the actual pointer variable.
140 * SilcAtomicPointer ptr;
142 * // Initialize atomic variable
143 * silc_atomic_init_pointer(&ptr, NULL);
147 * silc_atomic_set_pointer(&ptr, context);
150 * // Uninitialize atomic variable
151 * silc_atomic_uninit_pointer(&ptr);
155 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || (defined(__GNUC__) && \
156 (defined(SILC_I486) || defined(SILC_X86_64) || defined(SILC_IA64) || \
157 defined(SILC_POWERPC)))
159 volatile SilcUInt32 value;
162 volatile void *pointer;
165 #define SILC_ATOMIC_MUTEX
168 volatile SilcUInt32 value;
172 volatile void *pointer;
176 #if !defined(SILC_THREADS) || (defined(__GNUC__) && (defined(SILC_I486) || \
177 defined(SILC_X86_64)))
179 volatile SilcUInt16 value;
181 #elif defined(SILC_WIN32) || (defined(__GNUC__) && (defined(SILC_IA64) || \
182 defined(SILC_POWERPC)))
184 volatile SilcUInt32 value;
189 volatile SilcUInt16 value;
193 #if !defined(SILC_THREADS) || (defined(__GNUC__) && (defined(SILC_I486) || \
194 defined(SILC_X86_64)))
196 volatile SilcUInt8 value;
198 #elif defined(SILC_WIN32) || (defined(__GNUC__) && (defined(SILC_IA64) || \
199 defined(SILC_POWERPC)))
201 volatile SilcUInt32 value;
206 volatile SilcUInt8 value;
210 /****f* silcutil/SilcAtomicAPI/silc_atomic_init32
215 * SilcBool silc_atomic_init32(SilcAtomic32 *atomic, SilcUInt32 value);
219 * Initializes the atomic variable `atomic', and sets the `value' as its
220 * inital value. Returns FALSE on error. To uninitialize call the
221 * silc_atomic_uninit32 function.
226 SilcBool silc_atomic_init32(SilcAtomic32 *atomic, SilcUInt32 value)
228 atomic->value = value;
230 #if defined(SILC_ATOMIC_MUTEX)
231 if (!silc_mutex_alloc(&atomic->lock))
233 #endif /* SILC_ATOMIC_MUTEX */
238 /****f* silcutil/SilcAtomicAPI/silc_atomic_init16
243 * SilcBool silc_atomic_init16(SilcAtomic16 *atomic, SilcUInt16 value);
247 * Initializes the atomic variable `atomic', and sets the `value' as its
248 * inital value. Returns FALSE on error. To uninitialize call the
249 * silc_atomic_uninit32 function.
254 SilcBool silc_atomic_init16(SilcAtomic16 *atomic, SilcUInt16 value)
256 atomic->value = value;
258 #if defined(SILC_ATOMIC_MUTEX)
259 if (!silc_mutex_alloc(&atomic->lock))
261 #endif /* SILC_ATOMIC_MUTEX */
266 /****f* silcutil/SilcAtomicAPI/silc_atomic_init8
271 * SilcBool silc_atomic_init8(SilcAtomic8 *atomic, SilcUInt8 value);
275 * Initializes the atomic variable `atomic', and sets the `value' as its
276 * inital value. Returns FALSE on error. To uninitialize call the
277 * silc_atomic_uninit8 function.
282 SilcBool silc_atomic_init8(SilcAtomic8 *atomic, SilcUInt8 value)
284 atomic->value = value;
286 #if defined(SILC_ATOMIC_MUTEX)
287 if (!silc_mutex_alloc(&atomic->lock))
289 #endif /* SILC_ATOMIC_MUTEX */
294 /****f* silcutil/SilcAtomicAPI/silc_atomic_init_pointer
299 * SilcBool silc_atomic_init_pointer(SilcAtomicPointer *atomic,
304 * Initializes the atomic pointer variable `atomic', and sets the `pointer'
305 * as its inital pointer. Returns FALSE on error. To uninitialize call
306 * the silc_atomic_uninit_pointer function.
311 SilcBool silc_atomic_init_pointer(SilcAtomicPointer *atomic, void *pointer)
313 atomic->pointer = pointer;
315 #if defined(SILC_ATOMIC_MUTEX)
316 if (!silc_mutex_alloc(&atomic->lock))
318 #endif /* SILC_ATOMIC_MUTEX */
323 /****f* silcutil/SilcAtomicAPI/silc_atomic_uninit32
328 * void silc_atomic_uninit32(SilcAtomic32 *atomic);
332 * Uninitializes the atomic variable `atomic'. This should alwyas be
333 * called after the atomic variable is not used anymore.
338 void silc_atomic_uninit32(SilcAtomic32 *atomic)
341 #if defined(SILC_ATOMIC_MUTEX)
342 silc_mutex_free(atomic->lock);
343 #endif /* SILC_ATOMIC_MUTEX */
346 /****f* silcutil/SilcAtomicAPI/silc_atomic_uninit16
351 * void silc_atomic_uninit16(SilcAtomic16 *atomic);
355 * Uninitializes the atomic variable `atomic'. This should alwyas be
356 * called after the atomic variable is not used anymore.
361 void silc_atomic_uninit16(SilcAtomic16 *atomic)
364 #if defined(SILC_ATOMIC_MUTEX)
365 silc_mutex_free(atomic->lock);
366 #endif /* SILC_ATOMIC_MUTEX */
369 /****f* silcutil/SilcAtomicAPI/silc_atomic_uninit8
374 * void silc_atomic_uninit8(SilcAtomic8 *atomic);
378 * Uninitializes the atomic variable `atomic'. This should alwyas be
379 * called after the atomic variable is not used anymore.
384 void silc_atomic_uninit8(SilcAtomic8 *atomic)
387 #if defined(SILC_ATOMIC_MUTEX)
388 silc_mutex_free(atomic->lock);
389 #endif /* SILC_ATOMIC_MUTEX */
392 /****f* silcutil/SilcAtomicAPI/silc_atomic_uninit_pointer
397 * void silc_atomic_uninit_pointer(SilcAtomicPointer *atomic);
401 * Uninitializes the atomic variable `atomic'. This should alwyas be
402 * called after the atomic variable is not used anymore.
407 void silc_atomic_uninit_pointer(SilcAtomicPointer *atomic)
409 atomic->pointer = NULL;
410 #if defined(SILC_ATOMIC_MUTEX)
411 silc_mutex_free(atomic->lock);
412 #endif /* SILC_ATOMIC_MUTEX */
415 /****f* silcutil/SilcAtomicAPI/silc_atomic_set_int32
420 * void silc_atomic_set_int32(SilcAtomic32 *atomic, SilcUInt32 value);
424 * Atomically sets `value' to 32-bit integer.
429 void silc_atomic_set_int32(SilcAtomic32 *atomic, SilcUInt32 value)
431 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
432 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
433 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
434 atomic->value = value;
436 #elif defined(__GNUC__) && defined(SILC_IA64)
437 /* IA64, memory barrier needed */
438 atomic->value = value;
439 __sync_synchronize();
441 #elif defined(__GNUC__) && defined(SILC_POWERPC)
442 /* PowerPC, memory barrier needed */
443 atomic->value = value;
444 __asm("sync" : : : "memory");
448 silc_mutex_lock(atomic->lock);
449 atomic->value = value;
450 silc_mutex_unlock(atomic->lock);
454 /****f* silcutil/SilcAtomicAPI/silc_atomic_set_int16
459 * void silc_atomic_set_int16(SilcAtomic16 *atomic, SilcUInt16 value);
463 * Atomically sets `value' to 16-bit integer.
468 void silc_atomic_set_int16(SilcAtomic16 *atomic, SilcUInt16 value)
470 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
471 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
472 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
473 atomic->value = value;
475 #elif defined(__GNUC__) && defined(SILC_IA64)
476 /* IA64, memory barrier needed */
477 atomic->value = value;
478 __sync_synchronize();
480 #elif defined(__GNUC__) && defined(SILC_POWERPC)
481 /* PowerPC, memory barrier needed */
482 atomic->value = value;
483 __asm("sync" : : : "memory");
487 silc_mutex_lock(atomic->lock);
488 atomic->value = value;
489 silc_mutex_unlock(atomic->lock);
493 /****f* silcutil/SilcAtomicAPI/silc_atomic_set_int8
498 * void silc_atomic_set_int8(SilcAtomic8 *atomic, SilcUInt8 value);
502 * Atomically sets `value' to 8-bit integer.
507 void silc_atomic_set_int8(SilcAtomic8 *atomic, SilcUInt8 value)
509 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
510 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
511 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
512 atomic->value = value;
514 #elif defined(__GNUC__) && defined(SILC_IA64)
515 /* IA64, memory barrier needed */
516 atomic->value = value;
517 __sync_synchronize();
519 #elif defined(__GNUC__) && defined(SILC_POWERPC)
520 /* PowerPC, memory barrier needed */
521 atomic->value = value;
522 __asm("sync" : : : "memory");
526 silc_mutex_lock(atomic->lock);
527 atomic->value = value;
528 silc_mutex_unlock(atomic->lock);
532 /****f* silcutil/SilcAtomicAPI/silc_atomic_set_pointer
537 * void silc_atomic_set_pointer(SilcAtomicPointer *atomic, void *pointer);
541 * Atomically sets `pointer' to the atomic variable.
546 void silc_atomic_set_pointer(SilcAtomicPointer *atomic, void *pointer)
548 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
549 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
550 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
551 atomic->pointer = pointer;
553 #elif defined(__GNUC__) && defined(SILC_IA64)
554 /* IA64, memory barrier needed */
555 atomic->pointer = pointer;
556 __sync_synchronize();
558 #elif defined(__GNUC__) && defined(SILC_POWERPC)
559 /* PowerPC, memory barrier needed */
560 atomic->pointer = pointer;
561 __asm("sync" : : : "memory");
565 silc_mutex_lock(atomic->lock);
566 atomic->pointer = pointer;
567 silc_mutex_unlock(atomic->lock);
571 /****f* silcutil/SilcAtomicAPI/silc_atomic_get_int32
576 * SilcUInt32 silc_atomic_get_int32(SilcAtomic32 *atomic);
580 * Returns the current value of the atomic variable.
585 SilcUInt32 silc_atomic_get_int32(SilcAtomic32 *atomic)
589 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
590 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
591 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
595 #elif defined(__GNUC__) && defined(SILC_IA64)
596 /* IA64, memory barrier needed */
597 __sync_synchronize();
601 #elif defined(__GNUC__) && defined(SILC_POWERPC)
602 /* PowerPC, memory barrier needed */
603 __asm("sync" : : : "memory");
609 silc_mutex_lock(atomic->lock);
611 silc_mutex_unlock(atomic->lock);
616 /****f* silcutil/SilcAtomicAPI/silc_atomic_get_int16
621 * SilcUInt32 silc_atomic_get_int16(SilcAtomic16 *atomic);
625 * Returns the current value of the atomic variable.
630 SilcUInt16 silc_atomic_get_int16(SilcAtomic16 *atomic)
634 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
635 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
636 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
637 ret = atomic->value & 0xffff;
640 #elif defined(__GNUC__) && defined(SILC_IA64)
641 /* IA64, memory barrier needed */
642 __sync_synchronize();
643 ret = atomic->value & 0xffff;
646 #elif defined(__GNUC__) && defined(SILC_POWERPC)
647 /* PowerPC, memory barrier needed */
648 __asm("sync" : : : "memory");
649 ret = atomic->value & 0xffff;
654 silc_mutex_lock(atomic->lock);
655 ret = atomic->value & 0xffff;
656 silc_mutex_unlock(atomic->lock);
661 /****f* silcutil/SilcAtomicAPI/silc_atomic_get_int8
666 * SilcUInt32 silc_atomic_get_int8(SilcAtomic8 *atomic);
670 * Returns the current value of the atomic variable.
675 SilcUInt8 silc_atomic_get_int8(SilcAtomic8 *atomic)
679 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
680 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
681 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
682 ret = atomic->value & 0xff;
685 #elif defined(__GNUC__) && defined(SILC_IA64)
686 /* IA64, memory barrier needed */
687 __sync_synchronize();
688 ret = atomic->value & 0xff;
691 #elif defined(__GNUC__) && defined(SILC_POWERPC)
692 /* PowerPC, memory barrier needed */
693 __asm("sync" : : : "memory");
694 ret = atomic->value & 0xff;
699 silc_mutex_lock(atomic->lock);
700 ret = atomic->value & 0xff;
701 silc_mutex_unlock(atomic->lock);
706 /****f* silcutil/SilcAtomicAPI/silc_atomic_get_pointer
711 * SilcUInt8 silc_atomic_get_pointer(SilcAtomicPointer *atomic)
715 * Returns the current pointer value of the atomic variable.
720 void *silc_atomic_get_pointer(SilcAtomicPointer *atomic)
724 #if !defined(SILC_THREADS) || defined(SILC_WIN32) || \
725 (defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64)))
726 /* No threads, Windows, i486 or x86_64, no memory barrier needed */
727 ret = (void *)atomic->pointer;
730 #elif defined(__GNUC__) && defined(SILC_IA64)
731 /* IA64, memory barrier needed */
732 __sync_synchronize();
733 ret = (void *)atomic->pointer;
736 #elif defined(__GNUC__) && defined(SILC_POWERPC)
737 /* PowerPC, memory barrier needed */
738 __asm("sync" : : : "memory");
739 ret = (void *)atomic->pointer;
744 silc_mutex_lock(atomic->lock);
745 ret = (void *)atomic->pointer;
746 silc_mutex_unlock(atomic->lock);
751 /****f* silcutil/SilcAtomicAPI/silc_atomic_add_int32
756 * SilcUInt32 silc_atomic_add_int32(SilcAtomic32 *atomic, SilcInt32 value);
760 * Atomically adds `value' to 32-bit integer. Returns the value after
766 SilcUInt32 silc_atomic_add_int32(SilcAtomic32 *atomic, SilcInt32 value)
770 #if !defined(SILC_THREADS)
771 /* No atomic operations */
773 atomic->value += value;
775 #elif defined(SILC_WIN32)
777 ret = InterlockedExchangeAdd(&atomic->value, (LONG)value);
779 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
780 /* GCC + i486 or x86_64 */
781 __asm __volatile("lock; xaddl %0, %1"
782 : "=r" (ret), "+m" (atomic->value)
785 #elif defined(__GNUC__) && defined(SILC_IA64)
786 /* GCC + IA64 (GCC builtin atomic operations) */
787 ret = __sync_fetch_and_add(&atomic->value, value);
789 #elif defined(__GNUC__) && defined(SILC_POWERPC)
790 /* GCC + PowerPC (code adapted from IBM's documentation) */
791 __asm __volatile("0: lwarx %0, 0, %2\n"
793 " stwcx. %0, 0, %2\n"
796 : "r" (value), "r" (&atomic->value)
802 silc_mutex_lock(atomic->lock);
804 atomic->value += value;
805 silc_mutex_unlock(atomic->lock);
811 /****f* silcutil/SilcAtomicAPI/silc_atomic_add_int32
816 * SilcUInt16 silc_atomic_add_int16(SilcAtomic16 *atomic, SilcInt16 value);
820 * Atomically adds `value' to 16-bit integer. Returns the value after
826 SilcUInt16 silc_atomic_add_int16(SilcAtomic16 *atomic, SilcInt16 value)
830 #if !defined(SILC_THREADS)
831 /* No atomic operations */
833 atomic->value += value;
835 #elif defined(SILC_WIN32)
838 ret = InterlockedExchangeAdd(&atomic->value, v);
840 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
841 /* GCC + i486 or x86_64 */
842 __asm __volatile("lock; xaddw %0, %1"
843 : "=c" (ret), "+m" (atomic->value)
846 #elif defined(__GNUC__) && defined(SILC_IA64)
847 /* GCC + IA64 (GCC builtin atomic operations) */
849 ret = __sync_fetch_and_add(&atomic->value, v);
851 #elif defined(__GNUC__) && defined(SILC_POWERPC)
852 /* GCC + PowerPC (code adapted from IBM's documentation) */
855 __asm __volatile("0: lwarx %0, 0, %2\n"
857 " stwcx. %0, 0, %2\n"
860 : "r" (v), "r" (&atomic->value)
862 return ret32 & 0xffff;
866 silc_mutex_lock(atomic->lock);
868 atomic->value += value;
869 silc_mutex_unlock(atomic->lock);
875 /****f* silcutil/SilcAtomicAPI/silc_atomic_add_int8
880 * SilcUInt8 silc_atomic_add_int8(SilcAtomic8 *atomic, SilcInt8 value);
884 * Atomically adds `value' to 8-bit integer. Returns the value after
890 SilcUInt8 silc_atomic_add_int8(SilcAtomic8 *atomic, SilcInt8 value)
894 #if !defined(SILC_THREADS)
895 /* No atomic operations */
897 atomic->value += value;
899 #elif defined(SILC_WIN32)
902 ret = InterlockedExchangeAdd(&atomic->value, v);
904 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
905 /* GCC + i486 or x86_64 */
906 __asm __volatile("lock; xaddb %0, %1"
907 : "=c" (ret), "+m" (atomic->value)
910 #elif defined(__GNUC__) && defined(SILC_IA64)
911 /* GCC + IA64 (GCC builtin atomic operations) */
913 ret = __sync_fetch_and_add(&atomic->value, v);
915 #elif defined(__GNUC__) && defined(SILC_POWERPC)
916 /* GCC + PowerPC (code adapted from IBM's documentation) */
919 __asm __volatile("0: lwarx %0, 0, %2\n"
921 " stwcx. %0, 0, %2\n"
924 : "r" (v), "r" (&atomic->value)
930 silc_mutex_lock(atomic->lock);
932 atomic->value += value;
933 silc_mutex_unlock(atomic->lock);
939 /****f* silcutil/SilcAtomicAPI/silc_atomic_sub_int32
944 * SilcUInt32 silc_atomic_sub_int32(SilcAtomic32 *atomic, SilcInt32 value);
948 * Atomically subtracts `value' from 32-bit integer. Returns the value
954 SilcUInt32 silc_atomic_sub_int32(SilcAtomic32 *atomic, SilcInt32 value)
956 return silc_atomic_add_int32(atomic, -value);
959 /****f* silcutil/SilcAtomicAPI/silc_atomic_sub_int16
964 * SilcUInt16 silc_atomic_sub_int16(SilcAtomic16 *atomic, SilcInt16 value);
968 * Atomically subtracts `value' from 16-bit integer. Returns the value
974 SilcUInt16 silc_atomic_sub_int16(SilcAtomic16 *atomic, SilcInt16 value)
976 return silc_atomic_add_int16(atomic, -value);
979 /****f* silcutil/SilcAtomicAPI/silc_atomic_sub_int8
984 * SilcUInt8 silc_atomic_sub_int8(SilcAtomic8 *atomic, SilcInt8 value);
988 * Atomically subtracts `value' from 8-bit integer. Returns the value
994 SilcUInt8 silc_atomic_sub_int8(SilcAtomic8 *atomic, SilcInt8 value)
996 return silc_atomic_add_int8(atomic, -value);
999 /****f* silcutil/SilcAtomicAPI/silc_atomic_cas32
1004 * SilcBool silc_atomic_cas32(SilcAtomic32 *atomic, SilcUInt32 old_val,
1005 * SilcUInt32 new_val)
1009 * Performs compare and swap (CAS). Atomically compares if the variable
1010 * `atomic' has the value `old_val' and in that case swaps it with the
1011 * value `new_val'. Returns TRUE if the old value was same and it was
1012 * swapped and FALSE if it differed and was not swapped.
1017 SilcBool silc_atomic_cas32(SilcAtomic32 *atomic, SilcUInt32 old_val,
1022 #if !defined(SILC_THREADS)
1023 /* No atomic operations */
1024 if (atomic->value == old_val) {
1025 atomic->value = new_val;
1030 #elif defined(SILC_WIN32)
1032 return InterlockedCompareExchange(&atomic->value, (LONG)new_val,
1033 (LONG)old_val) == old_val;
1035 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
1036 /* GCC + i486 or x86_64 */
1037 __asm __volatile("lock; cmpxchgl %2, %1"
1038 : "=a" (ret), "=m" (atomic->value)
1039 : "r" (new_val), "m" (atomic->value), "0" (old_val));
1040 return ret == old_val;
1042 #elif defined(__GNUC__) && defined(SILC_IA64)
1043 /* GCC + IA64 (GCC builtin atomic operations) */
1044 return __sync_bool_compare_and_swap(&atomic->value, old_val, new_val);
1046 #elif defined(__GNUC__) && defined(SILC_POWERPC)
1052 silc_mutex_lock(atomic->lock);
1053 if (atomic->value == old_val) {
1054 atomic->value = new_val;
1055 silc_mutex_unlock(atomic->lock);
1058 silc_mutex_unlock(atomic->lock);
1063 /****f* silcutil/SilcAtomicAPI/silc_atomic_cas16
1068 * SilcBool silc_atomic_cas16(SilcAtomic16 *atomic, SilcUInt16 old_val,
1069 * SilcUInt16 new_val)
1073 * Performs compare and swap (CAS). Atomically compares if the variable
1074 * `atomic' has the value `old_val' and in that case swaps it with the
1075 * value `new_val'. Returns TRUE if the old value was same and it was
1076 * swapped and FALSE if it differed and was not swapped.
1081 SilcBool silc_atomic_cas16(SilcAtomic16 *atomic, SilcUInt16 old_val,
1086 #if !defined(SILC_THREADS)
1087 /* No atomic operations */
1088 if (atomic->value == old_val) {
1089 atomic->value = new_val;
1094 #elif defined(SILC_WIN32)
1096 LONG o = old_val, n = new_val;
1097 return InterlockedCompareExchange(&atomic->value, n, o) == o;
1099 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
1100 /* GCC + i486 or x86_64 */
1101 __asm __volatile("lock; cmpxchgw %2, %1"
1102 : "=a" (ret), "=m" (atomic->value)
1103 : "c" (new_val), "m" (atomic->value), "0" (old_val));
1104 return ret == old_val;
1106 #elif defined(__GNUC__) && defined(SILC_IA64)
1107 /* GCC + IA64 (GCC builtin atomic operations) */
1108 SilcUInt32 o = old_val, n = new_val;
1109 return __sync_bool_compare_and_swap(&atomic->value, o, n);
1111 #elif defined(__GNUC__) && defined(SILC_POWERPC)
1117 silc_mutex_lock(atomic->lock);
1118 if (atomic->value == old_val) {
1119 atomic->value = new_val;
1120 silc_mutex_unlock(atomic->lock);
1123 silc_mutex_unlock(atomic->lock);
1128 /****f* silcutil/SilcAtomicAPI/silc_atomic_cas8
1133 * SilcBool silc_atomic_cas8(SilcAtomic8 *atomic, SilcUInt8 old_val,
1134 * SilcUInt8 new_val)
1138 * Performs compare and swap (CAS). Atomically compares if the variable
1139 * `atomic' has the value `old_val' and in that case swaps it with the
1140 * value `new_val'. Returns TRUE if the old value was same and it was
1141 * swapped and FALSE if it differed and was not swapped.
1146 SilcBool silc_atomic_cas8(SilcAtomic8 *atomic, SilcUInt8 old_val,
1151 #if !defined(SILC_THREADS)
1152 /* No atomic operations */
1153 if (atomic->value == old_val) {
1154 atomic->value = new_val;
1159 #elif defined(SILC_WIN32)
1161 LONG o = old_val, n = new_val;
1162 return InterlockedCompareExchange(&atomic->value, n, o) == o;
1164 #elif defined(__GNUC__) && (defined(SILC_I486) || defined(SILC_X86_64))
1165 /* GCC + i486 or x86_64 */
1166 __asm __volatile("lock; cmpxchgb %2, %1"
1167 : "=a" (ret), "=m" (atomic->value)
1168 : "c" (new_val), "m" (atomic->value), "0" (old_val));
1169 return ret == old_val;
1171 #elif defined(__GNUC__) && defined(SILC_IA64)
1172 /* GCC + IA64 (GCC builtin atomic operations) */
1173 SilcUInt32 o = old_val, n = new_val;
1174 return __sync_bool_compare_and_swap(&atomic->value, o, n);
1176 #elif defined(__GNUC__) && defined(SILC_POWERPC)
1182 silc_mutex_lock(atomic->lock);
1183 if (atomic->value == old_val) {
1184 atomic->value = new_val;
1185 silc_mutex_unlock(atomic->lock);
1188 silc_mutex_unlock(atomic->lock);
1193 /****f* silcutil/SilcAtomicAPI/silc_atomic_cas_pointer
1198 * SilcBool silc_atomic_cas_pointer(SilcAtomicPointer *atomic,
1199 * void *old_ptr, void *new_ptr);
1203 * Performs compare and swap (CAS). Atomically compares if the variable
1204 * `atomic' has the pointer `old_ptr' and in that case swaps it with the
1205 * pointer `new_ptr'. Returns TRUE if the old pointer was same and it was
1206 * swapped and FALSE if it differed and was not swapped.
1211 SilcBool silc_atomic_cas_pointer(SilcAtomicPointer *atomic, void *old_val,
1216 #if !defined(SILC_THREADS)
1217 /* No atomic operations */
1218 if (atomic->pointer == old_val) {
1219 atomic->pointer = new_val;
1224 #elif defined(SILC_WIN32)
1226 return InterlockedCompareExchangePointer(&atomic->pointer, n, o) == o;
1228 #elif defined(__GNUC__) && defined(SILC_I486)
1230 __asm __volatile("lock; cmpxchgl %2, %1"
1231 : "=a" (ret), "=m" (atomic->pointer)
1232 : "c" (new_val), "m" (atomic->pointer), "0" (old_val));
1233 return ret == old_val;
1235 #elif defined(__GNUC__) && defined(SILC_X86_64)
1237 __asm __volatile("lock; cmpxchgq %q2, %1"
1238 : "=a" (ret), "=m" (atomic->pointer)
1239 : "c" (new_val), "m" (atomic->pointer), "0" (old_val));
1240 return ret == old_val;
1242 #elif defined(__GNUC__) && defined(SILC_IA64)
1243 /* GCC + IA64 (GCC builtin atomic operations) */
1244 return __sync_bool_compare_and_swap((long)&atomic->pointer, (long)old_val,
1247 #elif defined(__GNUC__) && defined(SILC_POWERPC)
1253 silc_mutex_lock(atomic->lock);
1254 if (atomic->pointer == old_val) {
1255 atomic->pointer = new_val;
1256 silc_mutex_unlock(atomic->lock);
1259 silc_mutex_unlock(atomic->lock);
1264 #endif /* SILCATOMIC_H */