5 Author: Pekka Riikonen <priikone@silcnet.org>
7 Copyright (C) 2002 - 2007 Pekka Riikonen
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; version 2 of the License.
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
20 /****h* silcutil/SILC Types
24 * This header file includes basic types and definitions used in SILC Toolkits.
25 * It contains all types, and many utility macros and functions.
32 /****d* silcutil/SILCTypes/SilcBool
36 * typedef unigned char SilcBool;
40 * Boolean value, and is always 8-bits. Represents value 0 or 1.
43 typedef unsigned char SilcBool;
45 /* The bool macro is deprecated. Use SilcBool instead. */
51 #define bool unsigned char
55 /****d* silcutil/SILCTypes/TRUE
63 * Boolean true value indicator.
72 /****d* silcutil/SILCTypes/FALSE
80 * Boolean false value indicator.
89 /* Our offsetof macro */
90 #define silc_offsetof(TYPE, MEMBER) ((size_t) &((TYPE *)0)->MEMBER)
92 /* silc_likely and silc_unlikely GCC branch prediction macros. Use only if
93 you have profiled the code first. */
95 #define silc_likely(expr) __builtin_expect(!!(expr), 1)
96 #define silc_unlikely(expr) __builtin_expect(!!(expr), 0)
98 #define silc_likely(expr) (expr)
99 #define silc_unlikely(expr) (expr)
100 #endif /* __GNUC__ >= 3 */
102 #if SILC_SIZEOF_SHORT > 2
103 #error "size of the short must be 2 bytes"
106 /****d* silcutil/SILCTypes/SilcUInt8
110 * typedef unsigned char SilcUInt8;
114 * 8-bit unsigned integer.
118 typedef unsigned char SilcUInt8;
121 /****d* silcutil/SILCTypes/SilcInt8
125 * typedef signed char SilcInt8;
129 * 8-bit signed integer.
133 typedef signed char SilcInt8;
136 /****d* silcutil/SILCTypes/SilcUInt16
140 * typedef unsigned short SilcUInt16;
144 * 16-bit unsigned integer. Guaranteed to be 16-bits.
148 typedef unsigned short SilcUInt16;
151 /****d* silcutil/SILCTypes/SilcInt16
155 * typedef signed short SilcInt16;
159 * 16-bit signed integer. Guaranteed to be 16-bits.
163 typedef signed short SilcInt16;
166 /****d* silcutil/SILCTypes/SilcUInt32
170 * typedef unsigned long SilcUInt32;
174 * 32-bit unsigned integer. Guaranteed to be 32-bits.
178 #if SILC_SIZEOF_LONG == 4
179 typedef unsigned long SilcUInt32;
180 typedef signed long SilcInt32;
182 #if SILC_SIZEOF_INT == 4
183 typedef unsigned int SilcUInt32;
184 typedef signed int SilcInt32;
186 #if SILC_SIZEOF_LONG_LONG >= 4
188 typedef unsigned long long SilcUInt32;
189 typedef signed long long SilcInt32;
196 /****d* silcutil/SILCTypes/SilcInt32
200 * typedef signed long SilcInt32;
204 * 32-bit signed integer. Guaranteed to be 32-bits.
208 /****d* silcutil/SILCTypes/SilcUInt64
212 * typedef unsigned long long SilcUInt64;
216 * 64-bit unsigned integer. Guaranteed to be 64-bits on systems that
221 #if SILC_SIZEOF_LONG >= 8
222 typedef unsigned long SilcUInt64;
223 typedef signed long SilcInt64;
225 #if SILC_SIZEOF_LONG_LONG >= 8
227 typedef unsigned long long SilcUInt64;
228 typedef signed long long SilcInt64;
230 typedef unsigned __int64 SilcUInt64;
231 typedef signed __int64 SilcInt64;
234 typedef SilcUInt32 SilcUInt64;
235 typedef SilcInt32 SilcInt64;
240 /****d* silcutil/SILCTypes/SilcInt64
244 * typedef signed long long SilcInt64;
248 * 64-bit signed integer. Guaranteed to be 64-bits on systems that
253 #if SILC_SIZEOF_VOID_P < 4
254 typedef SilcUInt32 * void *;
257 /****d* silcutil/SILCTypes/SilcSocket
265 * Platform specific socket. On POSIX compliant systems this is simply
266 * an integer, representing the socket. On other systems it is platform
267 * specific socket context. Access it only through routines that can
268 * handle SilcSocket types, unless you know what you are doing.
272 #if defined(SILC_UNIX)
273 typedef int SilcSocket;
274 #elif defined(SILC_WIN32)
275 typedef SOCKET SilcSocket;
276 #elif defined(SILC_SYMBIAN)
277 typedef void * SilcSocket;
283 #if (defined(SILC_I486) || defined(SILC_X86_64)) && defined(__GNUC__)
284 #define SILC_GET_WORD(cp) \
286 SilcUInt32 _result_; \
287 asm volatile ("movl (%1), %0; bswapl %0" \
288 : "=q" (_result_) : "q" (cp)); \
292 #define SILC_GET_WORD(cp) ((SilcUInt32)(SilcUInt8)(cp)[0]) << 24 \
293 | ((SilcUInt32)(SilcUInt8)(cp)[1] << 16) \
294 | ((SilcUInt32)(SilcUInt8)(cp)[2] << 8) \
295 | ((SilcUInt32)(SilcUInt8)(cp)[3])
296 #endif /* (SILC_I486 || SILC_X86_64) && __GNUC__ */
298 /****d* silcutil/SILCTypes/SILC_GET16_MSB
302 * #define SILC_GET16_MSB(dest, src)
306 * Return two 8-bit bytes, most significant bytes first.
310 #if (defined(SILC_I386) || defined(SILC_X86_64)) && defined(__GNUC__)
311 #define SILC_GET16_MSB(l, cp) \
312 asm volatile ("movw (%1), %w0; rolw $8, %w0" \
313 : "=q" (l) : "q" (cp) : "memory", "cc");
315 #define SILC_GET16_MSB(l, cp) \
317 (l) = ((SilcUInt32)(SilcUInt8)(cp)[0] << 8) \
318 | ((SilcUInt32)(SilcUInt8)(cp)[1]); \
320 #endif /* (SILC_I386 || SILC_X86_64) && __GNUC__ */
323 /****d* silcutil/SILCTypes/SILC_GET32_MSB
327 * #define SILC_GET32_MSB(dest, src)
331 * Return four 8-bit bytes, most significant bytes first.
335 #if (defined(SILC_I486) || defined(SILC_X86_64)) && defined(__GNUC__)
336 #define SILC_GET32_MSB(l, cp) \
337 asm volatile ("movl (%1), %0; bswapl %0" \
338 : "=q" (l) : "q" (cp) : "memory", "cc");
340 #define SILC_GET32_MSB(l, cp) \
342 (l) = ((SilcUInt32)(SilcUInt8)(cp)[0]) << 24 \
343 | ((SilcUInt32)(SilcUInt8)(cp)[1] << 16) \
344 | ((SilcUInt32)(SilcUInt8)(cp)[2] << 8) \
345 | ((SilcUInt32)(SilcUInt8)(cp)[3]); \
347 #endif /* (SILC_I486 || SILC_X86_64) && __GNUC__ */
350 /* Same as upper but XOR the result always. Special purpose macro. */
351 #if (defined(SILC_I486) || defined(SILC_X86_64)) && defined(__GNUC__)
352 #define SILC_GET32_X_MSB(l, cp) \
354 register volatile SilcUInt32 _x_; \
355 asm volatile ("movl %1, %3; movl (%2), %0;\n\t" \
356 "bswapl %0; xorl %3, %0" \
357 : "=r" (l) : "0" (l), "r" (cp), "r" (_x_) \
361 #define SILC_GET32_X_MSB(l, cp) \
362 (l) ^= ((SilcUInt32)(SilcUInt8)(cp)[0]) << 24 \
363 | ((SilcUInt32)(SilcUInt8)(cp)[1] << 16) \
364 | ((SilcUInt32)(SilcUInt8)(cp)[2] << 8) \
365 | ((SilcUInt32)(SilcUInt8)(cp)[3]);
366 #endif /* (SILC_I486 || SILC_X86_64) && __GNUC__ */
368 /****d* silcutil/SILCTypes/SILC_GET64_MSB
372 * #define SILC_GET64_MSB(dest, src)
376 * Return eight 8-bit bytes, most significant bytes first.
380 #if defined(SILC_X86_64) && defined(__GNUC__)
381 #define SILC_GET64_MSB(l, cp) \
382 asm volatile ("movq (%1), %0; bswapq %0" \
383 : "=r" (l) : "r" (cp) : "memory", "cc");
385 #define SILC_GET64_MSB(l, cp) \
387 (l) = ((((SilcUInt64)SILC_GET_WORD((cp))) << 32) | \
388 ((SilcUInt64)SILC_GET_WORD((cp) + 4))); \
390 #endif /* SILC_X86_64 && __GNUC__ */
393 /****d* silcutil/SILCTypes/SILC_GET16_LSB
397 * #define SILC_GET16_MSB(dest, src)
401 * Return two 8-bit bytes, least significant bytes first.
405 #if defined(SILC_I386) || defined(SILC_X86_64)
406 #define SILC_GET16_LSB(l, cp) (l) = (*(SilcUInt16 *)(cp))
408 #define SILC_GET16_LSB(l, cp) \
410 (l) = ((SilcUInt32)(SilcUInt8)(cp)[0]) \
411 | ((SilcUInt32)(SilcUInt8)(cp)[1] << 8); \
413 #endif /* SILC_I386 || SILC_X86_64 */
416 /****d* silcutil/SILCTypes/SILC_GET32_LSB
420 * #define SILC_GET32_LSB(dest, src)
424 * Return four 8-bit bytes, least significant bytes first.
428 #if defined(SILC_I386) || defined(SILC_X86_64)
429 #define SILC_GET32_LSB(l, cp) (l) = (*(SilcUInt32 *)(cp))
431 #define SILC_GET32_LSB(l, cp) \
433 (l) = ((SilcUInt32)(SilcUInt8)(cp)[0]) \
434 | ((SilcUInt32)(SilcUInt8)(cp)[1] << 8) \
435 | ((SilcUInt32)(SilcUInt8)(cp)[2] << 16) \
436 | ((SilcUInt32)(SilcUInt8)(cp)[3] << 24); \
438 #endif /* SILC_I386 || SILC_X86_64 */
441 /* Same as upper but XOR the result always. Special purpose macro. */
442 #if defined(SILC_I386) || defined(SILC_X86_64)
443 #define SILC_GET32_X_LSB(l, cp) (l) ^= (*(SilcUInt32 *)(cp))
445 #define SILC_GET32_X_LSB(l, cp) \
446 (l) ^= ((SilcUInt32)(SilcUInt8)(cp)[0]) \
447 | ((SilcUInt32)(SilcUInt8)(cp)[1] << 8) \
448 | ((SilcUInt32)(SilcUInt8)(cp)[2] << 16) \
449 | ((SilcUInt32)(SilcUInt8)(cp)[3] << 24)
450 #endif /* SILC_I386 || SILC_X86_64 */
452 /****d* silcutil/SILCTypes/SILC_PUT16_MSB
456 * #define SILC_PUT16_MSB(dest, src)
460 * Put two 8-bit bytes, most significant bytes first.
464 #if (defined(SILC_I386) || defined(SILC_X86_64)) && defined(__GNUC__)
465 #define SILC_PUT16_MSB(l, cp) \
466 asm volatile ("rolw $8, %w1; movw %w1, (%0)" \
467 : : "q" (cp), "q" (l) : "memory", "cc");
469 #define SILC_PUT16_MSB(l, cp) \
471 (cp)[0] = (SilcUInt8)((l) >> 8); \
472 (cp)[1] = (SilcUInt8)(l); \
474 #endif /* (SILC_I386 || SILC_X86_64) && __GNUC__ */
477 /****d* silcutil/SILCTypes/SILC_PUT32_MSB
481 * #define SILC_PUT32_MSB(dest, src)
485 * Put four 8-bit bytes, most significant bytes first.
489 #if (defined(SILC_I486) || defined(SILC_X86_64)) && defined(__GNUC__)
490 #define SILC_PUT32_MSB(l, cp) \
491 asm volatile ("bswapl %1; movl %1, (%0); bswapl %1" \
492 : : "q" (cp), "q" (l) : "memory", "cc");
494 #define SILC_PUT32_MSB(l, cp) \
496 (cp)[0] = (SilcUInt8)((l) >> 24); \
497 (cp)[1] = (SilcUInt8)((l) >> 16); \
498 (cp)[2] = (SilcUInt8)((l) >> 8); \
499 (cp)[3] = (SilcUInt8)(l); \
501 #endif /* (SILC_I486 || SILC_X86_64) && __GNUC__ */
504 /****d* silcutil/SILCTypes/SILC_PUT64_MSB
508 * #define SILC_PUT64_MSB(dest, src)
512 * Put eight 8-bit bytes, most significant bytes first.
516 #if defined(SILC_X86_64) && defined(__GNUC__)
517 #define SILC_PUT64_MSB(l, cp) \
518 asm volatile ("bswapq %1; movq %1, (%0); bswapq %1" \
519 : : "r" (cp), "r" (l) : "memory", "cc");
521 #define SILC_PUT64_MSB(l, cp) \
523 SILC_PUT32_MSB((SilcUInt32)((SilcUInt64)(l) >> 32), (cp)); \
524 SILC_PUT32_MSB((SilcUInt32)(l), (cp) + 4); \
526 #endif /* SILC_X86_64 && __GNUC__ */
529 /****d* silcutil/SILCTypes/SILC_PUT16_LSB
533 * #define SILC_PUT16_LSB(dest, src)
537 * Put two 8-bit bytes, least significant bytes first.
541 #if defined(SILC_I386) || defined(SILC_X86_64)
542 #define SILC_PUT16_LSB(l, cp) (*(SilcUInt16 *)(cp)) = (l)
544 #define SILC_PUT16_LSB(l, cp) \
546 (cp)[0] = (SilcUInt8)(l); \
547 (cp)[1] = (SilcUInt8)((l) >> 8); \
549 #endif /* SILC_I386 || SILC_X86_64 */
552 /****d* silcutil/SILCTypes/SILC_PUT32_LSB
556 * #define SILC_PUT32_LSB(dest, src)
560 * Put four 8-bit bytes, least significant bytes first.
564 #if defined(SILC_I386) || defined(SILC_X86_64)
565 #define SILC_PUT32_LSB(l, cp) (*(SilcUInt32 *)(cp)) = (l)
567 #define SILC_PUT32_LSB(l, cp) \
569 (cp)[0] = (SilcUInt8)(l); \
570 (cp)[1] = (SilcUInt8)((l) >> 8); \
571 (cp)[2] = (SilcUInt8)((l) >> 16); \
572 (cp)[3] = (SilcUInt8)((l) >> 24); \
574 #endif /* SILC_I386 || SILC_X86_64 */
577 /****d* silcutil/SILCTypes/SILC_SWAB_16
581 * #define SILC_SWAB_16(integer)
585 * Swabs 16-bit unsigned integer byte order. Returns the new value.
589 #if (defined(SILC_I386) || defined(SILC_X86_64)) && defined(__GNUC__)
590 #define SILC_SWAB_16(l) \
592 SilcUInt16 _result_; \
593 asm volatile ("movw %w1, %w0; rolw $8, %w0" \
594 : "=q" (_result_): "q" (l)); \
598 #define SILC_SWAB_16(l) \
599 ((SilcUInt16)(((SilcUInt16)(l) & (SilcUInt16)0x00FFU) << 8) | \
600 (((SilcUInt16)(l) & (SilcUInt16)0xFF00U) >> 8))
601 #endif /* (SILC_I386 || SILC_X86_64) && __GNUC__ */
604 /****d* silcutil/SILCTypes/SILC_SWAB_32
608 * #define SILC_SWAB_32(integer)
612 * Swabs 32-bit unsigned integer byte order. Returns the new value.
616 #if (defined(SILC_I486) || defined(SILC_X86_64)) && defined(__GNUC__)
617 #define SILC_SWAB_32(l) \
619 SilcUInt32 _result_; \
620 asm volatile ("movl %1, %0; bswapl %0" \
621 : "=q" (_result_): "q" (l)); \
625 #define SILC_SWAB_32(l) \
626 ((SilcUInt32)(((SilcUInt32)(l) & (SilcUInt32)0x000000FFUL) << 24) | \
627 (((SilcUInt32)(l) & (SilcUInt32)0x0000FF00UL) << 8) | \
628 (((SilcUInt32)(l) & (SilcUInt32)0x00FF0000UL) >> 8) | \
629 (((SilcUInt32)(l) & (SilcUInt32)0xFF000000UL) >> 24))
630 #endif /* (SILC_I486 || SILC_X86_64) && __GNUC__ */
633 /****d* silcutil/SILCTypes/SILC_PTR_TO_32
637 * #define SILC_PTR_TO_32(ptr)
641 * Type casts a pointer's value into a 32-bit integer. Use this to
642 * avoid compiler warnings when type casting pointers to integers
647 #if SILC_SIZEOF_VOID_P < 8
648 #define SILC_PTR_TO_32(_ptr__) ((SilcUInt32)(_ptr__))
650 #define SILC_PTR_TO_32(_ptr__) \
651 ((SilcUInt32)((SilcUInt64)(_ptr__) & (SilcUInt32)0xFFFFFFFFUL))
655 /****d* silcutil/SILCTypes/SILC_PTR_TO_64
659 * #define SILC_PTR_TO_64(ptr)
663 * Type casts a pointer's value into a 64-bit integer. Use this to
664 * avoid compiler warnings when type casting pointers to integers
669 #if SILC_SIZEOF_VOID_P < 8
670 #define SILC_PTR_TO_64(_ptr__) ((SilcUInt64)((SilcUInt32)(_ptr__)))
672 #define SILC_PTR_TO_64(_ptr__) ((SilcUInt64)((SilcUInt64)(_ptr__)))
676 /****d* silcutil/SILCTypes/SILC_32_TO_PTR
680 * #define SILC_32_TO_PTR(ptr)
684 * Type casts a 32-bit integer value into a pointer. Use this to
685 * avoid compiler warnings when type casting integers to pointers of
690 #if SILC_SIZEOF_VOID_P < 8
691 #define SILC_32_TO_PTR(_ival__) ((void *)((SilcUInt32)(_ival__)))
693 #define SILC_32_TO_PTR(_ival__) ((void *)((SilcUInt64)(_ival__)))
697 /****d* silcutil/SILCTypes/SILC_64_TO_PTR
701 * #define SILC_64_TO_PTR(ptr)
705 * Type casts a 64-bit integer value into a pointer. Use this to
706 * avoid compiler warnings when type casting integers to pointers of
711 #if SILC_SIZEOF_VOID_P < 8
712 #define SILC_64_TO_PTR(_ival__) \
713 ((void *)((SilcUInt32)((SilcUInt64)(_ival__) & (SilcUInt32)0xFFFFFFFFUL)))
715 #define SILC_64_TO_PTR(_ival__) ((void *)((SilcUInt64)(_ival__)))
719 /****d* silcutil/SILCTypes/silc_rol
723 * static inline SilcUInt32 silc_rol(SilcUInt32 val, int num);
727 * Rotate 32-bit integer's bits to left `num' times. Bits pushed to the
728 * left will appear from the right side of the integer, thus rotating.
729 * Returns the rotated value.
732 static inline SilcUInt32 silc_rol(SilcUInt32 val, int num)
734 #if (defined(SILC_I386) || defined(SILC_X86_64)) && defined(__GNUC__)
735 asm volatile ("roll %%cl, %0"
736 : "=q" (val) : "0" (val), "c" (num));
739 return ((val << (SilcUInt32)num) | (val >> (32 - (SilcUInt32)num)));
740 #endif /* (SILC_I486 || SILC_X86_64) && __GNUC__ */
743 /****d* silcutil/SILCTypes/silc_ror
747 * static inline SilcUInt32 silc_ror(SilcUInt32 val, int num);
751 * Rotate 32-bit integer's bits to right `num' times. Bits pushed to the
752 * right will appear from the left side of the integer, thus rotating.
753 * Returns the rotated value.
756 static inline SilcUInt32 silc_ror(SilcUInt32 val, int num)
758 #if (defined(SILC_I386) || defined(SILC_X86_64)) && defined(__GNUC__)
759 asm volatile ("rorl %%cl, %0"
760 : "=q" (val) : "0" (val), "c" (num));
763 return ((val >> (SilcUInt32)num) | (val << (32 - (SilcUInt32)num)));
764 #endif /* (SILC_I486 || SILC_X86_64) && __GNUC__ */
767 /****d* silcutil/SILCTypes/silc_rol64
771 * static inline SilcUInt64 silc_rol64(SilcUInt64 val, int num);
775 * Rotate 64-bit integer's bits to left `num' times. Bits pushed to the
776 * left will appear from the right side of the integer, thus rotating.
777 * Returns the rotated value.
780 static inline SilcUInt64 silc_rol64(SilcUInt64 val, int num)
782 #if defined(SILC_X86_64) && defined(__GNUC__)
783 asm volatile ("rolq %%cl, %0"
784 : "=q" (val) : "0" (val), "c" (num));
787 return ((val << (SilcUInt64)num) | (val >> (64 - (SilcUInt64)num)));
788 #endif /* SILC_X86_64 && __GNUC__ */
791 /****d* silcutil/SILCTypes/silc_ror64
795 * static inline SilcUInt64 silc_ror64(SilcUInt64 val, int num);
799 * Rotate 64-bit integer's bits to right `num' times. Bits pushed to the
800 * right will appear from the left side of the integer, thus rotating.
801 * Returns the rotated value.
804 static inline SilcUInt64 silc_ror64(SilcUInt64 val, int num)
806 #if defined(SILC_X86_64) && defined(__GNUC__)
807 asm volatile ("rorq %%cl, %0"
808 : "=q" (val) : "0" (val), "c" (num));
811 return ((val >> (SilcUInt64)num) | (val << (64 - (SilcUInt64)num)));
812 #endif /* SILC_X86_64 && __GNUC__ */
815 #endif /* SILCTYPES_H */