#ifndef __STDC_VERSION_STDINT_H__ #define __STDC_VERSION_STDINT_H__ 201711L #define __NEED_int8_t #define __NEED_int16_t #define __NEED_int32_t #define __NEED_int64_t #define __NEED_uint8_t #define __NEED_uint16_t #define __NEED_uint32_t #define __NEED_uint64_t #define __NEED_intptr_t #define __NEED_uintptr_t #define __NEED_intmax_t #define __NEED_uintmax_t #include typedef int8_t int_fast8_t; typedef int64_t int_fast64_t; typedef int8_t int_least8_t; typedef int16_t int_least16_t; typedef int32_t int_least32_t; typedef int64_t int_least64_t; typedef uint8_t uint_fast8_t; typedef uint64_t uint_fast64_t; typedef uint8_t uint_least8_t; typedef uint16_t uint_least16_t; typedef uint32_t uint_least32_t; typedef uint64_t uint_least64_t; #define INT8_WIDTH 8 #define INT16_WIDTH 16 #define INT32_WIDTH 32 #define INT64_WIDTH 64 #define INT8_MIN (-1-0x7f) #define INT16_MIN (-1-0x7fff) #define INT32_MIN (-1-0x7fffffff) #define INT64_MIN (-1-0x7fffffffffffffff) #define INT8_MAX (0x7f) #define INT16_MAX (0x7fff) #define INT32_MAX (0x7fffffff) #define INT64_MAX (0x7fffffffffffffff) #define UINT8_WIDTH 8 #define UINT16_WIDTH 16 #define UINT32_WIDTH 32 #define UINT64_WIDTH 64 #define UINT8_MAX (0xff) #define UINT16_MAX (0xffff) #define UINT32_MAX (0xffffffffu) #define UINT64_MAX (0xffffffffffffffffu) #define INT_FAST8_WIDTH 8 #define INT_FAST64_WIDTH 64 #define INT_FAST8_MIN INT8_MIN #define INT_FAST64_MIN INT64_MIN #define INT_LEAST8_WIDTH 8 #define INT_LEAST16_WIDTH 16 #define INT_LEAST32_WIDTH 32 #define INT_LEAST64_WIDTH 64 #define INT_LEAST8_MIN INT8_MIN #define INT_LEAST16_MIN INT16_MIN #define INT_LEAST32_MIN INT32_MIN #define INT_LEAST64_MIN INT64_MIN #define INT_FAST8_MAX INT8_MAX #define INT_FAST64_MAX INT64_MAX #define INT_LEAST8_MAX INT8_MAX #define INT_LEAST16_MAX INT16_MAX #define INT_LEAST32_MAX INT32_MAX #define INT_LEAST64_MAX INT64_MAX #define UINT_FAST8_WIDTH 8 #define UINT_FAST64_WIDTH 64 #define UINT_FAST8_MAX UINT8_MAX #define UINT_FAST64_MAX UINT64_MAX #define UINT_LEAST8_WIDTH 8 #define UINT_LEAST16_WIDTH 16 #define UINT_LEAST32_WIDTH 32 #define UINT_LEAST64_WIDTH 64 #define UINT_LEAST8_MAX UINT8_MAX #define UINT_LEAST16_MAX UINT16_MAX #define UINT_LEAST32_MAX UINT32_MAX #define UINT_LEAST64_MAX UINT64_MAX #define INTMAX_WIDTH 64 #define UINTMAX_WIDTH 64 #define INTMAX_MIN INT64_MIN #define INTMAX_MAX INT64_MAX #define UINTMAX_MAX UINT64_MAX #define WINT_WIDTH 32 #define WINT_MIN 0U #define WINT_MAX UINT32_MAX #define WCHAR_WIDTH 32 #if L'\0'-1 > 0 #define WCHAR_MAX (0xffffffffu+L'\0') #define WCHAR_MIN (0+L'\0') #else #define WCHAR_MAX (0x7fffffff+L'\0') #define WCHAR_MIN (-1-0x7fffffff+L'\0') #endif #define SIG_ATOMIC_WIDTH 32 #define SIG_ATOMIC_MIN INT32_MIN #define SIG_ATOMIC_MAX INT32_MAX #include #define INT8_C(c) c #define INT16_C(c) c #define INT32_C(c) c #define UINT8_C(c) c #define UINT16_C(c) c #define UINT32_C(c) c ## U #if UINTPTR_MAX == UINT64_MAX #define INTPTR_WIDTH 64 #define UINTPTR_WIDTH 64 #define INT64_C(c) c ## L #define UINT64_C(c) c ## UL #define INTMAX_C(c) c ## L #define UINTMAX_C(c) c ## UL #else #define INTPTR_WIDTH 32 #define UINTPTR_WIDTH 32 #define INT64_C(c) c ## LL #define UINT64_C(c) c ## ULL #define INTMAX_C(c) c ## LL #define UINTMAX_C(c) c ## ULL #endif #if UINT_FAST16_MAX == UINT32_MAX #define INT_FAST16_WIDTH 32 #define UINT_FAST16_WIDTH 32 #else #define INT_FAST16_WIDTH 16 #define UINT_FAST16_WIDTH 16 #endif #if UINT_FAST32_MAX == UINT64_MAX #define INT_FAST32_WIDTH 64 #define UINT_FAST32_WIDTH 64 #else #define INT_FAST32_WIDTH 32 #define UINT_FAST32_WIDTH 32 #endif #if PTRDIFF_MAX == INT64_MAX #define PTRDIFF_WIDTH 64 #else #define PTRDIFF_WIDTH 32 #endif #if SIZE_MAX == UINT64_MAX #define SIZE_WIDTH 64 #else #define SIZE_WIDTH 32 #endif // Enable support for _BitInt(N) types, if that comes not from the // compiler directly. #if (__STDC_VERSION__ >= 201900L) && !BITINT_MAXWIDTH && __BITINT_MAXWIDTH__ #define BITINT_MAXWIDTH __BITINT_MAXWIDTH__ #endif #if __has_int128_t typedef __int128 int128_t; typedef int128_t int_fast128_t; typedef int128_t int_least128_t; typedef unsigned __int128 uint128_t; typedef uint128_t uint_fast128_t; typedef uint128_t uint_least128_t; #define INT128_WIDTH 128 #define INT_LEAST128_WIDTH 128 #define INT_FAST128_WIDTH 128 #define UINT128_WIDTH 128 #define UINT_LEAST128_WIDTH 128 #define UINT_FAST128_WIDTH 128 // This uses the new integer constants for _BitInt(N) types for the // 128 bit type and is the only thing that we need in addition to the // types themselves to enable 128 bit support officially. Usually this // will not be fit for preprocessor arithmetic, but we should do an // effort to make this possible. #define INT128_C(X) ((int128_t)+(X ## wb)) #define UINT128_C(X) ((uint128_t)+(X ## wbu)) #define INT128_MAX INT128_C(0x7fffffffffffffffffffffffffffffff) #define INT128_MIN (-1-INT128_MAX) #define UINT128_MAX UINT128_C(0xffffffffffffffffffffffffffffffff) #define INT_LEAST128_MAX INT128_MAX #define INT_LEAST128_MIN INT128_MIN #define INT_FAST128_MAX INT128_MAX #define INT_FAST128_MIN INT128_MIN #define UINT_LEAST128_MAX UINT128_MAX #define UINT_FAST128_MAX UINT128_MAX #endif #endif