ICU 4.8.1.1
4.8.1.1
|
Basic types and constants for UTF. More...
#include "unicode/ptypes.h"
#include <stddef.h>
#include "unicode/urename.h"
Go to the source code of this file.
Defines | |
#define | U_CFUNC extern |
This is used in a declaration of a library private ICU C function. | |
#define | U_CDECL_BEGIN |
This is used to begin a declaration of a library private ICU C API. | |
#define | U_CDECL_END |
This is used to end a declaration of a library private ICU C API. | |
#define | U_ATTRIBUTE_DEPRECATED |
This is used for GCC specific attributes. | |
#define | U_CAPI U_CFUNC U_EXPORT |
This is used to declare a function as a public ICU C API. | |
#define | U_STABLE U_CAPI |
This is used to declare a function as a stable public ICU C API. | |
#define | U_DRAFT U_CAPI |
This is used to declare a function as a draft public ICU C API. | |
#define | U_DEPRECATED U_CAPI U_ATTRIBUTE_DEPRECATED |
This is used to declare a function as a deprecated public ICU C API. | |
#define | U_OBSOLETE U_CAPI |
This is used to declare a function as an obsolete public ICU C API. | |
#define | U_INTERNAL U_CAPI |
This is used to declare a function as an internal ICU C API. | |
#define | INT8_MIN ((int8_t)(-128)) |
The smallest value an 8 bit signed integer can hold. | |
#define | INT16_MIN ((int16_t)(-32767-1)) |
The smallest value a 16 bit signed integer can hold. | |
#define | INT32_MIN ((int32_t)(-2147483647-1)) |
The smallest value a 32 bit signed integer can hold. | |
#define | INT8_MAX ((int8_t)(127)) |
The largest value an 8 bit signed integer can hold. | |
#define | INT16_MAX ((int16_t)(32767)) |
The largest value a 16 bit signed integer can hold. | |
#define | INT32_MAX ((int32_t)(2147483647)) |
The largest value a 32 bit signed integer can hold. | |
#define | UINT8_MAX ((uint8_t)(255U)) |
The largest value an 8 bit unsigned integer can hold. | |
#define | UINT16_MAX ((uint16_t)(65535U)) |
The largest value a 16 bit unsigned integer can hold. | |
#define | UINT32_MAX ((uint32_t)(4294967295U)) |
The largest value a 32 bit unsigned integer can hold. | |
#define | INT64_C(c) c ## LL |
Provides a platform independent way to specify a signed 64-bit integer constant. | |
#define | UINT64_C(c) c ## ULL |
Provides a platform independent way to specify an unsigned 64-bit integer constant. | |
#define | U_INT64_MIN ((int64_t)(INT64_C(-9223372036854775807)-1)) |
The smallest value a 64 bit signed integer can hold. | |
#define | U_INT64_MAX ((int64_t)(INT64_C(9223372036854775807))) |
The largest value a 64 bit signed integer can hold. | |
#define | U_UINT64_MAX ((uint64_t)(UINT64_C(18446744073709551615))) |
The largest value a 64 bit unsigned integer can hold. | |
#define | TRUE 1 |
The TRUE value of a UBool. | |
#define | FALSE 0 |
The FALSE value of a UBool. | |
#define | U_HAVE_WCHAR_H 1 |
Indicates whether <wchar.h> is available (1) or not (0). | |
#define | U_SIZEOF_WCHAR_T 4 |
U_SIZEOF_WCHAR_T==sizeof(wchar_t) (0 means it is not defined or autoconf could not set it) | |
#define | U_SIZEOF_UCHAR 2 |
Number of bytes in a UChar. | |
#define | U_ALIGN_CODE(n) |
This is used to align code fragments to a specific byte boundary. | |
#define | U_INLINE |
This is used to request inlining of a function, on platforms and languages which support it. | |
Typedefs | |
typedef int8_t | UBool |
The ICU boolean type. | |
typedef uint16_t | UChar |
Define UChar to be wchar_t if that is 16 bits wide; always assumed to be unsigned. | |
typedef int32_t | UChar32 |
Define UChar32 as a type for single Unicode code points. |
Basic types and constants for UTF.
This file defines basic types and constants for utf.h to be platform-independent. umachine.h and utf.h are included into utypes.h to provide all the general definitions for ICU. All of these definitions used to be in utypes.h before the UTF-handling macros made this unmaintainable.
Definition in file umachine.h.
#define FALSE 0 |
The FALSE value of a UBool.
Definition at line 236 of file umachine.h.
Referenced by UnicodeString::append(), BytesTrie::hasUniqueValue(), UCharsTrie::hasUniqueValue(), ParsePosition::operator==(), UnicodeString::replace(), and UnicodeString::truncate().
The largest value a 16 bit signed integer can hold.
Definition at line 170 of file umachine.h.
The smallest value a 16 bit signed integer can hold.
Definition at line 157 of file umachine.h.
The largest value a 32 bit signed integer can hold.
Definition at line 174 of file umachine.h.
Referenced by UnicodeString::remove().
The smallest value a 32 bit signed integer can hold.
Definition at line 161 of file umachine.h.
#define INT64_C | ( | c | ) | c ## LL |
Provides a platform independent way to specify a signed 64-bit integer constant.
note: may be wrong for some 64 bit platforms - ensure your compiler provides INT64_C
Definition at line 199 of file umachine.h.
The largest value an 8 bit signed integer can hold.
Definition at line 166 of file umachine.h.
The smallest value an 8 bit signed integer can hold.
Definition at line 153 of file umachine.h.
#define TRUE 1 |
The TRUE value of a UBool.
Definition at line 232 of file umachine.h.
Referenced by Calendar::internalSet(), ParsePosition::operator==(), and UnicodeString::truncate().
#define U_ALIGN_CODE | ( | n | ) |
This is used to align code fragments to a specific byte boundary.
This is useful for getting consistent performance test results.
Definition at line 363 of file umachine.h.
#define U_ATTRIBUTE_DEPRECATED |
This is used for GCC specific attributes.
This API is for internal use only.
Definition at line 130 of file umachine.h.
This is used to declare a function as a public ICU C API.
Definition at line 135 of file umachine.h.
#define U_CDECL_BEGIN |
This is used to begin a declaration of a library private ICU C API.
Definition at line 110 of file umachine.h.
#define U_CDECL_END |
This is used to end a declaration of a library private ICU C API.
Definition at line 111 of file umachine.h.
#define U_CFUNC extern |
This is used in a declaration of a library private ICU C function.
Definition at line 109 of file umachine.h.
#define U_HAVE_WCHAR_H 1 |
Indicates whether <wchar.h> is available (1) or not (0).
Set to 1 by default.
Definition at line 253 of file umachine.h.
#define U_INLINE |
This is used to request inlining of a function, on platforms and languages which support it.
Definition at line 378 of file umachine.h.
#define U_INT64_MAX ((int64_t)(INT64_C(9223372036854775807))) |
The largest value a 64 bit signed integer can hold.
Definition at line 215 of file umachine.h.
#define U_INT64_MIN ((int64_t)(INT64_C(-9223372036854775807)-1)) |
The smallest value a 64 bit signed integer can hold.
Definition at line 211 of file umachine.h.
#define U_SIZEOF_UCHAR 2 |
#define U_SIZEOF_WCHAR_T 4 |
U_SIZEOF_WCHAR_T==sizeof(wchar_t) (0 means it is not defined or autoconf could not set it)
Definition at line 264 of file umachine.h.
#define U_UINT64_MAX ((uint64_t)(UINT64_C(18446744073709551615))) |
The largest value a 64 bit unsigned integer can hold.
Definition at line 219 of file umachine.h.
#define UINT16_MAX ((uint16_t)(65535U)) |
The largest value a 16 bit unsigned integer can hold.
Definition at line 183 of file umachine.h.
#define UINT32_MAX ((uint32_t)(4294967295U)) |
The largest value a 32 bit unsigned integer can hold.
Definition at line 187 of file umachine.h.
#define UINT64_C | ( | c | ) | c ## ULL |
Provides a platform independent way to specify an unsigned 64-bit integer constant.
note: may be wrong for some 64 bit platforms - ensure your compiler provides UINT64_C
Definition at line 207 of file umachine.h.
The largest value an 8 bit unsigned integer can hold.
Definition at line 179 of file umachine.h.
Define UChar to be wchar_t if that is 16 bits wide; always assumed to be unsigned.
If wchar_t is not 16 bits wide, then define UChar to be uint16_t or char16_t because GCC >=4.4 can handle UTF16 string literals. This makes the definition of UChar platform-dependent but allows direct string type compatibility with platforms with 16-bit wchar_t types.
Definition at line 325 of file umachine.h.
Define UChar32 as a type for single Unicode code points.
UChar32 is a signed 32-bit integer (same as int32_t).
The Unicode code point range is 0..0x10ffff. All other values (negative or >=0x110000) are illegal as Unicode code points. They may be used as sentinel values to indicate "done", "error" or similar non-code point conditions.
Before ICU 2.4 (Jitterbug 2146), UChar32 was defined to be wchar_t if that is 32 bits wide (wchar_t may be signed or unsigned) or else to be uint32_t. That is, the definition of UChar32 was platform-dependent.
Definition at line 345 of file umachine.h.