LLVM: include/llvm/Support/ConvertUTF.h File Reference (original) (raw)
#include "[llvm/Support/Compiler.h](Compiler%5F8h%5Fsource.html)"#include <cstddef>#include <string>
Go to the source code of this file.
| Namespaces | |
|---|---|
| namespace | llvm |
| This is an optimization pass for GlobalISel generic memory operations. |
| Macros | |
|---|---|
| #define | UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD |
| #define | UNI_MAX_BMP (UTF32)0x0000FFFF |
| #define | UNI_MAX_UTF16 (UTF32)0x0010FFFF |
| #define | UNI_MAX_UTF32 (UTF32)0x7FFFFFFF |
| #define | UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF |
| #define | UNI_MAX_UTF8_BYTES_PER_CODE_POINT 4 |
| #define | UNI_UTF16_BYTE_ORDER_MARK_NATIVE 0xFEFF |
| #define | UNI_UTF16_BYTE_ORDER_MARK_SWAPPED 0xFFFE |
| #define | UNI_UTF32_BYTE_ORDER_MARK_NATIVE 0x0000FEFF |
| #define | UNI_UTF32_BYTE_ORDER_MARK_SWAPPED 0xFFFE0000 |
| Functions | |
|---|---|
| LLVM_ABI ConversionResult | llvm::ConvertUTF8toUTF16 (const UTF8 **sourceStart, const UTF8 *sourceEnd, UTF16 **targetStart, UTF16 *targetEnd, ConversionFlags flags) |
| LLVM_ABI ConversionResult | llvm::ConvertUTF8toUTF32Partial (const UTF8 **sourceStart, const UTF8 *sourceEnd, UTF32 **targetStart, UTF32 *targetEnd, ConversionFlags flags) |
| Convert a partial UTF8 sequence to UTF32. | |
| LLVM_ABI ConversionResult | llvm::ConvertUTF8toUTF32 (const UTF8 **sourceStart, const UTF8 *sourceEnd, UTF32 **targetStart, UTF32 *targetEnd, ConversionFlags flags) |
| Convert a partial UTF8 sequence to UTF32. | |
| LLVM_ABI ConversionResult | llvm::ConvertUTF16toUTF8 (const UTF16 **sourceStart, const UTF16 *sourceEnd, UTF8 **targetStart, UTF8 *targetEnd, ConversionFlags flags) |
| LLVM_ABI ConversionResult | llvm::ConvertUTF32toUTF8 (const UTF32 **sourceStart, const UTF32 *sourceEnd, UTF8 **targetStart, UTF8 *targetEnd, ConversionFlags flags) |
| LLVM_ABI ConversionResult | llvm::ConvertUTF16toUTF32 (const UTF16 **sourceStart, const UTF16 *sourceEnd, UTF32 **targetStart, UTF32 *targetEnd, ConversionFlags flags) |
| LLVM_ABI ConversionResult | llvm::ConvertUTF32toUTF16 (const UTF32 **sourceStart, const UTF32 *sourceEnd, UTF16 **targetStart, UTF16 *targetEnd, ConversionFlags flags) |
| LLVM_ABI Boolean | llvm::isLegalUTF8Sequence (const UTF8 *source, const UTF8 *sourceEnd) |
| LLVM_ABI Boolean | llvm::isLegalUTF8String (const UTF8 **source, const UTF8 *sourceEnd) |
| LLVM_ABI unsigned | llvm::getUTF8SequenceSize (const UTF8 *source, const UTF8 *sourceEnd) |
| LLVM_ABI unsigned | llvm::getNumBytesForUTF8 (UTF8 firstByte) |
| LLVM_ABI bool | llvm::ConvertUTF8toWide (unsigned WideCharWidth, llvm::StringRef Source, char *&ResultPtr, const UTF8 *&ErrorPtr) |
| Convert an UTF8 StringRef to UTF8, UTF16, or UTF32 depending on WideCharWidth. | |
| LLVM_ABI bool | llvm::ConvertUTF8toWide (llvm::StringRef Source, std::wstring &Result) |
| Converts a UTF-8 StringRef to a std::wstring. | |
| LLVM_ABI bool | llvm::ConvertUTF8toWide (const char *Source, std::wstring &Result) |
| Converts a UTF-8 C-string to a std::wstring. | |
| LLVM_ABI bool | llvm::convertWideToUTF8 (const std::wstring &Source, std::string &Result) |
| Converts a std::wstring to a UTF-8 encoded std::string. | |
| LLVM_ABI bool | llvm::ConvertCodePointToUTF8 (unsigned Source, char *&ResultPtr) |
| Convert an Unicode code point to UTF8 sequence. | |
| ConversionResult | llvm::convertUTF8Sequence (const UTF8 **source, const UTF8 *sourceEnd, UTF32 *target, ConversionFlags flags) |
| Convert the first UTF8 sequence in the given source buffer to a UTF32 code point. | |
| LLVM_ABI bool | llvm::hasUTF16ByteOrderMark (ArrayRef< char > SrcBytes) |
| Returns true if a blob of text starts with a UTF-16 big or little endian byte order mark. | |
| LLVM_ABI bool | llvm::convertUTF16ToUTF8String (ArrayRef< char > SrcBytes, std::string &Out) |
| Converts a stream of raw bytes assumed to be UTF16 into a UTF8 std::string. | |
| LLVM_ABI bool | llvm::convertUTF16ToUTF8String (ArrayRef< UTF16 > Src, std::string &Out) |
| Converts a UTF16 string into a UTF8 std::string. | |
| LLVM_ABI bool | llvm::convertUTF32ToUTF8String (ArrayRef< char > SrcBytes, std::string &Out) |
| Converts a stream of raw bytes assumed to be UTF32 into a UTF8 std::string. | |
| LLVM_ABI bool | llvm::convertUTF32ToUTF8String (ArrayRef< UTF32 > Src, std::string &Out) |
| Converts a UTF32 string into a UTF8 std::string. | |
| LLVM_ABI bool | llvm::convertUTF8ToUTF16String (StringRef SrcUTF8, SmallVectorImpl< UTF16 > &DstUTF16) |
| Converts a UTF-8 string into a UTF-16 string with native endianness. | |
| LLVM_ABI bool | llvm::IsSingleCodeUnitUTF8Codepoint (unsigned) |
| LLVM_ABI bool | llvm::IsSingleCodeUnitUTF16Codepoint (unsigned) |
| LLVM_ABI bool | llvm::IsSingleCodeUnitUTF32Codepoint (unsigned) |
◆ UNI_MAX_BMP
#define UNI_MAX_BMP (UTF32)0x0000FFFF
◆ UNI_MAX_LEGAL_UTF32
#define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF
◆ UNI_MAX_UTF16
#define UNI_MAX_UTF16 (UTF32)0x0010FFFF
◆ UNI_MAX_UTF32
#define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF
◆ UNI_MAX_UTF8_BYTES_PER_CODE_POINT
#define UNI_MAX_UTF8_BYTES_PER_CODE_POINT 4
◆ UNI_REPLACEMENT_CHAR
#define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD
◆ UNI_UTF16_BYTE_ORDER_MARK_NATIVE
#define UNI_UTF16_BYTE_ORDER_MARK_NATIVE 0xFEFF
◆ UNI_UTF16_BYTE_ORDER_MARK_SWAPPED
#define UNI_UTF16_BYTE_ORDER_MARK_SWAPPED 0xFFFE
◆ UNI_UTF32_BYTE_ORDER_MARK_NATIVE
#define UNI_UTF32_BYTE_ORDER_MARK_NATIVE 0x0000FEFF
◆ UNI_UTF32_BYTE_ORDER_MARK_SWAPPED
#define UNI_UTF32_BYTE_ORDER_MARK_SWAPPED 0xFFFE0000