| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223 |
- // Copyright 2015 Cloudera Inc.
- //
- // Licensed under the Apache License, Version 2.0 (the "License");
- // you may not use this file except in compliance with the License.
- // You may obtain a copy of the License at
- //
- // http://www.apache.org/licenses/LICENSE-2.0
- //
- // Unless required by applicable law or agreed to in writing, software
- // distributed under the License is distributed on an "AS IS" BASIS,
- // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- // See the License for the specific language governing permissions and
- // limitations under the License.
- //
- //
- // THIS FILE IS AUTO GENERATED BY generated_error_codes.py DO NOT MODIFY
- // IT BY HAND.
- //
- namespace cpp impala
- namespace java com.cloudera.impala.thrift
- enum TErrorCode {
- OK,
- GENERAL,
- CANCELLED,
- ANALYSIS_ERROR,
- NOT_IMPLEMENTED_ERROR,
- RUNTIME_ERROR,
- MEM_LIMIT_EXCEEDED,
- INTERNAL_ERROR,
- RECOVERABLE_ERROR,
- PARQUET_MULTIPLE_BLOCKS,
- PARQUET_COLUMN_METADATA_INVALID,
- PARQUET_HEADER_PAGE_SIZE_EXCEEDED,
- PARQUET_HEADER_EOF,
- PARQUET_GROUP_ROW_COUNT_ERROR,
- PARQUET_GROUP_ROW_COUNT_OVERFLOW,
- PARQUET_MISSING_PRECISION,
- PARQUET_WRONG_PRECISION,
- PARQUET_BAD_CONVERTED_TYPE,
- PARQUET_INCOMPATIBLE_DECIMAL,
- SEQUENCE_SCANNER_PARSE_ERROR,
- SNAPPY_DECOMPRESS_INVALID_BLOCK_SIZE,
- SNAPPY_DECOMPRESS_INVALID_COMPRESSED_LENGTH,
- SNAPPY_DECOMPRESS_UNCOMPRESSED_LENGTH_FAILED,
- SNAPPY_DECOMPRESS_RAW_UNCOMPRESS_FAILED,
- SNAPPY_DECOMPRESS_DECOMPRESS_SIZE_INCORRECT,
- HDFS_SCAN_NODE_UNKNOWN_DISK,
- FRAGMENT_EXECUTOR,
- PARTITIONED_HASH_JOIN_MAX_PARTITION_DEPTH,
- PARTITIONED_AGG_MAX_PARTITION_DEPTH,
- MISSING_BUILTIN,
- RPC_GENERAL_ERROR,
- RPC_TIMEOUT,
- UDF_VERIFY_FAILED,
- PARQUET_CORRUPT_VALUE,
- AVRO_DECIMAL_RESOLUTION_ERROR,
- AVRO_DECIMAL_METADATA_MISMATCH,
- AVRO_SCHEMA_RESOLUTION_ERROR,
- AVRO_SCHEMA_METADATA_MISMATCH,
- AVRO_UNSUPPORTED_DEFAULT_VALUE,
- AVRO_MISSING_FIELD,
- AVRO_MISSING_DEFAULT,
- AVRO_NULLABILITY_MISMATCH,
- AVRO_NOT_A_RECORD,
- PARQUET_DEF_LEVEL_ERROR,
- PARQUET_NUM_COL_VALS_ERROR,
- PARQUET_DICT_DECODE_FAILURE,
- SSL_PASSWORD_CMD_FAILED,
- SSL_CERTIFICATE_PATH_BLANK,
- SSL_PRIVATE_KEY_PATH_BLANK,
- SSL_CERTIFICATE_NOT_FOUND,
- SSL_PRIVATE_KEY_NOT_FOUND,
- SSL_SOCKET_CREATION_FAILED,
- MEM_ALLOC_FAILED,
- PARQUET_REP_LEVEL_ERROR,
- PARQUET_UNRECOGNIZED_SCHEMA,
- COLLECTION_ALLOC_FAILED,
- TMP_DEVICE_BLACKLISTED,
- TMP_FILE_BLACKLISTED,
- RPC_CLIENT_CONNECT_FAILURE,
- STALE_METADATA_FILE_TOO_SHORT,
- PARQUET_BAD_VERSION_NUMBER,
- SCANNER_INCOMPLETE_READ,
- SCANNER_INVALID_READ,
- AVRO_BAD_VERSION_HEADER,
- IMPALA_2598_KERBEROS_SSL_DISALLOWED
- }
- const list<string> TErrorMessage = [
- // OK
- "",
- // GENERAL
- "$0",
- // CANCELLED
- "$0",
- // ANALYSIS_ERROR
- "$0",
- // NOT_IMPLEMENTED_ERROR
- "$0",
- // RUNTIME_ERROR
- "$0",
- // MEM_LIMIT_EXCEEDED
- "$0",
- // INTERNAL_ERROR
- "$0",
- // RECOVERABLE_ERROR
- "$0",
- // PARQUET_MULTIPLE_BLOCKS
- "Parquet files should not be split into multiple hdfs-blocks. file=$0",
- // PARQUET_COLUMN_METADATA_INVALID
- "Column metadata states there are $0 values, but read $1 values from column $2. file=$3",
- // PARQUET_HEADER_PAGE_SIZE_EXCEEDED
- "(unused)",
- // PARQUET_HEADER_EOF
- "ParquetScanner: reached EOF while deserializing data page header. file=$0",
- // PARQUET_GROUP_ROW_COUNT_ERROR
- "Metadata states that in group $0($1) there are $2 rows, but $3 rows were read.",
- // PARQUET_GROUP_ROW_COUNT_OVERFLOW
- "(unused)",
- // PARQUET_MISSING_PRECISION
- "File '$0' column '$1' does not have the decimal precision set.",
- // PARQUET_WRONG_PRECISION
- "File '$0' column '$1' has a precision that does not match the table metadata precision. File metadata precision: $2, table metadata precision: $3.",
- // PARQUET_BAD_CONVERTED_TYPE
- "File '$0' column '$1' does not have converted type set to DECIMAL",
- // PARQUET_INCOMPATIBLE_DECIMAL
- "File '$0' column '$1' contains decimal data but the table metadata has type $2",
- // SEQUENCE_SCANNER_PARSE_ERROR
- "Problem parsing file $0 at $1$2",
- // SNAPPY_DECOMPRESS_INVALID_BLOCK_SIZE
- "Decompressor: block size is too big. Data is likely corrupt. Size: $0",
- // SNAPPY_DECOMPRESS_INVALID_COMPRESSED_LENGTH
- "Decompressor: invalid compressed length. Data is likely corrupt.",
- // SNAPPY_DECOMPRESS_UNCOMPRESSED_LENGTH_FAILED
- "Snappy: GetUncompressedLength failed",
- // SNAPPY_DECOMPRESS_RAW_UNCOMPRESS_FAILED
- "SnappyBlock: RawUncompress failed",
- // SNAPPY_DECOMPRESS_DECOMPRESS_SIZE_INCORRECT
- "Snappy: Decompressed size is not correct.",
- // HDFS_SCAN_NODE_UNKNOWN_DISK
- "Unknown disk id. This will negatively affect performance. Check your hdfs settings to enable block location metadata.",
- // FRAGMENT_EXECUTOR
- "Reserved resource size ($0) is larger than query mem limit ($1), and will be restricted to $1. Configure the reservation size by setting RM_INITIAL_MEM.",
- // PARTITIONED_HASH_JOIN_MAX_PARTITION_DEPTH
- "Cannot perform join at hash join node with id $0. The input data was partitioned the maximum number of $1 times. This could mean there is significant skew in the data or the memory limit is set too low.",
- // PARTITIONED_AGG_MAX_PARTITION_DEPTH
- "Cannot perform aggregation at hash aggregation node with id $0. The input data was partitioned the maximum number of $1 times. This could mean there is significant skew in the data or the memory limit is set too low.",
- // MISSING_BUILTIN
- "Builtin '$0' with symbol '$1' does not exist. Verify that all your impalads are the same version.",
- // RPC_GENERAL_ERROR
- "RPC Error: $0",
- // RPC_TIMEOUT
- "RPC timed out",
- // UDF_VERIFY_FAILED
- "Failed to verify function $0 from LLVM module $1, see log for more details.",
- // PARQUET_CORRUPT_VALUE
- "File $0 corrupt. RLE level data bytes = $1",
- // AVRO_DECIMAL_RESOLUTION_ERROR
- "Column '$0' has conflicting Avro decimal types. Table schema $1: $2, file schema $1: $3",
- // AVRO_DECIMAL_METADATA_MISMATCH
- "Column '$0' has conflicting Avro decimal types. Declared $1: $2, $1 in table's Avro schema: $3",
- // AVRO_SCHEMA_RESOLUTION_ERROR
- "Unresolvable types for column '$0': table type: $1, file type: $2",
- // AVRO_SCHEMA_METADATA_MISMATCH
- "Unresolvable types for column '$0': declared column type: $1, table's Avro schema type: $2",
- // AVRO_UNSUPPORTED_DEFAULT_VALUE
- "Field $0 is missing from file and default values of type $1 are not yet supported.",
- // AVRO_MISSING_FIELD
- "Inconsistent table metadata. Mismatch between column definition and Avro schema: cannot read field $0 because there are only $1 fields.",
- // AVRO_MISSING_DEFAULT
- "Field $0 is missing from file and does not have a default value.",
- // AVRO_NULLABILITY_MISMATCH
- "Field $0 is nullable in the file schema but not the table schema.",
- // AVRO_NOT_A_RECORD
- "Inconsistent table metadata. Field $0 is not a record in the Avro schema.",
- // PARQUET_DEF_LEVEL_ERROR
- "Could not read definition level, even though metadata states there are $0 values remaining in data page. file=$1",
- // PARQUET_NUM_COL_VALS_ERROR
- "Mismatched number of values in column index $0 ($1 vs. $2). file=$3",
- // PARQUET_DICT_DECODE_FAILURE
- "Failed to decode dictionary-encoded value. file=$0",
- // SSL_PASSWORD_CMD_FAILED
- "SSL private-key password command ('$0') failed with error: $1",
- // SSL_CERTIFICATE_PATH_BLANK
- "The SSL certificate path is blank",
- // SSL_PRIVATE_KEY_PATH_BLANK
- "The SSL private key path is blank",
- // SSL_CERTIFICATE_NOT_FOUND
- "The SSL certificate file does not exist at path $0",
- // SSL_PRIVATE_KEY_NOT_FOUND
- "The SSL private key file does not exist at path $0",
- // SSL_SOCKET_CREATION_FAILED
- "SSL socket creation failed: $0",
- // MEM_ALLOC_FAILED
- "Memory allocation of $0 bytes failed",
- // PARQUET_REP_LEVEL_ERROR
- "Could not read repetition level, even though metadata states there are $0 values remaining in data page. file=$1",
- // PARQUET_UNRECOGNIZED_SCHEMA
- "File '$0' has an incompatible Parquet schema for column '$1'. Column type: $2, Parquet schema:\n$3",
- // COLLECTION_ALLOC_FAILED
- "Failed to allocate buffer for collection '$0'.",
- // TMP_DEVICE_BLACKLISTED
- "Temporary device for directory $0 is blacklisted from a previous error and cannot be used.",
- // TMP_FILE_BLACKLISTED
- "Temporary file $0 is blacklisted from a previous error and cannot be expanded.",
- // RPC_CLIENT_CONNECT_FAILURE
- "RPC client failed to connect: $0",
- // STALE_METADATA_FILE_TOO_SHORT
- "Metadata for file '$0' appears stale. Try running \"refresh $1\" to reload the file metadata.",
- // PARQUET_BAD_VERSION_NUMBER
- "File '$0' has an invalid version number: $1\nThis could be due to stale metadata. Try running \"refresh $2\".",
- // SCANNER_INCOMPLETE_READ
- "Tried to read $0 bytes but could only read $1 bytes. This may indicate data file corruption. (file $2, byte offset: $3)",
- // SCANNER_INVALID_READ
- "Invalid read of $0 bytes. This may indicate data file corruption. (file $1, byte offset: $2)",
- // AVRO_BAD_VERSION_HEADER
- "File '$0' has an invalid version header: $1\nMake sure the file is an Avro data file.",
- // IMPALA_2598_KERBEROS_SSL_DISALLOWED
- "Enabling server-to-server SSL connections in conjunction with Kerberos authentication is not supported at the same time. Disable server-to-server SSL by unsetting --ssl_client_ca_certificate."
- ]
|