ErrorCodes.thrift 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223
  1. // Copyright 2015 Cloudera Inc.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. //
  15. //
  16. // THIS FILE IS AUTO GENERATED BY generated_error_codes.py DO NOT MODIFY
  17. // IT BY HAND.
  18. //
  19. namespace cpp impala
  20. namespace java com.cloudera.impala.thrift
  21. enum TErrorCode {
  22. OK,
  23. GENERAL,
  24. CANCELLED,
  25. ANALYSIS_ERROR,
  26. NOT_IMPLEMENTED_ERROR,
  27. RUNTIME_ERROR,
  28. MEM_LIMIT_EXCEEDED,
  29. INTERNAL_ERROR,
  30. RECOVERABLE_ERROR,
  31. PARQUET_MULTIPLE_BLOCKS,
  32. PARQUET_COLUMN_METADATA_INVALID,
  33. PARQUET_HEADER_PAGE_SIZE_EXCEEDED,
  34. PARQUET_HEADER_EOF,
  35. PARQUET_GROUP_ROW_COUNT_ERROR,
  36. PARQUET_GROUP_ROW_COUNT_OVERFLOW,
  37. PARQUET_MISSING_PRECISION,
  38. PARQUET_WRONG_PRECISION,
  39. PARQUET_BAD_CONVERTED_TYPE,
  40. PARQUET_INCOMPATIBLE_DECIMAL,
  41. SEQUENCE_SCANNER_PARSE_ERROR,
  42. SNAPPY_DECOMPRESS_INVALID_BLOCK_SIZE,
  43. SNAPPY_DECOMPRESS_INVALID_COMPRESSED_LENGTH,
  44. SNAPPY_DECOMPRESS_UNCOMPRESSED_LENGTH_FAILED,
  45. SNAPPY_DECOMPRESS_RAW_UNCOMPRESS_FAILED,
  46. SNAPPY_DECOMPRESS_DECOMPRESS_SIZE_INCORRECT,
  47. HDFS_SCAN_NODE_UNKNOWN_DISK,
  48. FRAGMENT_EXECUTOR,
  49. PARTITIONED_HASH_JOIN_MAX_PARTITION_DEPTH,
  50. PARTITIONED_AGG_MAX_PARTITION_DEPTH,
  51. MISSING_BUILTIN,
  52. RPC_GENERAL_ERROR,
  53. RPC_TIMEOUT,
  54. UDF_VERIFY_FAILED,
  55. PARQUET_CORRUPT_VALUE,
  56. AVRO_DECIMAL_RESOLUTION_ERROR,
  57. AVRO_DECIMAL_METADATA_MISMATCH,
  58. AVRO_SCHEMA_RESOLUTION_ERROR,
  59. AVRO_SCHEMA_METADATA_MISMATCH,
  60. AVRO_UNSUPPORTED_DEFAULT_VALUE,
  61. AVRO_MISSING_FIELD,
  62. AVRO_MISSING_DEFAULT,
  63. AVRO_NULLABILITY_MISMATCH,
  64. AVRO_NOT_A_RECORD,
  65. PARQUET_DEF_LEVEL_ERROR,
  66. PARQUET_NUM_COL_VALS_ERROR,
  67. PARQUET_DICT_DECODE_FAILURE,
  68. SSL_PASSWORD_CMD_FAILED,
  69. SSL_CERTIFICATE_PATH_BLANK,
  70. SSL_PRIVATE_KEY_PATH_BLANK,
  71. SSL_CERTIFICATE_NOT_FOUND,
  72. SSL_PRIVATE_KEY_NOT_FOUND,
  73. SSL_SOCKET_CREATION_FAILED,
  74. MEM_ALLOC_FAILED,
  75. PARQUET_REP_LEVEL_ERROR,
  76. PARQUET_UNRECOGNIZED_SCHEMA,
  77. COLLECTION_ALLOC_FAILED,
  78. TMP_DEVICE_BLACKLISTED,
  79. TMP_FILE_BLACKLISTED,
  80. RPC_CLIENT_CONNECT_FAILURE,
  81. STALE_METADATA_FILE_TOO_SHORT,
  82. PARQUET_BAD_VERSION_NUMBER,
  83. SCANNER_INCOMPLETE_READ,
  84. SCANNER_INVALID_READ,
  85. AVRO_BAD_VERSION_HEADER,
  86. IMPALA_2598_KERBEROS_SSL_DISALLOWED
  87. }
  88. const list<string> TErrorMessage = [
  89. // OK
  90. "",
  91. // GENERAL
  92. "$0",
  93. // CANCELLED
  94. "$0",
  95. // ANALYSIS_ERROR
  96. "$0",
  97. // NOT_IMPLEMENTED_ERROR
  98. "$0",
  99. // RUNTIME_ERROR
  100. "$0",
  101. // MEM_LIMIT_EXCEEDED
  102. "$0",
  103. // INTERNAL_ERROR
  104. "$0",
  105. // RECOVERABLE_ERROR
  106. "$0",
  107. // PARQUET_MULTIPLE_BLOCKS
  108. "Parquet files should not be split into multiple hdfs-blocks. file=$0",
  109. // PARQUET_COLUMN_METADATA_INVALID
  110. "Column metadata states there are $0 values, but read $1 values from column $2. file=$3",
  111. // PARQUET_HEADER_PAGE_SIZE_EXCEEDED
  112. "(unused)",
  113. // PARQUET_HEADER_EOF
  114. "ParquetScanner: reached EOF while deserializing data page header. file=$0",
  115. // PARQUET_GROUP_ROW_COUNT_ERROR
  116. "Metadata states that in group $0($1) there are $2 rows, but $3 rows were read.",
  117. // PARQUET_GROUP_ROW_COUNT_OVERFLOW
  118. "(unused)",
  119. // PARQUET_MISSING_PRECISION
  120. "File '$0' column '$1' does not have the decimal precision set.",
  121. // PARQUET_WRONG_PRECISION
  122. "File '$0' column '$1' has a precision that does not match the table metadata precision. File metadata precision: $2, table metadata precision: $3.",
  123. // PARQUET_BAD_CONVERTED_TYPE
  124. "File '$0' column '$1' does not have converted type set to DECIMAL",
  125. // PARQUET_INCOMPATIBLE_DECIMAL
  126. "File '$0' column '$1' contains decimal data but the table metadata has type $2",
  127. // SEQUENCE_SCANNER_PARSE_ERROR
  128. "Problem parsing file $0 at $1$2",
  129. // SNAPPY_DECOMPRESS_INVALID_BLOCK_SIZE
  130. "Decompressor: block size is too big. Data is likely corrupt. Size: $0",
  131. // SNAPPY_DECOMPRESS_INVALID_COMPRESSED_LENGTH
  132. "Decompressor: invalid compressed length. Data is likely corrupt.",
  133. // SNAPPY_DECOMPRESS_UNCOMPRESSED_LENGTH_FAILED
  134. "Snappy: GetUncompressedLength failed",
  135. // SNAPPY_DECOMPRESS_RAW_UNCOMPRESS_FAILED
  136. "SnappyBlock: RawUncompress failed",
  137. // SNAPPY_DECOMPRESS_DECOMPRESS_SIZE_INCORRECT
  138. "Snappy: Decompressed size is not correct.",
  139. // HDFS_SCAN_NODE_UNKNOWN_DISK
  140. "Unknown disk id. This will negatively affect performance. Check your hdfs settings to enable block location metadata.",
  141. // FRAGMENT_EXECUTOR
  142. "Reserved resource size ($0) is larger than query mem limit ($1), and will be restricted to $1. Configure the reservation size by setting RM_INITIAL_MEM.",
  143. // PARTITIONED_HASH_JOIN_MAX_PARTITION_DEPTH
  144. "Cannot perform join at hash join node with id $0. The input data was partitioned the maximum number of $1 times. This could mean there is significant skew in the data or the memory limit is set too low.",
  145. // PARTITIONED_AGG_MAX_PARTITION_DEPTH
  146. "Cannot perform aggregation at hash aggregation node with id $0. The input data was partitioned the maximum number of $1 times. This could mean there is significant skew in the data or the memory limit is set too low.",
  147. // MISSING_BUILTIN
  148. "Builtin '$0' with symbol '$1' does not exist. Verify that all your impalads are the same version.",
  149. // RPC_GENERAL_ERROR
  150. "RPC Error: $0",
  151. // RPC_TIMEOUT
  152. "RPC timed out",
  153. // UDF_VERIFY_FAILED
  154. "Failed to verify function $0 from LLVM module $1, see log for more details.",
  155. // PARQUET_CORRUPT_VALUE
  156. "File $0 corrupt. RLE level data bytes = $1",
  157. // AVRO_DECIMAL_RESOLUTION_ERROR
  158. "Column '$0' has conflicting Avro decimal types. Table schema $1: $2, file schema $1: $3",
  159. // AVRO_DECIMAL_METADATA_MISMATCH
  160. "Column '$0' has conflicting Avro decimal types. Declared $1: $2, $1 in table's Avro schema: $3",
  161. // AVRO_SCHEMA_RESOLUTION_ERROR
  162. "Unresolvable types for column '$0': table type: $1, file type: $2",
  163. // AVRO_SCHEMA_METADATA_MISMATCH
  164. "Unresolvable types for column '$0': declared column type: $1, table's Avro schema type: $2",
  165. // AVRO_UNSUPPORTED_DEFAULT_VALUE
  166. "Field $0 is missing from file and default values of type $1 are not yet supported.",
  167. // AVRO_MISSING_FIELD
  168. "Inconsistent table metadata. Mismatch between column definition and Avro schema: cannot read field $0 because there are only $1 fields.",
  169. // AVRO_MISSING_DEFAULT
  170. "Field $0 is missing from file and does not have a default value.",
  171. // AVRO_NULLABILITY_MISMATCH
  172. "Field $0 is nullable in the file schema but not the table schema.",
  173. // AVRO_NOT_A_RECORD
  174. "Inconsistent table metadata. Field $0 is not a record in the Avro schema.",
  175. // PARQUET_DEF_LEVEL_ERROR
  176. "Could not read definition level, even though metadata states there are $0 values remaining in data page. file=$1",
  177. // PARQUET_NUM_COL_VALS_ERROR
  178. "Mismatched number of values in column index $0 ($1 vs. $2). file=$3",
  179. // PARQUET_DICT_DECODE_FAILURE
  180. "Failed to decode dictionary-encoded value. file=$0",
  181. // SSL_PASSWORD_CMD_FAILED
  182. "SSL private-key password command ('$0') failed with error: $1",
  183. // SSL_CERTIFICATE_PATH_BLANK
  184. "The SSL certificate path is blank",
  185. // SSL_PRIVATE_KEY_PATH_BLANK
  186. "The SSL private key path is blank",
  187. // SSL_CERTIFICATE_NOT_FOUND
  188. "The SSL certificate file does not exist at path $0",
  189. // SSL_PRIVATE_KEY_NOT_FOUND
  190. "The SSL private key file does not exist at path $0",
  191. // SSL_SOCKET_CREATION_FAILED
  192. "SSL socket creation failed: $0",
  193. // MEM_ALLOC_FAILED
  194. "Memory allocation of $0 bytes failed",
  195. // PARQUET_REP_LEVEL_ERROR
  196. "Could not read repetition level, even though metadata states there are $0 values remaining in data page. file=$1",
  197. // PARQUET_UNRECOGNIZED_SCHEMA
  198. "File '$0' has an incompatible Parquet schema for column '$1'. Column type: $2, Parquet schema:\n$3",
  199. // COLLECTION_ALLOC_FAILED
  200. "Failed to allocate buffer for collection '$0'.",
  201. // TMP_DEVICE_BLACKLISTED
  202. "Temporary device for directory $0 is blacklisted from a previous error and cannot be used.",
  203. // TMP_FILE_BLACKLISTED
  204. "Temporary file $0 is blacklisted from a previous error and cannot be expanded.",
  205. // RPC_CLIENT_CONNECT_FAILURE
  206. "RPC client failed to connect: $0",
  207. // STALE_METADATA_FILE_TOO_SHORT
  208. "Metadata for file '$0' appears stale. Try running \"refresh $1\" to reload the file metadata.",
  209. // PARQUET_BAD_VERSION_NUMBER
  210. "File '$0' has an invalid version number: $1\nThis could be due to stale metadata. Try running \"refresh $2\".",
  211. // SCANNER_INCOMPLETE_READ
  212. "Tried to read $0 bytes but could only read $1 bytes. This may indicate data file corruption. (file $2, byte offset: $3)",
  213. // SCANNER_INVALID_READ
  214. "Invalid read of $0 bytes. This may indicate data file corruption. (file $1, byte offset: $2)",
  215. // AVRO_BAD_VERSION_HEADER
  216. "File '$0' has an invalid version header: $1\nMake sure the file is an Avro data file.",
  217. // IMPALA_2598_KERBEROS_SSL_DISALLOWED
  218. "Enabling server-to-server SSL connections in conjunction with Kerberos authentication is not supported at the same time. Disable server-to-server SSL by unsetting --ssl_client_ca_certificate."
  219. ]