blob: 7c9886c749b9580dd06a1566aaa6118cd7ae5659 [file] [log] [blame]
{
"AGGREGATE_FUNCTION_WITH_NONDETERMINISTIC_EXPRESSION" : {
"message" : [
"Non-deterministic expression <sqlExpr> should not appear in the arguments of an aggregate function."
],
"sqlState" : "42845"
},
"ALL_PARAMETERS_MUST_BE_NAMED" : {
"message" : [
"Using name parameterized queries requires all parameters to be named. Parameters missing names: <exprs>."
],
"sqlState" : "07001"
},
"ALL_PARTITION_COLUMNS_NOT_ALLOWED" : {
"message" : [
"Cannot use all columns for partition columns."
],
"sqlState" : "KD005"
},
"ALTER_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : {
"message" : [
"ALTER TABLE <type> column <columnName> specifies descriptor \"<optionName>\" more than once, which is invalid."
],
"sqlState" : "42710"
},
"AMBIGUOUS_ALIAS_IN_NESTED_CTE" : {
"message" : [
"Name <name> is ambiguous in nested CTE.",
"Please set <config> to \"CORRECTED\" so that name defined in inner CTE takes precedence. If set it to \"LEGACY\", outer CTE definitions will take precedence.",
"See '<docroot>/sql-migration-guide.html#query-engine'."
],
"sqlState" : "42KD0"
},
"AMBIGUOUS_COLUMN_OR_FIELD" : {
"message" : [
"Column or field <name> is ambiguous and has <n> matches."
],
"sqlState" : "42702"
},
"AMBIGUOUS_COLUMN_REFERENCE" : {
"message" : [
"Column <name> is ambiguous. It's because you joined several DataFrame together, and some of these DataFrames are the same.",
"This column points to one of the DataFrames but Spark is unable to figure out which one.",
"Please alias the DataFrames with different names via `DataFrame.alias` before joining them,",
"and specify the column using qualified name, e.g. `df.alias(\"a\").join(df.alias(\"b\"), col(\"a.id\") > col(\"b.id\"))`."
],
"sqlState" : "42702"
},
"AMBIGUOUS_LATERAL_COLUMN_ALIAS" : {
"message" : [
"Lateral column alias <name> is ambiguous and has <n> matches."
],
"sqlState" : "42702"
},
"AMBIGUOUS_REFERENCE" : {
"message" : [
"Reference <name> is ambiguous, could be: <referenceNames>."
],
"sqlState" : "42704"
},
"AMBIGUOUS_REFERENCE_TO_FIELDS" : {
"message" : [
"Ambiguous reference to the field <field>. It appears <count> times in the schema."
],
"sqlState" : "42000"
},
"ARITHMETIC_OVERFLOW" : {
"message" : [
"<message>.<alternative> If necessary set <config> to \"false\" to bypass this error."
],
"sqlState" : "22003"
},
"ASSIGNMENT_ARITY_MISMATCH" : {
"message" : [
"The number of columns or variables assigned or aliased: <numTarget> does not match the number of source expressions: <numExpr>."
],
"sqlState" : "42802"
},
"AS_OF_JOIN" : {
"message" : [
"Invalid as-of join."
],
"subClass" : {
"TOLERANCE_IS_NON_NEGATIVE" : {
"message" : [
"The input argument `tolerance` must be non-negative."
]
},
"TOLERANCE_IS_UNFOLDABLE" : {
"message" : [
"The input argument `tolerance` must be a constant."
]
}
},
"sqlState" : "42604"
},
"AVRO_INCOMPATIBLE_READ_TYPE" : {
"message" : [
"Cannot convert Avro <avroPath> to SQL <sqlPath> because the original encoded data type is <avroType>, however you're trying to read the field as <sqlType>, which would lead to an incorrect answer.",
"To allow reading this field, enable the SQL configuration: \"spark.sql.legacy.avro.allowIncompatibleSchema\"."
],
"sqlState" : "22KD3"
},
"BATCH_METADATA_NOT_FOUND" : {
"message" : [
"Unable to find batch <batchMetadataFile>."
],
"sqlState" : "42K03"
},
"BINARY_ARITHMETIC_OVERFLOW" : {
"message" : [
"<value1> <symbol> <value2> caused overflow."
],
"sqlState" : "22003"
},
"CALL_ON_STREAMING_DATASET_UNSUPPORTED" : {
"message" : [
"The method <methodName> can not be called on streaming Dataset/DataFrame."
],
"sqlState" : "42KDE"
},
"CANNOT_ALTER_PARTITION_COLUMN" : {
"message" : [
"ALTER TABLE (ALTER|CHANGE) COLUMN is not supported for partition columns, but found the partition column <columnName> in the table <tableName>."
],
"sqlState" : "428FR"
},
"CANNOT_ASSIGN_EVENT_TIME_COLUMN_WITHOUT_WATERMARK" : {
"message" : [
"Watermark needs to be defined to reassign event time column. Failed to find watermark definition in the streaming query."
],
"sqlState" : "42611"
},
"CANNOT_CAST_DATATYPE" : {
"message" : [
"Cannot cast <sourceType> to <targetType>."
],
"sqlState" : "42846"
},
"CANNOT_CONVERT_PROTOBUF_FIELD_TYPE_TO_SQL_TYPE" : {
"message" : [
"Cannot convert Protobuf <protobufColumn> to SQL <sqlColumn> because schema is incompatible (protobufType = <protobufType>, sqlType = <sqlType>)."
],
"sqlState" : "42846"
},
"CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE" : {
"message" : [
"Unable to convert <protobufType> of Protobuf to SQL type <toType>."
],
"sqlState" : "42846"
},
"CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE" : {
"message" : [
"Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because schema is incompatible (protobufType = <protobufType>, sqlType = <sqlType>)."
],
"sqlState" : "42846"
},
"CANNOT_CONVERT_SQL_VALUE_TO_PROTOBUF_ENUM_TYPE" : {
"message" : [
"Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because <data> is not in defined values for enum: <enumString>."
],
"sqlState" : "42846"
},
"CANNOT_CREATE_DATA_SOURCE_TABLE" : {
"message" : [
"Failed to create data source table <tableName>:"
],
"subClass" : {
"EXTERNAL_METADATA_UNSUPPORTED" : {
"message" : [
"provider '<provider>' does not support external metadata but a schema is provided. Please remove the schema when creating the table."
]
}
},
"sqlState" : "42KDE"
},
"CANNOT_DECODE_URL" : {
"message" : [
"The provided URL cannot be decoded: <url>. Please ensure that the URL is properly formatted and try again."
],
"sqlState" : "22546"
},
"CANNOT_INVOKE_IN_TRANSFORMATIONS" : {
"message" : [
"Dataset transformations and actions can only be invoked by the driver, not inside of other Dataset transformations; for example, dataset1.map(x => dataset2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the dataset1.map transformation. For more information, see SPARK-28702."
],
"sqlState" : "0A000"
},
"CANNOT_LOAD_FUNCTION_CLASS" : {
"message" : [
"Cannot load class <className> when registering the function <functionName>, please make sure it is on the classpath."
],
"sqlState" : "46103"
},
"CANNOT_LOAD_PROTOBUF_CLASS" : {
"message" : [
"Could not load Protobuf class with name <protobufClassName>. <explanation>."
],
"sqlState" : "42K03"
},
"CANNOT_LOAD_STATE_STORE" : {
"message" : [
"An error occurred during loading state."
],
"subClass" : {
"CANNOT_READ_CHECKPOINT" : {
"message" : [
"Cannot read RocksDB checkpoint metadata. Expected <expectedVersion>, but found <actualVersion>."
]
},
"CANNOT_READ_DELTA_FILE_KEY_SIZE" : {
"message" : [
"Error reading delta file <fileToRead> of <clazz>: key size cannot be <keySize>."
]
},
"CANNOT_READ_DELTA_FILE_NOT_EXISTS" : {
"message" : [
"Error reading delta file <fileToRead> of <clazz>: <fileToRead> does not exist."
]
},
"CANNOT_READ_SNAPSHOT_FILE_KEY_SIZE" : {
"message" : [
"Error reading snapshot file <fileToRead> of <clazz>: key size cannot be <keySize>."
]
},
"CANNOT_READ_SNAPSHOT_FILE_VALUE_SIZE" : {
"message" : [
"Error reading snapshot file <fileToRead> of <clazz>: value size cannot be <valueSize>."
]
},
"CANNOT_READ_STREAMING_STATE_FILE" : {
"message" : [
"Error reading streaming state file of <fileToRead> does not exist. If the stream job is restarted with a new or updated state operation, please create a new checkpoint location or clear the existing checkpoint location."
]
},
"UNCATEGORIZED" : {
"message" : [
""
]
},
"UNEXPECTED_FILE_SIZE" : {
"message" : [
"Copied <dfsFile> to <localFile>, expected <expectedSize> bytes, found <localFileSize> bytes."
]
},
"UNEXPECTED_VERSION" : {
"message" : [
"Version cannot be <version> because it is less than 0."
]
},
"UNRELEASED_THREAD_ERROR" : {
"message" : [
"<loggingId>: RocksDB instance could not be acquired by <newAcquiredThreadInfo> for operationType=<operationType> as it was not released by <acquiredThreadInfo> after <timeWaitedMs> ms.",
"Thread holding the lock has trace: <stackTraceOutput>"
]
}
},
"sqlState" : "58030"
},
"CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE" : {
"message" : [
"Failed to merge incompatible data types <left> and <right>. Please check the data types of the columns being merged and ensure that they are compatible. If necessary, consider casting the columns to compatible data types before attempting the merge."
],
"sqlState" : "42825"
},
"CANNOT_MERGE_SCHEMAS" : {
"message" : [
"Failed merging schemas:",
"Initial schema:",
"<left>",
"Schema that cannot be merged with the initial schema:",
"<right>."
],
"sqlState" : "42KD9"
},
"CANNOT_MODIFY_CONFIG" : {
"message" : [
"Cannot modify the value of the Spark config: <key>.",
"See also '<docroot>/sql-migration-guide.html#ddl-statements'."
],
"sqlState" : "46110"
},
"CANNOT_PARSE_DECIMAL" : {
"message" : [
"Cannot parse decimal. Please ensure that the input is a valid number with optional decimal point or comma separators."
],
"sqlState" : "22018"
},
"CANNOT_PARSE_INTERVAL" : {
"message" : [
"Unable to parse <intervalString>. Please ensure that the value provided is in a valid format for defining an interval. You can reference the documentation for the correct format. If the issue persists, please double check that the input value is not null or empty and try again."
],
"sqlState" : "22006"
},
"CANNOT_PARSE_JSON_FIELD" : {
"message" : [
"Cannot parse the field name <fieldName> and the value <fieldValue> of the JSON token type <jsonType> to target Spark data type <dataType>."
],
"sqlState" : "2203G"
},
"CANNOT_PARSE_PROTOBUF_DESCRIPTOR" : {
"message" : [
"Error parsing descriptor bytes into Protobuf FileDescriptorSet."
],
"sqlState" : "22018"
},
"CANNOT_PARSE_TIMESTAMP" : {
"message" : [
"<message>. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22007"
},
"CANNOT_RECOGNIZE_HIVE_TYPE" : {
"message" : [
"Cannot recognize hive type string: <fieldType>, column: <fieldName>. The specified data type for the field cannot be recognized by Spark SQL. Please check the data type of the specified field and ensure that it is a valid Spark SQL data type. Refer to the Spark SQL documentation for a list of valid data types and their format. If the data type is correct, please ensure that you are using a supported version of Spark SQL."
],
"sqlState" : "429BB"
},
"CANNOT_RENAME_ACROSS_SCHEMA" : {
"message" : [
"Renaming a <type> across schemas is not allowed."
],
"sqlState" : "0AKD0"
},
"CANNOT_RESOLVE_DATAFRAME_COLUMN" : {
"message" : [
"Cannot resolve dataframe column <name>. It's probably because of illegal references like `df1.select(df2.col(\"a\"))`."
],
"sqlState" : "42704"
},
"CANNOT_RESOLVE_STAR_EXPAND" : {
"message" : [
"Cannot resolve <targetString>.* given input columns <columns>. Please check that the specified table or struct exists and is accessible in the input columns."
],
"sqlState" : "42704"
},
"CANNOT_RESTORE_PERMISSIONS_FOR_PATH" : {
"message" : [
"Failed to set permissions on created path <path> back to <permission>."
],
"sqlState" : "58030"
},
"CANNOT_SAVE_VARIANT" : {
"message" : [
"Cannot save variant data type into external storage."
],
"sqlState" : "0A000"
},
"CANNOT_UPDATE_FIELD" : {
"message" : [
"Cannot update <table> field <fieldName> type:"
],
"subClass" : {
"ARRAY_TYPE" : {
"message" : [
"Update the element by updating <fieldName>.element."
]
},
"INTERVAL_TYPE" : {
"message" : [
"Update an interval by updating its fields."
]
},
"MAP_TYPE" : {
"message" : [
"Update a map by updating <fieldName>.key or <fieldName>.value."
]
},
"STRUCT_TYPE" : {
"message" : [
"Update a struct by updating its fields."
]
},
"USER_DEFINED_TYPE" : {
"message" : [
"Update a UserDefinedType[<udtSql>] by updating its fields."
]
}
},
"sqlState" : "0A000"
},
"CANNOT_UP_CAST_DATATYPE" : {
"message" : [
"Cannot up cast <expression> from <sourceType> to <targetType>.",
"<details>"
],
"sqlState" : "42846"
},
"CANNOT_WRITE_STATE_STORE" : {
"message" : [
"Error writing state store files for provider <providerClass>."
],
"subClass" : {
"CANNOT_COMMIT" : {
"message" : [
"Cannot perform commit during state checkpoint."
]
}
},
"sqlState" : "58030"
},
"CAST_INVALID_INPUT" : {
"message" : [
"The value <expression> of the type <sourceType> cannot be cast to <targetType> because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22018"
},
"CAST_OVERFLOW" : {
"message" : [
"The value <value> of the type <sourceType> cannot be cast to <targetType> due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22003"
},
"CAST_OVERFLOW_IN_TABLE_INSERT" : {
"message" : [
"Fail to assign a value of <sourceType> type to the <targetType> type column or variable <columnName> due to an overflow. Use `try_cast` on the input value to tolerate overflow and return NULL instead."
],
"sqlState" : "22003"
},
"CATALOG_NOT_FOUND" : {
"message" : [
"The catalog <catalogName> not found. Consider to set the SQL config <config> to a catalog plugin."
],
"sqlState" : "42P08"
},
"CHECKPOINT_RDD_BLOCK_ID_NOT_FOUND" : {
"message" : [
"Checkpoint block <rddBlockId> not found!",
"Either the executor that originally checkpointed this partition is no longer alive, or the original RDD is unpersisted.",
"If this problem persists, you may consider using `rdd.checkpoint()` instead, which is slower than local checkpointing but more fault-tolerant."
],
"sqlState" : "56000"
},
"CLASS_NOT_OVERRIDE_EXPECTED_METHOD" : {
"message" : [
"<className> must override either <method1> or <method2>."
],
"sqlState" : "38000"
},
"CLASS_UNSUPPORTED_BY_MAP_OBJECTS" : {
"message" : [
"`MapObjects` does not support the class <cls> as resulting collection."
],
"sqlState" : "0A000"
},
"CODEC_NOT_AVAILABLE" : {
"message" : [
"The codec <codecName> is not available."
],
"subClass" : {
"WITH_AVAILABLE_CODECS_SUGGESTION" : {
"message" : [
"Available codecs are <availableCodecs>."
]
},
"WITH_CONF_SUGGESTION" : {
"message" : [
"Consider to set the config <configKey> to <configVal>."
]
}
},
"sqlState" : "56038"
},
"CODEC_SHORT_NAME_NOT_FOUND" : {
"message" : [
"Cannot find a short name for the codec <codecName>."
],
"sqlState" : "42704"
},
"COLLATION_INVALID_NAME" : {
"message" : [
"The value <collationName> does not represent a correct collation name. Suggested valid collation name: [<proposal>]."
],
"sqlState" : "42704"
},
"COLLATION_MISMATCH" : {
"message" : [
"Could not determine which collation to use for string functions and operators."
],
"subClass" : {
"EXPLICIT" : {
"message" : [
"Error occurred due to the mismatch between explicit collations: <explicitTypes>. Decide on a single explicit collation and remove others."
]
},
"IMPLICIT" : {
"message" : [
"Error occurred due to the mismatch between multiple implicit non-default collations. Use COLLATE function to set the collation explicitly."
]
}
},
"sqlState" : "42P21"
},
"COLLECTION_SIZE_LIMIT_EXCEEDED" : {
"message" : [
"Can't create array with <numberOfElements> elements which exceeding the array size limit <maxRoundedArrayLength>,"
],
"subClass" : {
"FUNCTION" : {
"message" : [
"unsuccessful try to create arrays in the function <functionName>."
]
},
"INITIALIZE" : {
"message" : [
"cannot initialize an array with specified parameters."
]
},
"PARAMETER" : {
"message" : [
"the value of parameter(s) <parameter> in the function <functionName> is invalid."
]
}
},
"sqlState" : "54000"
},
"COLUMN_ALIASES_NOT_ALLOWED" : {
"message" : [
"Column aliases are not allowed in <op>."
],
"sqlState" : "42601"
},
"COLUMN_ALREADY_EXISTS" : {
"message" : [
"The column <columnName> already exists. Choose another name or rename the existing column."
],
"sqlState" : "42711"
},
"COLUMN_NOT_DEFINED_IN_TABLE" : {
"message" : [
"<colType> column <colName> is not defined in table <tableName>, defined table columns are: <tableCols>."
],
"sqlState" : "42703"
},
"COLUMN_NOT_FOUND" : {
"message" : [
"The column <colName> cannot be found. Verify the spelling and correctness of the column name according to the SQL config <caseSensitiveConfig>."
],
"sqlState" : "42703"
},
"COMPARATOR_RETURNS_NULL" : {
"message" : [
"The comparator has returned a NULL for a comparison between <firstValue> and <secondValue>.",
"It should return a positive integer for \"greater than\", 0 for \"equal\" and a negative integer for \"less than\".",
"To revert to deprecated behavior where NULL is treated as 0 (equal), you must set \"spark.sql.legacy.allowNullComparisonResultInArraySort\" to \"true\"."
],
"sqlState" : "22004"
},
"COMPLEX_EXPRESSION_UNSUPPORTED_INPUT" : {
"message" : [
"Cannot process input data types for the expression: <expression>."
],
"subClass" : {
"MISMATCHED_TYPES" : {
"message" : [
"All input types must be the same except nullable, containsNull, valueContainsNull flags, but found the input types <inputTypes>."
]
},
"NO_INPUTS" : {
"message" : [
"The collection of input data types must not be empty."
]
}
},
"sqlState" : "42K09"
},
"CONCURRENT_QUERY" : {
"message" : [
"Another instance of this query was just started by a concurrent session."
],
"sqlState" : "0A000"
},
"CONCURRENT_STREAM_LOG_UPDATE" : {
"message" : [
"Concurrent update to the log. Multiple streaming jobs detected for <batchId>.",
"Please make sure only one streaming job runs on a specific checkpoint location at a time."
],
"sqlState" : "40000"
},
"CONNECT" : {
"message" : [
"Generic Spark Connect error."
],
"subClass" : {
"INTERCEPTOR_CTOR_MISSING" : {
"message" : [
"Cannot instantiate GRPC interceptor because <cls> is missing a default constructor without arguments."
]
},
"INTERCEPTOR_RUNTIME_ERROR" : {
"message" : [
"Error instantiating GRPC interceptor: <msg>"
]
},
"PLUGIN_CTOR_MISSING" : {
"message" : [
"Cannot instantiate Spark Connect plugin because <cls> is missing a default constructor without arguments."
]
},
"PLUGIN_RUNTIME_ERROR" : {
"message" : [
"Error instantiating Spark Connect plugin: <msg>"
]
},
"SESSION_NOT_SAME" : {
"message" : [
"Both Datasets must belong to the same SparkSession."
]
}
},
"sqlState" : "56K00"
},
"CONVERSION_INVALID_INPUT" : {
"message" : [
"The value <str> (<fmt>) cannot be converted to <targetType> because it is malformed. Correct the value as per the syntax, or change its format. Use <suggestion> to tolerate malformed input and return NULL instead."
],
"sqlState" : "22018"
},
"CREATE_PERMANENT_VIEW_WITHOUT_ALIAS" : {
"message" : [
"Not allowed to create the permanent view <name> without explicitly assigning an alias for the expression <attr>."
],
"sqlState" : "0A000"
},
"CREATE_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : {
"message" : [
"CREATE TABLE column <columnName> specifies descriptor \"<optionName>\" more than once, which is invalid."
],
"sqlState" : "42710"
},
"CREATE_VIEW_COLUMN_ARITY_MISMATCH" : {
"message" : [
"Cannot create view <viewName>, the reason is"
],
"subClass" : {
"NOT_ENOUGH_DATA_COLUMNS" : {
"message" : [
"not enough data columns:",
"View columns: <viewColumns>.",
"Data columns: <dataColumns>."
]
},
"TOO_MANY_DATA_COLUMNS" : {
"message" : [
"too many data columns:",
"View columns: <viewColumns>.",
"Data columns: <dataColumns>."
]
}
},
"sqlState" : "21S01"
},
"DATATYPE_MISMATCH" : {
"message" : [
"Cannot resolve <sqlExpr> due to data type mismatch:"
],
"subClass" : {
"ARRAY_FUNCTION_DIFF_TYPES" : {
"message" : [
"Input to <functionName> should have been <dataType> followed by a value with same element type, but it's [<leftType>, <rightType>]."
]
},
"BINARY_ARRAY_DIFF_TYPES" : {
"message" : [
"Input to function <functionName> should have been two <arrayType> with same element type, but it's [<leftType>, <rightType>]."
]
},
"BINARY_OP_DIFF_TYPES" : {
"message" : [
"the left and right operands of the binary operator have incompatible types (<left> and <right>)."
]
},
"BINARY_OP_WRONG_TYPE" : {
"message" : [
"the binary operator requires the input type <inputType>, not <actualDataType>."
]
},
"BLOOM_FILTER_BINARY_OP_WRONG_TYPE" : {
"message" : [
"The Bloom filter binary input to <functionName> should be either a constant value or a scalar subquery expression, but it's <actual>."
]
},
"BLOOM_FILTER_WRONG_TYPE" : {
"message" : [
"Input to function <functionName> should have been <expectedLeft> followed by value with <expectedRight>, but it's [<actual>]."
]
},
"CANNOT_CONVERT_TO_JSON" : {
"message" : [
"Unable to convert column <name> of type <type> to JSON."
]
},
"CANNOT_DROP_ALL_FIELDS" : {
"message" : [
"Cannot drop all fields in struct."
]
},
"CAST_WITHOUT_SUGGESTION" : {
"message" : [
"cannot cast <srcType> to <targetType>."
]
},
"CAST_WITH_CONF_SUGGESTION" : {
"message" : [
"cannot cast <srcType> to <targetType> with ANSI mode on.",
"If you have to cast <srcType> to <targetType>, you can set <config> as <configVal>."
]
},
"CAST_WITH_FUNC_SUGGESTION" : {
"message" : [
"cannot cast <srcType> to <targetType>.",
"To convert values from <srcType> to <targetType>, you can use the functions <functionNames> instead."
]
},
"CREATE_MAP_KEY_DIFF_TYPES" : {
"message" : [
"The given keys of function <functionName> should all be the same type, but they are <dataType>."
]
},
"CREATE_MAP_VALUE_DIFF_TYPES" : {
"message" : [
"The given values of function <functionName> should all be the same type, but they are <dataType>."
]
},
"CREATE_NAMED_STRUCT_WITHOUT_FOLDABLE_STRING" : {
"message" : [
"Only foldable `STRING` expressions are allowed to appear at odd position, but they are <inputExprs>."
]
},
"DATA_DIFF_TYPES" : {
"message" : [
"Input to <functionName> should all be the same type, but it's <dataType>."
]
},
"FILTER_NOT_BOOLEAN" : {
"message" : [
"Filter expression <filter> of type <type> is not a boolean."
]
},
"HASH_MAP_TYPE" : {
"message" : [
"Input to the function <functionName> cannot contain elements of the \"MAP\" type. In Spark, same maps may have different hashcode, thus hash expressions are prohibited on \"MAP\" elements. To restore previous behavior set \"spark.sql.legacy.allowHashOnMapType\" to \"true\"."
]
},
"HASH_VARIANT_TYPE" : {
"message" : [
"Input to the function <functionName> cannot contain elements of the \"VARIANT\" type yet."
]
},
"INPUT_SIZE_NOT_ONE" : {
"message" : [
"Length of <exprName> should be 1."
]
},
"INVALID_ARG_VALUE" : {
"message" : [
"The <inputName> value must to be a <requireType> literal of <validValues>, but got <inputValue>."
]
},
"INVALID_JSON_MAP_KEY_TYPE" : {
"message" : [
"Input schema <schema> can only contain STRING as a key type for a MAP."
]
},
"INVALID_JSON_SCHEMA" : {
"message" : [
"Input schema <schema> must be a struct, an array, a map or a variant."
]
},
"INVALID_MAP_KEY_TYPE" : {
"message" : [
"The key of map cannot be/contain <keyType>."
]
},
"INVALID_ORDERING_TYPE" : {
"message" : [
"The <functionName> does not support ordering on type <dataType>."
]
},
"INVALID_ROW_LEVEL_OPERATION_ASSIGNMENTS" : {
"message" : [
"<errors>"
]
},
"INVALID_XML_MAP_KEY_TYPE" : {
"message" : [
"Input schema <schema> can only contain STRING as a key type for a MAP."
]
},
"IN_SUBQUERY_DATA_TYPE_MISMATCH" : {
"message" : [
"The data type of one or more elements in the left hand side of an IN subquery is not compatible with the data type of the output of the subquery. Mismatched columns: [<mismatchedColumns>], left side: [<leftType>], right side: [<rightType>]."
]
},
"IN_SUBQUERY_LENGTH_MISMATCH" : {
"message" : [
"The number of columns in the left hand side of an IN subquery does not match the number of columns in the output of subquery. Left hand side columns(length: <leftLength>): [<leftColumns>], right hand side columns(length: <rightLength>): [<rightColumns>]."
]
},
"MAP_CONCAT_DIFF_TYPES" : {
"message" : [
"The <functionName> should all be of type map, but it's <dataType>."
]
},
"MAP_FUNCTION_DIFF_TYPES" : {
"message" : [
"Input to <functionName> should have been <dataType> followed by a value with same key type, but it's [<leftType>, <rightType>]."
]
},
"MAP_ZIP_WITH_DIFF_TYPES" : {
"message" : [
"Input to the <functionName> should have been two maps with compatible key types, but it's [<leftType>, <rightType>]."
]
},
"NON_FOLDABLE_INPUT" : {
"message" : [
"the input <inputName> should be a foldable <inputType> expression; however, got <inputExpr>."
]
},
"NON_STRING_TYPE" : {
"message" : [
"all arguments must be strings."
]
},
"NULL_TYPE" : {
"message" : [
"Null typed values cannot be used as arguments of <functionName>."
]
},
"PARAMETER_CONSTRAINT_VIOLATION" : {
"message" : [
"The <leftExprName>(<leftExprValue>) must be <constraint> the <rightExprName>(<rightExprValue>)."
]
},
"RANGE_FRAME_INVALID_TYPE" : {
"message" : [
"The data type <orderSpecType> used in the order specification does not match the data type <valueBoundaryType> which is used in the range frame."
]
},
"RANGE_FRAME_MULTI_ORDER" : {
"message" : [
"A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: <orderSpec>."
]
},
"RANGE_FRAME_WITHOUT_ORDER" : {
"message" : [
"A range window frame cannot be used in an unordered window specification."
]
},
"SEQUENCE_WRONG_INPUT_TYPES" : {
"message" : [
"<functionName> uses the wrong parameter type. The parameter type must conform to:",
"1. The start and stop expressions must resolve to the same type.",
"2. If start and stop expressions resolve to the <startType> type, then the step expression must resolve to the <stepType> type.",
"3. Otherwise, if start and stop expressions resolve to the <otherStartType> type, then the step expression must resolve to the same type."
]
},
"SPECIFIED_WINDOW_FRAME_DIFF_TYPES" : {
"message" : [
"Window frame bounds <lower> and <upper> do not have the same type: <lowerType> <> <upperType>."
]
},
"SPECIFIED_WINDOW_FRAME_INVALID_BOUND" : {
"message" : [
"Window frame upper bound <upper> does not follow the lower bound <lower>."
]
},
"SPECIFIED_WINDOW_FRAME_UNACCEPTED_TYPE" : {
"message" : [
"The data type of the <location> bound <exprType> does not match the expected data type <expectedType>."
]
},
"SPECIFIED_WINDOW_FRAME_WITHOUT_FOLDABLE" : {
"message" : [
"Window frame <location> bound <expression> is not a literal."
]
},
"SPECIFIED_WINDOW_FRAME_WRONG_COMPARISON" : {
"message" : [
"The lower bound of a window frame must be <comparison> to the upper bound."
]
},
"STACK_COLUMN_DIFF_TYPES" : {
"message" : [
"The data type of the column (<columnIndex>) do not have the same type: <leftType> (<leftParamIndex>) <> <rightType> (<rightParamIndex>)."
]
},
"TYPE_CHECK_FAILURE_WITH_HINT" : {
"message" : [
"<msg><hint>."
]
},
"UNEXPECTED_CLASS_TYPE" : {
"message" : [
"class <className> not found."
]
},
"UNEXPECTED_INPUT_TYPE" : {
"message" : [
"The <paramIndex> parameter requires the <requiredType> type, however <inputSql> has the type <inputType>."
]
},
"UNEXPECTED_NULL" : {
"message" : [
"The <exprName> must not be null."
]
},
"UNEXPECTED_RETURN_TYPE" : {
"message" : [
"The <functionName> requires return <expectedType> type, but the actual is <actualType> type."
]
},
"UNEXPECTED_STATIC_METHOD" : {
"message" : [
"cannot find a static method <methodName> that matches the argument types in <className>."
]
},
"UNSUPPORTED_INPUT_TYPE" : {
"message" : [
"The input of <functionName> can't be <dataType> type data."
]
},
"VALUE_OUT_OF_RANGE" : {
"message" : [
"The <exprName> must be between <valueRange> (current value = <currentValue>)."
]
},
"WRONG_NUM_ARG_TYPES" : {
"message" : [
"The expression requires <expectedNum> argument types but the actual number is <actualNum>."
]
},
"WRONG_NUM_ENDPOINTS" : {
"message" : [
"The number of endpoints must be >= 2 to construct intervals but the actual number is <actualNumber>."
]
}
},
"sqlState" : "42K09"
},
"DATATYPE_MISSING_SIZE" : {
"message" : [
"DataType <type> requires a length parameter, for example <type>(10). Please specify the length."
],
"sqlState" : "42K01"
},
"DATA_SOURCE_ALREADY_EXISTS" : {
"message" : [
"Data source '<provider>' already exists. Please choose a different name for the new data source."
],
"sqlState" : "42710"
},
"DATA_SOURCE_NOT_EXIST" : {
"message" : [
"Data source '<provider>' not found. Please make sure the data source is registered."
],
"sqlState" : "42704"
},
"DATA_SOURCE_NOT_FOUND" : {
"message" : [
"Failed to find the data source: <provider>. Make sure the provider name is correct and the package is properly registered and compatible with your Spark version."
],
"sqlState" : "42K02"
},
"DATA_SOURCE_TABLE_SCHEMA_MISMATCH" : {
"message" : [
"The schema of the data source table does not match the expected schema. If you are using the DataFrameReader.schema API or creating a table, avoid specifying the schema.",
"Data Source schema: <dsSchema>",
"Expected schema: <expectedSchema>"
],
"sqlState" : "42K03"
},
"DATETIME_OVERFLOW" : {
"message" : [
"Datetime operation overflow: <operation>."
],
"sqlState" : "22008"
},
"DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : {
"message" : [
"Decimal precision <precision> exceeds max precision <maxPrecision>."
],
"sqlState" : "22003"
},
"DEFAULT_DATABASE_NOT_EXISTS" : {
"message" : [
"Default database <defaultDatabase> does not exist, please create it first or change default database to `<defaultDatabase>`."
],
"sqlState" : "42704"
},
"DEFAULT_PLACEMENT_INVALID" : {
"message" : [
"A DEFAULT keyword in a MERGE, INSERT, UPDATE, or SET VARIABLE command could not be directly assigned to a target column because it was part of an expression.",
"For example: `UPDATE SET c1 = DEFAULT` is allowed, but `UPDATE T SET c1 = DEFAULT + 1` is not allowed."
],
"sqlState" : "42608"
},
"DISTINCT_WINDOW_FUNCTION_UNSUPPORTED" : {
"message" : [
"Distinct window functions are not supported: <windowExpr>."
],
"sqlState" : "0A000"
},
"DIVIDE_BY_ZERO" : {
"message" : [
"Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set <config> to \"false\" to bypass this error."
],
"sqlState" : "22012"
},
"DUPLICATED_FIELD_NAME_IN_ARROW_STRUCT" : {
"message" : [
"Duplicated field names in Arrow Struct are not allowed, got <fieldNames>."
],
"sqlState" : "42713"
},
"DUPLICATED_MAP_KEY" : {
"message" : [
"Duplicate map key <key> was found, please check the input data.",
"If you want to remove the duplicated keys, you can set <mapKeyDedupPolicy> to \"LAST_WIN\" so that the key inserted at last takes precedence."
],
"sqlState" : "23505"
},
"DUPLICATED_METRICS_NAME" : {
"message" : [
"The metric name is not unique: <metricName>. The same name cannot be used for metrics with different results.",
"However multiple instances of metrics with with same result and name are allowed (e.g. self-joins)."
],
"sqlState" : "42710"
},
"DUPLICATE_ASSIGNMENTS" : {
"message" : [
"The columns or variables <nameList> appear more than once as assignment targets."
],
"sqlState" : "42701"
},
"DUPLICATE_CLAUSES" : {
"message" : [
"Found duplicate clauses: <clauseName>. Please, remove one of them."
],
"sqlState" : "42614"
},
"DUPLICATE_KEY" : {
"message" : [
"Found duplicate keys <keyColumn>."
],
"sqlState" : "23505"
},
"DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT" : {
"message" : [
"Call to routine <functionName> is invalid because it includes multiple argument assignments to the same parameter name <parameterName>."
],
"subClass" : {
"BOTH_POSITIONAL_AND_NAMED" : {
"message" : [
"A positional argument and named argument both referred to the same parameter. Please remove the named argument referring to this parameter."
]
},
"DOUBLE_NAMED_ARGUMENT_REFERENCE" : {
"message" : [
"More than one named argument referred to the same parameter. Please assign a value only once."
]
}
},
"sqlState" : "4274K"
},
"EMITTING_ROWS_OLDER_THAN_WATERMARK_NOT_ALLOWED" : {
"message" : [
"Previous node emitted a row with eventTime=<emittedRowEventTime> which is older than current_watermark_value=<currentWatermark>",
"This can lead to correctness issues in the stateful operators downstream in the execution pipeline.",
"Please correct the operator logic to emit rows after current global watermark value."
],
"sqlState" : "42815"
},
"EMPTY_JSON_FIELD_VALUE" : {
"message" : [
"Failed to parse an empty string for data type <dataType>."
],
"sqlState" : "42604"
},
"ENCODER_NOT_FOUND" : {
"message" : [
"Not found an encoder of the type <typeName> to Spark SQL internal representation.",
"Consider to change the input type to one of supported at '<docroot>/sql-ref-datatypes.html'."
],
"sqlState" : "42704"
},
"ERROR_READING_AVRO_UNKNOWN_FINGERPRINT" : {
"message" : [
"Error reading avro data -- encountered an unknown fingerprint: <fingerprint>, not sure what schema to use.",
"This could happen if you registered additional schemas after starting your spark context."
],
"sqlState" : "KD00B"
},
"EVENT_TIME_IS_NOT_ON_TIMESTAMP_TYPE" : {
"message" : [
"The event time <eventName> has the invalid type <eventType>, but expected \"TIMESTAMP\"."
],
"sqlState" : "42K09"
},
"EXCEED_LIMIT_LENGTH" : {
"message" : [
"Exceeds char/varchar type length limitation: <limit>."
],
"sqlState" : "54006"
},
"EXCEPT_NESTED_COLUMN_INVALID_TYPE" : {
"message" : [
"EXCEPT column <columnName> was resolved and expected to be StructType, but found type <dataType>."
],
"sqlState" : "428H2"
},
"EXCEPT_OVERLAPPING_COLUMNS" : {
"message" : [
"Columns in an EXCEPT list must be distinct and non-overlapping, but got (<columns>)."
],
"sqlState" : "42702"
},
"EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES" : {
"message" : [
"The USING clause of this EXECUTE IMMEDIATE command contained multiple arguments with same alias (<aliases>), which is invalid; please update the command to specify unique aliases and then try it again."
],
"sqlState" : "42701"
},
"EXPECT_PERMANENT_VIEW_NOT_TEMP" : {
"message" : [
"'<operation>' expects a permanent view but <viewName> is a temp view."
],
"sqlState" : "42809"
},
"EXPECT_TABLE_NOT_VIEW" : {
"message" : [
"'<operation>' expects a table but <viewName> is a view."
],
"subClass" : {
"NO_ALTERNATIVE" : {
"message" : [
""
]
},
"USE_ALTER_VIEW" : {
"message" : [
"Please use ALTER VIEW instead."
]
}
},
"sqlState" : "42809"
},
"EXPECT_VIEW_NOT_TABLE" : {
"message" : [
"The table <tableName> does not support <operation>."
],
"subClass" : {
"NO_ALTERNATIVE" : {
"message" : [
""
]
},
"USE_ALTER_TABLE" : {
"message" : [
"Please use ALTER TABLE instead."
]
}
},
"sqlState" : "42809"
},
"EXPRESSION_DECODING_FAILED" : {
"message" : [
"Failed to decode a row to a value of the expressions: <expressions>."
],
"sqlState" : "42846"
},
"EXPRESSION_ENCODING_FAILED" : {
"message" : [
"Failed to encode a value of the expressions: <expressions> to a row."
],
"sqlState" : "42846"
},
"EXPRESSION_TYPE_IS_NOT_ORDERABLE" : {
"message" : [
"Column expression <expr> cannot be sorted because its type <exprType> is not orderable."
],
"sqlState" : "42822"
},
"FAILED_EXECUTE_UDF" : {
"message" : [
"User defined function (<functionName>: (<signature>) => <result>) failed due to: <reason>."
],
"sqlState" : "39000"
},
"FAILED_FUNCTION_CALL" : {
"message" : [
"Failed preparing of the function <funcName> for call. Please, double check function's arguments."
],
"sqlState" : "38000"
},
"FAILED_JDBC" : {
"message" : [
"Failed JDBC <url> on the operation:"
],
"subClass" : {
"ALTER_TABLE" : {
"message" : [
"Alter the table <tableName>."
]
},
"CREATE_INDEX" : {
"message" : [
"Create the index <indexName> in the <tableName> table."
]
},
"CREATE_NAMESPACE" : {
"message" : [
"Create the namespace <namespace>."
]
},
"CREATE_NAMESPACE_COMMENT" : {
"message" : [
"Create a comment on the namespace: <namespace>."
]
},
"CREATE_TABLE" : {
"message" : [
"Create the table <tableName>."
]
},
"DROP_INDEX" : {
"message" : [
"Drop the index <indexName> in the <tableName> table."
]
},
"DROP_NAMESPACE" : {
"message" : [
"Drop the namespace <namespace>."
]
},
"GET_TABLES" : {
"message" : [
"Get tables from the namespace: <namespace>."
]
},
"LIST_NAMESPACES" : {
"message" : [
"List namespaces."
]
},
"NAMESPACE_EXISTS" : {
"message" : [
"Check that the namespace <namespace> exists."
]
},
"REMOVE_NAMESPACE_COMMENT" : {
"message" : [
"Remove a comment on the namespace: <namespace>."
]
},
"RENAME_TABLE" : {
"message" : [
"Rename the table <oldName> to <newName>."
]
},
"TABLE_EXISTS" : {
"message" : [
"Check that the table <tableName> exists."
]
},
"UNCLASSIFIED" : {
"message" : [
"<message>"
]
}
},
"sqlState" : "HV000"
},
"FAILED_PARSE_STRUCT_TYPE" : {
"message" : [
"Failed parsing struct: <raw>."
],
"sqlState" : "22018"
},
"FAILED_READ_FILE" : {
"message" : [
"Encountered error while reading file <path>."
],
"subClass" : {
"CANNOT_READ_FILE_FOOTER" : {
"message" : [
"Could not read footer. Please ensure that the file is in either ORC or Parquet format.",
"If not, please convert it to a valid format. If the file is in the valid format, please check if it is corrupt.",
"If it is, you can choose to either ignore it or fix the corruption."
]
},
"FILE_NOT_EXIST" : {
"message" : [
"File does not exist. It is possible the underlying files have been updated.",
"You can explicitly invalidate the cache in Spark by running 'REFRESH TABLE tableName' command in SQL or by recreating the Dataset/DataFrame involved."
]
},
"NO_HINT" : {
"message" : [
""
]
},
"PARQUET_COLUMN_DATA_TYPE_MISMATCH" : {
"message" : [
"Data type mismatches when reading Parquet column <column>. Expected Spark type <expectedType>, actual Parquet type <actualType>."
]
}
},
"sqlState" : "KD001"
},
"FAILED_REGISTER_CLASS_WITH_KRYO" : {
"message" : [
"Failed to register classes with Kryo."
],
"sqlState" : "KD000"
},
"FAILED_RENAME_PATH" : {
"message" : [
"Failed to rename <sourcePath> to <targetPath> as destination already exists."
],
"sqlState" : "42K04"
},
"FAILED_RENAME_TEMP_FILE" : {
"message" : [
"Failed to rename temp file <srcPath> to <dstPath> as FileSystem.rename returned false."
],
"sqlState" : "58030"
},
"FAILED_ROW_TO_JSON" : {
"message" : [
"Failed to convert the row value <value> of the class <class> to the target SQL type <sqlType> in the JSON format."
],
"sqlState" : "2203G"
},
"FAILED_TO_PARSE_TOO_COMPLEX" : {
"message" : [
"The statement, including potential SQL functions and referenced views, was too complex to parse.",
"To mitigate this error divide the statement into multiple, less complex chunks."
],
"sqlState" : "54001"
},
"FIELD_ALREADY_EXISTS" : {
"message" : [
"Cannot <op> column, because <fieldNames> already exists in <struct>."
],
"sqlState" : "42710"
},
"FIELD_NOT_FOUND" : {
"message" : [
"No such struct field <fieldName> in <fields>."
],
"sqlState" : "42704"
},
"FORBIDDEN_OPERATION" : {
"message" : [
"The operation <statement> is not allowed on the <objectType>: <objectName>."
],
"sqlState" : "42809"
},
"FOREACH_BATCH_USER_FUNCTION_ERROR" : {
"message" : [
"An error occurred in the user provided function in foreach batch sink. Reason: <reason>"
],
"sqlState" : "39000"
},
"FOUND_MULTIPLE_DATA_SOURCES" : {
"message" : [
"Detected multiple data sources with the name '<provider>'. Please check the data source isn't simultaneously registered and located in the classpath."
],
"sqlState" : "42710"
},
"GENERATED_COLUMN_WITH_DEFAULT_VALUE" : {
"message" : [
"A column cannot have both a default value and a generation expression but column <colName> has default value: (<defaultValue>) and generation expression: (<genExpr>)."
],
"sqlState" : "42623"
},
"GET_TABLES_BY_TYPE_UNSUPPORTED_BY_HIVE_VERSION" : {
"message" : [
"Hive 2.2 and lower versions don't support getTablesByType. Please use Hive 2.3 or higher version."
],
"sqlState" : "56038"
},
"GRAPHITE_SINK_INVALID_PROTOCOL" : {
"message" : [
"Invalid Graphite protocol: <protocol>."
],
"sqlState" : "KD000"
},
"GRAPHITE_SINK_PROPERTY_MISSING" : {
"message" : [
"Graphite sink requires '<property>' property."
],
"sqlState" : "KD000"
},
"GROUPING_COLUMN_MISMATCH" : {
"message" : [
"Column of grouping (<grouping>) can't be found in grouping columns <groupingColumns>."
],
"sqlState" : "42803"
},
"GROUPING_ID_COLUMN_MISMATCH" : {
"message" : [
"Columns of grouping_id (<groupingIdColumn>) does not match grouping columns (<groupByColumns>)."
],
"sqlState" : "42803"
},
"GROUPING_SIZE_LIMIT_EXCEEDED" : {
"message" : [
"Grouping sets size cannot be greater than <maxSize>."
],
"sqlState" : "54000"
},
"GROUP_BY_AGGREGATE" : {
"message" : [
"Aggregate functions are not allowed in GROUP BY, but found <sqlExpr>."
],
"sqlState" : "42903"
},
"GROUP_BY_POS_AGGREGATE" : {
"message" : [
"GROUP BY <index> refers to an expression <aggExpr> that contains an aggregate function. Aggregate functions are not allowed in GROUP BY."
],
"sqlState" : "42903"
},
"GROUP_BY_POS_OUT_OF_RANGE" : {
"message" : [
"GROUP BY position <index> is not in select list (valid range is [1, <size>])."
],
"sqlState" : "42805"
},
"GROUP_EXPRESSION_TYPE_IS_NOT_ORDERABLE" : {
"message" : [
"The expression <sqlExpr> cannot be used as a grouping expression because its data type <dataType> is not an orderable data type."
],
"sqlState" : "42822"
},
"HLL_INVALID_INPUT_SKETCH_BUFFER" : {
"message" : [
"Invalid call to <function>; only valid HLL sketch buffers are supported as inputs (such as those produced by the `hll_sketch_agg` function)."
],
"sqlState" : "22546"
},
"HLL_INVALID_LG_K" : {
"message" : [
"Invalid call to <function>; the `lgConfigK` value must be between <min> and <max>, inclusive: <value>."
],
"sqlState" : "22546"
},
"HLL_UNION_DIFFERENT_LG_K" : {
"message" : [
"Sketches have different `lgConfigK` values: <left> and <right>. Set the `allowDifferentLgConfigK` parameter to true to call <function> with different `lgConfigK` values."
],
"sqlState" : "22000"
},
"IDENTIFIER_TOO_MANY_NAME_PARTS" : {
"message" : [
"<identifier> is not a valid identifier as it has more than 2 name parts."
],
"sqlState" : "42601"
},
"ILLEGAL_STATE_STORE_VALUE" : {
"message" : [
"Illegal value provided to the State Store"
],
"subClass" : {
"EMPTY_LIST_VALUE" : {
"message" : [
"Cannot write empty list values to State Store for StateName <stateName>."
]
},
"NULL_VALUE" : {
"message" : [
"Cannot write null values to State Store for StateName <stateName>."
]
}
},
"sqlState" : "42601"
},
"INCOMPARABLE_PIVOT_COLUMN" : {
"message" : [
"Invalid pivot column <columnName>. Pivot columns must be comparable."
],
"sqlState" : "42818"
},
"INCOMPATIBLE_COLUMN_TYPE" : {
"message" : [
"<operator> can only be performed on tables with compatible column types. The <columnOrdinalNumber> column of the <tableOrdinalNumber> table is <dataType1> type which is not compatible with <dataType2> at the same column of the first table.<hint>."
],
"sqlState" : "42825"
},
"INCOMPATIBLE_DATASOURCE_REGISTER" : {
"message" : [
"Detected an incompatible DataSourceRegister. Please remove the incompatible library from classpath or upgrade it. Error: <message>"
],
"sqlState" : "56038"
},
"INCOMPATIBLE_DATA_FOR_TABLE" : {
"message" : [
"Cannot write incompatible data for the table <tableName>:"
],
"subClass" : {
"AMBIGUOUS_COLUMN_NAME" : {
"message" : [
"Ambiguous column name in the input data <colName>."
]
},
"CANNOT_FIND_DATA" : {
"message" : [
"Cannot find data for the output column <colName>."
]
},
"CANNOT_SAFELY_CAST" : {
"message" : [
"Cannot safely cast <colName> <srcType> to <targetType>."
]
},
"EXTRA_COLUMNS" : {
"message" : [
"Cannot write extra columns <extraColumns>."
]
},
"EXTRA_STRUCT_FIELDS" : {
"message" : [
"Cannot write extra fields <extraFields> to the struct <colName>."
]
},
"NULLABLE_ARRAY_ELEMENTS" : {
"message" : [
"Cannot write nullable elements to array of non-nulls: <colName>."
]
},
"NULLABLE_COLUMN" : {
"message" : [
"Cannot write nullable values to non-null column <colName>."
]
},
"NULLABLE_MAP_VALUES" : {
"message" : [
"Cannot write nullable values to map of non-nulls: <colName>."
]
},
"STRUCT_MISSING_FIELDS" : {
"message" : [
"Struct <colName> missing fields: <missingFields>."
]
},
"UNEXPECTED_COLUMN_NAME" : {
"message" : [
"Struct <colName> <order>-th field name does not match (may be out of order): expected <expected>, found <found>."
]
}
},
"sqlState" : "KD000"
},
"INCOMPATIBLE_JOIN_TYPES" : {
"message" : [
"The join types <joinType1> and <joinType2> are incompatible."
],
"sqlState" : "42613"
},
"INCOMPATIBLE_VIEW_SCHEMA_CHANGE" : {
"message" : [
"The SQL query of view <viewName> has an incompatible schema change and column <colName> cannot be resolved. Expected <expectedNum> columns named <colName> but got <actualCols>.",
"Please try to re-create the view by running: <suggestion>."
],
"sqlState" : "51024"
},
"INCOMPLETE_TYPE_DEFINITION" : {
"message" : [
"Incomplete complex type:"
],
"subClass" : {
"ARRAY" : {
"message" : [
"The definition of \"ARRAY\" type is incomplete. You must provide an element type. For example: \"ARRAY<elementType>\"."
]
},
"MAP" : {
"message" : [
"The definition of \"MAP\" type is incomplete. You must provide a key type and a value type. For example: \"MAP<TIMESTAMP, INT>\"."
]
},
"STRUCT" : {
"message" : [
"The definition of \"STRUCT\" type is incomplete. You must provide at least one field type. For example: \"STRUCT<name STRING, phone DECIMAL(10, 0)>\"."
]
}
},
"sqlState" : "42K01"
},
"INCONSISTENT_BEHAVIOR_CROSS_VERSION" : {
"message" : [
"You may get a different result due to the upgrading to"
],
"subClass" : {
"DATETIME_PATTERN_RECOGNITION" : {
"message" : [
"Spark >= 3.0:",
"Fail to recognize <pattern> pattern in the DateTimeFormatter.",
"1) You can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0.",
"2) You can form a valid datetime pattern with the guide from '<docroot>/sql-ref-datetime-pattern.html'."
]
},
"DATETIME_WEEK_BASED_PATTERN" : {
"message" : [
"Spark >= 3.0:",
"All week-based patterns are unsupported since Spark 3.0, detected week-based character: <c>.",
"Please use the SQL function EXTRACT instead."
]
},
"PARSE_DATETIME_BY_NEW_PARSER" : {
"message" : [
"Spark >= 3.0:",
"Fail to parse <datetime> in the new parser.",
"You can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0, or set to \"CORRECTED\" and treat it as an invalid datetime string."
]
},
"READ_ANCIENT_DATETIME" : {
"message" : [
"Spark >= 3.0: reading dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z from <format> files can be ambiguous, as the files may be written by",
"Spark 2.x or legacy versions of Hive, which uses a legacy hybrid calendar that is different from Spark 3.0+'s Proleptic Gregorian calendar.",
"See more details in SPARK-31404.",
"You can set the SQL config <config> or the datasource option <option> to \"LEGACY\" to rebase the datetime values w.r.t. the calendar difference during reading.",
"To read the datetime values as it is, set the SQL config or the datasource option to \"CORRECTED\"."
]
},
"WRITE_ANCIENT_DATETIME" : {
"message" : [
"Spark >= 3.0:",
"writing dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z into <format> files can be dangerous, as the files may be read by Spark 2.x or legacy versions of Hive later, which uses a legacy hybrid calendar that is different from Spark 3.0+'s Proleptic Gregorian calendar.",
"See more details in SPARK-31404.",
"You can set <config> to \"LEGACY\" to rebase the datetime values w.r.t. the calendar difference during writing, to get maximum interoperability.",
"Or set the config to \"CORRECTED\" to write the datetime values as it is, if you are sure that the written files will only be read by Spark 3.0+ or other systems that use Proleptic Gregorian calendar."
]
}
},
"sqlState" : "42K0B"
},
"INCORRECT_RAMP_UP_RATE" : {
"message" : [
"Max offset with <rowsPerSecond> rowsPerSecond is <maxSeconds>, but 'rampUpTimeSeconds' is <rampUpTimeSeconds>."
],
"sqlState" : "22003"
},
"INDETERMINATE_COLLATION" : {
"message" : [
"Function called requires knowledge of the collation it should apply, but indeterminate collation was found. Use COLLATE function to set the collation explicitly."
],
"sqlState" : "42P22"
},
"INDEX_ALREADY_EXISTS" : {
"message" : [
"Cannot create the index <indexName> on table <tableName> because it already exists."
],
"sqlState" : "42710"
},
"INDEX_NOT_FOUND" : {
"message" : [
"Cannot find the index <indexName> on table <tableName>."
],
"sqlState" : "42704"
},
"INSERT_COLUMN_ARITY_MISMATCH" : {
"message" : [
"Cannot write to <tableName>, the reason is"
],
"subClass" : {
"NOT_ENOUGH_DATA_COLUMNS" : {
"message" : [
"not enough data columns:",
"Table columns: <tableColumns>.",
"Data columns: <dataColumns>."
]
},
"TOO_MANY_DATA_COLUMNS" : {
"message" : [
"too many data columns:",
"Table columns: <tableColumns>.",
"Data columns: <dataColumns>."
]
}
},
"sqlState" : "21S01"
},
"INSERT_PARTITION_COLUMN_ARITY_MISMATCH" : {
"message" : [
"Cannot write to '<tableName>', <reason>:",
"Table columns: <tableColumns>.",
"Partition columns with static values: <staticPartCols>.",
"Data columns: <dataColumns>."
],
"sqlState" : "21S01"
},
"INSUFFICIENT_TABLE_PROPERTY" : {
"message" : [
"Can't find table property:"
],
"subClass" : {
"MISSING_KEY" : {
"message" : [
"<key>."
]
},
"MISSING_KEY_PART" : {
"message" : [
"<key>, <totalAmountOfParts> parts are expected."
]
}
},
"sqlState" : "XXKUC"
},
"INTERNAL_ERROR" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_BROADCAST" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_EXECUTOR" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_MEMORY" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_METADATA_CATALOG" : {
"message" : [
"An object in the metadata catalog has been corrupted:"
],
"subClass" : {
"SQL_CONFIG" : {
"message" : [
"Corrupted view SQL configs in catalog."
]
},
"TABLE_NAME_CONTEXT" : {
"message" : [
"Corrupted table name context in catalog: <numParts> parts expected, but part <index> is missing."
]
},
"TEMP_FUNCTION_REFERENCE" : {
"message" : [
"Corrupted view referred temp functions names in catalog."
]
},
"TEMP_VARIABLE_REFERENCE" : {
"message" : [
"Corrupted view referred temp variable names in catalog."
]
},
"TEMP_VIEW_REFERENCE" : {
"message" : [
"Corrupted view referred temp view names in catalog."
]
},
"VIEW_QUERY_COLUMN_ARITY" : {
"message" : [
"Corrupted view query output column names in catalog: <numCols> parts expected, but part <index> is missing."
]
}
},
"sqlState" : "XX000"
},
"INTERNAL_ERROR_NETWORK" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_SHUFFLE" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_STORAGE" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_TWS" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERVAL_ARITHMETIC_OVERFLOW" : {
"message" : [
"<message>.<alternative>"
],
"sqlState" : "22015"
},
"INTERVAL_DIVIDED_BY_ZERO" : {
"message" : [
"Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead."
],
"sqlState" : "22012"
},
"INVALID_AGGREGATE_FILTER" : {
"message" : [
"The FILTER expression <filterExpr> in an aggregate function is invalid."
],
"subClass" : {
"CONTAINS_AGGREGATE" : {
"message" : [
"Expected a FILTER expression without an aggregation, but found <aggExpr>."
]
},
"CONTAINS_WINDOW_FUNCTION" : {
"message" : [
"Expected a FILTER expression without a window function, but found <windowExpr>."
]
},
"NON_DETERMINISTIC" : {
"message" : [
"Expected a deterministic FILTER expression."
]
},
"NOT_BOOLEAN" : {
"message" : [
"Expected a FILTER expression of the BOOLEAN type."
]
}
},
"sqlState" : "42903"
},
"INVALID_ARRAY_INDEX" : {
"message" : [
"The index <indexValue> is out of bounds. The array has <arraySize> elements. Use the SQL function `get()` to tolerate accessing element at invalid index and return NULL instead. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22003"
},
"INVALID_ARRAY_INDEX_IN_ELEMENT_AT" : {
"message" : [
"The index <indexValue> is out of bounds. The array has <arraySize> elements. Use `try_element_at` to tolerate accessing element at invalid index and return NULL instead. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22003"
},
"INVALID_BITMAP_POSITION" : {
"message" : [
"The 0-indexed bitmap position <bitPosition> is out of bounds. The bitmap has <bitmapNumBits> bits (<bitmapNumBytes> bytes)."
],
"sqlState" : "22003"
},
"INVALID_BOUNDARY" : {
"message" : [
"The boundary <boundary> is invalid: <invalidValue>."
],
"subClass" : {
"END" : {
"message" : [
"Expected the value is '0', '<longMaxValue>', '[<intMinValue>, <intMaxValue>]'."
]
},
"START" : {
"message" : [
"Expected the value is '0', '<longMinValue>', '[<intMinValue>, <intMaxValue>]'."
]
}
},
"sqlState" : "22003"
},
"INVALID_BUCKET_COLUMN_DATA_TYPE" : {
"message" : [
"Cannot use <type> for bucket column. Collated data types are not supported for bucketing."
],
"sqlState" : "42601"
},
"INVALID_BUCKET_FILE" : {
"message" : [
"Invalid bucket file: <path>."
],
"sqlState" : "58030"
},
"INVALID_BYTE_STRING" : {
"message" : [
"The expected format is ByteString, but was <unsupported> (<class>)."
],
"sqlState" : "22P03"
},
"INVALID_COLUMN_NAME_AS_PATH" : {
"message" : [
"The datasource <datasource> cannot save the column <columnName> because its name contains some characters that are not allowed in file paths. Please, use an alias to rename it."
],
"sqlState" : "46121"
},
"INVALID_COLUMN_OR_FIELD_DATA_TYPE" : {
"message" : [
"Column or field <name> is of type <type> while it's required to be <expectedType>."
],
"sqlState" : "42000"
},
"INVALID_CONF_VALUE" : {
"message" : [
"The value '<confValue>' in the config \"<confName>\" is invalid."
],
"subClass" : {
"DEFAULT_COLLATION" : {
"message" : [
"Cannot resolve the given default collation. Did you mean '<proposal>'?"
]
},
"TIME_ZONE" : {
"message" : [
"Cannot resolve the given timezone."
]
}
},
"sqlState" : "22022"
},
"INVALID_CURSOR" : {
"message" : [
"The cursor is invalid."
],
"subClass" : {
"DISCONNECTED" : {
"message" : [
"The cursor has been disconnected by the server."
]
},
"NOT_REATTACHABLE" : {
"message" : [
"The cursor is not reattachable."
]
},
"POSITION_NOT_AVAILABLE" : {
"message" : [
"The cursor position id <responseId> is no longer available at index <index>."
]
},
"POSITION_NOT_FOUND" : {
"message" : [
"The cursor position id <responseId> is not found."
]
}
},
"sqlState" : "HY109"
},
"INVALID_DATETIME_PATTERN" : {
"message" : [
"Unrecognized datetime pattern: <pattern>."
],
"subClass" : {
"ILLEGAL_CHARACTER" : {
"message" : [
"Illegal pattern character found in datetime pattern: <c>. Please provide legal character."
]
},
"LENGTH" : {
"message" : [
"Too many letters in datetime pattern: <pattern>. Please reduce pattern length."
]
}
},
"sqlState" : "22007"
},
"INVALID_DEFAULT_VALUE" : {
"message" : [
"Failed to execute <statement> command because the destination column or variable <colName> has a DEFAULT value <defaultValue>,"
],
"subClass" : {
"DATA_TYPE" : {
"message" : [
"which requires <expectedType> type, but the statement provided a value of incompatible <actualType> type."
]
},
"NOT_CONSTANT" : {
"message" : [
"which is not a constant expression whose equivalent value is known at query planning time."
]
},
"SUBQUERY_EXPRESSION" : {
"message" : [
"which contains subquery expressions."
]
},
"UNRESOLVED_EXPRESSION" : {
"message" : [
"which fails to resolve as a valid expression."
]
}
},
"sqlState" : "42623"
},
"INVALID_DELIMITER_VALUE" : {
"message" : [
"Invalid value for delimiter."
],
"subClass" : {
"DELIMITER_LONGER_THAN_EXPECTED" : {
"message" : [
"Delimiter cannot be more than one character: <str>."
]
},
"EMPTY_STRING" : {
"message" : [
"Delimiter cannot be empty string."
]
},
"SINGLE_BACKSLASH" : {
"message" : [
"Single backslash is prohibited. It has special meaning as beginning of an escape sequence. To get the backslash character, pass a string with two backslashes as the delimiter."
]
},
"UNSUPPORTED_SPECIAL_CHARACTER" : {
"message" : [
"Unsupported special character for delimiter: <str>."
]
}
},
"sqlState" : "42602"
},
"INVALID_DRIVER_MEMORY" : {
"message" : [
"System memory <systemMemory> must be at least <minSystemMemory>.",
"Please increase heap size using the --driver-memory option or \"<config>\" in Spark configuration."
],
"sqlState" : "F0000"
},
"INVALID_EMPTY_LOCATION" : {
"message" : [
"The location name cannot be empty string, but `<location>` was given."
],
"sqlState" : "42K05"
},
"INVALID_ESC" : {
"message" : [
"Found an invalid escape string: <invalidEscape>. The escape string must contain only one character."
],
"sqlState" : "42604"
},
"INVALID_ESCAPE_CHAR" : {
"message" : [
"`EscapeChar` should be a string literal of length one, but got <sqlExpr>."
],
"sqlState" : "42604"
},
"INVALID_EXECUTOR_MEMORY" : {
"message" : [
"Executor memory <executorMemory> must be at least <minSystemMemory>.",
"Please increase executor memory using the --executor-memory option or \"<config>\" in Spark configuration."
],
"sqlState" : "F0000"
},
"INVALID_EXPRESSION_ENCODER" : {
"message" : [
"Found an invalid expression encoder. Expects an instance of ExpressionEncoder but got <encoderType>. For more information consult '<docroot>/api/java/index.html?org/apache/spark/sql/Encoder.html'."
],
"sqlState" : "42001"
},
"INVALID_EXTRACT_BASE_FIELD_TYPE" : {
"message" : [
"Can't extract a value from <base>. Need a complex type [STRUCT, ARRAY, MAP] but got <other>."
],
"sqlState" : "42000"
},
"INVALID_EXTRACT_FIELD" : {
"message" : [
"Cannot extract <field> from <expr>."
],
"sqlState" : "42601"
},
"INVALID_EXTRACT_FIELD_TYPE" : {
"message" : [
"Field name should be a non-null string literal, but it's <extraction>."
],
"sqlState" : "42000"
},
"INVALID_FIELD_NAME" : {
"message" : [
"Field name <fieldName> is invalid: <path> is not a struct."
],
"sqlState" : "42000"
},
"INVALID_FORMAT" : {
"message" : [
"The format is invalid: <format>."
],
"subClass" : {
"CONT_THOUSANDS_SEPS" : {
"message" : [
"Thousands separators (, or G) must have digits in between them in the number format."
]
},
"CUR_MUST_BEFORE_DEC" : {
"message" : [
"Currency characters must appear before any decimal point in the number format."
]
},
"CUR_MUST_BEFORE_DIGIT" : {
"message" : [
"Currency characters must appear before digits in the number format."
]
},
"EMPTY" : {
"message" : [
"The number format string cannot be empty."
]
},
"ESC_AT_THE_END" : {
"message" : [
"The escape character is not allowed to end with."
]
},
"ESC_IN_THE_MIDDLE" : {
"message" : [
"The escape character is not allowed to precede <char>."
]
},
"MISMATCH_INPUT" : {
"message" : [
"The input <inputType> <input> does not match the format."
]
},
"THOUSANDS_SEPS_MUST_BEFORE_DEC" : {
"message" : [
"Thousands separators (, or G) may not appear after the decimal point in the number format."
]
},
"UNEXPECTED_TOKEN" : {
"message" : [
"Found the unexpected <token> in the format string; the structure of the format string must match: `[MI|S]` `[$]` `[0|9|G|,]*` `[.|D]` `[0|9]*` `[$]` `[PR|MI|S]`."
]
},
"WRONG_NUM_DIGIT" : {
"message" : [
"The format string requires at least one number digit."
]
},
"WRONG_NUM_TOKEN" : {
"message" : [
"At most one <token> is allowed in the number format."
]
}
},
"sqlState" : "42601"
},
"INVALID_FRACTION_OF_SECOND" : {
"message" : [
"The fraction of sec must be zero. Valid range is [0, 60]. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22023"
},
"INVALID_HANDLE" : {
"message" : [
"The handle <handle> is invalid."
],
"subClass" : {
"FORMAT" : {
"message" : [
"Handle must be an UUID string of the format '00112233-4455-6677-8899-aabbccddeeff'"
]
},
"OPERATION_ABANDONED" : {
"message" : [
"Operation was considered abandoned because of inactivity and removed."
]
},
"OPERATION_ALREADY_EXISTS" : {
"message" : [
"Operation already exists."
]
},
"OPERATION_NOT_FOUND" : {
"message" : [
"Operation not found."
]
},
"SESSION_CHANGED" : {
"message" : [
"The existing Spark server driver instance has restarted. Please reconnect."
]
},
"SESSION_CLOSED" : {
"message" : [
"Session was closed."
]
},
"SESSION_NOT_FOUND" : {
"message" : [
"Session not found."
]
}
},
"sqlState" : "HY000"
},
"INVALID_IDENTIFIER" : {
"message" : [
"The unquoted identifier <ident> is invalid and must be back quoted as: `<ident>`.",
"Unquoted identifiers can only contain ASCII letters ('a' - 'z', 'A' - 'Z'), digits ('0' - '9'), and underbar ('_').",
"Unquoted identifiers must also not start with a digit.",
"Different data sources and meta stores may impose additional restrictions on valid identifiers."
],
"sqlState" : "42602"
},
"INVALID_INDEX_OF_ZERO" : {
"message" : [
"The index 0 is invalid. An index shall be either < 0 or > 0 (the first element has index 1)."
],
"sqlState" : "22003"
},
"INVALID_INLINE_TABLE" : {
"message" : [
"Invalid inline table."
],
"subClass" : {
"CANNOT_EVALUATE_EXPRESSION_IN_INLINE_TABLE" : {
"message" : [
"Cannot evaluate the expression <expr> in inline table definition."
]
},
"FAILED_SQL_EXPRESSION_EVALUATION" : {
"message" : [
"Failed to evaluate the SQL expression <sqlExpr>. Please check your syntax and ensure all required tables and columns are available."
]
},
"INCOMPATIBLE_TYPES_IN_INLINE_TABLE" : {
"message" : [
"Found incompatible types in the column <colName> for inline table."
]
},
"NUM_COLUMNS_MISMATCH" : {
"message" : [
"Inline table expected <expectedNumCols> columns but found <actualNumCols> columns in row <rowIndex>."
]
}
},
"sqlState" : "42000"
},
"INVALID_INTERVAL_FORMAT" : {
"message" : [
"Error parsing '<input>' to interval. Please ensure that the value provided is in a valid format for defining an interval. You can reference the documentation for the correct format."
],
"subClass" : {
"ARITHMETIC_EXCEPTION" : {
"message" : [
"Uncaught arithmetic exception while parsing '<input>'."
]
},
"INPUT_IS_EMPTY" : {
"message" : [
"Interval string cannot be empty."
]
},
"INPUT_IS_NULL" : {
"message" : [
"Interval string cannot be null."
]
},
"INVALID_FRACTION" : {
"message" : [
"<unit> cannot have fractional part."
]
},
"INVALID_PRECISION" : {
"message" : [
"Interval can only support nanosecond precision, <value> is out of range."
]
},
"INVALID_PREFIX" : {
"message" : [
"Invalid interval prefix <prefix>."
]
},
"INVALID_UNIT" : {
"message" : [
"Invalid unit <unit>."
]
},
"INVALID_VALUE" : {
"message" : [
"Invalid value <value>."
]
},
"MISSING_NUMBER" : {
"message" : [
"Expect a number after <word> but hit EOL."
]
},
"MISSING_UNIT" : {
"message" : [
"Expect a unit name after <word> but hit EOL."
]
},
"UNKNOWN_PARSING_ERROR" : {
"message" : [
"Unknown error when parsing <word>."
]
},
"UNRECOGNIZED_NUMBER" : {
"message" : [
"Unrecognized number <number>."
]
}
},
"sqlState" : "22006"
},
"INVALID_INVERSE_DISTRIBUTION_FUNCTION" : {
"message" : [
"Invalid inverse distribution function <funcName>."
],
"subClass" : {
"DISTINCT_UNSUPPORTED" : {
"message" : [
"Cannot use DISTINCT with WITHIN GROUP."
]
},
"WITHIN_GROUP_MISSING" : {
"message" : [
"WITHIN GROUP is required for inverse distribution function."
]
},
"WRONG_NUM_ORDERINGS" : {
"message" : [
"Requires <expectedNum> orderings in WITHIN GROUP but got <actualNum>."
]
}
},
"sqlState" : "42K0K"
},
"INVALID_JSON_DATA_TYPE" : {
"message" : [
"Failed to convert the JSON string '<invalidType>' to a data type. Please enter a valid data type."
],
"sqlState" : "2203G"
},
"INVALID_JSON_ROOT_FIELD" : {
"message" : [
"Cannot convert JSON root field to target Spark type."
],
"sqlState" : "22032"
},
"INVALID_JSON_SCHEMA_MAP_TYPE" : {
"message" : [
"Input schema <jsonSchema> can only contain STRING as a key type for a MAP."
],
"sqlState" : "22032"
},
"INVALID_KRYO_SERIALIZER_BUFFER_SIZE" : {
"message" : [
"The value of the config \"<bufferSizeConfKey>\" must be less than 2048 MiB, but got <bufferSizeConfValue> MiB."
],
"sqlState" : "F0000"
},
"INVALID_LAMBDA_FUNCTION_CALL" : {
"message" : [
"Invalid lambda function call."
],
"subClass" : {
"DUPLICATE_ARG_NAMES" : {
"message" : [
"The lambda function has duplicate arguments <args>. Please, consider to rename the argument names or set <caseSensitiveConfig> to \"true\"."
]
},
"NON_HIGHER_ORDER_FUNCTION" : {
"message" : [
"A lambda function should only be used in a higher order function. However, its class is <class>, which is not a higher order function."
]
},
"NUM_ARGS_MISMATCH" : {
"message" : [
"A higher order function expects <expectedNumArgs> arguments, but got <actualNumArgs>."
]
}
},
"sqlState" : "42K0D"
},
"INVALID_LATERAL_JOIN_TYPE" : {
"message" : [
"The <joinType> JOIN with LATERAL correlation is not allowed because an OUTER subquery cannot correlate to its join partner. Remove the LATERAL correlation or use an INNER JOIN, or LEFT OUTER JOIN instead."
],
"sqlState" : "42613"
},
"INVALID_LIMIT_LIKE_EXPRESSION" : {
"message" : [
"The limit like expression <expr> is invalid."
],
"subClass" : {
"DATA_TYPE" : {
"message" : [
"The <name> expression must be integer type, but got <dataType>."
]
},
"IS_NEGATIVE" : {
"message" : [
"The <name> expression must be equal to or greater than 0, but got <v>."
]
},
"IS_NULL" : {
"message" : [
"The evaluated <name> expression must not be null."
]
},
"IS_UNFOLDABLE" : {
"message" : [
"The <name> expression must evaluate to a constant value."
]
}
},
"sqlState" : "42K0E"
},
"INVALID_NON_DETERMINISTIC_EXPRESSIONS" : {
"message" : [
"The operator expects a deterministic expression, but the actual expression is <sqlExprs>."
],
"sqlState" : "42K0E"
},
"INVALID_NUMERIC_LITERAL_RANGE" : {
"message" : [
"Numeric literal <rawStrippedQualifier> is outside the valid range for <typeName> with minimum value of <minValue> and maximum value of <maxValue>. Please adjust the value accordingly."
],
"sqlState" : "22003"
},
"INVALID_OBSERVED_METRICS" : {
"message" : [
"Invalid observed metrics."
],
"subClass" : {
"AGGREGATE_EXPRESSION_WITH_DISTINCT_UNSUPPORTED" : {
"message" : [
"Aggregate expressions with DISTINCT are not allowed in observed metrics, but found: <expr>."
]
},
"AGGREGATE_EXPRESSION_WITH_FILTER_UNSUPPORTED" : {
"message" : [
"Aggregate expression with FILTER predicate are not allowed in observed metrics, but found: <expr>."
]
},
"MISSING_NAME" : {
"message" : [
"The observed metrics should be named: <operator>."
]
},
"NESTED_AGGREGATES_UNSUPPORTED" : {
"message" : [
"Nested aggregates are not allowed in observed metrics, but found: <expr>."
]
},
"NON_AGGREGATE_FUNC_ARG_IS_ATTRIBUTE" : {
"message" : [
"Attribute <expr> can only be used as an argument to an aggregate function."
]
},
"NON_AGGREGATE_FUNC_ARG_IS_NON_DETERMINISTIC" : {
"message" : [
"Non-deterministic expression <expr> can only be used as an argument to an aggregate function."
]
},
"WINDOW_EXPRESSIONS_UNSUPPORTED" : {
"message" : [
"Window expressions are not allowed in observed metrics, but found: <expr>."
]
}
},
"sqlState" : "42K0E"
},
"INVALID_OPTIONS" : {
"message" : [
"Invalid options:"
],
"subClass" : {
"NON_MAP_FUNCTION" : {
"message" : [
"Must use the `map()` function for options."
]
},
"NON_STRING_TYPE" : {
"message" : [
"A type of keys and values in `map()` must be string, but got <mapType>."
]
}
},
"sqlState" : "42K06"
},
"INVALID_PANDAS_UDF_PLACEMENT" : {
"message" : [
"The group aggregate pandas UDF <functionList> cannot be invoked together with as other, non-pandas aggregate functions."
],
"sqlState" : "0A000"
},
"INVALID_PARAMETER_VALUE" : {
"message" : [
"The value of parameter(s) <parameter> in <functionName> is invalid:"
],
"subClass" : {
"AES_CRYPTO_ERROR" : {
"message" : [
"detail message: <detailMessage>"
]
},
"AES_IV_LENGTH" : {
"message" : [
"supports 16-byte CBC IVs and 12-byte GCM IVs, but got <actualLength> bytes for <mode>."
]
},
"AES_KEY_LENGTH" : {
"message" : [
"expects a binary value with 16, 24 or 32 bytes, but got <actualLength> bytes."
]
},
"BINARY_FORMAT" : {
"message" : [
"expects one of binary formats 'base64', 'hex', 'utf-8', but got <invalidFormat>."
]
},
"BIT_POSITION_RANGE" : {
"message" : [
"expects an integer value in [0, <upper>), but got <invalidValue>."
]
},
"CHARSET" : {
"message" : [
"expects one of the charsets 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16', but got <charset>."
]
},
"DATETIME_UNIT" : {
"message" : [
"expects one of the units without quotes YEAR, QUARTER, MONTH, WEEK, DAY, DAYOFYEAR, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, but got the string literal <invalidValue>."
]
},
"LENGTH" : {
"message" : [
"Expects `length` greater than or equal to 0, but got <length>."
]
},
"NULL" : {
"message" : [
"expects a non-NULL value."
]
},
"PATTERN" : {
"message" : [
"<value>."
]
},
"REGEX_GROUP_INDEX" : {
"message" : [
"Expects group index between 0 and <groupCount>, but got <groupIndex>."
]
},
"START" : {
"message" : [
"Expects a positive or a negative value for `start`, but got 0."
]
},
"ZERO_INDEX" : {
"message" : [
"expects %1$, %2$ and so on, but got %0$."
]
}
},
"sqlState" : "22023"
},
"INVALID_PARTITION_COLUMN_DATA_TYPE" : {
"message" : [
"Cannot use <type> for partition column."
],
"sqlState" : "0A000"
},
"INVALID_PARTITION_OPERATION" : {
"message" : [
"The partition command is invalid."
],
"subClass" : {
"PARTITION_MANAGEMENT_IS_UNSUPPORTED" : {
"message" : [
"Table <name> does not support partition management."
]
},
"PARTITION_SCHEMA_IS_EMPTY" : {
"message" : [
"Table <name> is not partitioned."
]
}
},
"sqlState" : "42601"
},
"INVALID_PROPERTY_KEY" : {
"message" : [
"<key> is an invalid property key, please use quotes, e.g. SET <key>=<value>."
],
"sqlState" : "42602"
},
"INVALID_PROPERTY_VALUE" : {
"message" : [
"<value> is an invalid property value, please use quotes, e.g. SET <key>=<value>"
],
"sqlState" : "42602"
},
"INVALID_QUERY_MIXED_QUERY_PARAMETERS" : {
"message" : [
"Parameterized query must either use positional, or named parameters, but not both."
],
"sqlState" : "42613"
},
"INVALID_SAVE_MODE" : {
"message" : [
"The specified save mode <mode> is invalid. Valid save modes include \"append\", \"overwrite\", \"ignore\", \"error\", \"errorifexists\", and \"default\"."
],
"sqlState" : "42000"
},
"INVALID_SCHEMA" : {
"message" : [
"The input schema <inputSchema> is not a valid schema string."
],
"subClass" : {
"NON_STRING_LITERAL" : {
"message" : [
"The input expression must be string literal and not null."
]
},
"NON_STRUCT_TYPE" : {
"message" : [
"The input expression should be evaluated to struct type, but got <dataType>."
]
},
"PARSE_ERROR" : {
"message" : [
"Cannot parse the schema:",
"<reason>"
]
}
},
"sqlState" : "42K07"
},
"INVALID_SCHEMA_OR_RELATION_NAME" : {
"message" : [
"<name> is not a valid name for tables/schemas. Valid names only contain alphabet characters, numbers and _."
],
"sqlState" : "42602"
},
"INVALID_SET_SYNTAX" : {
"message" : [
"Expected format is 'SET', 'SET key', or 'SET key=value'. If you want to include special characters in key, or include semicolon in value, please use backquotes, e.g., SET `key`=`value`."
],
"sqlState" : "42000"
},
"INVALID_SINGLE_VARIANT_COLUMN" : {
"message" : [
"The `singleVariantColumn` option cannot be used if there is also a user specified schema."
],
"sqlState" : "42613"
},
"INVALID_SQL_ARG" : {
"message" : [
"The argument <name> of `sql()` is invalid. Consider to replace it either by a SQL literal or by collection constructor functions such as `map()`, `array()`, `struct()`."
],
"sqlState" : "42K08"
},
"INVALID_SQL_SYNTAX" : {
"message" : [
"Invalid SQL syntax:"
],
"subClass" : {
"ANALYZE_TABLE_UNEXPECTED_NOSCAN" : {
"message" : [
"ANALYZE TABLE(S) ... COMPUTE STATISTICS ... <ctx> must be either NOSCAN or empty."
]
},
"CREATE_FUNC_WITH_IF_NOT_EXISTS_AND_REPLACE" : {
"message" : [
"CREATE FUNCTION with both IF NOT EXISTS and REPLACE is not allowed."
]
},
"CREATE_TEMP_FUNC_WITH_DATABASE" : {
"message" : [
"CREATE TEMPORARY FUNCTION with specifying a database(<database>) is not allowed."
]
},
"CREATE_TEMP_FUNC_WITH_IF_NOT_EXISTS" : {
"message" : [
"CREATE TEMPORARY FUNCTION with IF NOT EXISTS is not allowed."
]
},
"EMPTY_PARTITION_VALUE" : {
"message" : [
"Partition key <partKey> must set value."
]
},
"FUNCTION_WITH_UNSUPPORTED_SYNTAX" : {
"message" : [
"The function <prettyName> does not support <syntax>."
]
},
"INVALID_COLUMN_REFERENCE" : {
"message" : [
"Expected a column reference for transform <transform>: <expr>."
]
},
"INVALID_TABLE_FUNCTION_IDENTIFIER_ARGUMENT_MISSING_PARENTHESES" : {
"message" : [
"Syntax error: call to table-valued function is invalid because parentheses are missing around the provided TABLE argument <argumentName>; please surround this with parentheses and try again."
]
},
"INVALID_TABLE_VALUED_FUNC_NAME" : {
"message" : [
"Table valued function cannot specify database name: <funcName>."
]
},
"INVALID_WINDOW_REFERENCE" : {
"message" : [
"Window reference <windowName> is not a window specification."
]
},
"LATERAL_WITHOUT_SUBQUERY_OR_TABLE_VALUED_FUNC" : {
"message" : [
"LATERAL can only be used with subquery and table-valued functions."
]
},
"MULTI_PART_NAME" : {
"message" : [
"<statement> with multiple part function name(<funcName>) is not allowed."
]
},
"OPTION_IS_INVALID" : {
"message" : [
"option or property key <key> is invalid; only <supported> are supported"
]
},
"REPETITIVE_WINDOW_DEFINITION" : {
"message" : [
"The definition of window <windowName> is repetitive."
]
},
"SHOW_FUNCTIONS_INVALID_PATTERN" : {
"message" : [
"Invalid pattern in SHOW FUNCTIONS: <pattern>. It must be a \"STRING\" literal."
]
},
"SHOW_FUNCTIONS_INVALID_SCOPE" : {
"message" : [
"SHOW <scope> FUNCTIONS not supported."
]
},
"TRANSFORM_WRONG_NUM_ARGS" : {
"message" : [
"The transform<transform> requires <expectedNum> parameters but the actual number is <actualNum>."
]
},
"UNRESOLVED_WINDOW_REFERENCE" : {
"message" : [
"Cannot resolve window reference <windowName>."
]
},
"UNSUPPORTED_FUNC_NAME" : {
"message" : [
"Unsupported function name <funcName>."
]
},
"VARIABLE_TYPE_OR_DEFAULT_REQUIRED" : {
"message" : [
"The definition of a SQL variable requires either a datatype or a DEFAULT clause.",
"For example, use `DECLARE name STRING` or `DECLARE name = 'SQL'` instead of `DECLARE name`."
]
}
},
"sqlState" : "42000"
},
"INVALID_STATEMENT_FOR_EXECUTE_INTO" : {
"message" : [
"The INTO clause of EXECUTE IMMEDIATE is only valid for queries but the given statement is not a query: <sqlString>."
],
"sqlState" : "07501"
},
"INVALID_STATEMENT_OR_CLAUSE" : {
"message" : [
"The statement or clause: <operation> is not valid."
],
"sqlState" : "42601"
},
"INVALID_SUBQUERY_EXPRESSION" : {
"message" : [
"Invalid subquery:"
],
"subClass" : {
"SCALAR_SUBQUERY_RETURN_MORE_THAN_ONE_OUTPUT_COLUMN" : {
"message" : [
"Scalar subquery must return only one column, but got <number>."
]
}
},
"sqlState" : "42823"
},
"INVALID_TEMP_OBJ_REFERENCE" : {
"message" : [
"Cannot create the persistent object <objName> of the type <obj> because it references to the temporary object <tempObjName> of the type <tempObj>. Please make the temporary object <tempObjName> persistent, or make the persistent object <objName> temporary."
],
"sqlState" : "42K0F"
},
"INVALID_TIME_TRAVEL_SPEC" : {
"message" : [
"Cannot specify both version and timestamp when time travelling the table."
],
"sqlState" : "42K0E"
},
"INVALID_TIME_TRAVEL_TIMESTAMP_EXPR" : {
"message" : [
"The time travel timestamp expression <expr> is invalid."
],
"subClass" : {
"INPUT" : {
"message" : [
"Cannot be casted to the \"TIMESTAMP\" type."
]
},
"NON_DETERMINISTIC" : {
"message" : [
"Must be deterministic."
]
},
"OPTION" : {
"message" : [
"Timestamp string in the options must be able to cast to TIMESTAMP type."
]
},
"UNEVALUABLE" : {
"message" : [
"Must be evaluable."
]
}
},
"sqlState" : "42K0E"
},
"INVALID_TYPED_LITERAL" : {
"message" : [
"The value of the typed literal <valueType> is invalid: <value>."
],
"sqlState" : "42604"
},
"INVALID_UDF_IMPLEMENTATION" : {
"message" : [
"Function <funcName> does not implement a ScalarFunction or AggregateFunction."
],
"sqlState" : "38000"
},
"INVALID_URL" : {
"message" : [
"The url is invalid: <url>. If necessary set <ansiConfig> to \"false\" to bypass this error."
],
"sqlState" : "22P02"
},
"INVALID_USAGE_OF_STAR_OR_REGEX" : {
"message" : [
"Invalid usage of <elem> in <prettyName>."
],
"sqlState" : "42000"
},
"INVALID_VARIABLE_TYPE_FOR_QUERY_EXECUTE_IMMEDIATE" : {
"message" : [
"Variable type must be string type but got <varType>."
],
"sqlState" : "42K09"
},
"INVALID_VARIANT_CAST" : {
"message" : [
"The variant value `<value>` cannot be cast into `<dataType>`. Please use `try_variant_get` instead."
],
"sqlState" : "22023"
},
"INVALID_VARIANT_GET_PATH" : {
"message" : [
"The path `<path>` is not a valid variant extraction path in `<functionName>`.",
"A valid path should start with `$` and is followed by zero or more segments like `[123]`, `.name`, `['name']`, or `[\"name\"]`."
],
"sqlState" : "22023"
},
"INVALID_VIEW_TEXT" : {
"message" : [
"The view <viewName> cannot be displayed due to invalid view text: <viewText>. This may be caused by an unauthorized modification of the view or an incorrect query syntax. Please check your query syntax and verify that the view has not been tampered with."
],
"sqlState" : "XX000"
},
"INVALID_WHERE_CONDITION" : {
"message" : [
"The WHERE condition <condition> contains invalid expressions: <expressionList>.",
"Rewrite the query to avoid window functions, aggregate functions, and generator functions in the WHERE clause."
],
"sqlState" : "42903"
},
"INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC" : {
"message" : [
"Cannot specify ORDER BY or a window frame for <aggFunc>."
],
"sqlState" : "42601"
},
"INVALID_WRITER_COMMIT_MESSAGE" : {
"message" : [
"The data source writer has generated an invalid number of commit messages. Expected exactly one writer commit message from each task, but received <detail>."
],
"sqlState" : "42KDE"
},
"INVALID_WRITE_DISTRIBUTION" : {
"message" : [
"The requested write distribution is invalid."
],
"subClass" : {
"PARTITION_NUM_AND_SIZE" : {
"message" : [
"The partition number and advisory partition size can't be specified at the same time."
]
},
"PARTITION_NUM_WITH_UNSPECIFIED_DISTRIBUTION" : {
"message" : [
"The number of partitions can't be specified with unspecified distribution."
]
},
"PARTITION_SIZE_WITH_UNSPECIFIED_DISTRIBUTION" : {
"message" : [
"The advisory partition size can't be specified with unspecified distribution."
]
}
},
"sqlState" : "42000"
},
"JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE" : {
"message" : [
"The join condition <joinCondition> has the invalid type <conditionType>, expected \"BOOLEAN\"."
],
"sqlState" : "42K0E"
},
"KRYO_BUFFER_OVERFLOW" : {
"message" : [
"Kryo serialization failed: <exceptionMsg>. To avoid this, increase \"<bufferSizeConfKey>\" value."
],
"sqlState" : "54006"
},
"LOAD_DATA_PATH_NOT_EXISTS" : {
"message" : [
"LOAD DATA input path does not exist: <path>."
],
"sqlState" : "42K03"
},
"LOCAL_MUST_WITH_SCHEMA_FILE" : {
"message" : [
"LOCAL must be used together with the schema of `file`, but got: `<actualSchema>`."
],
"sqlState" : "42601"
},
"LOCATION_ALREADY_EXISTS" : {
"message" : [
"Cannot name the managed table as <identifier>, as its associated location <location> already exists. Please pick a different table name, or remove the existing location first."
],
"sqlState" : "42710"
},
"MALFORMED_CSV_RECORD" : {
"message" : [
"Malformed CSV record: <badRecord>"
],
"sqlState" : "KD000"
},
"MALFORMED_PROTOBUF_MESSAGE" : {
"message" : [
"Malformed Protobuf messages are detected in message deserialization. Parse Mode: <failFastMode>. To process malformed protobuf message as null result, try setting the option 'mode' as 'PERMISSIVE'."
],
"sqlState" : "XX000"
},
"MALFORMED_RECORD_IN_PARSING" : {
"message" : [
"Malformed records are detected in record parsing: <badRecord>.",
"Parse Mode: <failFastMode>. To process malformed records as null result, try setting the option 'mode' as 'PERMISSIVE'."
],
"subClass" : {
"CANNOT_PARSE_JSON_ARRAYS_AS_STRUCTS" : {
"message" : [
"Parsing JSON arrays as structs is forbidden."
]
},
"CANNOT_PARSE_STRING_AS_DATATYPE" : {
"message" : [
"Cannot parse the value <fieldValue> of the field <fieldName> as target spark data type <targetType> from the input type <inputType>."
]
},
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
}
},
"sqlState" : "22023"
},
"MALFORMED_VARIANT" : {
"message" : [
"Variant binary is malformed. Please check the data source is valid."
],
"sqlState" : "22023"
},
"MERGE_CARDINALITY_VIOLATION" : {
"message" : [
"The ON search condition of the MERGE statement matched a single row from the target table with multiple rows of the source table.",
"This could result in the target row being operated on more than once with an update or delete operation and is not allowed."
],
"sqlState" : "23K01"
},
"MISSING_AGGREGATION" : {
"message" : [
"The non-aggregating expression <expression> is based on columns which are not participating in the GROUP BY clause.",
"Add the columns or the expression to the GROUP BY, aggregate the expression, or use <expressionAnyValue> if you do not care which of the values within a group is returned."
],
"sqlState" : "42803"
},
"MISSING_ATTRIBUTES" : {
"message" : [
"Resolved attribute(s) <missingAttributes> missing from <input> in operator <operator>."
],
"subClass" : {
"RESOLVED_ATTRIBUTE_APPEAR_IN_OPERATION" : {
"message" : [
"Attribute(s) with the same name appear in the operation: <operation>.",
"Please check if the right attribute(s) are used."
]
},
"RESOLVED_ATTRIBUTE_MISSING_FROM_INPUT" : {
"message" : [
""
]
}
},
"sqlState" : "XX000"
},
"MISSING_GROUP_BY" : {
"message" : [
"The query does not include a GROUP BY clause. Add GROUP BY or turn it into the window functions using OVER clauses."
],
"sqlState" : "42803"
},
"MULTIPLE_TIME_TRAVEL_SPEC" : {
"message" : [
"Cannot specify time travel in both the time travel clause and options."
],
"sqlState" : "42K0E"
},
"MULTIPLE_XML_DATA_SOURCE" : {
"message" : [
"Detected multiple data sources with the name <provider> (<sourceNames>). Please specify the fully qualified class name or remove <externalSource> from the classpath."
],
"sqlState" : "42710"
},
"MULTI_SOURCES_UNSUPPORTED_FOR_EXPRESSION" : {
"message" : [
"The expression <expr> does not support more than one source."
],
"sqlState" : "42K0E"
},
"MULTI_UDF_INTERFACE_ERROR" : {
"message" : [
"Not allowed to implement multiple UDF interfaces, UDF class <className>."
],
"sqlState" : "0A000"
},
"NAMED_PARAMETERS_NOT_SUPPORTED" : {
"message" : [
"Named parameters are not supported for function <functionName>; please retry the query with positional arguments to the function call instead."
],
"sqlState" : "4274K"
},
"NAMED_PARAMETER_SUPPORT_DISABLED" : {
"message" : [
"Cannot call function <functionName> because named argument references are not enabled here.",
"In this case, the named argument reference was <argument>.",
"Set \"spark.sql.allowNamedFunctionArguments\" to \"true\" to turn on feature."
],
"sqlState" : "0A000"
},
"NESTED_AGGREGATE_FUNCTION" : {
"message" : [
"It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."
],
"sqlState" : "42607"
},
"NESTED_EXECUTE_IMMEDIATE" : {
"message" : [
"Nested EXECUTE IMMEDIATE commands are not allowed. Please ensure that the SQL query provided (<sqlString>) does not contain another EXECUTE IMMEDIATE command."
],
"sqlState" : "07501"
},
"NONEXISTENT_FIELD_NAME_IN_LIST" : {
"message" : [
"Field(s) <nonExistFields> do(es) not exist. Available fields: <fieldNames>"
],
"sqlState" : "HV091"
},
"NON_FOLDABLE_ARGUMENT" : {
"message" : [
"The function <funcName> requires the parameter <paramName> to be a foldable expression of the type <paramType>, but the actual argument is a non-foldable."
],
"sqlState" : "42K08"
},
"NON_LAST_MATCHED_CLAUSE_OMIT_CONDITION" : {
"message" : [
"When there are more than one MATCHED clauses in a MERGE statement, only the last MATCHED clause can omit the condition."
],
"sqlState" : "42613"
},
"NON_LAST_NOT_MATCHED_BY_SOURCE_CLAUSE_OMIT_CONDITION" : {
"message" : [
"When there are more than one NOT MATCHED BY SOURCE clauses in a MERGE statement, only the last NOT MATCHED BY SOURCE clause can omit the condition."
],
"sqlState" : "42613"
},
"NON_LAST_NOT_MATCHED_BY_TARGET_CLAUSE_OMIT_CONDITION" : {
"message" : [
"When there are more than one NOT MATCHED [BY TARGET] clauses in a MERGE statement, only the last NOT MATCHED [BY TARGET] clause can omit the condition."
],
"sqlState" : "42613"
},
"NON_LITERAL_PIVOT_VALUES" : {
"message" : [
"Literal expressions required for pivot values, found <expression>."
],
"sqlState" : "42K08"
},
"NON_PARTITION_COLUMN" : {
"message" : [
"PARTITION clause cannot contain the non-partition column: <columnName>."
],
"sqlState" : "42000"
},
"NON_TIME_WINDOW_NOT_SUPPORTED_IN_STREAMING" : {
"message" : [
"Window function is not supported in <windowFunc> (as column <columnName>) on streaming DataFrames/Datasets.",
"Structured Streaming only supports time-window aggregation using the WINDOW function. (window specification: <windowSpec>)"
],
"sqlState" : "42KDE"
},
"NOT_ALLOWED_IN_FROM" : {
"message" : [
"Not allowed in the FROM clause:"
],
"subClass" : {
"LATERAL_WITH_PIVOT" : {
"message" : [
"LATERAL together with PIVOT."
]
},
"LATERAL_WITH_UNPIVOT" : {
"message" : [
"LATERAL together with UNPIVOT."
]
},
"UNPIVOT_WITH_PIVOT" : {
"message" : [
"UNPIVOT together with PIVOT."
]
}
},
"sqlState" : "42601"
},
"NOT_A_CONSTANT_STRING" : {
"message" : [
"The expression <expr> used for the routine or clause <name> must be a constant STRING which is NOT NULL."
],
"subClass" : {
"NOT_CONSTANT" : {
"message" : [
"To be considered constant the expression must not depend on any columns, contain a subquery, or invoke a non deterministic function such as rand()."
]
},
"NULL" : {
"message" : [
"The expression evaluates to NULL."
]
},
"WRONG_TYPE" : {
"message" : [
"The data type of the expression is <dataType>."
]
}
},
"sqlState" : "42601"
},
"NOT_A_PARTITIONED_TABLE" : {
"message" : [
"Operation <operation> is not allowed for <tableIdentWithDB> because it is not a partitioned table."
],
"sqlState" : "42809"
},
"NOT_NULL_CONSTRAINT_VIOLATION" : {
"message" : [
"Assigning a NULL is not allowed here."
],
"subClass" : {
"ARRAY_ELEMENT" : {
"message" : [
"The array <columnPath> is defined to contain only elements that are NOT NULL."
]
},
"MAP_VALUE" : {
"message" : [
"The map <columnPath> is defined to contain only values that are NOT NULL."
]
}
},
"sqlState" : "42000"
},
"NOT_SUPPORTED_CHANGE_COLUMN" : {
"message" : [
"ALTER TABLE ALTER/CHANGE COLUMN is not supported for changing <table>'s column <originName> with type <originType> to <newName> with type <newType>."
],
"sqlState" : "0A000"
},
"NOT_SUPPORTED_COMMAND_FOR_V2_TABLE" : {
"message" : [
"<cmd> is not supported for v2 tables."
],
"sqlState" : "0A000"
},
"NOT_SUPPORTED_COMMAND_WITHOUT_HIVE_SUPPORT" : {
"message" : [
"<cmd> is not supported, if you want to enable it, please set \"spark.sql.catalogImplementation\" to \"hive\"."
],
"sqlState" : "0A000"
},
"NOT_SUPPORTED_IN_JDBC_CATALOG" : {
"message" : [
"Not supported command in JDBC catalog:"
],
"subClass" : {
"COMMAND" : {
"message" : [
"<cmd>"
]
},
"COMMAND_WITH_PROPERTY" : {
"message" : [
"<cmd> with property <property>."
]
}
},
"sqlState" : "0A000"
},
"NOT_UNRESOLVED_ENCODER" : {
"message" : [
"Unresolved encoder expected, but <attr> was found."
],
"sqlState" : "42601"
},
"NO_DEFAULT_COLUMN_VALUE_AVAILABLE" : {
"message" : [
"Can't determine the default value for <colName> since it is not nullable and it has no default value."
],
"sqlState" : "42608"
},
"NO_HANDLER_FOR_UDAF" : {
"message" : [
"No handler for UDAF '<functionName>'. Use sparkSession.udf.register(...) instead."
],
"sqlState" : "42000"
},
"NO_MERGE_ACTION_SPECIFIED" : {
"message" : [
"df.mergeInto needs to be followed by at least one of whenMatched/whenNotMatched/whenNotMatchedBySource."
],
"sqlState" : "42K0E"
},
"NO_SQL_TYPE_IN_PROTOBUF_SCHEMA" : {
"message" : [
"Cannot find <catalystFieldPath> in Protobuf schema."
],
"sqlState" : "42S22"
},
"NO_UDF_INTERFACE" : {
"message" : [
"UDF class <className> doesn't implement any UDF interface."
],
"sqlState" : "38000"
},
"NULLABLE_COLUMN_OR_FIELD" : {
"message" : [
"Column or field <name> is nullable while it's required to be non-nullable."
],
"sqlState" : "42000"
},
"NULLABLE_ROW_ID_ATTRIBUTES" : {
"message" : [
"Row ID attributes cannot be nullable: <nullableRowIdAttrs>."
],
"sqlState" : "42000"
},
"NULL_MAP_KEY" : {
"message" : [
"Cannot use null as map key."
],
"sqlState" : "2200E"
},
"NULL_QUERY_STRING_EXECUTE_IMMEDIATE" : {
"message" : [
"Execute immediate requires a non-null variable as the query string, but the provided variable <varName> is null."
],
"sqlState" : "22004"
},
"NUMERIC_OUT_OF_SUPPORTED_RANGE" : {
"message" : [
"The value <value> cannot be interpreted as a numeric since it has more than 38 digits."
],
"sqlState" : "22003"
},
"NUMERIC_VALUE_OUT_OF_RANGE" : {
"message" : [
""
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
"The <roundedValue> rounded half up from <originalValue> cannot be represented as Decimal(<precision>, <scale>)."
]
},
"WITH_SUGGESTION" : {
"message" : [
"<value> cannot be represented as Decimal(<precision>, <scale>). If necessary set <config> to \"false\" to bypass this error, and return NULL instead."
]
}
},
"sqlState" : "22003"
},
"NUM_COLUMNS_MISMATCH" : {
"message" : [
"<operator> can only be performed on inputs with the same number of columns, but the first input has <firstNumColumns> columns and the <invalidOrdinalNum> input has <invalidNumColumns> columns."
],
"sqlState" : "42826"
},
"NUM_TABLE_VALUE_ALIASES_MISMATCH" : {
"message" : [
"Number of given aliases does not match number of output columns.",
"Function name: <funcName>; number of aliases: <aliasesNum>; number of output columns: <outColsNum>."
],
"sqlState" : "42826"
},
"OPERATION_CANCELED" : {
"message" : [
"Operation has been canceled."
],
"sqlState" : "HY008"
},
"ORDER_BY_POS_OUT_OF_RANGE" : {
"message" : [
"ORDER BY position <index> is not in select list (valid range is [1, <size>])."
],
"sqlState" : "42805"
},
"PARSE_EMPTY_STATEMENT" : {
"message" : [
"Syntax error, unexpected empty statement."
],
"sqlState" : "42617"
},
"PARSE_SYNTAX_ERROR" : {
"message" : [
"Syntax error at or near <error><hint>."
],
"sqlState" : "42601"
},
"PARTITIONS_ALREADY_EXIST" : {
"message" : [
"Cannot ADD or RENAME TO partition(s) <partitionList> in table <tableName> because they already exist.",
"Choose a different name, drop the existing partition, or add the IF NOT EXISTS clause to tolerate a pre-existing partition."
],
"sqlState" : "428FT"
},
"PARTITIONS_NOT_FOUND" : {
"message" : [
"The partition(s) <partitionList> cannot be found in table <tableName>.",
"Verify the partition specification and table name.",
"To tolerate the error on drop use ALTER TABLE … DROP IF EXISTS PARTITION."
],
"sqlState" : "428FT"
},
"PATH_ALREADY_EXISTS" : {
"message" : [
"Path <outputPath> already exists. Set mode as \"overwrite\" to overwrite the existing path."
],
"sqlState" : "42K04"
},
"PATH_NOT_FOUND" : {
"message" : [
"Path does not exist: <path>."
],
"sqlState" : "42K03"
},
"PIVOT_VALUE_DATA_TYPE_MISMATCH" : {
"message" : [
"Invalid pivot value '<value>': value data type <valueType> does not match pivot column data type <pivotType>."
],
"sqlState" : "42K09"
},
"PLAN_VALIDATION_FAILED_RULE_EXECUTOR" : {
"message" : [
"The input plan of <ruleExecutor> is invalid: <reason>"
],
"sqlState" : "XXKD0"
},
"PLAN_VALIDATION_FAILED_RULE_IN_BATCH" : {
"message" : [
"Rule <rule> in batch <batch> generated an invalid plan: <reason>"
],
"sqlState" : "XXKD0"
},
"PROTOBUF_DEPENDENCY_NOT_FOUND" : {
"message" : [
"Could not find dependency: <dependencyName>."
],
"sqlState" : "42K0G"
},
"PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND" : {
"message" : [
"Error reading Protobuf descriptor file at path: <filePath>."
],
"sqlState" : "42K0G"
},
"PROTOBUF_FIELD_MISSING" : {
"message" : [
"Searching for <field> in Protobuf schema at <protobufSchema> gave <matchSize> matches. Candidates: <matches>."
],
"sqlState" : "42K0G"
},
"PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA" : {
"message" : [
"Found <field> in Protobuf schema but there is no match in the SQL schema."
],
"sqlState" : "42K0G"
},
"PROTOBUF_FIELD_TYPE_MISMATCH" : {
"message" : [
"Type mismatch encountered for field: <field>."
],
"sqlState" : "42K0G"
},
"PROTOBUF_MESSAGE_NOT_FOUND" : {
"message" : [
"Unable to locate Message <messageName> in Descriptor."
],
"sqlState" : "42K0G"
},
"PROTOBUF_TYPE_NOT_SUPPORT" : {
"message" : [
"Protobuf type not yet supported: <protobufType>."
],
"sqlState" : "42K0G"
},
"PYTHON_DATA_SOURCE_ERROR" : {
"message" : [
"Failed to <action> Python data source <type>: <msg>"
],
"sqlState" : "38000"
},
"PYTHON_STREAMING_DATA_SOURCE_RUNTIME_ERROR" : {
"message" : [
"Failed when Python streaming data source perform <action>: <msg>"
],
"sqlState" : "38000"
},
"RECURSIVE_PROTOBUF_SCHEMA" : {
"message" : [
"Found recursive reference in Protobuf schema, which can not be processed by Spark by default: <fieldDescriptor>. try setting the option `recursive.fields.max.depth` 0 to 10. Going beyond 10 levels of recursion is not allowed."
],
"sqlState" : "42K0G"
},
"RECURSIVE_VIEW" : {
"message" : [
"Recursive view <viewIdent> detected (cycle: <newPath>)."
],
"sqlState" : "42K0H"
},
"REF_DEFAULT_VALUE_IS_NOT_ALLOWED_IN_PARTITION" : {
"message" : [
"References to DEFAULT column values are not allowed within the PARTITION clause."
],
"sqlState" : "42601"
},
"RENAME_SRC_PATH_NOT_FOUND" : {
"message" : [
"Failed to rename as <sourcePath> was not found."
],
"sqlState" : "42K03"
},
"REPEATED_CLAUSE" : {
"message" : [
"The <clause> clause may be used at most once per <operation> operation."
],
"sqlState" : "42614"
},
"REQUIRED_PARAMETER_NOT_FOUND" : {
"message" : [
"Cannot invoke function <functionName> because the parameter named <parameterName> is required, but the function call did not supply a value. Please update the function call to supply an argument value (either positionally at index <index> or by name) and retry the query again."
],
"sqlState" : "4274K"
},
"REQUIRES_SINGLE_PART_NAMESPACE" : {
"message" : [
"<sessionCatalog> requires a single-part namespace, but got <namespace>."
],
"sqlState" : "42K05"
},
"ROUTINE_ALREADY_EXISTS" : {
"message" : [
"Cannot create the routine <routineName> because it already exists.",
"Choose a different name, drop or replace the existing routine, or add the IF NOT EXISTS clause to tolerate a pre-existing routine."
],
"sqlState" : "42723"
},
"ROUTINE_NOT_FOUND" : {
"message" : [
"The routine <routineName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog.",
"To tolerate the error on drop use DROP ... IF EXISTS."
],
"sqlState" : "42883"
},
"ROW_SUBQUERY_TOO_MANY_ROWS" : {
"message" : [
"More than one row returned by a subquery used as a row."
],
"sqlState" : "21000"
},
"RULE_ID_NOT_FOUND" : {
"message" : [
"Not found an id for the rule name \"<ruleName>\". Please modify RuleIdCollection.scala if you are adding a new rule."
],
"sqlState" : "22023"
},
"SCALAR_SUBQUERY_IS_IN_GROUP_BY_OR_AGGREGATE_FUNCTION" : {
"message" : [
"The correlated scalar subquery '<sqlExpr>' is neither present in GROUP BY, nor in an aggregate function.",
"Add it to GROUP BY using ordinal position or wrap it in `first()` (or `first_value`) if you don't care which value you get."
],
"sqlState" : "0A000"
},
"SCALAR_SUBQUERY_TOO_MANY_ROWS" : {
"message" : [
"More than one row returned by a subquery used as an expression."
],
"sqlState" : "21000"
},
"SCHEMA_ALREADY_EXISTS" : {
"message" : [
"Cannot create schema <schemaName> because it already exists.",
"Choose a different name, drop the existing schema, or add the IF NOT EXISTS clause to tolerate pre-existing schema."
],
"sqlState" : "42P06"
},
"SCHEMA_NOT_EMPTY" : {
"message" : [
"Cannot drop a schema <schemaName> because it contains objects.",
"Use DROP SCHEMA ... CASCADE to drop the schema and all its objects."
],
"sqlState" : "2BP01"
},
"SCHEMA_NOT_FOUND" : {
"message" : [
"The schema <schemaName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a catalog, verify the current_schema() output, or qualify the name with the correct catalog.",
"To tolerate the error on drop use DROP SCHEMA IF EXISTS."
],
"sqlState" : "42704"
},
"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER" : {
"message" : [
"The second argument of <functionName> function needs to be an integer."
],
"sqlState" : "22023"
},
"SEED_EXPRESSION_IS_UNFOLDABLE" : {
"message" : [
"The seed expression <seedExpr> of the expression <exprWithSeed> must be foldable."
],
"sqlState" : "42K08"
},
"SORT_BY_WITHOUT_BUCKETING" : {
"message" : [
"sortBy must be used together with bucketBy."
],
"sqlState" : "42601"
},
"SPARK_JOB_CANCELLED" : {
"message" : [
"Job <jobId> cancelled <reason>"
],
"sqlState" : "XXKDA"
},
"SPECIFY_BUCKETING_IS_NOT_ALLOWED" : {
"message" : [
"A CREATE TABLE without explicit column list cannot specify bucketing information.",
"Please use the form with explicit column list and specify bucketing information.",
"Alternatively, allow bucketing information to be inferred by omitting the clause."
],
"sqlState" : "42601"
},
"SPECIFY_CLUSTER_BY_WITH_BUCKETING_IS_NOT_ALLOWED" : {
"message" : [
"Cannot specify both CLUSTER BY and CLUSTERED BY INTO BUCKETS."
],
"sqlState" : "42908"
},
"SPECIFY_CLUSTER_BY_WITH_PARTITIONED_BY_IS_NOT_ALLOWED" : {
"message" : [
"Cannot specify both CLUSTER BY and PARTITIONED BY."
],
"sqlState" : "42908"
},
"SPECIFY_PARTITION_IS_NOT_ALLOWED" : {
"message" : [
"A CREATE TABLE without explicit column list cannot specify PARTITIONED BY.",
"Please use the form with explicit column list and specify PARTITIONED BY.",
"Alternatively, allow partitioning to be inferred by omitting the PARTITION BY clause."
],
"sqlState" : "42601"
},
"SQL_CONF_NOT_FOUND" : {
"message" : [
"The SQL config <sqlConf> cannot be found. Please verify that the config exists."
],
"sqlState" : "42K0I"
},
"STAR_GROUP_BY_POS" : {
"message" : [
"Star (*) is not allowed in a select list when GROUP BY an ordinal position is used."
],
"sqlState" : "0A000"
},
"STATEFUL_PROCESSOR_CANNOT_PERFORM_OPERATION_WITH_INVALID_HANDLE_STATE" : {
"message" : [
"Failed to perform stateful processor operation=<operationType> with invalid handle state=<handleState>."
],
"sqlState" : "42802"
},
"STATEFUL_PROCESSOR_CANNOT_PERFORM_OPERATION_WITH_INVALID_TIME_MODE" : {
"message" : [
"Failed to perform stateful processor operation=<operationType> with invalid timeMode=<timeMode>"
],
"sqlState" : "42802"
},
"STATEFUL_PROCESSOR_CANNOT_REINITIALIZE_STATE_ON_KEY" : {
"message" : [
"Cannot re-initialize state on the same grouping key during initial state handling for stateful processor. Invalid grouping key=<groupingKey>."
],
"sqlState" : "42802"
},
"STATEFUL_PROCESSOR_INCORRECT_TIME_MODE_TO_ASSIGN_TTL" : {
"message" : [
"Cannot use TTL for state=<stateName> in timeMode=<timeMode>, use TimeMode.ProcessingTime() instead."
],
"sqlState" : "42802"
},
"STATEFUL_PROCESSOR_TTL_DURATION_MUST_BE_POSITIVE" : {
"message" : [
"TTL duration must be greater than zero for State store operation=<operationType> on state=<stateName>."
],
"sqlState" : "42802"
},
"STATE_STORE_CANNOT_CREATE_COLUMN_FAMILY_WITH_RESERVED_CHARS" : {
"message" : [
"Failed to create column family with unsupported starting character and name=<colFamilyName>."
],
"sqlState" : "42802"
},
"STATE_STORE_CANNOT_USE_COLUMN_FAMILY_WITH_INVALID_NAME" : {
"message" : [
"Failed to perform column family operation=<operationName> with invalid name=<colFamilyName>. Column family name cannot be empty or include leading/trailing spaces or use the reserved keyword=default"
],
"sqlState" : "42802"
},
"STATE_STORE_HANDLE_NOT_INITIALIZED" : {
"message" : [
"The handle has not been initialized for this StatefulProcessor.",
"Please only use the StatefulProcessor within the transformWithState operator."
],
"sqlState" : "42802"
},
"STATE_STORE_INCORRECT_NUM_ORDERING_COLS_FOR_RANGE_SCAN" : {
"message" : [
"Incorrect number of ordering ordinals=<numOrderingCols> for range scan encoder. The number of ordering ordinals cannot be zero or greater than number of schema columns."
],
"sqlState" : "42802"
},
"STATE_STORE_INCORRECT_NUM_PREFIX_COLS_FOR_PREFIX_SCAN" : {
"message" : [
"Incorrect number of prefix columns=<numPrefixCols> for prefix scan encoder. Prefix columns cannot be zero or greater than or equal to num of schema columns."
],
"sqlState" : "42802"
},
"STATE_STORE_NULL_TYPE_ORDERING_COLS_NOT_SUPPORTED" : {
"message" : [
"Null type ordering column with name=<fieldName> at index=<index> is not supported for range scan encoder."
],
"sqlState" : "42802"
},
"STATE_STORE_UNSUPPORTED_OPERATION" : {
"message" : [
"<operationType> operation not supported with <entity>"
],
"sqlState" : "XXKST"
},
"STATE_STORE_UNSUPPORTED_OPERATION_BINARY_INEQUALITY" : {
"message" : [
"Binary inequality column is not supported with state store. Provided schema: <schema>."
],
"sqlState" : "XXKST"
},
"STATE_STORE_UNSUPPORTED_OPERATION_ON_MISSING_COLUMN_FAMILY" : {
"message" : [
"State store operation=<operationType> not supported on missing column family=<colFamilyName>."
],
"sqlState" : "42802"
},
"STATE_STORE_VARIABLE_SIZE_ORDERING_COLS_NOT_SUPPORTED" : {
"message" : [
"Variable size ordering column with name=<fieldName> at index=<index> is not supported for range scan encoder."
],
"sqlState" : "42802"
},
"STATIC_PARTITION_COLUMN_IN_INSERT_COLUMN_LIST" : {
"message" : [
"Static partition column <staticName> is also specified in the column list."
],
"sqlState" : "42713"
},
"STDS_COMMITTED_BATCH_UNAVAILABLE" : {
"message" : [
"No committed batch found, checkpoint location: <checkpointLocation>. Ensure that the query has run and committed any microbatch before stopping."
],
"sqlState" : "KD006"
},
"STDS_CONFLICT_OPTIONS" : {
"message" : [
"The options <options> cannot be specified together. Please specify the one."
],
"sqlState" : "42613"
},
"STDS_FAILED_TO_READ_STATE_SCHEMA" : {
"message" : [
"Failed to read the state schema. Either the file does not exist, or the file is corrupted. options: <sourceOptions>.",
"Rerun the streaming query to construct the state schema, and report to the corresponding communities or vendors if the error persists."
],
"sqlState" : "42K03"
},
"STDS_INTERNAL_ERROR" : {
"message" : [
"Internal error: <message>",
"Please, report this bug to the corresponding communities or vendors, and provide the full stack trace."
],
"sqlState" : "XXKST"
},
"STDS_INVALID_OPTION_VALUE" : {
"message" : [
"Invalid value for source option '<optionName>':"
],
"subClass" : {
"IS_EMPTY" : {
"message" : [
"cannot be empty."
]
},
"IS_NEGATIVE" : {
"message" : [
"cannot be negative."
]
},
"WITH_MESSAGE" : {
"message" : [
"<message>"
]
}
},
"sqlState" : "42616"
},
"STDS_NO_PARTITION_DISCOVERED_IN_STATE_STORE" : {
"message" : [
"The state does not have any partition. Please double check that the query points to the valid state. options: <sourceOptions>"
],
"sqlState" : "KD006"
},
"STDS_OFFSET_LOG_UNAVAILABLE" : {
"message" : [
"The offset log for <batchId> does not exist, checkpoint location: <checkpointLocation>.",
"Please specify the batch ID which is available for querying - you can query the available batch IDs via using state metadata data source."
],
"sqlState" : "KD006"
},
"STDS_OFFSET_METADATA_LOG_UNAVAILABLE" : {
"message" : [
"Metadata is not available for offset log for <batchId>, checkpoint location: <checkpointLocation>.",
"The checkpoint seems to be only run with older Spark version(s). Run the streaming query with the recent Spark version, so that Spark constructs the state metadata."
],
"sqlState" : "KD006"
},
"STDS_REQUIRED_OPTION_UNSPECIFIED" : {
"message" : [
"'<optionName>' must be specified."
],
"sqlState" : "42601"
},
"STREAMING_PYTHON_RUNNER_INITIALIZATION_FAILURE" : {
"message" : [
"Streaming Runner initialization failed, returned <resFromPython>. Cause: <msg>"
],
"sqlState" : "XXKST"
},
"STREAMING_STATEFUL_OPERATOR_NOT_MATCH_IN_STATE_METADATA" : {
"message" : [
"Streaming stateful operator name does not match with the operator in state metadata. This likely to happen when user adds/removes/changes stateful operator of existing streaming query.",
"Stateful operators in the metadata: [<OpsInMetadataSeq>]; Stateful operators in current batch: [<OpsInCurBatchSeq>]."
],
"sqlState" : "42K03"
},
"STREAM_FAILED" : {
"message" : [
"Query [id = <id>, runId = <runId>] terminated with exception: <message>"
],
"sqlState" : "XXKST"
},
"STRUCT_ARRAY_LENGTH_MISMATCH" : {
"message" : [
"Input row doesn't have expected number of values required by the schema. <expected> fields are required while <actual> values are provided."
],
"sqlState" : "2201E"
},
"SUM_OF_LIMIT_AND_OFFSET_EXCEEDS_MAX_INT" : {
"message" : [
"The sum of the LIMIT clause and the OFFSET clause must not be greater than the maximum 32-bit integer value (2,147,483,647) but found limit = <limit>, offset = <offset>."
],
"sqlState" : "22003"
},
"SYNTAX_DISCONTINUED" : {
"message" : [
"Support of the clause or keyword: <clause> has been discontinued in this context."
],
"subClass" : {
"BANG_EQUALS_NOT" : {
"message" : [
"The '!' keyword is only supported as an alias for the prefix operator 'NOT'.",
"Use the 'NOT' keyword instead for infix clauses such as `NOT LIKE`, `NOT IN`, `NOT BETWEEN`, etc.",
"To re-enable the '!' keyword, set \"spark.sql.legacy.bangEqualsNot\" to \"true\"."
]
}
},
"sqlState" : "42601"
},
"TABLE_OR_VIEW_ALREADY_EXISTS" : {
"message" : [
"Cannot create table or view <relationName> because it already exists.",
"Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects."
],
"sqlState" : "42P07"
},
"TABLE_OR_VIEW_NOT_FOUND" : {
"message" : [
"The table or view <relationName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.",
"To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS."
],
"sqlState" : "42P01"
},
"TABLE_VALUED_FUNCTION_FAILED_TO_ANALYZE_IN_PYTHON" : {
"message" : [
"Failed to analyze the Python user defined table function: <msg>"
],
"sqlState" : "38000"
},
"TABLE_VALUED_FUNCTION_REQUIRED_METADATA_INCOMPATIBLE_WITH_CALL" : {
"message" : [
"Failed to evaluate the table function <functionName> because its table metadata <requestedMetadata>, but the function call <invalidFunctionCallProperty>."
],
"sqlState" : "22023"
},
"TABLE_VALUED_FUNCTION_REQUIRED_METADATA_INVALID" : {
"message" : [
"Failed to evaluate the table function <functionName> because its table metadata was invalid; <reason>."
],
"sqlState" : "22023"
},
"TABLE_VALUED_FUNCTION_TOO_MANY_TABLE_ARGUMENTS" : {
"message" : [
"There are too many table arguments for table-valued function.",
"It allows one table argument, but got: <num>.",
"If you want to allow it, please set \"spark.sql.allowMultipleTableArguments.enabled\" to \"true\""
],
"sqlState" : "54023"
},
"TASK_WRITE_FAILED" : {
"message" : [
"Task failed while writing rows to <path>."
],
"sqlState" : "58030"
},
"TEMP_TABLE_OR_VIEW_ALREADY_EXISTS" : {
"message" : [
"Cannot create the temporary view <relationName> because it already exists.",
"Choose a different name, drop or replace the existing view, or add the IF NOT EXISTS clause to tolerate pre-existing views."
],
"sqlState" : "42P07"
},
"TEMP_VIEW_NAME_TOO_MANY_NAME_PARTS" : {
"message" : [
"CREATE TEMPORARY VIEW or the corresponding Dataset APIs only accept single-part view names, but got: <actualName>."
],
"sqlState" : "428EK"
},
"UDTF_ALIAS_NUMBER_MISMATCH" : {
"message" : [
"The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF.",
"Expected <aliasesSize> aliases, but got <aliasesNames>.",
"Please ensure that the number of aliases provided matches the number of columns output by the UDTF."
],
"sqlState" : "42802"
},
"UDTF_INVALID_ALIAS_IN_REQUESTED_ORDERING_STRING_FROM_ANALYZE_METHOD" : {
"message" : [
"Failed to evaluate the user-defined table function because its 'analyze' method returned a requested OrderingColumn whose column name expression included an unnecessary alias <aliasName>; please remove this alias and then try the query again."
],
"sqlState" : "42802"
},
"UDTF_INVALID_REQUESTED_SELECTED_EXPRESSION_FROM_ANALYZE_METHOD_REQUIRES_ALIAS" : {
"message" : [
"Failed to evaluate the user-defined table function because its 'analyze' method returned a requested 'select' expression (<expression>) that does not include a corresponding alias; please update the UDTF to specify an alias there and then try the query again."
],
"sqlState" : "42802"
},
"UNABLE_TO_ACQUIRE_MEMORY" : {
"message" : [
"Unable to acquire <requestedBytes> bytes of memory, got <receivedBytes>."
],
"sqlState" : "53200"
},
"UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE" : {
"message" : [
"Unable to convert SQL type <toType> to Protobuf type <protobufType>."
],
"sqlState" : "42K0G"
},
"UNABLE_TO_FETCH_HIVE_TABLES" : {
"message" : [
"Unable to fetch tables of Hive database: <dbName>."
],
"sqlState" : "58030"
},
"UNABLE_TO_INFER_SCHEMA" : {
"message" : [
"Unable to infer schema for <format>. It must be specified manually."
],
"sqlState" : "42KD9"
},
"UNBOUND_SQL_PARAMETER" : {
"message" : [
"Found the unbound parameter: <name>. Please, fix `args` and provide a mapping of the parameter to either a SQL literal or collection constructor functions such as `map()`, `array()`, `struct()`."
],
"sqlState" : "42P02"
},
"UNCLOSED_BRACKETED_COMMENT" : {
"message" : [
"Found an unclosed bracketed comment. Please, append */ at the end of the comment."
],
"sqlState" : "42601"
},
"UNEXPECTED_INPUT_TYPE" : {
"message" : [
"Parameter <paramIndex> of function <functionName> requires the <requiredType> type, however <inputSql> has the type <inputType>."
],
"sqlState" : "42K09"
},
"UNEXPECTED_POSITIONAL_ARGUMENT" : {
"message" : [
"Cannot invoke function <functionName> because it contains positional argument(s) following the named argument assigned to <parameterName>; please rearrange them so the positional arguments come first and then retry the query again."
],
"sqlState" : "4274K"
},
"UNEXPECTED_SERIALIZER_FOR_CLASS" : {
"message" : [
"The class <className> has an unexpected expression serializer. Expects \"STRUCT\" or \"IF\" which returns \"STRUCT\" but found <expr>."
],
"sqlState" : "42846"
},
"UNKNOWN_PROTOBUF_MESSAGE_TYPE" : {
"message" : [
"Attempting to treat <descriptorName> as a Message, but it was <containingType>."
],
"sqlState" : "42K0G"
},
"UNPIVOT_REQUIRES_ATTRIBUTES" : {
"message" : [
"UNPIVOT requires all given <given> expressions to be columns when no <empty> expressions are given. These are not columns: [<expressions>]."
],
"sqlState" : "42K0A"
},
"UNPIVOT_REQUIRES_VALUE_COLUMNS" : {
"message" : [
"At least one value column needs to be specified for UNPIVOT, all columns specified as ids."
],
"sqlState" : "42K0A"
},
"UNPIVOT_VALUE_DATA_TYPE_MISMATCH" : {
"message" : [
"Unpivot value columns must share a least common type, some types do not: [<types>]."
],
"sqlState" : "42K09"
},
"UNPIVOT_VALUE_SIZE_MISMATCH" : {
"message" : [
"All unpivot value columns must have the same size as there are value column names (<names>)."
],
"sqlState" : "428C4"
},
"UNRECOGNIZED_PARAMETER_NAME" : {
"message" : [
"Cannot invoke function <functionName> because the function call included a named argument reference for the argument named <argumentName>, but this function does not include any signature containing an argument with this name. Did you mean one of the following? [<proposal>]."
],
"sqlState" : "4274K"
},
"UNRECOGNIZED_SQL_TYPE" : {
"message" : [
"Unrecognized SQL type - name: <typeName>, id: <jdbcType>."
],
"sqlState" : "42704"
},
"UNRESOLVABLE_TABLE_VALUED_FUNCTION" : {
"message" : [
"Could not resolve <name> to a table-valued function.",
"Please make sure that <name> is defined as a table-valued function and that all required parameters are provided correctly.",
"If <name> is not defined, please create the table-valued function before using it.",
"For more information about defining table-valued functions, please refer to the Apache Spark documentation."
],
"sqlState" : "42883"
},
"UNRESOLVED_ALL_IN_GROUP_BY" : {
"message" : [
"Cannot infer grouping columns for GROUP BY ALL based on the select clause. Please explicitly specify the grouping columns."
],
"sqlState" : "42803"
},
"UNRESOLVED_COLUMN" : {
"message" : [
"A column, variable, or function parameter with name <objectName> cannot be resolved."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
},
"WITH_SUGGESTION" : {
"message" : [
"Did you mean one of the following? [<proposal>]."
]
}
},
"sqlState" : "42703"
},
"UNRESOLVED_FIELD" : {
"message" : [
"A field with name <fieldName> cannot be resolved with the struct-type column <columnPath>."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
},
"WITH_SUGGESTION" : {
"message" : [
"Did you mean one of the following? [<proposal>]."
]
}
},
"sqlState" : "42703"
},
"UNRESOLVED_MAP_KEY" : {
"message" : [
"Cannot resolve column <objectName> as a map key. If the key is a string literal, add the single quotes '' around it."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
},
"WITH_SUGGESTION" : {
"message" : [
"Otherwise did you mean one of the following column(s)? [<proposal>]."
]
}
},
"sqlState" : "42703"
},
"UNRESOLVED_ROUTINE" : {
"message" : [
"Cannot resolve routine <routineName> on search path <searchPath>."
],
"sqlState" : "42883"
},
"UNRESOLVED_USING_COLUMN_FOR_JOIN" : {
"message" : [
"USING column <colName> cannot be resolved on the <side> side of the join. The <side>-side columns: [<suggestion>]."
],
"sqlState" : "42703"
},
"UNRESOLVED_VARIABLE" : {
"message" : [
"Cannot resolve variable <variableName> on search path <searchPath>."
],
"sqlState" : "42883"
},
"UNSET_NONEXISTENT_PROPERTIES" : {
"message" : [
"Attempted to unset non-existent properties [<properties>] in table <table>."
],
"sqlState" : "42K0J"
},
"UNSUPPORTED_ADD_FILE" : {
"message" : [
"Don't support add file."
],
"subClass" : {
"DIRECTORY" : {
"message" : [
"The file <path> is a directory, consider to set \"spark.sql.legacy.addSingleFileInAddFile\" to \"false\"."
]
},
"LOCAL_DIRECTORY" : {
"message" : [
"The local directory <path> is not supported in a non-local master mode."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_ARROWTYPE" : {
"message" : [
"Unsupported arrow type <typeName>."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_CALL" : {
"message" : [
"Cannot call the method \"<methodName>\" of the class \"<className>\"."
],
"subClass" : {
"FIELD_INDEX" : {
"message" : [
"The row shall have a schema to get an index of the field <fieldName>."
]
},
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING" : {
"message" : [
"The char/varchar type can't be used in the table schema.",
"If you want Spark treat them as string type as same as Spark 3.0 and earlier, please set \"spark.sql.legacy.charVarcharAsString\" to \"true\"."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_COLLATION" : {
"message" : [
"Collation <collationName> is not supported for:"
],
"subClass" : {
"FOR_FUNCTION" : {
"message" : [
"function <functionName>. Please try to use a different collation."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY" : {
"message" : [
"Unsupported data source type for direct query on files: <dataSourceType>"
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATATYPE" : {
"message" : [
"Unsupported data type <typeName>."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATA_SOURCE_SAVE_MODE" : {
"message" : [
"The data source \"<source>\" cannot be written in the <createMode> mode. Please use either the \"Append\" or \"Overwrite\" mode instead."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE" : {
"message" : [
"The <format> datasource doesn't support the column <columnName> of the type <columnType>."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DEFAULT_VALUE" : {
"message" : [
"DEFAULT column values is not supported."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
},
"WITH_SUGGESTION" : {
"message" : [
"Enable it by setting \"spark.sql.defaultColumn.enabled\" to \"true\"."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_DESERIALIZER" : {
"message" : [
"The deserializer is not supported:"
],
"subClass" : {
"DATA_TYPE_MISMATCH" : {
"message" : [
"need a(n) <desiredType> field but got <dataType>."
]
},
"FIELD_NUMBER_MISMATCH" : {
"message" : [
"try to map <schema> to Tuple<ordinal>, but failed as the number of fields does not line up."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_EXPRESSION_GENERATED_COLUMN" : {
"message" : [
"Cannot create generated column <fieldName> with generation expression <expressionStr> because <reason>."
],
"sqlState" : "42621"
},
"UNSUPPORTED_EXPR_FOR_OPERATOR" : {
"message" : [
"A query operator contains one or more unsupported expressions.",
"Consider to rewrite it to avoid window functions, aggregate functions, and generator functions in the WHERE clause.",
"Invalid expressions: [<invalidExprSqls>]"
],
"sqlState" : "42K0E"
},
"UNSUPPORTED_EXPR_FOR_PARAMETER" : {
"message" : [
"A query parameter contains unsupported expression.",
"Parameters can either be variables or literals.",
"Invalid expression: [<invalidExprSql>]"
],
"sqlState" : "42K0E"
},
"UNSUPPORTED_EXPR_FOR_WINDOW" : {
"message" : [
"Expression <sqlExpr> not supported within a window function."
],
"sqlState" : "42P20"
},
"UNSUPPORTED_FEATURE" : {
"message" : [
"The feature is not supported:"
],
"subClass" : {
"AES_MODE" : {
"message" : [
"AES-<mode> with the padding <padding> by the <functionName> function."
]
},
"AES_MODE_AAD" : {
"message" : [
"<functionName> with AES-<mode> does not support additional authenticate data (AAD)."
]
},
"AES_MODE_IV" : {
"message" : [
"<functionName> with AES-<mode> does not support initialization vectors (IVs)."
]
},
"ANALYZE_UNCACHED_TEMP_VIEW" : {
"message" : [
"The ANALYZE TABLE FOR COLUMNS command can operate on temporary views that have been cached already. Consider to cache the view <viewName>."
]
},
"ANALYZE_UNSUPPORTED_COLUMN_TYPE" : {
"message" : [
"The ANALYZE TABLE FOR COLUMNS command does not support the type <columnType> of the column <columnName> in the table <tableName>."
]
},
"ANALYZE_VIEW" : {
"message" : [
"The ANALYZE TABLE command does not support views."
]
},
"CATALOG_OPERATION" : {
"message" : [
"Catalog <catalogName> does not support <operation>."
]
},
"COLLATION" : {
"message" : [
"Collation is not yet supported."
]
},
"COMBINATION_QUERY_RESULT_CLAUSES" : {
"message" : [
"Combination of ORDER BY/SORT BY/DISTRIBUTE BY/CLUSTER BY."
]
},
"COMMENT_NAMESPACE" : {
"message" : [
"Attach a comment to the namespace <namespace>."
]
},
"DESC_TABLE_COLUMN_PARTITION" : {
"message" : [
"DESC TABLE COLUMN for a specific partition."
]
},
"DROP_DATABASE" : {
"message" : [
"Drop the default database <database>."
]
},
"DROP_NAMESPACE" : {
"message" : [
"Drop the namespace <namespace>."
]
},
"HIVE_TABLE_TYPE" : {
"message" : [
"The <tableName> is hive <tableType>."
]
},
"HIVE_WITH_ANSI_INTERVALS" : {
"message" : [
"Hive table <tableName> with ANSI intervals."
]
},
"INSERT_PARTITION_SPEC_IF_NOT_EXISTS" : {
"message" : [
"INSERT INTO <tableName> with IF NOT EXISTS in the PARTITION spec."
]
},
"LATERAL_COLUMN_ALIAS_IN_AGGREGATE_FUNC" : {
"message" : [
"Referencing a lateral column alias <lca> in the aggregate function <aggFunc>."
]
},
"LATERAL_COLUMN_ALIAS_IN_AGGREGATE_WITH_WINDOW_AND_HAVING" : {
"message" : [
"Referencing lateral column alias <lca> in the aggregate query both with window expressions and with having clause. Please rewrite the aggregate query by removing the having clause or removing lateral alias reference in the SELECT list."
]
},
"LATERAL_COLUMN_ALIAS_IN_GROUP_BY" : {
"message" : [
"Referencing a lateral column alias via GROUP BY alias/ALL is not supported yet."
]
},
"LATERAL_COLUMN_ALIAS_IN_WINDOW" : {
"message" : [
"Referencing a lateral column alias <lca> in window expression <windowExpr>."
]
},
"LATERAL_JOIN_USING" : {
"message" : [
"JOIN USING with LATERAL correlation."
]
},
"LITERAL_TYPE" : {
"message" : [
"Literal for '<value>' of <type>."
]
},
"MULTIPLE_BUCKET_TRANSFORMS" : {
"message" : [
"Multiple bucket TRANSFORMs."
]
},
"MULTI_ACTION_ALTER" : {
"message" : [
"The target JDBC server hosting table <tableName> does not support ALTER TABLE with multiple actions. Split the ALTER TABLE up into individual actions to avoid this error."
]
},
"ORC_TYPE_CAST" : {
"message" : [
"Unable to convert <orcType> of Orc to data type <toType>."
]
},
"OVERWRITE_BY_SUBQUERY" : {
"message" : [
"INSERT OVERWRITE with a subquery condition."
]
},
"PANDAS_UDAF_IN_PIVOT" : {
"message" : [
"Pandas user defined aggregate function in the PIVOT clause."
]
},
"PARAMETER_MARKER_IN_UNEXPECTED_STATEMENT" : {
"message" : [
"Parameter markers are not allowed in <statement>."
]
},
"PARTITION_WITH_NESTED_COLUMN_IS_UNSUPPORTED" : {
"message" : [
"Invalid partitioning: <cols> is missing or is in a map or array."
]
},
"PIVOT_AFTER_GROUP_BY" : {
"message" : [
"PIVOT clause following a GROUP BY clause. Consider pushing the GROUP BY into a subquery."
]
},
"PIVOT_TYPE" : {
"message" : [
"Pivoting by the value '<value>' of the column data type <type>."
]
},
"PURGE_PARTITION" : {
"message" : [
"Partition purge."
]
},
"PURGE_TABLE" : {
"message" : [
"Purge table."
]
},
"PYTHON_UDF_IN_ON_CLAUSE" : {
"message" : [
"Python UDF in the ON clause of a <joinType> JOIN. In case of an INNER JOIN consider rewriting to a CROSS JOIN with a WHERE clause."
]
},
"REMOVE_NAMESPACE_COMMENT" : {
"message" : [
"Remove a comment from the namespace <namespace>."
]
},
"REPLACE_NESTED_COLUMN" : {
"message" : [
"The replace function does not support nested column <colName>."
]
},
"SET_NAMESPACE_PROPERTY" : {
"message" : [
"<property> is a reserved namespace property, <msg>."
]
},
"SET_OPERATION_ON_MAP_TYPE" : {
"message" : [
"Cannot have MAP type columns in DataFrame which calls set operations (INTERSECT, EXCEPT, etc.), but the type of column <colName> is <dataType>."
]
},
"SET_PROPERTIES_AND_DBPROPERTIES" : {
"message" : [
"set PROPERTIES and DBPROPERTIES at the same time."
]
},
"SET_TABLE_PROPERTY" : {
"message" : [
"<property> is a reserved table property, <msg>."
]
},
"SET_VARIABLE_USING_SET" : {
"message" : [
"<variableName> is a VARIABLE and cannot be updated using the SET statement. Use SET VARIABLE <variableName> = ... instead."
]
},
"STATE_STORE_MULTIPLE_COLUMN_FAMILIES" : {
"message" : [
"Creating multiple column families with <stateStoreProvider> is not supported."
]
},
"STATE_STORE_REMOVING_COLUMN_FAMILIES" : {
"message" : [
"Removing column families with <stateStoreProvider> is not supported."
]
},
"STATE_STORE_TTL" : {
"message" : [
"State TTL with <stateStoreProvider> is not supported. Please use RocksDBStateStoreProvider."
]
},
"TABLE_OPERATION" : {
"message" : [
"Table <tableName> does not support <operation>. Please check the current catalog and namespace to make sure the qualified table name is expected, and also check the catalog implementation which is configured by \"spark.sql.catalog\"."
]
},
"TIME_TRAVEL" : {
"message" : [
"Time travel on the relation: <relationId>."
]
},
"TOO_MANY_TYPE_ARGUMENTS_FOR_UDF_CLASS" : {
"message" : [
"UDF class with <num> type arguments."
]
},
"TRANSFORM_DISTINCT_ALL" : {
"message" : [
"TRANSFORM with the DISTINCT/ALL clause."
]
},
"TRANSFORM_NON_HIVE" : {
"message" : [
"TRANSFORM with SERDE is only supported in hive mode."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_GENERATOR" : {
"message" : [
"The generator is not supported:"
],
"subClass" : {
"MULTI_GENERATOR" : {
"message" : [
"only one generator allowed per SELECT clause but found <num>: <generators>."
]
},
"NESTED_IN_EXPRESSIONS" : {
"message" : [
"nested in expressions <expression>."
]
},
"NOT_GENERATOR" : {
"message" : [
"<functionName> is expected to be a generator. However, its class is <classCanonicalName>, which is not a generator."
]
},
"OUTSIDE_SELECT" : {
"message" : [
"outside the SELECT clause, found: <plan>."
]
}
},
"sqlState" : "42K0E"
},
"UNSUPPORTED_GROUPING_EXPRESSION" : {
"message" : [
"grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup."
],
"sqlState" : "42K0E"
},
"UNSUPPORTED_INSERT" : {
"message" : [
"Can't insert into the target."
],
"subClass" : {
"MULTI_PATH" : {
"message" : [
"Can only write data to relations with a single path but given paths are <paths>."
]
},
"NOT_ALLOWED" : {
"message" : [
"The target relation <relationId> does not allow insertion."
]
},
"NOT_PARTITIONED" : {
"message" : [
"The target relation <relationId> is not partitioned."
]
},
"RDD_BASED" : {
"message" : [
"An RDD-based table is not allowed."
]
},
"READ_FROM" : {
"message" : [
"The target relation <relationId> is also being read from."
]
}
},
"sqlState" : "42809"
},
"UNSUPPORTED_MERGE_CONDITION" : {
"message" : [
"MERGE operation contains unsupported <condName> condition."
],
"subClass" : {
"AGGREGATE" : {
"message" : [
"Aggregates are not allowed: <cond>."
]
},
"NON_DETERMINISTIC" : {
"message" : [
"Non-deterministic expressions are not allowed: <cond>."
]
},
"SUBQUERY" : {
"message" : [
"Subqueries are not allowed: <cond>."
]
}
},
"sqlState" : "42K0E"
},
"UNSUPPORTED_OVERWRITE" : {
"message" : [
"Can't overwrite the target that is also being read from."
],
"subClass" : {
"PATH" : {
"message" : [
"The target path is <path>."
]
},
"TABLE" : {
"message" : [
"The target table is <table>."
]
}
},
"sqlState" : "42902"
},
"UNSUPPORTED_SAVE_MODE" : {
"message" : [
"The save mode <saveMode> is not supported for:"
],
"subClass" : {
"EXISTENT_PATH" : {
"message" : [
"an existent path."
]
},
"NON_EXISTENT_PATH" : {
"message" : [
"a non-existent path."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY" : {
"message" : [
"Unsupported subquery expression:"
],
"subClass" : {
"ACCESSING_OUTER_QUERY_COLUMN_IS_NOT_ALLOWED" : {
"message" : [
"Accessing outer query column is not allowed in this location:",
"<treeNode>"
]
},
"AGGREGATE_FUNCTION_MIXED_OUTER_LOCAL_REFERENCES" : {
"message" : [
"Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: <function>."
]
},
"CORRELATED_COLUMN_IS_NOT_ALLOWED_IN_PREDICATE" : {
"message" : [
"Correlated column is not allowed in predicate:",
"<treeNode>"
]
},
"CORRELATED_COLUMN_NOT_FOUND" : {
"message" : [
"A correlated outer name reference within a subquery expression body was not found in the enclosing query: <value>."
]
},
"CORRELATED_REFERENCE" : {
"message" : [
"Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses: <sqlExprs>."
]
},
"HIGHER_ORDER_FUNCTION" : {
"message" : [
"Subquery expressions are not supported within higher-order functions. Please remove all subquery expressions from higher-order functions and then try the query again."
]
},
"LATERAL_JOIN_CONDITION_NON_DETERMINISTIC" : {
"message" : [
"Lateral join condition cannot be non-deterministic: <condition>."
]
},
"MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY" : {
"message" : [
"Correlated scalar subqueries must be aggregated to return at most one row."
]
},
"NON_CORRELATED_COLUMNS_IN_GROUP_BY" : {
"message" : [
"A GROUP BY clause in a scalar correlated subquery cannot contain non-correlated columns: <value>."
]
},
"NON_DETERMINISTIC_LATERAL_SUBQUERIES" : {
"message" : [
"Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row:",
"<treeNode>"
]
},
"UNSUPPORTED_CORRELATED_REFERENCE_DATA_TYPE" : {
"message" : [
"Correlated column reference '<expr>' cannot be <dataType> type."
]
},
"UNSUPPORTED_CORRELATED_SCALAR_SUBQUERY" : {
"message" : [
"Correlated scalar subqueries can only be used in filters, aggregations, projections, and UPDATE/MERGE/DELETE commands:",
"<treeNode>"
]
},
"UNSUPPORTED_IN_EXISTS_SUBQUERY" : {
"message" : [
"IN/EXISTS predicate subqueries can only be used in filters, joins, aggregations, window functions, projections, and UPDATE/MERGE/DELETE commands:",
"<treeNode>"
]
},
"UNSUPPORTED_TABLE_ARGUMENT" : {
"message" : [
"Table arguments are used in a function where they are not supported:",
"<treeNode>"
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_TYPED_LITERAL" : {
"message" : [
"Literals of the type <unsupportedType> are not supported. Supported types are <supportedTypes>."
],
"sqlState" : "0A000"
},
"UNTYPED_SCALA_UDF" : {
"message" : [
"You're using untyped Scala UDF, which does not have the input type information. Spark may blindly pass null to the Scala closure with primitive-type argument, and the closure will see the default value of the Java type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result is 0 for null input. To get rid of this error, you could:",
"1. use typed Scala UDF APIs(without return type parameter), e.g. `udf((x: Int) => x)`.",
"2. use Java UDF APIs, e.g. `udf(new UDF1[String, Integer] { override def call(s: String): Integer = s.length() }, IntegerType)`, if input types are all non primitive.",
"3. set \"spark.sql.legacy.allowUntypedScalaUDF\" to \"true\" and use this API with caution."
],
"sqlState" : "42K0E"
},
"USER_RAISED_EXCEPTION" : {
"message" : [
"<errorMessage>"
],
"sqlState" : "P0001"
},
"USER_RAISED_EXCEPTION_PARAMETER_MISMATCH" : {
"message" : [
"The `raise_error()` function was used to raise error class: <errorClass> which expects parameters: <expectedParms>.",
"The provided parameters <providedParms> do not match the expected parameters.",
"Please make sure to provide all expected parameters."
],
"sqlState" : "P0001"
},
"USER_RAISED_EXCEPTION_UNKNOWN_ERROR_CLASS" : {
"message" : [
"The `raise_error()` function was used to raise an unknown error class: <errorClass>"
],
"sqlState" : "P0001"
},
"VARIABLE_ALREADY_EXISTS" : {
"message" : [
"Cannot create the variable <variableName> because it already exists.",
"Choose a different name, or drop or replace the existing variable."
],
"sqlState" : "42723"
},
"VARIABLE_NOT_FOUND" : {
"message" : [
"The variable <variableName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog.",
"To tolerate the error on drop use DROP VARIABLE IF EXISTS."
],
"sqlState" : "42883"
},
"VARIANT_CONSTRUCTOR_SIZE_LIMIT" : {
"message" : [
"Cannot construct a Variant larger than 16 MiB. The maximum allowed size of a Variant value is 16 MiB."
],
"sqlState" : "22023"
},
"VARIANT_DUPLICATE_KEY" : {
"message" : [
"Failed to build variant because of a duplicate object key `<key>`."
],
"sqlState" : "22023"
},
"VARIANT_SIZE_LIMIT" : {
"message" : [
"Cannot build variant bigger than <sizeLimit> in <functionName>.",
"Please avoid large input strings to this expression (for example, add function calls(s) to check the expression size and convert it to NULL first if it is too big)."
],
"sqlState" : "22023"
},
"VIEW_ALREADY_EXISTS" : {
"message" : [
"Cannot create view <relationName> because it already exists.",
"Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects."
],
"sqlState" : "42P07"
},
"VIEW_EXCEED_MAX_NESTED_DEPTH" : {
"message" : [
"The depth of view <viewName> exceeds the maximum view resolution depth (<maxNestedDepth>).",
"Analysis is aborted to avoid errors. If you want to work around this, please try to increase the value of \"spark.sql.view.maxNestedViewDepth\"."
],
"sqlState" : "54K00"
},
"VIEW_NOT_FOUND" : {
"message" : [
"The view <relationName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.",
"To tolerate the error on drop use DROP VIEW IF EXISTS."
],
"sqlState" : "42P01"
},
"WINDOW_FUNCTION_AND_FRAME_MISMATCH" : {
"message" : [
"<funcName> function can only be evaluated in an ordered row-based window frame with a single offset: <windowExpr>."
],
"sqlState" : "42K0E"
},
"WINDOW_FUNCTION_WITHOUT_OVER_CLAUSE" : {
"message" : [
"Window function <funcName> requires an OVER clause."
],
"sqlState" : "42601"
},
"WRITE_STREAM_NOT_ALLOWED" : {
"message" : [
"`writeStream` can be called only on streaming Dataset/DataFrame."
],
"sqlState" : "42601"
},
"WRONG_COMMAND_FOR_OBJECT_TYPE" : {
"message" : [
"The operation <operation> requires a <requiredType>. But <objectName> is a <foundType>. Use <alternative> instead."
],
"sqlState" : "42809"
},
"WRONG_NUM_ARGS" : {
"message" : [
"The <functionName> requires <expectedNum> parameters but the actual number is <actualNum>."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
"Please, refer to '<docroot>/sql-ref-functions.html' for a fix."
]
},
"WITH_SUGGESTION" : {
"message" : [
"If you have to call this function with <legacyNum> parameters, set the legacy configuration <legacyConfKey> to <legacyConfValue>."
]
}
},
"sqlState" : "42605"
},
"XML_ROW_TAG_MISSING" : {
"message" : [
"<rowTag> option is required for reading files in XML format."
],
"sqlState" : "42KDF"
},
"_LEGACY_ERROR_TEMP_0001" : {
"message" : [
"Invalid InsertIntoContext."
]
},
"_LEGACY_ERROR_TEMP_0004" : {
"message" : [
"Empty source for merge: you should specify a source table/subquery in merge."
]
},
"_LEGACY_ERROR_TEMP_0006" : {
"message" : [
"The number of inserted values cannot match the fields."
]
},
"_LEGACY_ERROR_TEMP_0008" : {
"message" : [
"There must be at least one WHEN clause in a MERGE statement."
]
},
"_LEGACY_ERROR_TEMP_0012" : {
"message" : [
"DISTRIBUTE BY is not supported."
]
},
"_LEGACY_ERROR_TEMP_0014" : {
"message" : [
"TABLESAMPLE does not accept empty inputs."
]
},
"_LEGACY_ERROR_TEMP_0015" : {
"message" : [
"TABLESAMPLE(<msg>) is not supported."
]
},
"_LEGACY_ERROR_TEMP_0016" : {
"message" : [
"<bytesStr> is not a valid byte length literal, expected syntax: DIGIT+ ('B' | 'K' | 'M' | 'G')."
]
},
"_LEGACY_ERROR_TEMP_0018" : {
"message" : [
"Function trim doesn't support with type <trimOption>. Please use BOTH, LEADING or TRAILING as trim type."
]
},
"_LEGACY_ERROR_TEMP_0024" : {
"message" : [
"Can only have a single from-to unit in the interval literal syntax."
]
},
"_LEGACY_ERROR_TEMP_0026" : {
"message" : [
"Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: <value>."
]
},
"_LEGACY_ERROR_TEMP_0027" : {
"message" : [
"The value of from-to unit must be a string."
]
},
"_LEGACY_ERROR_TEMP_0028" : {
"message" : [
"Intervals FROM <from> TO <to> are not supported."
]
},
"_LEGACY_ERROR_TEMP_0029" : {
"message" : [
"Cannot mix year-month and day-time fields: <literal>."
]
},
"_LEGACY_ERROR_TEMP_0031" : {
"message" : [
"Invalid number of buckets: <describe>."
]
},
"_LEGACY_ERROR_TEMP_0032" : {
"message" : [
"Duplicated table paths found: '<pathOne>' and '<pathTwo>'. LOCATION and the case insensitive key 'path' in OPTIONS are all used to indicate the custom table path, you can only specify one of them."
]
},
"_LEGACY_ERROR_TEMP_0033" : {
"message" : [
"Expected either STORED AS or STORED BY, not both."
]
},
"_LEGACY_ERROR_TEMP_0034" : {
"message" : [
"<operation> is not supported in Hive-style <command><msg>."
]
},
"_LEGACY_ERROR_TEMP_0035" : {
"message" : [
"Operation not allowed: <message>."
]
},
"_LEGACY_ERROR_TEMP_0037" : {
"message" : [
"It is not allowed to add catalog/namespace prefix <quoted> to the table name in CACHE TABLE AS SELECT."
]
},
"_LEGACY_ERROR_TEMP_0038" : {
"message" : [
"CTE definition can't have duplicate names: <duplicateNames>."
]
},
"_LEGACY_ERROR_TEMP_0039" : {
"message" : [
"Unsupported SQL statement."
]
},
"_LEGACY_ERROR_TEMP_0043" : {
"message" : [
"Expected format is 'RESET' or 'RESET key'. If you want to include special characters in key, please use quotes, e.g., RESET `key`."
]
},
"_LEGACY_ERROR_TEMP_0044" : {
"message" : [
"The interval value must be in the range of [-18, +18] hours with second precision."
]
},
"_LEGACY_ERROR_TEMP_0045" : {
"message" : [
"Invalid time zone displacement value."
]
},
"_LEGACY_ERROR_TEMP_0046" : {
"message" : [
"CREATE TEMPORARY TABLE without a provider is not allowed."
]
},
"_LEGACY_ERROR_TEMP_0047" : {
"message" : [
"'ROW FORMAT' must be used with 'STORED AS'."
]
},
"_LEGACY_ERROR_TEMP_0048" : {
"message" : [
"Unsupported operation: Used defined record reader/writer classes."
]
},
"_LEGACY_ERROR_TEMP_0049" : {
"message" : [
"Directory path and 'path' in OPTIONS should be specified one, but not both."
]
},
"_LEGACY_ERROR_TEMP_0051" : {
"message" : [
"Empty set in <element> grouping sets is not supported."
]
},
"_LEGACY_ERROR_TEMP_0052" : {
"message" : [
"CREATE VIEW with both IF NOT EXISTS and REPLACE is not allowed."
]
},
"_LEGACY_ERROR_TEMP_0053" : {
"message" : [
"It is not allowed to define a TEMPORARY view with IF NOT EXISTS."
]
},
"_LEGACY_ERROR_TEMP_0056" : {
"message" : [
"Invalid time travel spec: <reason>."
]
},
"_LEGACY_ERROR_TEMP_0060" : {
"message" : [
"<msg>."
]
},
"_LEGACY_ERROR_TEMP_0062" : {
"message" : [
"<msg>."
]
},
"_LEGACY_ERROR_TEMP_0063" : {
"message" : [
"<msg>."
]
},
"_LEGACY_ERROR_TEMP_0064" : {
"message" : [
"<msg>."
]
},
"_LEGACY_ERROR_TEMP_1000" : {
"message" : [
"LEGACY store assignment policy is disallowed in Spark data source V2. Please set the configuration <configKey> to other values."
]
},
"_LEGACY_ERROR_TEMP_1002" : {
"message" : [
"Unable to generate an encoder for inner class `<className>` without access to the scope that this class was defined in.",
"Try moving this class out of its parent class."
]
},
"_LEGACY_ERROR_TEMP_1004" : {
"message" : [
"Window specification <windowName> is not defined in the WINDOW clause."
]
},
"_LEGACY_ERROR_TEMP_1005" : {
"message" : [
"<expr> doesn't show up in the GROUP BY list <groupByAliases>."
]
},
"_LEGACY_ERROR_TEMP_1006" : {
"message" : [
"Aggregate expression required for pivot, but '<sql>' did not appear in any aggregate function."
]
},
"_LEGACY_ERROR_TEMP_1007" : {
"message" : [
"Cannot write into temp view <quoted> as it's not a data source v2 relation."
]
},
"_LEGACY_ERROR_TEMP_1008" : {
"message" : [
"<quoted> is not a temp view of streaming logical plan, please use batch API such as `DataFrameReader.table` to read it."
]
},
"_LEGACY_ERROR_TEMP_1011" : {
"message" : [
"Writing into a view is not allowed. View: <identifier>."
]
},
"_LEGACY_ERROR_TEMP_1012" : {
"message" : [
"Cannot write into v1 table: <identifier>."
]
},
"_LEGACY_ERROR_TEMP_1017" : {
"message" : [
"<name> is a built-in/temporary function. '<cmd>' expects a persistent function.<hintStr>."
]
},
"_LEGACY_ERROR_TEMP_1018" : {
"message" : [
"<quoted> is a permanent view, which is not supported by streaming reading API such as `DataStreamReader.table` yet."
]
},
"_LEGACY_ERROR_TEMP_1021" : {
"message" : [
"count(<targetString>.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)."
]
},
"_LEGACY_ERROR_TEMP_1030" : {
"message" : [
"Window aggregate function with filter predicate is not supported yet."
]
},
"_LEGACY_ERROR_TEMP_1031" : {
"message" : [
"It is not allowed to use a window function inside an aggregate function. Please use the inner window function in a sub-query."
]
},
"_LEGACY_ERROR_TEMP_1032" : {
"message" : [
"<expr> does not have any WindowExpression."
]
},
"_LEGACY_ERROR_TEMP_1033" : {
"message" : [
"<expr> has multiple Window Specifications (<distinctWindowSpec>).",
"Please file a bug report with this error message, stack trace, and the query."
]
},
"_LEGACY_ERROR_TEMP_1034" : {
"message" : [
"It is not allowed to use window functions inside <clauseName> clause."
]
},
"_LEGACY_ERROR_TEMP_1035" : {
"message" : [
"Cannot specify window frame for <prettyName> function."
]
},
"_LEGACY_ERROR_TEMP_1036" : {
"message" : [
"Window Frame <wf> must match the required frame <required>."
]
},
"_LEGACY_ERROR_TEMP_1037" : {
"message" : [
"Window function <wf> requires window to be ordered, please add ORDER BY clause. For example SELECT <wf>(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table."
]
},
"_LEGACY_ERROR_TEMP_1039" : {
"message" : [
"Multiple time/session window expressions would result in a cartesian product of rows, therefore they are currently not supported."
]
},
"_LEGACY_ERROR_TEMP_1040" : {
"message" : [
"Gap duration expression used in session window must be CalendarIntervalType, but got <dt>."
]
},
"_LEGACY_ERROR_TEMP_1045" : {
"message" : [
"ALTER TABLE SET LOCATION does not support partition for v2 tables."
]
},
"_LEGACY_ERROR_TEMP_1046" : {
"message" : [
"Join strategy hint parameter should be an identifier or string but was <unsupported> (<class>)."
]
},
"_LEGACY_ERROR_TEMP_1047" : {
"message" : [
"<hintName> Hint parameter should include columns, but <invalidParams> found."
]
},
"_LEGACY_ERROR_TEMP_1048" : {
"message" : [
"<hintName> Hint expects a partition number as a parameter."
]
},
"_LEGACY_ERROR_TEMP_1049" : {
"message" : [
"Syntax error in attribute name: <name>."
]
},
"_LEGACY_ERROR_TEMP_1050" : {
"message" : [
"Can only star expand struct data types. Attribute: `<attributes>`."
]
},
"_LEGACY_ERROR_TEMP_1052" : {
"message" : [
"ADD COLUMN with v1 tables cannot specify NOT NULL."
]
},
"_LEGACY_ERROR_TEMP_1053" : {
"message" : [
"ALTER COLUMN with v1 tables cannot specify NOT NULL."
]
},
"_LEGACY_ERROR_TEMP_1054" : {
"message" : [
"ALTER COLUMN cannot find column <colName> in v1 table. Available: <fieldNames>."
]
},
"_LEGACY_ERROR_TEMP_1057" : {
"message" : [
"SHOW COLUMNS with conflicting databases: '<dbA>' != '<dbB>'."
]
},
"_LEGACY_ERROR_TEMP_1058" : {
"message" : [
"Cannot create table with both USING <provider> and <serDeInfo>."
]
},
"_LEGACY_ERROR_TEMP_1059" : {
"message" : [
"STORED AS with file format '<serdeInfo>' is invalid."
]
},
"_LEGACY_ERROR_TEMP_1060" : {
"message" : [
"<command> does not support nested column: <column>."
]
},
"_LEGACY_ERROR_TEMP_1066" : {
"message" : [
"<database> is a system preserved database, you cannot create a database with this name."
]
},
"_LEGACY_ERROR_TEMP_1068" : {
"message" : [
"<database> is a system preserved database, you cannot use it as current database. To access global temporary views, you should use qualified name with the GLOBAL_TEMP_DATABASE, e.g. SELECT * FROM <database>.viewName."
]
},
"_LEGACY_ERROR_TEMP_1069" : {
"message" : [
"CREATE EXTERNAL TABLE must be accompanied by LOCATION."
]
},
"_LEGACY_ERROR_TEMP_1071" : {
"message" : [
"Some existing schema fields (<nonExistentColumnNames>) are not present in the new schema. We don't support dropping columns yet."
]
},
"_LEGACY_ERROR_TEMP_1072" : {
"message" : [
"Only the tables/views belong to the same database can be retrieved. Querying tables/views are <qualifiedTableNames>."
]
},
"_LEGACY_ERROR_TEMP_1073" : {
"message" : [
"RENAME TABLE source and destination databases do not match: '<db>' != '<newDb>'."
]
},
"_LEGACY_ERROR_TEMP_1074" : {
"message" : [
"RENAME TEMPORARY VIEW from '<oldName>' to '<newName>': cannot specify database name '<db>' in the destination table."
]
},
"_LEGACY_ERROR_TEMP_1076" : {
"message" : [
"Partition spec is invalid. <details>."
]
},
"_LEGACY_ERROR_TEMP_1079" : {
"message" : [
"Resource Type '<resourceType>' is not supported."
]
},
"_LEGACY_ERROR_TEMP_1080" : {
"message" : [
"Table <identifier> did not specify database."
]
},
"_LEGACY_ERROR_TEMP_1081" : {
"message" : [
"Table <identifier> did not specify locationUri."
]
},
"_LEGACY_ERROR_TEMP_1082" : {
"message" : [
"Partition [<specString>] did not specify locationUri."
]
},
"_LEGACY_ERROR_TEMP_1083" : {
"message" : [
"Number of buckets should be greater than 0 but less than or equal to bucketing.maxBuckets (`<bucketingMaxBuckets>`). Got `<numBuckets>`."
]
},
"_LEGACY_ERROR_TEMP_1089" : {
"message" : [
"Column statistics deserialization is not supported for column <name> of data type: <dataType>."
]
},
"_LEGACY_ERROR_TEMP_1090" : {
"message" : [
"Column statistics serialization is not supported for column <colName> of data type: <dataType>."
]
},
"_LEGACY_ERROR_TEMP_1097" : {
"message" : [
"The field for corrupt records must be string type and nullable."
]
},
"_LEGACY_ERROR_TEMP_1098" : {
"message" : [
"DataType '<x>' is not supported by <className>."
]
},
"_LEGACY_ERROR_TEMP_1099" : {
"message" : [
"<funcName>() doesn't support the <mode> mode. Acceptable modes are <permissiveMode> and <failFastMode>."
]
},
"_LEGACY_ERROR_TEMP_1103" : {
"message" : [
"Unsupported component type <clz> in arrays."
]
},
"_LEGACY_ERROR_TEMP_1104" : {
"message" : [
"The second argument should be a double literal."
]
},
"_LEGACY_ERROR_TEMP_1107" : {
"message" : [
"Table <table> declares <batchWrite> capability but <v2WriteClassName> is not an instance of <v1WriteClassName>."
]
},
"_LEGACY_ERROR_TEMP_1108" : {
"message" : [
"Delete by condition with subquery is not supported: <condition>."
]
},
"_LEGACY_ERROR_TEMP_1109" : {
"message" : [
"Exec update failed: cannot translate expression to source filter: <f>."
]
},
"_LEGACY_ERROR_TEMP_1110" : {
"message" : [
"Cannot delete from table <table> where <filters>."
]
},
"_LEGACY_ERROR_TEMP_1111" : {
"message" : [
"DESCRIBE does not support partition for v2 tables."
]
},
"_LEGACY_ERROR_TEMP_1114" : {
"message" : [
"The streaming sources in a query do not have a common supported execution mode.",
"Sources support micro-batch: <microBatchSources>.",
"Sources support continuous: <continuousSources>."
]
},
"_LEGACY_ERROR_TEMP_1120" : {
"message" : [
"Unsupported NamespaceChange <changes> in JDBC catalog."
]
},
"_LEGACY_ERROR_TEMP_1121" : {
"message" : [
"Table does not support <cmd>: <table>."
]
},
"_LEGACY_ERROR_TEMP_1122" : {
"message" : [
"Table <table> is not a row-level operation table."
]
},
"_LEGACY_ERROR_TEMP_1123" : {
"message" : [
"Cannot rename a table with ALTER VIEW. Please use ALTER TABLE instead."
]
},
"_LEGACY_ERROR_TEMP_1125" : {
"message" : [
"Database from v1 session catalog is not specified."
]
},
"_LEGACY_ERROR_TEMP_1126" : {
"message" : [
"Nested databases are not supported by v1 session catalog: <catalog>."
]
},
"_LEGACY_ERROR_TEMP_1127" : {
"message" : [
"Invalid partitionExprs specified: <sortOrders> For range partitioning use REPARTITION_BY_RANGE instead."
]
},
"_LEGACY_ERROR_TEMP_1128" : {
"message" : [
"Failed to resolve the schema for <format> for the partition column: <partitionColumn>. It must be specified manually."
]
},
"_LEGACY_ERROR_TEMP_1131" : {
"message" : [
"Data source <className> does not support <outputMode> output mode."
]
},
"_LEGACY_ERROR_TEMP_1132" : {
"message" : [
"A schema needs to be specified when using <className>."
]
},
"_LEGACY_ERROR_TEMP_1133" : {
"message" : [
"The user-specified schema doesn't match the actual schema:",
"user-specified: <schema>, actual: <actualSchema>. If you're using",
"DataFrameReader.schema API or creating a table, please do not specify the schema.",
"Or if you're scanning an existed table, please drop it and re-create it."
]
},
"_LEGACY_ERROR_TEMP_1134" : {
"message" : [
"Unable to infer schema for <format> at <fileCatalog>. It must be specified manually."
]
},
"_LEGACY_ERROR_TEMP_1135" : {
"message" : [
"<className> is not a valid Spark SQL Data Source."
]
},
"_LEGACY_ERROR_TEMP_1136" : {
"message" : [
"Cannot save interval data type into external storage."
]
},
"_LEGACY_ERROR_TEMP_1137" : {
"message" : [
"Unable to resolve <name> given [<outputStr>]."
]
},
"_LEGACY_ERROR_TEMP_1138" : {
"message" : [
"Hive built-in ORC data source must be used with Hive support enabled. Please use the native ORC data source by setting 'spark.sql.orc.impl' to 'native'."
]
},
"_LEGACY_ERROR_TEMP_1139" : {
"message" : [
"Failed to find data source: <provider>. Avro is built-in but external data source module since Spark 2.4. Please deploy the application as per the deployment section of Apache Avro Data Source Guide."
]
},
"_LEGACY_ERROR_TEMP_1140" : {
"message" : [
"Failed to find data source: <provider>. Please deploy the application as per the deployment section of Structured Streaming + Kafka Integration Guide."
]
},
"_LEGACY_ERROR_TEMP_1141" : {
"message" : [
"Multiple sources found for <provider> (<sourceNames>), please specify the fully qualified class name."
]
},
"_LEGACY_ERROR_TEMP_1142" : {
"message" : [
"Datasource does not support writing empty or nested empty schemas. Please make sure the data schema has at least one or more column(s)."
]
},
"_LEGACY_ERROR_TEMP_1143" : {
"message" : [
"The data to be inserted needs to have the same number of columns as the target table: target table has <targetSize> column(s) but the inserted data has <actualSize> column(s), which contain <staticPartitionsSize> partition column(s) having assigned constant values."
]
},
"_LEGACY_ERROR_TEMP_1144" : {
"message" : [
"The data to be inserted needs to have the same number of partition columns as the target table: target table has <targetSize> partition column(s) but the inserted data has <providedPartitionsSize> partition columns specified."
]
},
"_LEGACY_ERROR_TEMP_1145" : {
"message" : [
"<partKey> is not a partition column. Partition columns are <partitionColumns>."
]
},
"_LEGACY_ERROR_TEMP_1146" : {
"message" : [
"Partition column <partColumn> have multiple values specified, <values>. Please only specify a single value."
]
},
"_LEGACY_ERROR_TEMP_1147" : {
"message" : [
"The ordering of partition columns is <partColumns>. All partition columns having constant values need to appear before other partition columns that do not have an assigned constant value."
]
},
"_LEGACY_ERROR_TEMP_1149" : {
"message" : [
"Fail to rebuild expression: missing key <filter> in `translatedFilterToExpr`."
]
},
"_LEGACY_ERROR_TEMP_1151" : {
"message" : [
"Fail to resolve data source for the table <table> since the table serde property has the duplicated key <key> with extra options specified for this scan operation. To fix this, you can rollback to the legacy behavior of ignoring the extra options by setting the config <config> to `false`, or address the conflicts of the same config."
]
},
"_LEGACY_ERROR_TEMP_1155" : {
"message" : [
"Partition column `<col>` not found in schema <schemaCatalog>."
]
},
"_LEGACY_ERROR_TEMP_1156" : {
"message" : [
"Column <colName> not found in schema <tableSchema>."
]
},
"_LEGACY_ERROR_TEMP_1158" : {
"message" : [
"Saving data into a view is not allowed."
]
},
"_LEGACY_ERROR_TEMP_1159" : {
"message" : [
"The format of the existing table <tableName> is `<existingProvider>`. It doesn't match the specified format `<specifiedProvider>`."
]
},
"_LEGACY_ERROR_TEMP_1160" : {
"message" : [
"The location of the existing table <identifier> is `<existingTableLoc>`. It doesn't match the specified location `<tableDescLoc>`."
]
},
"_LEGACY_ERROR_TEMP_1161" : {
"message" : [
"The column number of the existing table <tableName> (<existingTableSchema>) doesn't match the data schema (<querySchema>)."
]
},
"_LEGACY_ERROR_TEMP_1162" : {
"message" : [
"Cannot resolve '<col>' given input columns: [<inputColumns>]."
]
},
"_LEGACY_ERROR_TEMP_1163" : {
"message" : [
"Specified partitioning does not match that of the existing table <tableName>.",
"Specified partition columns: [<specifiedPartCols>].",
"Existing partition columns: [<existingPartCols>]."
]
},
"_LEGACY_ERROR_TEMP_1164" : {
"message" : [
"Specified bucketing does not match that of the existing table <tableName>.",
"Specified bucketing: <specifiedBucketString>.",
"Existing bucketing: <existingBucketString>."
]
},
"_LEGACY_ERROR_TEMP_1165" : {
"message" : [
"It is not allowed to specify partitioning when the table schema is not defined."
]
},
"_LEGACY_ERROR_TEMP_1166" : {
"message" : [
"Bucketing column '<bucketCol>' should not be part of partition columns '<normalizedPartCols>'."
]
},
"_LEGACY_ERROR_TEMP_1167" : {
"message" : [
"Bucket sorting column '<sortCol>' should not be part of partition columns '<normalizedPartCols>'."
]
},
"_LEGACY_ERROR_TEMP_1169" : {
"message" : [
"Requested partitioning does not match the table <tableName>:",
"Requested partitions: <normalizedPartSpec>.",
"Table partitions: <partColNames>."
]
},
"_LEGACY_ERROR_TEMP_1171" : {
"message" : [
"createTableColumnTypes option column <col> not found in schema <schema>."
]
},
"_LEGACY_ERROR_TEMP_1172" : {
"message" : [
"Parquet type not yet supported: <parquetType>."
]
},
"_LEGACY_ERROR_TEMP_1173" : {
"message" : [
"Illegal Parquet type: <parquetType>."
]
},
"_LEGACY_ERROR_TEMP_1174" : {
"message" : [
"Unrecognized Parquet type: <field>."
]
},
"_LEGACY_ERROR_TEMP_1181" : {
"message" : [
"Stream-stream join without equality predicate is not supported."
]
},
"_LEGACY_ERROR_TEMP_1182" : {
"message" : [
"Column <ambiguousAttrs> are ambiguous. It's probably because you joined several Datasets together, and some of these Datasets are the same. This column points to one of the Datasets but Spark is unable to figure out which one. Please alias the Datasets with different names via `Dataset.as` before joining them, and specify the column using qualified name, e.g. `df.as(\"a\").join(df.as(\"b\"), $\"a.id\" > $\"b.id\")`. You can also set <config> to false to disable this check."
]
},
"_LEGACY_ERROR_TEMP_1183" : {
"message" : [
"Cannot use \"INTERVAL\" type in the table schema."
]
},
"_LEGACY_ERROR_TEMP_1184" : {
"message" : [
"Catalog <plugin> does not support <ability>."
]
},
"_LEGACY_ERROR_TEMP_1186" : {
"message" : [
"Multi-part identifier cannot be empty."
]
},
"_LEGACY_ERROR_TEMP_1187" : {
"message" : [
"Hive data source can only be used with tables, you can not <operation> files of Hive data source directly."
]
},
"_LEGACY_ERROR_TEMP_1188" : {
"message" : [
"There is a 'path' option set and <method>() is called with a path parameter. Either remove the path option, or call <method>() without the parameter. To ignore this check, set '<config>' to 'true'."
]
},
"_LEGACY_ERROR_TEMP_1189" : {
"message" : [
"User specified schema not supported with `<operation>`."
]
},
"_LEGACY_ERROR_TEMP_1190" : {
"message" : [
"Temporary view <viewName> doesn't support streaming write."
]
},
"_LEGACY_ERROR_TEMP_1191" : {
"message" : [
"Streaming into views <viewName> is not supported."
]
},
"_LEGACY_ERROR_TEMP_1192" : {
"message" : [
"The input source(<source>) is different from the table <tableName>'s data source provider(<provider>)."
]
},
"_LEGACY_ERROR_TEMP_1193" : {
"message" : [
"Table <tableName> doesn't support streaming write - <t>."
]
},
"_LEGACY_ERROR_TEMP_1194" : {
"message" : [
"queryName must be specified for memory sink."
]
},
"_LEGACY_ERROR_TEMP_1195" : {
"message" : [
"'<source>' is not supported with continuous trigger."
]
},
"_LEGACY_ERROR_TEMP_1196" : {
"message" : [
"<columnType> column <columnName> not found in existing columns (<validColumnNames>)."
]
},
"_LEGACY_ERROR_TEMP_1197" : {
"message" : [
"'<operation>' does not support partitioning."
]
},
"_LEGACY_ERROR_TEMP_1198" : {
"message" : [
"Function '<unbound>' cannot process input: (<arguments>): <unsupported>."
]
},
"_LEGACY_ERROR_TEMP_1199" : {
"message" : [
"Invalid bound function '<bound>: there are <argsLen> arguments but <inputTypesLen> parameters returned from 'inputTypes()'."
]
},
"_LEGACY_ERROR_TEMP_1201" : {
"message" : [
"Cannot resolve column name \"<colName>\" among (<fieldNames>)."
]
},
"_LEGACY_ERROR_TEMP_1205" : {
"message" : [
"Expected only partition pruning predicates: <nonPartitionPruningPredicates>."
]
},
"_LEGACY_ERROR_TEMP_1207" : {
"message" : [
"The duration and time inputs to window must be an integer, long or string literal."
]
},
"_LEGACY_ERROR_TEMP_1210" : {
"message" : [
"The second argument in <funcName> should be a boolean literal."
]
},
"_LEGACY_ERROR_TEMP_1211" : {
"message" : [
"Detected implicit cartesian product for <joinType> join between logical plans",
"<leftPlan>",
"and",
"rightPlan",
"Join condition is missing or trivial.",
"Either: use the CROSS JOIN syntax to allow cartesian products between these relations, or: enable implicit cartesian products by setting the configuration variable spark.sql.crossJoin.enabled=true."
]
},
"_LEGACY_ERROR_TEMP_1212" : {
"message" : [
"Found conflicting attributes <conflictingAttrs> in the condition joining outer plan:",
"<outerPlan>",
"and subplan:",
"<subplan>."
]
},
"_LEGACY_ERROR_TEMP_1213" : {
"message" : [
"Window expression is empty in <expr>."
]
},
"_LEGACY_ERROR_TEMP_1214" : {
"message" : [
"Found different window function type in <windowExpressions>."
]
},
"_LEGACY_ERROR_TEMP_1218" : {
"message" : [
"<tableIdentifier> should be converted to HadoopFsRelation."
]
},
"_LEGACY_ERROR_TEMP_1219" : {
"message" : [
"Hive metastore does not support altering database location."
]
},
"_LEGACY_ERROR_TEMP_1222" : {
"message" : [
"Unknown resource type: <resourceType>."
]
},
"_LEGACY_ERROR_TEMP_1223" : {
"message" : [
"Invalid field id '<field>' in day-time interval. Supported interval fields: <supportedIds>."
]
},
"_LEGACY_ERROR_TEMP_1224" : {
"message" : [
"'interval <startFieldName> to <endFieldName>' is invalid."
]
},
"_LEGACY_ERROR_TEMP_1225" : {
"message" : [
"Invalid field id '<field>' in year-month interval. Supported interval fields: <supportedIds>."
]
},
"_LEGACY_ERROR_TEMP_1226" : {
"message" : [
"The SQL config '<configName>' was removed in the version <version>. <comment>"
]
},
"_LEGACY_ERROR_TEMP_1228" : {
"message" : [
"Decimal scale (<scale>) cannot be greater than precision (<precision>)."
]
},
"_LEGACY_ERROR_TEMP_1231" : {
"message" : [
"<key> is not a valid partition column in table <tblName>."
]
},
"_LEGACY_ERROR_TEMP_1232" : {
"message" : [
"Partition spec is invalid. The spec (<specKeys>) must match the partition spec (<partitionColumnNames>) defined in table '<tableName>'."
]
},
"_LEGACY_ERROR_TEMP_1237" : {
"message" : [
"The list of partition columns with values in partition specification for table '<table>' in database '<database>' is not a prefix of the list of partition columns defined in the table schema. Expected a prefix of [<schemaColumns>], but got [<specColumns>]."
]
},
"_LEGACY_ERROR_TEMP_1239" : {
"message" : [
"Analyzing column statistics is not supported for column <name> of data type: <dataType>."
]
},
"_LEGACY_ERROR_TEMP_1241" : {
"message" : [
"CREATE-TABLE-AS-SELECT cannot create table with location to a non-empty directory <tablePath>. To allow overwriting the existing non-empty directory, set '<config>' to true."
]
},
"_LEGACY_ERROR_TEMP_1246" : {
"message" : [
"Can't find column `<name>` given table data columns <fieldNames>."
]
},
"_LEGACY_ERROR_TEMP_1247" : {
"message" : [
"Operation not allowed: ALTER TABLE SET [SERDE | SERDEPROPERTIES] for a specific partition is not supported for tables created with the datasource API."
]
},
"_LEGACY_ERROR_TEMP_1248" : {
"message" : [
"Operation not allowed: ALTER TABLE SET SERDE is not supported for tables created with the datasource API."
]
},
"_LEGACY_ERROR_TEMP_1250" : {
"message" : [
"<action> is not allowed on <tableName> since filesource partition management is disabled (spark.sql.hive.manageFilesourcePartitions = false)."
]
},
"_LEGACY_ERROR_TEMP_1251" : {
"message" : [
"<action> is not allowed on <tableName> since its partition metadata is not stored in the Hive metastore. To import this information into the metastore, run `msck repair table <tableName>`."
]
},
"_LEGACY_ERROR_TEMP_1252" : {
"message" : [
"Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead."
]
},
"_LEGACY_ERROR_TEMP_1253" : {
"message" : [
"Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead."
]
},
"_LEGACY_ERROR_TEMP_1255" : {
"message" : [
"Cannot drop built-in function '<functionName>'."
]
},
"_LEGACY_ERROR_TEMP_1256" : {
"message" : [
"Cannot refresh built-in function <functionName>."
]
},
"_LEGACY_ERROR_TEMP_1257" : {
"message" : [
"Cannot refresh temporary function <functionName>."
]
},
"_LEGACY_ERROR_TEMP_1259" : {
"message" : [
"ALTER ADD COLUMNS does not support views. You must drop and re-create the views for adding the new columns. Views: <table>."
]
},
"_LEGACY_ERROR_TEMP_1260" : {
"message" : [
"ALTER ADD COLUMNS does not support datasource table with type <tableType>. You must drop and re-create the table for adding the new columns. Tables: <table>."
]
},
"_LEGACY_ERROR_TEMP_1261" : {
"message" : [
"LOAD DATA is not supported for datasource tables: <tableIdentWithDB>."
]
},
"_LEGACY_ERROR_TEMP_1262" : {
"message" : [
"LOAD DATA target table <tableIdentWithDB> is partitioned, but no partition spec is provided."
]
},
"_LEGACY_ERROR_TEMP_1263" : {
"message" : [
"LOAD DATA target table <tableIdentWithDB> is partitioned, but number of columns in provided partition spec (<partitionSize>) do not match number of partitioned columns in table (<targetTableSize>)."
]
},
"_LEGACY_ERROR_TEMP_1264" : {
"message" : [
"LOAD DATA target table <tableIdentWithDB> is not partitioned, but a partition spec was provided."
]
},
"_LEGACY_ERROR_TEMP_1266" : {
"message" : [
"Operation not allowed: TRUNCATE TABLE on external tables: <tableIdentWithDB>."
]
},
"_LEGACY_ERROR_TEMP_1267" : {
"message" : [
"Operation not allowed: TRUNCATE TABLE ... PARTITION is not supported for tables that are not partitioned: <tableIdentWithDB>."
]
},
"_LEGACY_ERROR_TEMP_1268" : {
"message" : [
"Failed to truncate table <tableIdentWithDB> when removing data of the path: <path>."
]
},
"_LEGACY_ERROR_TEMP_1270" : {
"message" : [
"SHOW CREATE TABLE is not supported on a temporary view: <table>."
]
},
"_LEGACY_ERROR_TEMP_1271" : {
"message" : [
"Failed to execute SHOW CREATE TABLE against table <table>, which is created by Hive and uses the following unsupported feature(s)",
"<unsupportedFeatures>",
"Please use `SHOW CREATE TABLE <table> AS SERDE` to show Hive DDL instead."
]
},
"_LEGACY_ERROR_TEMP_1272" : {
"message" : [
"SHOW CREATE TABLE doesn't support transactional Hive table. Please use `SHOW CREATE TABLE <table> AS SERDE` to show Hive DDL instead."
]
},
"_LEGACY_ERROR_TEMP_1273" : {
"message" : [
"Failed to execute SHOW CREATE TABLE against table <table>, which is created by Hive and uses the following unsupported serde configuration",
"<configs>",
"Please use `SHOW CREATE TABLE <table> AS SERDE` to show Hive DDL instead."
]
},
"_LEGACY_ERROR_TEMP_1274" : {
"message" : [
"<table> is a Spark data source table. Use `SHOW CREATE TABLE` without `AS SERDE` instead."
]
},
"_LEGACY_ERROR_TEMP_1275" : {
"message" : [
"Failed to execute SHOW CREATE TABLE against table/view <table>, which is created by Hive and uses the following unsupported feature(s)",
"<features>."
]
},
"_LEGACY_ERROR_TEMP_1276" : {
"message" : [
"The logical plan that represents the view is not analyzed."
]
},
"_LEGACY_ERROR_TEMP_1280" : {
"message" : [
"It is not allowed to create a persisted view from the Dataset API."
]
},
"_LEGACY_ERROR_TEMP_1285" : {
"message" : [
"Since Spark 2.3, the queries from raw JSON/CSV files are disallowed when the",
"referenced columns only include the internal corrupt record column",
"(named _corrupt_record by default). For example:",
"spark.read.schema(schema).csv(file).filter($\"_corrupt_record\".isNotNull).count()",
"and spark.read.schema(schema).csv(file).select(\"_corrupt_record\").show().",
"Instead, you can cache or save the parsed results and then send the same query.",
"For example, val df = spark.read.schema(schema).csv(file).cache() and then",
"df.filter($\"_corrupt_record\".isNotNull).count()."
]
},
"_LEGACY_ERROR_TEMP_1286" : {
"message" : [
"User-defined partition column <columnName> not found in the JDBC relation: <schema>."
]
},
"_LEGACY_ERROR_TEMP_1287" : {
"message" : [
"Partition column type should be <numericType>, <dateType>, or <timestampType>, but <dataType> found."
]
},
"_LEGACY_ERROR_TEMP_1288" : {
"message" : [
"Table or view '<name>' already exists. SaveMode: ErrorIfExists."
]
},
"_LEGACY_ERROR_TEMP_1290" : {
"message" : [
"Text data source supports only a single column, and you have <schemaSize> columns."
]
},
"_LEGACY_ERROR_TEMP_1291" : {
"message" : [
"Can't find required partition column <readField> in partition schema <partitionSchema>."
]
},
"_LEGACY_ERROR_TEMP_1292" : {
"message" : [
"Temporary view '<tableIdent>' should not have specified a database."
]
},
"_LEGACY_ERROR_TEMP_1293" : {
"message" : [
"Hive data source can only be used with tables, you can't use it with CREATE TEMP VIEW USING."
]
},
"_LEGACY_ERROR_TEMP_1294" : {
"message" : [
"The timestamp provided for the '<strategy>' option is invalid. The expected format is 'YYYY-MM-DDTHH:mm:ss', but the provided timestamp: <timeString>."
]
},
"_LEGACY_ERROR_TEMP_1295" : {
"message" : [
"Set a host to read from with option(\"host\", ...)."
]
},
"_LEGACY_ERROR_TEMP_1296" : {
"message" : [
"Set a port to read from with option(\"port\", ...)."
]
},
"_LEGACY_ERROR_TEMP_1297" : {
"message" : [
"IncludeTimestamp must be set to either \"true\" or \"false\"."
]
},
"_LEGACY_ERROR_TEMP_1298" : {
"message" : [
"checkpointLocation must be specified either through option(\"checkpointLocation\", ...) or SparkSession.conf.set(\"<config>\", ...)."
]
},
"_LEGACY_ERROR_TEMP_1299" : {
"message" : [
"This query does not support recovering from checkpoint location. Delete <checkpointPath> to start over."
]
},
"_LEGACY_ERROR_TEMP_1300" : {
"message" : [
"Unable to find the column `<colName>` given [<actualColumns>]."
]
},
"_LEGACY_ERROR_TEMP_1305" : {
"message" : [
"Unsupported TableChange <change> in JDBC catalog."
]
},
"_LEGACY_ERROR_TEMP_1306" : {
"message" : [
"There is a 'path' or 'paths' option set and load() is called with path parameters. Either remove the path option if it's the same as the path parameter, or add it to the load() parameter if you do want to read multiple paths. To ignore this check, set '<config>' to 'true'."
]
},
"_LEGACY_ERROR_TEMP_1307" : {
"message" : [
"There is a 'path' option set and save() is called with a path parameter. Either remove the path option, or call save() without the parameter. To ignore this check, set '<config>' to 'true'."
]
},
"_LEGACY_ERROR_TEMP_1309" : {
"message" : [
"insertInto() can't be used together with partitionBy(). Partition columns have already been defined for the table. It is not necessary to use partitionBy()."
]
},
"_LEGACY_ERROR_TEMP_1310" : {
"message" : [
"Couldn't find a catalog to handle the identifier <quote>."
]
},
"_LEGACY_ERROR_TEMP_1312" : {
"message" : [
"'<operation>' does not support bucketBy right now."
]
},
"_LEGACY_ERROR_TEMP_1313" : {
"message" : [
"'<operation>' does not support bucketBy and sortBy right now."
]
},
"_LEGACY_ERROR_TEMP_1316" : {
"message" : [
"Invalid partition transformation: <expr>."
]
},
"_LEGACY_ERROR_TEMP_1319" : {
"message" : [
"Invalid join type in joinWith: <joinType>."
]
},
"_LEGACY_ERROR_TEMP_1320" : {
"message" : [
"Typed column <typedCol> that needs input type and schema cannot be passed in untyped `select` API. Use the typed `Dataset.select` API instead."
]
},
"_LEGACY_ERROR_TEMP_1321" : {
"message" : [
"Invalid view name: <viewName>."
]
},
"_LEGACY_ERROR_TEMP_1322" : {
"message" : [
"Invalid number of buckets: bucket(<numBuckets>, <e>)."
]
},
"_LEGACY_ERROR_TEMP_1323" : {
"message" : [
"\"<colName>\" is not a numeric column. Aggregation function can only be applied on a numeric column."
]
},
"_LEGACY_ERROR_TEMP_1324" : {
"message" : [
"The pivot column <pivotColumn> has more than <maxValues> distinct values, this could indicate an error. If this was intended, set <config> to at least the number of distinct values of the pivot column."
]
},
"_LEGACY_ERROR_TEMP_1325" : {
"message" : [
"Cannot modify the value of a static config: <key>."
]
},
"_LEGACY_ERROR_TEMP_1327" : {
"message" : [
"Command execution is not supported in runner <runner>."
]
},
"_LEGACY_ERROR_TEMP_1328" : {
"message" : [
"Can not instantiate class <className>, please make sure it has public non argument constructor."
]
},
"_LEGACY_ERROR_TEMP_1329" : {
"message" : [
"Can not load class <className>, please make sure it is on the classpath."
]
},
"_LEGACY_ERROR_TEMP_1330" : {
"message" : [
"Class <className> doesn't implement interface UserDefinedAggregateFunction."
]
},
"_LEGACY_ERROR_TEMP_1331" : {
"message" : [
"Missing field <fieldName> in table <table> with schema:",
"<schema>."
]
},
"_LEGACY_ERROR_TEMP_1332" : {
"message" : [
"<errorMessage>"
]
},
"_LEGACY_ERROR_TEMP_1338" : {
"message" : [
"Sinks cannot request distribution and ordering in continuous execution mode."
]
},
"_LEGACY_ERROR_TEMP_1344" : {
"message" : [
"Invalid DEFAULT value for column <fieldName>: <defaultValue> fails to parse as a valid literal value."
]
},
"_LEGACY_ERROR_TEMP_1345" : {
"message" : [
"Failed to execute <statementType> command because DEFAULT values are not supported for target data source with table provider: \"<dataSource>\"."
]
},
"_LEGACY_ERROR_TEMP_1346" : {
"message" : [
"Failed to execute <statementType> command because DEFAULT values are not supported when adding new columns to previously existing target data source with table provider: \"<dataSource>\"."
]
},
"_LEGACY_ERROR_TEMP_2000" : {
"message" : [
"<message>. If necessary set <ansiConfig> to false to bypass this error."
]
},
"_LEGACY_ERROR_TEMP_2003" : {
"message" : [
"Unsuccessful try to zip maps with <size> unique keys due to exceeding the array size limit <maxRoundedArrayLength>."
]
},
"_LEGACY_ERROR_TEMP_2005" : {
"message" : [
"Type <dataType> does not support ordered operations."
]
},
"_LEGACY_ERROR_TEMP_2011" : {
"message" : [
"Unexpected data type <dataType>."
]
},
"_LEGACY_ERROR_TEMP_2013" : {
"message" : [
"Negative values found in <frequencyExpression>"
]
},
"_LEGACY_ERROR_TEMP_2017" : {
"message" : [
"not resolved."
]
},
"_LEGACY_ERROR_TEMP_2026" : {
"message" : [
"Failed to convert value <value> (class of <cls>) with the type of <dataType> to JSON."
]
},
"_LEGACY_ERROR_TEMP_2027" : {
"message" : [
"Unexpected operator <op> in correlated subquery<pos>."
]
},
"_LEGACY_ERROR_TEMP_2028" : {
"message" : [
"This line should be unreachable<err>."
]
},
"_LEGACY_ERROR_TEMP_2030" : {
"message" : [
"Can not handle nested schema yet... plan <plan>."
]
},
"_LEGACY_ERROR_TEMP_2031" : {
"message" : [
"The input external row cannot be null."
]
},
"_LEGACY_ERROR_TEMP_2032" : {
"message" : [
"<fieldCannotBeNullMsg>"
]
},
"_LEGACY_ERROR_TEMP_2033" : {
"message" : [
"Unable to create database <name> as failed to create its directory <locationUri>."
]
},
"_LEGACY_ERROR_TEMP_2034" : {
"message" : [
"Unable to drop database <name> as failed to delete its directory <locationUri>."
]
},
"_LEGACY_ERROR_TEMP_2035" : {
"message" : [
"Unable to create table <table> as failed to create its directory <defaultTableLocation>."
]
},
"_LEGACY_ERROR_TEMP_2036" : {
"message" : [
"Unable to delete partition path <partitionPath>."
]
},
"_LEGACY_ERROR_TEMP_2037" : {
"message" : [
"Unable to drop table <table> as failed to delete its directory <dir>."
]
},
"_LEGACY_ERROR_TEMP_2038" : {
"message" : [
"Unable to rename table <oldName> to <newName> as failed to rename its directory <oldDir>."
]
},
"_LEGACY_ERROR_TEMP_2039" : {
"message" : [
"Unable to create partition path <partitionPath>."
]
},
"_LEGACY_ERROR_TEMP_2040" : {
"message" : [
"Unable to rename partition path <oldPartPath>."
]
},
"_LEGACY_ERROR_TEMP_2041" : {
"message" : [
"<methodName> is not implemented."
]
},
"_LEGACY_ERROR_TEMP_2042" : {
"message" : [
"<message>. If necessary set <ansiConfig> to false to bypass this error."
]
},
"_LEGACY_ERROR_TEMP_2045" : {
"message" : [
"Unsupported table change: <message>"
]
},
"_LEGACY_ERROR_TEMP_2046" : {
"message" : [
"[BUG] Not a DataSourceRDDPartition: <split>."
]
},
"_LEGACY_ERROR_TEMP_2047" : {
"message" : [
"'path' is not specified."
]
},
"_LEGACY_ERROR_TEMP_2048" : {
"message" : [
"Schema must be specified when creating a streaming source DataFrame. If some files already exist in the directory, then depending on the file format you may be able to create a static DataFrame on that directory with 'spark.read.load(directory)' and infer schema from it."
]
},
"_LEGACY_ERROR_TEMP_2049" : {
"message" : [
"Data source <className> does not support streamed <operator>."
]
},
"_LEGACY_ERROR_TEMP_2050" : {
"message" : [
"Expected exactly one path to be specified, but got: <paths>."
]
},
"_LEGACY_ERROR_TEMP_2052" : {
"message" : [
"<className> was removed in Spark 2.0. Please check if your library is compatible with Spark 2.0."
]
},
"_LEGACY_ERROR_TEMP_2053" : {
"message" : [
"buildReader is not supported for <format>."
]
},
"_LEGACY_ERROR_TEMP_2056" : {
"message" : [
"Unable to clear output directory <staticPrefixPath> prior to writing to it."
]
},
"_LEGACY_ERROR_TEMP_2057" : {
"message" : [
"Unable to clear partition directory <path> prior to writing to it."
]
},
"_LEGACY_ERROR_TEMP_2058" : {
"message" : [
"Failed to cast value `<value>` to `<dataType>` for partition column `<columnName>`."
]
},
"_LEGACY_ERROR_TEMP_2059" : {
"message" : [
"End of stream."
]
},
"_LEGACY_ERROR_TEMP_2060" : {
"message" : [
"The fallback v1 relation reports inconsistent schema:",
"Schema of v2 scan: <v2Schema>.",
"Schema of v1 relation: <v1Schema>."
]
},
"_LEGACY_ERROR_TEMP_2061" : {
"message" : [
"No records should be returned from EmptyDataReader."
]
},
"_LEGACY_ERROR_TEMP_2065" : {
"message" : [
"Cannot create columnar reader."
]
},
"_LEGACY_ERROR_TEMP_2066" : {
"message" : [
"Invalid namespace name: <namespace>."
]
},
"_LEGACY_ERROR_TEMP_2067" : {
"message" : [
"Unsupported partition transform: <transform>."
]
},
"_LEGACY_ERROR_TEMP_2068" : {
"message" : [
"Missing database location."
]
},
"_LEGACY_ERROR_TEMP_2069" : {
"message" : [
"Cannot remove reserved property: <property>."
]
},
"_LEGACY_ERROR_TEMP_2070" : {
"message" : [
"Writing job failed."
]
},
"_LEGACY_ERROR_TEMP_2071" : {
"message" : [
"Commit denied for partition <partId> (task <taskId>, attempt <attemptId>, stage <stageId>.<stageAttempt>)."
]
},
"_LEGACY_ERROR_TEMP_2073" : {
"message" : [
"Cannot create JDBC table with partition."
]
},
"_LEGACY_ERROR_TEMP_2074" : {
"message" : [
"user-specified schema."
]
},
"_LEGACY_ERROR_TEMP_2075" : {
"message" : [
"Write is not supported for binary file data source."
]
},
"_LEGACY_ERROR_TEMP_2076" : {
"message" : [
"The length of <path> is <len>, which exceeds the max length allowed: <maxLength>."
]
},
"_LEGACY_ERROR_TEMP_2077" : {
"message" : [
"Unsupported field name: <fieldName>."
]
},
"_LEGACY_ERROR_TEMP_2078" : {
"message" : [
"Both '<jdbcTableName>' and '<jdbcQueryString>' can not be specified at the same time."
]
},
"_LEGACY_ERROR_TEMP_2079" : {
"message" : [
"Option '<jdbcTableName>' or '<jdbcQueryString>' is required."
]
},
"_LEGACY_ERROR_TEMP_2080" : {
"message" : [
"Option `<optionName>` can not be empty."
]
},
"_LEGACY_ERROR_TEMP_2081" : {
"message" : [
"Invalid value `<value>` for parameter `<jdbcTxnIsolationLevel>`. This can be `NONE`, `READ_UNCOMMITTED`, `READ_COMMITTED`, `REPEATABLE_READ` or `SERIALIZABLE`."
]
},
"_LEGACY_ERROR_TEMP_2082" : {
"message" : [
"Can't get JDBC type for <catalogString>."
]
},
"_LEGACY_ERROR_TEMP_2083" : {
"message" : [
"Unsupported type <content>."
]
},
"_LEGACY_ERROR_TEMP_2084" : {
"message" : [
"Unsupported array element type <catalogString> based on binary."
]
},
"_LEGACY_ERROR_TEMP_2085" : {
"message" : [
"Nested arrays unsupported."
]
},
"_LEGACY_ERROR_TEMP_2086" : {
"message" : [
"Can't translate non-null value for field <pos>."
]
},
"_LEGACY_ERROR_TEMP_2087" : {
"message" : [
"Invalid value `<n>` for parameter `<jdbcNumPartitions>` in table writing via JDBC. The minimum value is 1."
]
},
"_LEGACY_ERROR_TEMP_2088" : {
"message" : [
"<dataType> is not supported yet."
]
},
"_LEGACY_ERROR_TEMP_2089" : {
"message" : [
"DataType: <catalogString>."
]
},
"_LEGACY_ERROR_TEMP_2090" : {
"message" : [
"The input filter of <owner> should be fully convertible."
]
},
"_LEGACY_ERROR_TEMP_2093" : {
"message" : [
"Found duplicate field(s) \"<requiredFieldName>\": <matchedOrcFields> in case-insensitive mode."
]
},
"_LEGACY_ERROR_TEMP_2094" : {
"message" : [
"Found duplicate field(s) \"<requiredId>\": <matchedFields> in id mapping mode."
]
},
"_LEGACY_ERROR_TEMP_2095" : {
"message" : [
"Failed to merge incompatible schemas <left> and <right>."
]
},
"_LEGACY_ERROR_TEMP_2096" : {
"message" : [
"<ddl> is not supported temporarily."
]
},
"_LEGACY_ERROR_TEMP_2097" : {
"message" : [
"Could not execute broadcast in <timeout> secs. You can increase the timeout for broadcasts via <broadcastTimeout> or disable broadcast join by setting <autoBroadcastJoinThreshold> to -1."
]
},
"_LEGACY_ERROR_TEMP_2098" : {
"message" : [
"Could not compare cost with <cost>."
]
},
"_LEGACY_ERROR_TEMP_2100" : {
"message" : [
"not support type: <dataType>."
]
},
"_LEGACY_ERROR_TEMP_2101" : {
"message" : [
"Not support non-primitive type now."
]
},
"_LEGACY_ERROR_TEMP_2103" : {
"message" : [
"Dictionary encoding should not be used because of dictionary overflow."
]
},
"_LEGACY_ERROR_TEMP_2104" : {
"message" : [
"End of the iterator."
]
},
"_LEGACY_ERROR_TEMP_2105" : {
"message" : [
"Could not allocate memory to grow BytesToBytesMap."
]
},
"_LEGACY_ERROR_TEMP_2106" : {
"message" : [
"Can't acquire <size> bytes memory to build hash relation, got <got> bytes."
]
},
"_LEGACY_ERROR_TEMP_2107" : {
"message" : [
"There is not enough memory to build hash map."
]
},
"_LEGACY_ERROR_TEMP_2108" : {
"message" : [
"Does not support row that is larger than 256M."
]
},
"_LEGACY_ERROR_TEMP_2109" : {
"message" : [
"Cannot build HashedRelation with more than 1/3 billion unique keys."
]
},
"_LEGACY_ERROR_TEMP_2110" : {
"message" : [
"Cannot build a HashedRelation that is larger than 8G."
]
},
"_LEGACY_ERROR_TEMP_2111" : {
"message" : [
"Failed to push a row into <rowQueue>."
]
},
"_LEGACY_ERROR_TEMP_2112" : {
"message" : [
"Unexpected window function frame <frame>."
]
},
"_LEGACY_ERROR_TEMP_2113" : {
"message" : [
"Unable to parse <stats> as a percentile."
]
},
"_LEGACY_ERROR_TEMP_2114" : {
"message" : [
"<stats> is not a recognised statistic."
]
},
"_LEGACY_ERROR_TEMP_2115" : {
"message" : [
"Unknown column: <unknownColumn>."
]
},
"_LEGACY_ERROR_TEMP_2116" : {
"message" : [
"Unexpected: <o>."
]
},
"_LEGACY_ERROR_TEMP_2120" : {
"message" : [
"Do not support array of type <clazz>."
]
},
"_LEGACY_ERROR_TEMP_2121" : {
"message" : [
"Do not support type <clazz>."
]
},
"_LEGACY_ERROR_TEMP_2124" : {
"message" : [
"Failed to merge decimal types with incompatible scale <leftScale> and <rightScale>."
]
},
"_LEGACY_ERROR_TEMP_2126" : {
"message" : [
"Unsuccessful attempt to build maps with <size> elements due to exceeding the map size limit <maxRoundedArrayLength>."
]
},
"_LEGACY_ERROR_TEMP_2128" : {
"message" : [
"The key array and value array of MapData must have the same length."
]
},
"_LEGACY_ERROR_TEMP_2129" : {
"message" : [
"Conflict found: Field <field> <actual> differs from <field> <expected> derived from <candidate>."
]
},
"_LEGACY_ERROR_TEMP_2130" : {
"message" : [
"Fail to recognize '<pattern>' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from '<docroot>/sql-ref-datetime-pattern.html'."
]
},
"_LEGACY_ERROR_TEMP_2131" : {
"message" : [
"Exception when registering StreamingQueryListener."
]
},
"_LEGACY_ERROR_TEMP_2138" : {
"message" : [
"Cannot have circular references in bean class, but got the circular reference of class <clazz>."
]
},
"_LEGACY_ERROR_TEMP_2139" : {
"message" : [
"cannot have circular references in class, but got the circular reference of class <t>."
]
},
"_LEGACY_ERROR_TEMP_2140" : {
"message" : [
"`<fieldName>` is not a valid identifier of Java and cannot be used as field name",
"<walkedTypePath>."
]
},
"_LEGACY_ERROR_TEMP_2144" : {
"message" : [
"Unable to find constructor for <tpe>. This could happen if <tpe> is an interface, or a trait without companion object constructor."
]
},
"_LEGACY_ERROR_TEMP_2145" : {
"message" : [
"<paramName> cannot be more than one character."
]
},
"_LEGACY_ERROR_TEMP_2146" : {
"message" : [
"<paramName> should be an integer. Found <value>."
]
},
"_LEGACY_ERROR_TEMP_2147" : {
"message" : [
"<paramName> flag can be true or false."
]
},
"_LEGACY_ERROR_TEMP_2148" : {
"message" : [
"null value found but field <name> is not nullable."
]
},
"_LEGACY_ERROR_TEMP_2150" : {
"message" : [
"Due to Scala's limited support of tuple, tuple with more than 22 elements are not supported."
]
},
"_LEGACY_ERROR_TEMP_2154" : {
"message" : [
"Failed to get outer pointer for <innerCls>."
]
},
"_LEGACY_ERROR_TEMP_2155" : {
"message" : [
"<userClass> is not annotated with SQLUserDefinedType nor registered with UDTRegistration.}"
]
},
"_LEGACY_ERROR_TEMP_2163" : {
"message" : [
"Initial type <dataType> must be a <target>."
]
},
"_LEGACY_ERROR_TEMP_2164" : {
"message" : [
"Initial type <dataType> must be an <arrayType>, a <structType> or a <mapType>."
]
},
"_LEGACY_ERROR_TEMP_2165" : {
"message" : [
"Malformed records are detected in schema inference. Parse Mode: <failFastMode>."
]
},
"_LEGACY_ERROR_TEMP_2166" : {
"message" : [
"Malformed JSON."
]
},
"_LEGACY_ERROR_TEMP_2167" : {
"message" : [
"Malformed records are detected in schema inference. Parse Mode: <failFastMode>. Reasons: Failed to infer a common schema. Struct types are expected, but `<dataType>` was found."
]
},
"_LEGACY_ERROR_TEMP_2168" : {
"message" : [
"Decorrelate inner query through <plan> is not supported."
]
},
"_LEGACY_ERROR_TEMP_2169" : {
"message" : [
"This method should not be called in the analyzer."
]
},
"_LEGACY_ERROR_TEMP_2170" : {
"message" : [
"Cannot safely merge SERDEPROPERTIES:",
"<props1>",
"<props2>",
"The conflict keys: <conflictKeys>."
]
},
"_LEGACY_ERROR_TEMP_2171" : {
"message" : [
"Not supported pair: <r1>, <r2> at <function>()."
]
},
"_LEGACY_ERROR_TEMP_2172" : {
"message" : [
"Once strategy's idempotence is broken for batch <batchName>",
"<plan>."
]
},
"_LEGACY_ERROR_TEMP_2176" : {
"message" : [
"Cannot create array with <numElements> elements of data due to exceeding the limit <maxRoundedArrayLength> elements for ArrayData. <additionalErrorMessage>"
]
},
"_LEGACY_ERROR_TEMP_2179" : {
"message" : [
"HiveServer2 Kerberos principal or keytab is not correctly configured."
]
},
"_LEGACY_ERROR_TEMP_2180" : {
"message" : [
"Parent SparkUI to attach this tab to not found."
]
},
"_LEGACY_ERROR_TEMP_2181" : {
"message" : [
"inferSchema is not supported for hive data source."
]
},
"_LEGACY_ERROR_TEMP_2182" : {
"message" : [
"Requested partitioning does not match the <tableIdentifier> table:",
"Requested partitions: <partitionKeys>.",
"Table partitions: <partitionColumnNames>."
]
},
"_LEGACY_ERROR_TEMP_2183" : {
"message" : [
"Dynamic partition key <key> is not among written partition paths."
]
},
"_LEGACY_ERROR_TEMP_2184" : {
"message" : [
"Cannot remove partition directory '<partitionPath>'."
]
},
"_LEGACY_ERROR_TEMP_2185" : {
"message" : [
"Cannot create staging directory: <message>"
]
},
"_LEGACY_ERROR_TEMP_2186" : {
"message" : [
"The SerDe interface removed since Hive 2.3(HIVE-15167). Please migrate your custom SerDes to Hive 2.3. See HIVE-15167 for more details."
]
},
"_LEGACY_ERROR_TEMP_2187" : {
"message" : [
"<message>, db: <dbName>, table: <tableName>."
]
},
"_LEGACY_ERROR_TEMP_2192" : {
"message" : [
"Partition filter cannot have both `\"` and `'` characters."
]
},
"_LEGACY_ERROR_TEMP_2193" : {
"message" : [
"Caught Hive MetaException attempting to get partition metadata by filter from Hive. You can set the Spark configuration setting <hiveMetastorePartitionPruningFallbackOnException> to true to work around this problem, however this will result in degraded performance. Please report a bug: https://issues.apache.org/jira/browse/SPARK."
]
},
"_LEGACY_ERROR_TEMP_2194" : {
"message" : [
"Unsupported Hive Metastore version <version>. Please set <key> with a valid version."
]
},
"_LEGACY_ERROR_TEMP_2195" : {
"message" : [
"<cnf> when creating Hive client using classpath: <execJars> Please make sure that jars for your version of hive and hadoop are included in the paths passed to <key>."
]
},
"_LEGACY_ERROR_TEMP_2198" : {
"message" : [
"Failed to rename as <dstPath> already exists."
]
},
"_LEGACY_ERROR_TEMP_2200" : {
"message" : [
"Error: we detected a possible problem with the location of your \"_spark_metadata\"",
"directory and you likely need to move it before restarting this query.",
"",
"Earlier version of Spark incorrectly escaped paths when writing out the",
"\"_spark_metadata\" directory for structured streaming. While this was corrected in",
"Spark 3.0, it appears that your query was started using an earlier version that",
"",
"Correct \"_spark_metadata\" Directory: <metadataPath>",
"Incorrect \"_spark_metadata\" Directory: <legacyMetadataPath>",
"",
"Please move the data from the incorrect directory to the correct one, delete the",
"incorrect directory, and then restart this query. If you believe you are receiving",
"this message in error, you can disable it with the SQL conf",
"<StreamingCheckpointEscapedPathCheckEnabled>."
]
},
"_LEGACY_ERROR_TEMP_2201" : {
"message" : [
"Partition column <col> not found in schema <schema>."
]
},
"_LEGACY_ERROR_TEMP_2203" : {
"message" : [
"Cannot set timeout duration without enabling processing time timeout in [map|flatMap]GroupsWithState."
]
},
"_LEGACY_ERROR_TEMP_2204" : {
"message" : [
"Cannot get event time watermark timestamp without setting watermark before [map|flatMap]GroupsWithState."
]
},
"_LEGACY_ERROR_TEMP_2205" : {
"message" : [
"Cannot set timeout timestamp without enabling event time timeout in [map|flatMapGroupsWithState."
]
},
"_LEGACY_ERROR_TEMP_2207" : {
"message" : [
"Multiple streaming queries are concurrently using <path>."
]
},
"_LEGACY_ERROR_TEMP_2208" : {
"message" : [
"<commitProtocol> does not support adding files with an absolute path."
]
},
"_LEGACY_ERROR_TEMP_2209" : {
"message" : [
"Data source <srcName> does not support microbatch processing.",
"",
"Either the data source is disabled at",
"SQLConf.get.DISABLED_V2_STREAMING_MICROBATCH_READERS.key (The disabled sources",
"are [<disabledSources>]) or the table <table> does not have MICRO_BATCH_READ",
"capability. Meanwhile, the fallback, data source v1, is not available.\""
]
},
"_LEGACY_ERROR_TEMP_2210" : {
"message" : [
"StreamingRelationExec cannot be executed."
]
},
"_LEGACY_ERROR_TEMP_2211" : {
"message" : [
"Invalid output mode: <outputMode>."
]
},
"_LEGACY_ERROR_TEMP_2212" : {
"message" : [
"Invalid catalog name: <name>."
]
},
"_LEGACY_ERROR_TEMP_2214" : {
"message" : [
"Plugin class for catalog '<name>' does not implement CatalogPlugin: <pluginClassName>."
]
},
"_LEGACY_ERROR_TEMP_2215" : {
"message" : [
"Cannot find catalog plugin class for catalog '<name>': <pluginClassName>."
]
},
"_LEGACY_ERROR_TEMP_2216" : {
"message" : [
"Failed to find public no-arg constructor for catalog '<name>': <pluginClassName>)."
]
},
"_LEGACY_ERROR_TEMP_2217" : {
"message" : [
"Failed to call public no-arg constructor for catalog '<name>': <pluginClassName>)."
]
},
"_LEGACY_ERROR_TEMP_2218" : {
"message" : [
"Cannot instantiate abstract catalog plugin class for catalog '<name>': <pluginClassName>."
]
},
"_LEGACY_ERROR_TEMP_2219" : {
"message" : [
"Failed during instantiating constructor for catalog '<name>': <pluginClassName>."
]
},
"_LEGACY_ERROR_TEMP_2220" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_2222" : {
"message" : [
"Cannot mutate ReadOnlySQLConf."
]
},
"_LEGACY_ERROR_TEMP_2223" : {
"message" : [
"Cannot clone/copy ReadOnlySQLConf."
]
},
"_LEGACY_ERROR_TEMP_2224" : {
"message" : [
"Cannot get SQLConf inside scheduler event loop thread."
]
},
"_LEGACY_ERROR_TEMP_2225" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_2226" : {
"message" : [
"null literals can't be casted to <name>."
]
},
"_LEGACY_ERROR_TEMP_2227" : {
"message" : [
"<name> is not an UserDefinedType. Please make sure registering an UserDefinedType for <userClass>."
]
},
"_LEGACY_ERROR_TEMP_2228" : {
"message" : [
"Can not load in UserDefinedType <name> for user class <userClass>."
]
},
"_LEGACY_ERROR_TEMP_2229" : {
"message" : [
"<name> is not a public class. Only public classes are supported."
]
},
"_LEGACY_ERROR_TEMP_2230" : {
"message" : [
"Primitive types are not supported."
]
},
"_LEGACY_ERROR_TEMP_2232" : {
"message" : [
"Value at index <index> is null."
]
},
"_LEGACY_ERROR_TEMP_2233" : {
"message" : [
"Only Data Sources providing FileFormat are supported: <providingClass>."
]
},
"_LEGACY_ERROR_TEMP_2234" : {
"message" : [
"Failed to set original ACL <aclEntries> back to the created path: <path>. Exception: <message>"
]
},
"_LEGACY_ERROR_TEMP_2235" : {
"message" : [
"Multiple failures in stage materialization."
]
},
"_LEGACY_ERROR_TEMP_2236" : {
"message" : [
"Unrecognized compression scheme type ID: <typeId>."
]
},
"_LEGACY_ERROR_TEMP_2237" : {
"message" : [
"<className>.getParentLogger is not yet implemented."
]
},
"_LEGACY_ERROR_TEMP_2238" : {
"message" : [
"Unable to create Parquet converter for <typeName> whose Parquet type is <parquetType> without decimal metadata. Please read this column/field as Spark BINARY type."
]
},
"_LEGACY_ERROR_TEMP_2239" : {
"message" : [
"Unable to create Parquet converter for decimal type <t> whose Parquet type is <parquetType>. Parquet DECIMAL type can only be backed by INT32, INT64, FIXED_LEN_BYTE_ARRAY, or BINARY."
]
},
"_LEGACY_ERROR_TEMP_2240" : {
"message" : [
"Unable to create Parquet converter for data type <t> whose Parquet type is <parquetType>."
]
},
"_LEGACY_ERROR_TEMP_2241" : {
"message" : [
"Nonatomic partition table <tableName> can not add multiple partitions."
]
},
"_LEGACY_ERROR_TEMP_2242" : {
"message" : [
"<provider> source does not support user-specified schema."
]
},
"_LEGACY_ERROR_TEMP_2243" : {
"message" : [
"Nonatomic partition table <tableName> can not drop multiple partitions."
]
},
"_LEGACY_ERROR_TEMP_2244" : {
"message" : [
"The table <tableName> does not support truncation of multiple partition."
]
},
"_LEGACY_ERROR_TEMP_2245" : {
"message" : [
"Table does not support overwrite by expression: <table>."
]
},
"_LEGACY_ERROR_TEMP_2246" : {
"message" : [
"Table does not support dynamic partition overwrite: <table>."
]
},
"_LEGACY_ERROR_TEMP_2248" : {
"message" : [
"Cannot broadcast the table over <maxBroadcastTableRows> rows: <numRows> rows."
]
},
"_LEGACY_ERROR_TEMP_2249" : {
"message" : [
"Cannot broadcast the table that is larger than <maxBroadcastTableBytes>: <dataSize>."
]
},
"_LEGACY_ERROR_TEMP_2250" : {
"message" : [
"Not enough memory to build and broadcast the table to all worker nodes. As a workaround, you can either disable broadcast by setting <autoBroadcastJoinThreshold> to -1 or increase the spark driver memory by setting <driverMemory> to a higher value<analyzeTblMsg>"
]
},
"_LEGACY_ERROR_TEMP_2251" : {
"message" : [
"<execName> does not support the execute() code path."
]
},
"_LEGACY_ERROR_TEMP_2252" : {
"message" : [
"Cannot merge <className> with <otherClass>."
]
},
"_LEGACY_ERROR_TEMP_2253" : {
"message" : [
"Data source <sourceName> does not support continuous processing."
]
},
"_LEGACY_ERROR_TEMP_2254" : {
"message" : [
"Data read failed."
]
},
"_LEGACY_ERROR_TEMP_2255" : {
"message" : [
"Epoch marker generation failed."
]
},
"_LEGACY_ERROR_TEMP_2256" : {
"message" : [
"Foreach writer has been aborted due to a task failure."
]
},
"_LEGACY_ERROR_TEMP_2260" : {
"message" : [
"Cannot purge as it might break internal state."
]
},
"_LEGACY_ERROR_TEMP_2261" : {
"message" : [
"Clean up source files is not supported when reading from the output directory of FileStreamSink."
]
},
"_LEGACY_ERROR_TEMP_2262" : {
"message" : [
"latestOffset(Offset, ReadLimit) should be called instead of this method."
]
},
"_LEGACY_ERROR_TEMP_2263" : {
"message" : [
"Error: we detected a possible problem with the location of your checkpoint and you",
"likely need to move it before restarting this query.",
"",
"Earlier version of Spark incorrectly escaped paths when writing out checkpoints for",
"structured streaming. While this was corrected in Spark 3.0, it appears that your",
"query was started using an earlier version that incorrectly handled the checkpoint",
"path.",
"",
"Correct Checkpoint Directory: <checkpointPath>",
"Incorrect Checkpoint Directory: <legacyCheckpointDir>",
"",
"Please move the data from the incorrect directory to the correct one, delete the",
"incorrect directory, and then restart this query. If you believe you are receiving",
"this message in error, you can disable it with the SQL conf",
"<StreamingCheckpointEscapedPathCheckEnabled>."
]
},
"_LEGACY_ERROR_TEMP_2264" : {
"message" : [
"Subprocess exited with status <exitCode>. Error: <stderrBuffer>."
]
},
"_LEGACY_ERROR_TEMP_2265" : {
"message" : [
"<nodeName> without serde does not support <dt> as output data type."
]
},
"_LEGACY_ERROR_TEMP_2266" : {
"message" : [
"Invalid `startIndex` provided for generating iterator over the array. Total elements: <numRows>, requested `startIndex`: <startIndex>."
]
},
"_LEGACY_ERROR_TEMP_2267" : {
"message" : [
"The backing <className> has been modified since the creation of this Iterator."
]
},
"_LEGACY_ERROR_TEMP_2268" : {
"message" : [
"<nodeName> does not implement doExecuteBroadcast."
]
},
"_LEGACY_ERROR_TEMP_2269" : {
"message" : [
"<globalTempDB> is a system preserved database, please rename your existing database to resolve the name conflict, or set a different value for <globalTempDatabase>, and launch your Spark application again."
]
},
"_LEGACY_ERROR_TEMP_2270" : {
"message" : [
"comment on table is not supported."
]
},
"_LEGACY_ERROR_TEMP_2271" : {
"message" : [
"UpdateColumnNullability is not supported."
]
},
"_LEGACY_ERROR_TEMP_2272" : {
"message" : [
"Rename column is only supported for MySQL version 8.0 and above."
]
},
"_LEGACY_ERROR_TEMP_2273" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_2277" : {
"message" : [
"Number of dynamic partitions created is <numWrittenParts>, which is more than <maxDynamicPartitions>. To solve this try to set <maxDynamicPartitionsKey> to at least <numWrittenParts>."
]
},
"_LEGACY_ERROR_TEMP_2330" : {
"message" : [
"Cannot change nullable column to non-nullable: <fieldName>."
]
},
"_LEGACY_ERROR_TEMP_2446" : {
"message" : [
"Operation not allowed: <cmd> only works on table with location provided: <tableIdentWithDB>"
]
},
"_LEGACY_ERROR_TEMP_2450" : {
"message" : [
"No handler for UDF/UDAF/UDTF '<clazz>'"
]
},
"_LEGACY_ERROR_TEMP_3000" : {
"message" : [
"Unexpected Py4J server <class>."
]
},
"_LEGACY_ERROR_TEMP_3001" : {
"message" : [
"EOFException occurred while reading the port number from <daemonModule>'s stdout<additionalMessage>."
]
},
"_LEGACY_ERROR_TEMP_3002" : {
"message" : [
"Data of type <other> is not supported"
]
},
"_LEGACY_ERROR_TEMP_3003" : {
"message" : [
"Could not compute split, block <blockId> of RDD <id> not found"
]
},
"_LEGACY_ERROR_TEMP_3004" : {
"message" : [
"Attempted to use <string> after its blocks have been removed!"
]
},
"_LEGACY_ERROR_TEMP_3005" : {
"message" : [
"Histogram on either an empty RDD or RDD containing +/-infinity or NaN"
]
},
"_LEGACY_ERROR_TEMP_3006" : {
"message" : [
"empty RDD"
]
},
"_LEGACY_ERROR_TEMP_3008" : {
"message" : [
"Cannot use map-side combining with array keys."
]
},
"_LEGACY_ERROR_TEMP_3009" : {
"message" : [
"HashPartitioner cannot partition array keys."
]
},
"_LEGACY_ERROR_TEMP_3010" : {
"message" : [
"reduceByKeyLocally() does not support array keys"
]
},
"_LEGACY_ERROR_TEMP_3011" : {
"message" : [
"This RDD lacks a SparkContext. It could happen in the following cases:",
"(1) RDD transformations and actions are NOT invoked by the driver, but inside of other transformations; for example, rdd1.map(x => rdd2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the rdd1.map transformation. For more information, see SPARK-5063.",
"(2) When a Spark Streaming job recovers from checkpoint, this exception will be hit if a reference to an RDD not defined by the streaming job is used in DStream operations. For more information, See SPARK-13758."
]
},
"_LEGACY_ERROR_TEMP_3012" : {
"message" : [
"Cannot change storage level of an RDD after it was already assigned a level"
]
},
"_LEGACY_ERROR_TEMP_3013" : {
"message" : [
"Can only zip RDDs with same number of elements in each partition"
]
},
"_LEGACY_ERROR_TEMP_3014" : {
"message" : [
"empty collection"
]
},
"_LEGACY_ERROR_TEMP_3015" : {
"message" : [
"countByValueApprox() does not support arrays"
]
},
"_LEGACY_ERROR_TEMP_3016" : {
"message" : [
"Checkpoint directory has not been set in the SparkContext"
]
},
"_LEGACY_ERROR_TEMP_3017" : {
"message" : [
"Invalid checkpoint file: <path>"
]
},
"_LEGACY_ERROR_TEMP_3018" : {
"message" : [
"Failed to create checkpoint path <checkpointDirPath>"
]
},
"_LEGACY_ERROR_TEMP_3019" : {
"message" : [
"Checkpoint RDD has a different number of partitions from original RDD. Original",
"RDD [ID: <originalRDDId>, num of partitions: <originalRDDLength>];",
"Checkpoint RDD [ID: <newRDDId>, num of partitions: <newRDDLength>]."
]
},
"_LEGACY_ERROR_TEMP_3020" : {
"message" : [
"Checkpoint dir must be specified."
]
},
"_LEGACY_ERROR_TEMP_3021" : {
"message" : [
"Error asking standalone scheduler to shut down executors"
]
},
"_LEGACY_ERROR_TEMP_3022" : {
"message" : [
"Error stopping standalone scheduler's driver endpoint"
]
},
"_LEGACY_ERROR_TEMP_3023" : {
"message" : [
"Can't run submitMapStage on RDD with 0 partitions"
]
},
"_LEGACY_ERROR_TEMP_3024" : {
"message" : [
"attempted to access non-existent accumulator <id>"
]
},
"_LEGACY_ERROR_TEMP_3025" : {
"message" : [
"TaskSetManagers should only send Resubmitted task statuses for tasks in ShuffleMapStages."
]
},
"_LEGACY_ERROR_TEMP_3026" : {
"message" : [
"duration() called on unfinished task"
]
},
"_LEGACY_ERROR_TEMP_3027" : {
"message" : [
"Unrecognized <schedulerModeProperty>: <schedulingModeConf>"
]
},
"_LEGACY_ERROR_TEMP_3028" : {
"message" : [
"<errorMsg>"
]
},
"_LEGACY_ERROR_TEMP_3029" : {
"message" : [
"Exiting due to error from cluster scheduler: <message>"
]
},
"_LEGACY_ERROR_TEMP_3030" : {
"message" : [
"Task <currentTaskAttemptId> has not locked block <blockId> for writing"
]
},
"_LEGACY_ERROR_TEMP_3031" : {
"message" : [
"Block <blockId> does not exist"
]
},
"_LEGACY_ERROR_TEMP_3032" : {
"message" : [
"Error occurred while waiting for replication to finish"
]
},
"_LEGACY_ERROR_TEMP_3033" : {
"message" : [
"Unable to register with external shuffle server due to : <message>"
]
},
"_LEGACY_ERROR_TEMP_3034" : {
"message" : [
"Error occurred while waiting for async. reregistration"
]
},
"_LEGACY_ERROR_TEMP_3035" : {
"message" : [
"Unexpected shuffle block <blockId> with unsupported shuffle resolver <shuffleBlockResolver>"
]
},
"_LEGACY_ERROR_TEMP_3036" : {
"message" : [
"Failure while trying to store block <blockId> on <blockManagerId>."
]
},
"_LEGACY_ERROR_TEMP_3037" : {
"message" : [
"Block <blockId> was not found even though it's read-locked"
]
},
"_LEGACY_ERROR_TEMP_3038" : {
"message" : [
"get() failed for block <blockId> even though we held a lock"
]
},
"_LEGACY_ERROR_TEMP_3039" : {
"message" : [
"BlockManager returned null for BlockStatus query: <blockId>"
]
},
"_LEGACY_ERROR_TEMP_3040" : {
"message" : [
"BlockManagerMasterEndpoint returned false, expected true."
]
},
"_LEGACY_ERROR_TEMP_3041" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3042" : {
"message" : [
"Failed to get block <blockId>, which is not a shuffle block"
]
},
"_LEGACY_ERROR_TEMP_3050" : {
"message" : [
"Cannot modify the value of a static config: <k>"
]
},
"_LEGACY_ERROR_TEMP_3052" : {
"message" : [
"Unexpected resolved action: <other>"
]
},
"_LEGACY_ERROR_TEMP_3053" : {
"message" : [
"Unexpected WHEN NOT MATCHED action: <other>"
]
},
"_LEGACY_ERROR_TEMP_3054" : {
"message" : [
"<expr> is not currently supported"
]
},
"_LEGACY_ERROR_TEMP_3055" : {
"message" : [
"ScalarFunction '<scalarFunc.name>' neither implement magic method nor override 'produceResult'"
]
},
"_LEGACY_ERROR_TEMP_3056" : {
"message" : [
"Unexpected row-level read relations (allow multiple = <allowMultipleReads>): <other>"
]
},
"_LEGACY_ERROR_TEMP_3057" : {
"message" : [
"Cannot retrieve row-level operation from <table>"
]
},
"_LEGACY_ERROR_TEMP_3058" : {
"message" : [
"Found duplicate column(s) <checkType>: <duplicateColumns>"
]
},
"_LEGACY_ERROR_TEMP_3059" : {
"message" : [
"The positions provided (<pos>) cannot be resolved in",
"<schema>"
]
},
"_LEGACY_ERROR_TEMP_3060" : {
"message" : [
"Couldn't find column <i> in:",
"<schema>"
]
},
"_LEGACY_ERROR_TEMP_3061" : {
"message" : [
"<e>",
"<schema>"
]
},
"_LEGACY_ERROR_TEMP_3062" : {
"message" : [
"Expected <columnPath> to be a nested data type, but found <o>. Was looking for the index of <attr> in a nested field"
]
},
"_LEGACY_ERROR_TEMP_3063" : {
"message" : [
"pivot is not supported on a streaming DataFrames/Datasets"
]
},
"_LEGACY_ERROR_TEMP_3065" : {
"message" : [
"<clazz>: <msg>"
]
},
"_LEGACY_ERROR_TEMP_3067" : {
"message" : [
"Streaming aggregation doesn't support group aggregate pandas UDF"
]
},
"_LEGACY_ERROR_TEMP_3068" : {
"message" : [
"Global aggregation with session window in streaming query is not supported."
]
},
"_LEGACY_ERROR_TEMP_3069" : {
"message" : [
"<internalName> is a reserved column name that cannot be read in combination with <colName> column."
]
},
"_LEGACY_ERROR_TEMP_3070" : {
"message" : [
"<internalName> is a reserved column name that cannot be read in combination with <colName> column."
]
},
"_LEGACY_ERROR_TEMP_3071" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3072" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3073" : {
"message" : [
"Unexpected instruction: <other>"
]
},
"_LEGACY_ERROR_TEMP_3074" : {
"message" : [
"field <fieldName> not found from given schema <schema>"
]
},
"_LEGACY_ERROR_TEMP_3075" : {
"message" : [
"Couldn't find scan attribute for <tableAttr> in <scanAttrs>"
]
},
"_LEGACY_ERROR_TEMP_3076" : {
"message" : [
"Redefining watermark is disallowed. You can set the config '<config>' to 'false' to restore the previous behavior. Note that multiple stateful operators will be disallowed."
]
},
"_LEGACY_ERROR_TEMP_3077" : {
"message" : [
"More than one event time columns are available. Please ensure there is at most one event time column per stream. event time columns: <eventTimeCols>"
]
},
"_LEGACY_ERROR_TEMP_3079" : {
"message" : [
"Dynamic partition cannot be the parent of a static partition."
]
},
"_LEGACY_ERROR_TEMP_3080" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3081" : {
"message" : [
"Save mode <mode> not allowed for Kafka. Allowed save modes are <append> and <errorIfExists> (default)."
]
},
"_LEGACY_ERROR_TEMP_3082" : {
"message" : [
"Creating bucketed Hive serde table is not supported yet."
]
},
"_LEGACY_ERROR_TEMP_3083" : {
"message" : [
"Unable to infer the schema. The schema specification is required to create the table <tableName>."
]
},
"_LEGACY_ERROR_TEMP_3084" : {
"message" : [
"No handler for UDF/UDAF/UDTF '<clazz>': <e>"
]
},
"_LEGACY_ERROR_TEMP_3085" : {
"message" : [
"from_avro() doesn't support the <name> mode. Acceptable modes are <permissiveMode> and <failFastMode>."
]
},
"_LEGACY_ERROR_TEMP_3086" : {
"message" : [
"Cannot persist <tableName> into Hive metastore as table property keys may not start with 'spark.sql.': <invalidKeys>"
]
},
"_LEGACY_ERROR_TEMP_3087" : {
"message" : [
"Cannot set or change the preserved property key: 'EXTERNAL'"
]
},
"_LEGACY_ERROR_TEMP_3088" : {
"message" : [
"The metadata is corrupted. Unable to find the partition column names from the schema. schema: <schema>. Partition columns: <partColumnNames>"
]
},
"_LEGACY_ERROR_TEMP_3089" : {
"message" : [
"Corrupted <typeName> in catalog: <numCols> parts expected, but part <index> is missing."
]
},
"_LEGACY_ERROR_TEMP_3090" : {
"message" : [
"Raw list type in java is unsupported because Spark cannot infer the element type."
]
},
"_LEGACY_ERROR_TEMP_3091" : {
"message" : [
"Raw map type in java is unsupported because Spark cannot infer key and value types."
]
},
"_LEGACY_ERROR_TEMP_3092" : {
"message" : [
"Collection types with wildcards (e.g. List<?> or Map<?, ?>) are unsupported because Spark cannot infer the data type for these type parameters."
]
},
"_LEGACY_ERROR_TEMP_3093" : {
"message" : [
"Unsupported java type <c>"
]
},
"_LEGACY_ERROR_TEMP_3094" : {
"message" : [
"<dt> is not supported."
]
},
"_LEGACY_ERROR_TEMP_3095" : {
"message" : [
"<dt> cannot be converted to Hive TypeInfo"
]
},
"_LEGACY_ERROR_TEMP_3096" : {
"message" : [
"Converted table has <resLen> columns,",
"but source Hive table has <relLen> columns.",
"Set <key> to false,",
"or recreate table <ident> to workaround."
]
},
"_LEGACY_ERROR_TEMP_3097" : {
"message" : [
"Column in converted table has different data type with source Hive table's.",
"Set <key> to false,",
"or recreate table <ident> to workaround."
]
},
"_LEGACY_ERROR_TEMP_3100" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3101" : {
"message" : [
"The input is not a correct window column: <windowTime>"
]
},
"_LEGACY_ERROR_TEMP_3102" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3103" : {
"message" : [
"Namespace '<namespace>' is non empty. <details>"
]
},
"_LEGACY_ERROR_TEMP_3104" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3105" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3106" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3107" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3108" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3109" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3110" : {
"message" : [
"Cannot bind a V1 function."
]
},
"_LEGACY_ERROR_TEMP_3113" : {
"message" : [
"UnresolvedTableSpec doesn't have a data type"
]
},
"_LEGACY_ERROR_TEMP_3114" : {
"message" : [
"UnresolvedTableSpec doesn't have a data type"
]
},
"_LEGACY_ERROR_TEMP_3121" : {
"message" : [
"A HllSketch instance cannot be updates with a Spark <dataType> type"
]
},
"_LEGACY_ERROR_TEMP_3129" : {
"message" : [
"Cannot convert this array to unsafe format as it's too big."
]
},
"_LEGACY_ERROR_TEMP_3130" : {
"message" : [
"Cannot create BufferHolder for input UnsafeRow because there are too many fields (number of fields: <numFields>)"
]
},
"_LEGACY_ERROR_TEMP_3131" : {
"message" : [
"Unsupported data type <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3132" : {
"message" : [
"CaseInsensitiveStringMap is read-only."
]
},
"_LEGACY_ERROR_TEMP_3133" : {
"message" : [
"<class> does not implement rowIdSchema"
]
},
"_LEGACY_ERROR_TEMP_3134" : {
"message" : [
"<class> does not implement metadataSchema"
]
},
"_LEGACY_ERROR_TEMP_3135" : {
"message" : [
"<class> does not support batch write"
]
},
"_LEGACY_ERROR_TEMP_3136" : {
"message" : [
"<class> does not support streaming write"
]
},
"_LEGACY_ERROR_TEMP_3137" : {
"message" : [
"<description>: Batch write is not supported"
]
},
"_LEGACY_ERROR_TEMP_3138" : {
"message" : [
"<description>: Streaming write is not supported"
]
},
"_LEGACY_ERROR_TEMP_3139" : {
"message" : [
"<description>: Delta batch write is not supported"
]
},
"_LEGACY_ERROR_TEMP_3140" : {
"message" : [
"<class> does not implement build"
]
},
"_LEGACY_ERROR_TEMP_3141" : {
"message" : [
"<class> does not support user defined function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3142" : {
"message" : [
"<class> does not support user defined aggregate function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3143" : {
"message" : [
"Partition renaming is not supported"
]
},
"_LEGACY_ERROR_TEMP_3144" : {
"message" : [
"Partition truncate is not supported"
]
},
"_LEGACY_ERROR_TEMP_3145" : {
"message" : [
"Partitions truncate is not supported"
]
},
"_LEGACY_ERROR_TEMP_3146" : {
"message" : [
"Cannot find a compatible ScalarFunction#produceResult"
]
},
"_LEGACY_ERROR_TEMP_3147" : {
"message" : [
"<description>: Batch scan are not supported"
]
},
"_LEGACY_ERROR_TEMP_3148" : {
"message" : [
"<description>: Micro-batch scan are not supported"
]
},
"_LEGACY_ERROR_TEMP_3149" : {
"message" : [
"<description>: Continuous scan are not supported"
]
},
"_LEGACY_ERROR_TEMP_3150" : {
"message" : [
"Cannot create columnar reader."
]
},
"_LEGACY_ERROR_TEMP_3152" : {
"message" : [
"Datatype not supported <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3155" : {
"message" : [
"Datatype not supported <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3160" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3161" : {
"message" : [
"Uploading artifact file to local file system destination path is not supported."
]
},
"_LEGACY_ERROR_TEMP_3162" : {
"message" : [
"Unsupported physical type <type>."
]
},
"_LEGACY_ERROR_TEMP_3163" : {
"message" : [
"Unsupported number of children: <num>."
]
},
"_LEGACY_ERROR_TEMP_3165" : {
"message" : [
"Cannot merge <classA> with <classB>"
]
},
"_LEGACY_ERROR_TEMP_3166" : {
"message" : [
"latestOffset(Offset, ReadLimit) should be called instead of this method"
]
},
"_LEGACY_ERROR_TEMP_3167" : {
"message" : [
"continuous mode is not supported!"
]
},
"_LEGACY_ERROR_TEMP_3168" : {
"message" : [
"hasTimedOut is true however there's no timeout configured"
]
},
"_LEGACY_ERROR_TEMP_3169" : {
"message" : [
"AcceptsLatestSeenOffset is not supported with DSv1 streaming source: <unsupportedSources>"
]
},
"_LEGACY_ERROR_TEMP_3170" : {
"message" : [
"SortAggregate code-gen does not support grouping keys"
]
},
"_LEGACY_ERROR_TEMP_3171" : {
"message" : [
"Number of nulls not set for Parquet file <filePath>. Set SQLConf <config> to false and execute again."
]
},
"_LEGACY_ERROR_TEMP_3172" : {
"message" : [
"No min/max found for Parquet file <filePath>. Set SQLConf <config> to false and execute again."
]
},
"_LEGACY_ERROR_TEMP_3173" : {
"message" : [
"Cannot specify 'USING index_type' in 'CREATE INDEX'"
]
},
"_LEGACY_ERROR_TEMP_3175" : {
"message" : [
"Index Type <v> is not supported. The supported Index Types are: <supportedIndexTypeList>"
]
},
"_LEGACY_ERROR_TEMP_3176" : {
"message" : [
"applyInPandasWithState is unsupported in batch query. Use applyInPandas instead."
]
},
"_LEGACY_ERROR_TEMP_3177" : {
"message" : [
"<class> does not support function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3178" : {
"message" : [
"<class> does not support inverse distribution function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3179" : {
"message" : [
"createIndex is not supported"
]
},
"_LEGACY_ERROR_TEMP_3180" : {
"message" : [
"indexExists is not supported"
]
},
"_LEGACY_ERROR_TEMP_3181" : {
"message" : [
"dropIndex is not supported"
]
},
"_LEGACY_ERROR_TEMP_3182" : {
"message" : [
"listIndexes is not supported"
]
},
"_LEGACY_ERROR_TEMP_3183" : {
"message" : [
"TableSample is not supported by this data source"
]
},
"_LEGACY_ERROR_TEMP_3184" : {
"message" : [
"<class> does not support aggregate function: <funcName> with DISTINCT"
]
},
"_LEGACY_ERROR_TEMP_3185" : {
"message" : [
"Schema evolution not supported."
]
},
"_LEGACY_ERROR_TEMP_3186" : {
"message" : [
"Boolean is not supported"
]
},
"_LEGACY_ERROR_TEMP_3187" : {
"message" : [
"only readInts is valid."
]
},
"_LEGACY_ERROR_TEMP_3188" : {
"message" : [
"only skipIntegers is valid"
]
},
"_LEGACY_ERROR_TEMP_3189" : {
"message" : [
"Unsupported encoding: <encoding>"
]
},
"_LEGACY_ERROR_TEMP_3190" : {
"message" : [
"RLE encoding is not supported for values of type: <typeName>"
]
},
"_LEGACY_ERROR_TEMP_3191" : {
"message" : [
"Dictionary encoding does not support String"
]
},
"_LEGACY_ERROR_TEMP_3192" : {
"message" : [
"Datatype not supported <dt>"
]
},
"_LEGACY_ERROR_TEMP_3198" : {
"message" : [
"Cannot grow BufferHolder by size <neededSize> because the size is negative"
]
},
"_LEGACY_ERROR_TEMP_3199" : {
"message" : [
"Cannot grow BufferHolder by size <neededSize> because the size after growing exceeds size limitation <arrayMax>"
]
},
"_LEGACY_ERROR_TEMP_3200" : {
"message" : [
"Read-ahead limit < 0"
]
},
"_LEGACY_ERROR_TEMP_3201" : {
"message" : [
"'note' is malformed in the expression [<exprName>]. It should start with a newline and 4 leading spaces; end with a newline and two spaces; however, got [<note>]."
]
},
"_LEGACY_ERROR_TEMP_3202" : {
"message" : [
"'group' is malformed in the expression [<exprName>]. It should be a value in <validGroups>; however, got <group>."
]
},
"_LEGACY_ERROR_TEMP_3203" : {
"message" : [
"'source' is malformed in the expression [<exprName>]. It should be a value in <validSources>; however, got [<source>]."
]
},
"_LEGACY_ERROR_TEMP_3204" : {
"message" : [
"'since' is malformed in the expression [<exprName>]. It should not start with a negative number; however, got [<since>]."
]
},
"_LEGACY_ERROR_TEMP_3205" : {
"message" : [
"'deprecated' is malformed in the expression [<exprName>]. It should start with a newline and 4 leading spaces; end with a newline and two spaces; however, got [<deprecated>]."
]
},
"_LEGACY_ERROR_TEMP_3206" : {
"message" : [
"<value> is not a boolean string."
]
},
"_LEGACY_ERROR_TEMP_3207" : {
"message" : [
"Unexpected V2 expression: <expr>"
]
},
"_LEGACY_ERROR_TEMP_3208" : {
"message" : [
"The number of fields (<numFields>) in the partition identifier is not equal to the partition schema length (<schemaLen>). The identifier might not refer to one partition."
]
},
"_LEGACY_ERROR_TEMP_3209" : {
"message" : [
"Illegal input for day of week: <string>"
]
},
"_LEGACY_ERROR_TEMP_3210" : {
"message" : [
"Interval string does not match second-nano format of ss.nnnnnnnnn"
]
},
"_LEGACY_ERROR_TEMP_3211" : {
"message" : [
"Error parsing interval day-time string: <msg>"
]
},
"_LEGACY_ERROR_TEMP_3212" : {
"message" : [
"Cannot support (interval '<input>' <from> to <to>) expression"
]
},
"_LEGACY_ERROR_TEMP_3213" : {
"message" : [
"Error parsing interval <interval> string: <msg>"
]
},
"_LEGACY_ERROR_TEMP_3214" : {
"message" : [
"Interval string does not match <intervalStr> format of <supportedFormat> when cast to <typeName>: <input><fallBackNotice>"
]
},
"_LEGACY_ERROR_TEMP_3215" : {
"message" : [
"Expected a Boolean type expression in replaceNullWithFalse, but got the type <dataType> in <expr>."
]
},
"_LEGACY_ERROR_TEMP_3216" : {
"message" : [
"Unsupported join type '<typ>'. Supported join types include: <supported>."
]
},
"_LEGACY_ERROR_TEMP_3217" : {
"message" : [
"Unsupported as-of join direction '<direction>'. Supported as-of join direction include: <supported>."
]
},
"_LEGACY_ERROR_TEMP_3218" : {
"message" : [
"Must be 2 children: <others>"
]
},
"_LEGACY_ERROR_TEMP_3219" : {
"message" : [
"The value (<other>) of the type (<otherClass>) cannot be converted to the <dataType> type."
]
},
"_LEGACY_ERROR_TEMP_3220" : {
"message" : [
"The value (<other>) of the type (<otherClass>) cannot be converted to an array of <elementType>"
]
},
"_LEGACY_ERROR_TEMP_3221" : {
"message" : [
"The value (<other>) of the type (<otherClass>) cannot be converted to a map type with key type (<keyType>) and value type (<valueType>)"
]
},
"_LEGACY_ERROR_TEMP_3222" : {
"message" : [
"Only literals are allowed in the partition spec, but got <expr>"
]
},
"_LEGACY_ERROR_TEMP_3223" : {
"message" : [
"Cannot find field: <name> in <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3224" : {
"message" : [
"Cannot delete array element"
]
},
"_LEGACY_ERROR_TEMP_3225" : {
"message" : [
"Cannot delete map value"
]
},
"_LEGACY_ERROR_TEMP_3226" : {
"message" : [
"Cannot delete map key"
]
},
"_LEGACY_ERROR_TEMP_3227" : {
"message" : [
"Cannot find field: <fieldName>"
]
},
"_LEGACY_ERROR_TEMP_3228" : {
"message" : [
"AFTER column not found: <afterCol>"
]
},
"_LEGACY_ERROR_TEMP_3229" : {
"message" : [
"Not a struct: <name>"
]
},
"_LEGACY_ERROR_TEMP_3230" : {
"message" : [
"Field not found: <name>"
]
},
"_LEGACY_ERROR_TEMP_3231" : {
"message" : [
"Intervals greater than a month is not supported (<interval>)."
]
},
"_LEGACY_ERROR_TEMP_3232" : {
"message" : [
"Unknown EvalMode value: <other>"
]
},
"_LEGACY_ERROR_TEMP_3233" : {
"message" : [
"cannot generate code for unsupported type: <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3235" : {
"message" : [
"The numbers of zipped arrays and field names should be the same"
]
},
"_LEGACY_ERROR_TEMP_3238" : {
"message" : [
"Failed to convert value <v> (class of <class>) in type <dt> to XML."
]
},
"_LEGACY_ERROR_TEMP_3239" : {
"message" : [
"Failed to parse data with unexpected event <e>"
]
},
"_LEGACY_ERROR_TEMP_3240" : {
"message" : [
"Failed to parse a value for data type <dt> with event <e>"
]
},
"_LEGACY_ERROR_TEMP_3241" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3242" : {
"message" : [
"sequence step must be an <intervalType> of day granularity if start and end values are dates"
]
},
"_LEGACY_ERROR_TEMP_3243" : {
"message" : [
"Illegal sequence boundaries: <start> to <stop> by <step>"
]
},
"_LEGACY_ERROR_TEMP_3244" : {
"message" : [
"Unsupported type: <castType>"
]
},
"_LEGACY_ERROR_TEMP_3245" : {
"message" : [
"For input string: <s>"
]
},
"_LEGACY_ERROR_TEMP_3246" : {
"message" : [
"Failed to parse a value for data type <dataType>."
]
},
"_LEGACY_ERROR_TEMP_3250" : {
"message" : [
"Failed to convert the JSON string '<other>' to a field."
]
},
"_LEGACY_ERROR_TEMP_3260" : {
"message" : [
"'<s>' is an invalid timestamp"
]
},
"_LEGACY_ERROR_TEMP_3261" : {
"message" : [
"Unknown output mode <outputMode>. Accepted output modes are 'append', 'complete', 'update'"
]
},
"_LEGACY_ERROR_TEMP_3262" : {
"message" : [
"Doesn't support month or year interval: <interval>"
]
},
"_LEGACY_ERROR_USER_RAISED_EXCEPTION" : {
"message" : [
"<errorMessage>"
],
"sqlState" : "P0001"
}
}