blob: 8e3acfdb2c0ab3e70747d7b26a08f73de1fca038 [file] [log] [blame]
{
"ADD_DEFAULT_UNSUPPORTED" : {
"message" : [
"Failed to execute <statementType> command because DEFAULT values are not supported when adding new columns to previously existing target data source with table provider: \"<dataSource>\"."
],
"sqlState" : "42623"
},
"AGGREGATE_FUNCTION_WITH_NONDETERMINISTIC_EXPRESSION" : {
"message" : [
"Non-deterministic expression <sqlExpr> should not appear in the arguments of an aggregate function."
],
"sqlState" : "42845"
},
"AGGREGATE_OUT_OF_MEMORY" : {
"message" : [
"No enough memory for aggregation"
],
"sqlState" : "82001"
},
"ALL_PARAMETERS_MUST_BE_NAMED" : {
"message" : [
"Using name parameterized queries requires all parameters to be named. Parameters missing names: <exprs>."
],
"sqlState" : "07001"
},
"ALL_PARTITION_COLUMNS_NOT_ALLOWED" : {
"message" : [
"Cannot use all columns for partition columns."
],
"sqlState" : "KD005"
},
"ALTER_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : {
"message" : [
"ALTER TABLE <type> column <columnName> specifies descriptor \"<optionName>\" more than once, which is invalid."
],
"sqlState" : "42710"
},
"AMBIGUOUS_ALIAS_IN_NESTED_CTE" : {
"message" : [
"Name <name> is ambiguous in nested CTE.",
"Please set <config> to \"CORRECTED\" so that name defined in inner CTE takes precedence. If set it to \"LEGACY\", outer CTE definitions will take precedence.",
"See '<docroot>/sql-migration-guide.html#query-engine'."
],
"sqlState" : "42KD0"
},
"AMBIGUOUS_COLUMN_OR_FIELD" : {
"message" : [
"Column or field <name> is ambiguous and has <n> matches."
],
"sqlState" : "42702"
},
"AMBIGUOUS_COLUMN_REFERENCE" : {
"message" : [
"Column <name> is ambiguous. It's because you joined several DataFrame together, and some of these DataFrames are the same.",
"This column points to one of the DataFrames but Spark is unable to figure out which one.",
"Please alias the DataFrames with different names via `DataFrame.alias` before joining them,",
"and specify the column using qualified name, e.g. `df.alias(\"a\").join(df.alias(\"b\"), col(\"a.id\") > col(\"b.id\"))`."
],
"sqlState" : "42702"
},
"AMBIGUOUS_LATERAL_COLUMN_ALIAS" : {
"message" : [
"Lateral column alias <name> is ambiguous and has <n> matches."
],
"sqlState" : "42702"
},
"AMBIGUOUS_REFERENCE" : {
"message" : [
"Reference <name> is ambiguous, could be: <referenceNames>."
],
"sqlState" : "42704"
},
"AMBIGUOUS_REFERENCE_TO_FIELDS" : {
"message" : [
"Ambiguous reference to the field <field>. It appears <count> times in the schema."
],
"sqlState" : "42000"
},
"AMBIGUOUS_RESOLVER_EXTENSION" : {
"message" : [
"The single-pass analyzer cannot process this query or command because the extension choice for <operator> is ambiguous: <extensions>."
],
"sqlState" : "XX000"
},
"APPEND_ONCE_FROM_BATCH_QUERY" : {
"message" : [
"Creating a streaming table from a batch query prevents incremental loading of new data from source. Offending table: '<table>'.",
"Please use the stream() operator. Example usage:",
"CREATE STREAMING TABLE <target table name> ... AS SELECT ... FROM stream(<source table name>) ..."
],
"sqlState" : "42000"
},
"APPROX_TOP_K_MAX_ITEMS_TRACKED_EXCEEDS_LIMIT" : {
"message" : [
"The max items tracked `maxItemsTracked`(<maxItemsTracked>) of `approx_top_k` should be less than or equal to <limit>."
],
"sqlState" : "22023"
},
"APPROX_TOP_K_MAX_ITEMS_TRACKED_LESS_THAN_K" : {
"message" : [
"The max items tracked `maxItemsTracked`(<maxItemsTracked>) of `approx_top_k` should be greater than or equal to `k`(<k>)."
],
"sqlState" : "22023"
},
"APPROX_TOP_K_NON_POSITIVE_ARG" : {
"message" : [
"The value of <argName> in `approx_top_k` must be a positive integer, but got <argValue>."
],
"sqlState" : "22023"
},
"APPROX_TOP_K_NULL_ARG" : {
"message" : [
"The value of <argName> in `approx_top_k` cannot be NULL."
],
"sqlState" : "22004"
},
"APPROX_TOP_K_SKETCH_SIZE_NOT_MATCH" : {
"message" : [
"Combining approx_top_k sketches of different sizes is not allowed. Found sketches of size <size1> and <size2>."
],
"sqlState" : "42846"
},
"APPROX_TOP_K_SKETCH_TYPE_NOT_MATCH" : {
"message" : [
"Combining approx_top_k sketches of different types is not allowed. Found sketches of type <type1> and <type2>."
],
"sqlState" : "42846"
},
"ARITHMETIC_OVERFLOW" : {
"message" : [
"<message>.<alternative> If necessary set <config> to \"false\" to bypass this error."
],
"sqlState" : "22003"
},
"ARROW_TYPE_MISMATCH" : {
"message" : [
"Invalid schema from <operation>: expected <outputTypes>, got <actualDataTypes>."
],
"sqlState" : "42K0G"
},
"ARTIFACT_ALREADY_EXISTS" : {
"message" : [
"The artifact <normalizedRemoteRelativePath> already exists. Please choose a different name for the new artifact because it cannot be overwritten."
],
"sqlState" : "42713"
},
"ASSIGNMENT_ARITY_MISMATCH" : {
"message" : [
"The number of columns or variables assigned or aliased: <numTarget> does not match the number of source expressions: <numExpr>."
],
"sqlState" : "42802"
},
"AS_OF_JOIN" : {
"message" : [
"Invalid as-of join."
],
"subClass" : {
"TOLERANCE_IS_NON_NEGATIVE" : {
"message" : [
"The input argument `tolerance` must be non-negative."
]
},
"TOLERANCE_IS_UNFOLDABLE" : {
"message" : [
"The input argument `tolerance` must be a constant."
]
},
"UNSUPPORTED_DIRECTION" : {
"message" : [
"Unsupported as-of join direction '<direction>'. Supported as-of join direction include: <supported>."
]
}
},
"sqlState" : "42604"
},
"ATTEMPT_ANALYSIS_IN_PIPELINE_QUERY_FUNCTION" : {
"message" : [
"Operations that trigger DataFrame analysis or execution are not allowed in pipeline query functions. Move code outside of the pipeline query function."
],
"sqlState" : "0A000"
},
"AVRO_CANNOT_WRITE_NULL_FIELD" : {
"message" : [
"Cannot write null value for field <name> defined as non-null Avro data type <dataType>.",
"To allow null value for this field, specify its avro schema as a union type with \"null\" using `avroSchema` option."
],
"sqlState" : "22004"
},
"AVRO_INCOMPATIBLE_READ_TYPE" : {
"message" : [
"Cannot convert Avro <avroPath> to SQL <sqlPath> because the original encoded data type is <avroType>, however you're trying to read the field as <sqlType>, which would lead to an incorrect answer.",
"To allow reading this field, enable the SQL configuration: \"spark.sql.legacy.avro.allowIncompatibleSchema\"."
],
"sqlState" : "22KD3"
},
"AVRO_NOT_LOADED_SQL_FUNCTIONS_UNUSABLE" : {
"message" : [
"Cannot call the <functionName> SQL function because the Avro data source is not loaded.",
"Please restart your job or session with the 'spark-avro' package loaded, such as by using the --packages argument on the command line, and then retry your query or command again."
],
"sqlState" : "22KD3"
},
"BATCH_METADATA_NOT_FOUND" : {
"message" : [
"Unable to find batch <batchMetadataFile>."
],
"sqlState" : "42K03"
},
"BINARY_ARITHMETIC_OVERFLOW" : {
"message" : [
"<value1> <symbol> <value2> caused overflow. Use <functionName> to ignore overflow problem and return NULL."
],
"sqlState" : "22003"
},
"CALL_ON_STREAMING_DATASET_UNSUPPORTED" : {
"message" : [
"The method <methodName> can not be called on streaming Dataset/DataFrame."
],
"sqlState" : "42KDE"
},
"CANNOT_ALTER_COLLATION_BUCKET_COLUMN" : {
"message" : [
"ALTER TABLE (ALTER|CHANGE) COLUMN cannot change collation of type/subtypes of bucket columns, but found the bucket column <columnName> in the table <tableName>."
],
"sqlState" : "428FR"
},
"CANNOT_ALTER_PARTITION_COLUMN" : {
"message" : [
"ALTER TABLE (ALTER|CHANGE) COLUMN is not supported for partition columns, but found the partition column <columnName> in the table <tableName>."
],
"sqlState" : "428FR"
},
"CANNOT_ASSIGN_EVENT_TIME_COLUMN_WITHOUT_WATERMARK" : {
"message" : [
"Watermark needs to be defined to reassign event time column. Failed to find watermark definition in the streaming query."
],
"sqlState" : "42611"
},
"CANNOT_CAST_DATATYPE" : {
"message" : [
"Cannot cast <sourceType> to <targetType>."
],
"sqlState" : "42846"
},
"CANNOT_CONVERT_PROTOBUF_FIELD_TYPE_TO_SQL_TYPE" : {
"message" : [
"Cannot convert Protobuf <protobufColumn> to SQL <sqlColumn> because schema is incompatible (protobufType = <protobufType>, sqlType = <sqlType>)."
],
"sqlState" : "42846"
},
"CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE" : {
"message" : [
"Unable to convert <protobufType> of Protobuf to SQL type <toType>."
],
"sqlState" : "42846"
},
"CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE" : {
"message" : [
"Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because schema is incompatible (protobufType = <protobufType>, sqlType = <sqlType>)."
],
"sqlState" : "42846"
},
"CANNOT_CONVERT_SQL_VALUE_TO_PROTOBUF_ENUM_TYPE" : {
"message" : [
"Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because <data> is not in defined values for enum: <enumString>."
],
"sqlState" : "42846"
},
"CANNOT_CREATE_DATA_SOURCE_TABLE" : {
"message" : [
"Failed to create data source table <tableName>:"
],
"subClass" : {
"EXTERNAL_METADATA_UNSUPPORTED" : {
"message" : [
"provider '<provider>' does not support external metadata but a schema is provided. Please remove the schema when creating the table."
]
}
},
"sqlState" : "42KDE"
},
"CANNOT_DECODE_URL" : {
"message" : [
"The provided URL cannot be decoded: <url>. Please ensure that the URL is properly formatted and try again."
],
"sqlState" : "22546"
},
"CANNOT_INVOKE_IN_TRANSFORMATIONS" : {
"message" : [
"Dataset transformations and actions can only be invoked by the driver, not inside of other Dataset transformations; for example, dataset1.map(x => dataset2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the dataset1.map transformation. For more information, see SPARK-28702."
],
"sqlState" : "0A000"
},
"CANNOT_LOAD_CHECKPOINT_FILE_MANAGER" : {
"message" : [
"Error loading streaming checkpoint file manager for path=<path>."
],
"subClass" : {
"ERROR_LOADING_CLASS" : {
"message" : [
"Error instantiating streaming checkpoint file manager for path=<path> with className=<className>. msg=<msg>."
]
},
"UNCATEGORIZED" : {
"message" : [
""
]
}
},
"sqlState" : "58030"
},
"CANNOT_LOAD_FUNCTION_CLASS" : {
"message" : [
"Cannot load class <className> when registering the function <functionName>, please make sure it is on the classpath."
],
"sqlState" : "46103"
},
"CANNOT_LOAD_PROTOBUF_CLASS" : {
"message" : [
"Could not load Protobuf class with name <protobufClassName>. <explanation>."
],
"sqlState" : "42K03"
},
"CANNOT_LOAD_STATE_STORE" : {
"message" : [
"An error occurred during loading state."
],
"subClass" : {
"AUTO_SNAPSHOT_REPAIR_FAILED" : {
"message" : [
"Failed to load snapshot version <latestSnapshot> for state store <stateStoreId>. An attempt to auto repair using snapshot versions (<selectedSnapshots>) out of available snapshots (<eligibleSnapshots>) also failed."
]
},
"CANNOT_FIND_BASE_SNAPSHOT_CHECKPOINT" : {
"message" : [
"Cannot find a base snapshot checkpoint with lineage: <lineage>."
]
},
"CANNOT_READ_CHECKPOINT" : {
"message" : [
"Cannot read RocksDB checkpoint metadata. Expected <expectedVersion>, but found <actualVersion>."
]
},
"CANNOT_READ_DELTA_FILE_KEY_SIZE" : {
"message" : [
"Error reading delta file <fileToRead> of <clazz>: key size cannot be <keySize>."
]
},
"CANNOT_READ_DELTA_FILE_NOT_EXISTS" : {
"message" : [
"Error reading delta file <fileToRead> of <clazz>: <fileToRead> does not exist."
]
},
"CANNOT_READ_MISSING_SNAPSHOT_FILE" : {
"message" : [
"Error reading snapshot file <fileToRead> of <clazz>: <fileToRead> does not exist."
]
},
"CANNOT_READ_SNAPSHOT_FILE_KEY_SIZE" : {
"message" : [
"Error reading snapshot file <fileToRead> of <clazz>: key size cannot be <keySize>."
]
},
"CANNOT_READ_SNAPSHOT_FILE_VALUE_SIZE" : {
"message" : [
"Error reading snapshot file <fileToRead> of <clazz>: value size cannot be <valueSize>."
]
},
"CANNOT_READ_STREAMING_STATE_FILE" : {
"message" : [
"Error reading streaming state file of <fileToRead> does not exist. If the stream job is restarted with a new or updated state operation, please create a new checkpoint location or clear the existing checkpoint location."
]
},
"FAILED_TO_GET_CHANGELOG_WRITER" : {
"message" : [
"Failed to get the changelog writer for state store at version <version>."
]
},
"HDFS_STORE_PROVIDER_OUT_OF_MEMORY" : {
"message" : [
"Could not load HDFS state store with id <stateStoreId> because of an out of memory exception."
]
},
"INVALID_CHANGE_LOG_READER_VERSION" : {
"message" : [
"The change log reader version cannot be <version>. The checkpoint probably is from a future Spark version, please upgrade your Spark."
]
},
"INVALID_CHANGE_LOG_WRITER_VERSION" : {
"message" : [
"The change log writer version cannot be <version>."
]
},
"INVALID_CHECKPOINT_LINEAGE" : {
"message" : [
"Invalid checkpoint lineage: <lineage>. <message>"
]
},
"KEY_ROW_FORMAT_VALIDATION_FAILURE" : {
"message" : [
"<msg>"
]
},
"ROCKSDB_STORE_PROVIDER_OUT_OF_MEMORY" : {
"message" : [
"Could not load RocksDB state store with id <stateStoreId> because of an out of memory exception."
]
},
"SNAPSHOT_PARTITION_ID_NOT_FOUND" : {
"message" : [
"Partition id <snapshotPartitionId> not found for state of operator <operatorId> at <checkpointLocation>."
]
},
"UNCATEGORIZED" : {
"message" : [
""
]
},
"UNEXPECTED_FILE_SIZE" : {
"message" : [
"Copied <dfsFile> to <localFile>, expected <expectedSize> bytes, found <localFileSize> bytes."
]
},
"UNEXPECTED_VERSION" : {
"message" : [
"Version cannot be <version> because it is less than 0."
]
},
"UNRELEASED_THREAD_ERROR" : {
"message" : [
"<loggingId>: RocksDB instance could not be acquired by <newAcquiredThreadInfo> for operationType=<operationType> as it was not released by <acquiredThreadInfo> after <timeWaitedMs> ms.",
"Thread holding the lock has trace: <stackTraceOutput>"
]
},
"VALUE_ROW_FORMAT_VALIDATION_FAILURE" : {
"message" : [
"<msg>"
]
}
},
"sqlState" : "58030"
},
"CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE" : {
"message" : [
"Failed to merge incompatible data types <left> and <right>. Please check the data types of the columns being merged and ensure that they are compatible. If necessary, consider casting the columns to compatible data types before attempting the merge."
],
"sqlState" : "42825"
},
"CANNOT_MERGE_SCHEMAS" : {
"message" : [
"Failed merging schemas:",
"Initial schema:",
"<left>",
"Schema that cannot be merged with the initial schema:",
"<right>."
],
"sqlState" : "42KD9"
},
"CANNOT_MODIFY_CONFIG" : {
"message" : [
"Cannot modify the value of the Spark config: <key>.",
"See also '<docroot>/sql-migration-guide.html#ddl-statements'."
],
"sqlState" : "46110"
},
"CANNOT_MODIFY_STATIC_CONFIG" : {
"message" : [
"Cannot modify the value of the static Spark config: <key>."
],
"sqlState" : "46110"
},
"CANNOT_PARSE_DECIMAL" : {
"message" : [
"Cannot parse decimal. Please ensure that the input is a valid number with optional decimal point or comma separators."
],
"sqlState" : "22018"
},
"CANNOT_PARSE_INTERVAL" : {
"message" : [
"Unable to parse <intervalString>. Please ensure that the value provided is in a valid format for defining an interval. You can reference the documentation for the correct format. If the issue persists, please double check that the input value is not null or empty and try again."
],
"sqlState" : "22006"
},
"CANNOT_PARSE_JSON_FIELD" : {
"message" : [
"Cannot parse the field name <fieldName> and the value <fieldValue> of the JSON token type <jsonType> to target Spark data type <dataType>."
],
"sqlState" : "2203G"
},
"CANNOT_PARSE_PROTOBUF_DESCRIPTOR" : {
"message" : [
"Error parsing descriptor bytes into Protobuf FileDescriptorSet."
],
"sqlState" : "22018"
},
"CANNOT_PARSE_TIME" : {
"message" : [
"The input string <input> cannot be parsed to a TIME value because it does not match to the datetime format <format>."
],
"sqlState" : "22010"
},
"CANNOT_PARSE_TIMESTAMP" : {
"message" : [
"<message>. Use <func> to tolerate invalid input string and return NULL instead."
],
"sqlState" : "22007"
},
"CANNOT_RECOGNIZE_HIVE_TYPE" : {
"message" : [
"Cannot recognize hive type string: <fieldType>, column: <fieldName>. The specified data type for the field cannot be recognized by Spark SQL. Please check the data type of the specified field and ensure that it is a valid Spark SQL data type. Refer to the Spark SQL documentation for a list of valid data types and their format. If the data type is correct, please ensure that you are using a supported version of Spark SQL."
],
"sqlState" : "429BB"
},
"CANNOT_REMOVE_RESERVED_PROPERTY" : {
"message" : [
"Cannot remove reserved property: <property>."
],
"sqlState" : "42000"
},
"CANNOT_RENAME_ACROSS_SCHEMA" : {
"message" : [
"Renaming a <type> across schemas is not allowed."
],
"sqlState" : "0AKD0"
},
"CANNOT_RESOLVE_DATAFRAME_COLUMN" : {
"message" : [
"Cannot resolve dataframe column <name>. It's probably because of illegal references like `df1.select(df2.col(\"a\"))`."
],
"sqlState" : "42704"
},
"CANNOT_RESOLVE_STAR_EXPAND" : {
"message" : [
"Cannot resolve <targetString>.* given input columns <columns>. Please check that the specified table or struct exists and is accessible in the input columns."
],
"sqlState" : "42704"
},
"CANNOT_RESTORE_PERMISSIONS_FOR_PATH" : {
"message" : [
"Failed to set permissions on created path <path> back to <permission>."
],
"sqlState" : "58030"
},
"CANNOT_UPDATE_FIELD" : {
"message" : [
"Cannot update <table> field <fieldName> type:"
],
"subClass" : {
"ARRAY_TYPE" : {
"message" : [
"Update the element by updating <fieldName>.element."
]
},
"INTERVAL_TYPE" : {
"message" : [
"Update an interval by updating its fields."
]
},
"MAP_TYPE" : {
"message" : [
"Update a map by updating <fieldName>.key or <fieldName>.value."
]
},
"STRUCT_TYPE" : {
"message" : [
"Update a struct by updating its fields."
]
},
"USER_DEFINED_TYPE" : {
"message" : [
"Update a UserDefinedType[<udtSql>] by updating its fields."
]
}
},
"sqlState" : "0A000"
},
"CANNOT_UPDATE_PARTITION_COLUMNS" : {
"message" : [
"Declared partitioning <requestedPartitionColumns> conflicts with existing table partitioning <existingPartitionColumns>.",
"Please delete the table or change the declared partitioning to match its partitions."
],
"sqlState" : "42000"
},
"CANNOT_UP_CAST_DATATYPE" : {
"message" : [
"Cannot up cast <expression> from <sourceType> to <targetType>.",
"<details>"
],
"sqlState" : "42846"
},
"CANNOT_USE_KRYO" : {
"message" : [
"Cannot load Kryo serialization codec. Kryo serialization cannot be used in the Spark Connect client. Use Java serialization, provide a custom Codec, or use Spark Classic instead."
],
"sqlState" : "22KD3"
},
"CANNOT_USE_MULTI_ALIASES_IN_WATERMARK_CLAUSE" : {
"message" : [
"Multiple aliases are not supported in watermark clause."
],
"sqlState" : "42000"
},
"CANNOT_WRITE_STATE_STORE" : {
"message" : [
"Error writing state store files for provider <providerClass>."
],
"subClass" : {
"CANNOT_COMMIT" : {
"message" : [
"Cannot perform commit during state checkpoint."
]
}
},
"sqlState" : "58030"
},
"CAST_INVALID_INPUT" : {
"message" : [
"The value <expression> of the type <sourceType> cannot be cast to <targetType> because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead."
],
"sqlState" : "22018"
},
"CAST_OVERFLOW" : {
"message" : [
"The value <value> of the type <sourceType> cannot be cast to <targetType> due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead."
],
"sqlState" : "22003"
},
"CAST_OVERFLOW_IN_TABLE_INSERT" : {
"message" : [
"Fail to assign a value of <sourceType> type to the <targetType> type column or variable <columnName> due to an overflow. Use `try_cast` on the input value to tolerate overflow and return NULL instead."
],
"sqlState" : "22003"
},
"CATALOG_NOT_FOUND" : {
"message" : [
"The catalog <catalogName> not found. Consider to set the SQL config <config> to a catalog plugin."
],
"sqlState" : "42P08"
},
"CHECKPOINT_FILE_CHECKSUM_VERIFICATION_FAILED" : {
"message" : [
"Checksum verification failed, the file may be corrupted. File: <fileName>",
"Expected (file size: <expectedSize>, checksum: <expectedChecksum>), Computed (file size: <computedSize>, checksum: <computedChecksum>)."
],
"sqlState" : "XX000"
},
"CHECKPOINT_RDD_BLOCK_ID_NOT_FOUND" : {
"message" : [
"Checkpoint block <rddBlockId> not found!",
"Either the executor that originally checkpointed this partition is no longer alive, or the original RDD is unpersisted.",
"If this problem persists, you may consider using `rdd.checkpoint()` instead, which is slower than local checkpointing but more fault-tolerant."
],
"sqlState" : "56000"
},
"CHECK_CONSTRAINT_VIOLATION" : {
"message" : [
"CHECK constraint <constraintName> <expression> violated by row with values:",
"<values>",
""
],
"sqlState" : "23001"
},
"CIRCULAR_CLASS_REFERENCE" : {
"message" : [
"Cannot have circular references in class, but got the circular reference of class <t>."
],
"sqlState" : "42602"
},
"CLASS_NOT_OVERRIDE_EXPECTED_METHOD" : {
"message" : [
"<className> must override either <method1> or <method2>."
],
"sqlState" : "38000"
},
"CLASS_UNSUPPORTED_BY_MAP_OBJECTS" : {
"message" : [
"`MapObjects` does not support the class <cls> as resulting collection."
],
"sqlState" : "0A000"
},
"CLUSTERING_COLUMNS_MISMATCH" : {
"message" : [
"Specified clustering does not match that of the existing table <tableName>.",
"Specified clustering columns: [<specifiedClusteringString>].",
"Existing clustering columns: [<existingClusteringString>]."
],
"sqlState" : "42P10"
},
"CLUSTERING_NOT_SUPPORTED" : {
"message" : [
"'<operation>' does not support clustering."
],
"sqlState" : "42000"
},
"CODEC_NOT_AVAILABLE" : {
"message" : [
"The codec <codecName> is not available."
],
"subClass" : {
"WITH_AVAILABLE_CODECS_SUGGESTION" : {
"message" : [
"Available codecs are <availableCodecs>."
]
},
"WITH_CONF_SUGGESTION" : {
"message" : [
"Consider to set the config <configKey> to <configVal>."
]
}
},
"sqlState" : "56038"
},
"CODEC_SHORT_NAME_NOT_FOUND" : {
"message" : [
"Cannot find a short name for the codec <codecName>."
],
"sqlState" : "42704"
},
"COLLATION_INVALID_NAME" : {
"message" : [
"The value <collationName> does not represent a correct collation name. Suggested valid collation names: [<proposals>]."
],
"sqlState" : "42704"
},
"COLLATION_INVALID_PROVIDER" : {
"message" : [
"The value <provider> does not represent a correct collation provider. Supported providers are: [<supportedProviders>]."
],
"sqlState" : "42704"
},
"COLLATION_MISMATCH" : {
"message" : [
"Could not determine which collation to use for string functions and operators."
],
"subClass" : {
"EXPLICIT" : {
"message" : [
"Error occurred due to the mismatch between explicit collations: [<explicitTypes>]. Decide on a single explicit collation and remove others."
]
},
"IMPLICIT" : {
"message" : [
"Error occurred due to the mismatch between implicit collations: [<implicitTypes>]. Use COLLATE function to set the collation explicitly."
]
}
},
"sqlState" : "42P21"
},
"COLLECTION_SIZE_LIMIT_EXCEEDED" : {
"message" : [
"Can't create array with <numberOfElements> elements which exceeding the array size limit <maxRoundedArrayLength>,"
],
"subClass" : {
"FUNCTION" : {
"message" : [
"unsuccessful try to create arrays in the function <functionName>."
]
},
"INITIALIZE" : {
"message" : [
"cannot initialize an array with specified parameters."
]
},
"PARAMETER" : {
"message" : [
"the value of parameter(s) <parameter> in the function <functionName> is invalid."
]
}
},
"sqlState" : "54000"
},
"COLUMN_ALIASES_NOT_ALLOWED" : {
"message" : [
"Column aliases are not allowed in <op>."
],
"sqlState" : "42601"
},
"COLUMN_ALREADY_EXISTS" : {
"message" : [
"The column <columnName> already exists. Choose another name or rename the existing column."
],
"sqlState" : "42711"
},
"COLUMN_ARRAY_ELEMENT_TYPE_MISMATCH" : {
"message" : [
"Some values in field <pos> are incompatible with the column array type. Expected type <type>."
],
"sqlState" : "0A000"
},
"COLUMN_NOT_DEFINED_IN_TABLE" : {
"message" : [
"<colType> column <colName> is not defined in table <tableName>, defined table columns are: <tableCols>."
],
"sqlState" : "42703"
},
"COLUMN_NOT_FOUND" : {
"message" : [
"The column <colName> cannot be found. Verify the spelling and correctness of the column name according to the SQL config <caseSensitiveConfig>."
],
"sqlState" : "42703"
},
"COLUMN_ORDINAL_OUT_OF_BOUNDS" : {
"message" : [
"Column ordinal out of bounds. The number of columns in the table is <attributesLength>, but the column ordinal is <ordinal>.",
"Attributes are the following: <attributes>."
],
"sqlState" : "22003"
},
"COMPARATOR_RETURNS_NULL" : {
"message" : [
"The comparator has returned a NULL for a comparison between <firstValue> and <secondValue>.",
"It should return a positive integer for \"greater than\", 0 for \"equal\" and a negative integer for \"less than\".",
"To revert to deprecated behavior where NULL is treated as 0 (equal), you must set \"spark.sql.legacy.allowNullComparisonResultInArraySort\" to \"true\"."
],
"sqlState" : "22004"
},
"COMPLEX_EXPRESSION_UNSUPPORTED_INPUT" : {
"message" : [
"Cannot process input data types for the expression: <expression>."
],
"subClass" : {
"BAD_INPUTS" : {
"message" : [
"The input data types to <functionName> must be valid, but found the input types <dataType>."
]
},
"MISMATCHED_TYPES" : {
"message" : [
"All input types must be the same except nullable, containsNull, valueContainsNull flags, but found the input types <inputTypes>."
]
},
"NO_INPUTS" : {
"message" : [
"The collection of input data types must not be empty."
]
}
},
"sqlState" : "42K09"
},
"CONCURRENT_QUERY" : {
"message" : [
"Another instance of this query was just started by a concurrent session."
],
"sqlState" : "0A000"
},
"CONCURRENT_STREAM_LOG_UPDATE" : {
"message" : [
"Concurrent update to the log. Multiple streaming jobs detected for <batchId>.",
"Please make sure only one streaming job runs on a specific checkpoint location at a time."
],
"sqlState" : "40000"
},
"CONFLICTING_DIRECTORY_STRUCTURES" : {
"message" : [
"Conflicting directory structures detected.",
"Suspicious paths:",
"<discoveredBasePaths>",
"If provided paths are partition directories, please set \"basePath\" in the options of the data source to specify the root directory of the table.",
"If there are multiple root directories, please load them separately and then union them."
],
"sqlState" : "KD009"
},
"CONFLICTING_PARTITION_COLUMN_NAMES" : {
"message" : [
"Conflicting partition column names detected:",
"<distinctPartColLists>",
"For partitioned table directories, data files should only live in leaf directories.",
"And directories at the same level should have the same partition column name.",
"Please check the following directories for unexpected files or inconsistent partition column names:",
"<suspiciousPaths>"
],
"sqlState" : "KD009"
},
"CONFLICTING_PARTITION_COLUMN_NAME_WITH_RESERVED" : {
"message" : [
"Partition column name '<partitionColumnName>' conflicts with reserved column name.",
"The schema of <tableName> is Hive-incompatible, Spark automatically generates a reserved column '<partitionColumnName>' to store the table in a specific way.",
"Please use a different name for the partition column."
],
"sqlState" : "KD009"
},
"CONNECT" : {
"message" : [
"Generic Spark Connect error."
],
"subClass" : {
"INTERCEPTOR_CTOR_MISSING" : {
"message" : [
"Cannot instantiate GRPC interceptor because <cls> is missing a default constructor without arguments."
]
},
"INTERCEPTOR_RUNTIME_ERROR" : {
"message" : [
"Error instantiating GRPC interceptor: <msg>"
]
},
"PLUGIN_CTOR_MISSING" : {
"message" : [
"Cannot instantiate Spark Connect plugin because <cls> is missing a default constructor without arguments."
]
},
"PLUGIN_RUNTIME_ERROR" : {
"message" : [
"Error instantiating Spark Connect plugin: <msg>"
]
},
"SESSION_NOT_SAME" : {
"message" : [
"Both Datasets must belong to the same SparkSession."
]
}
},
"sqlState" : "56K00"
},
"CONNECT_CLIENT_UNEXPECTED_MISSING_SQL_STATE" : {
"message" : [
"Unidentified Error: <message>"
],
"sqlState" : "XXKCM"
},
"CONNECT_INVALID_PLAN" : {
"message" : [
"The Spark Connect plan is invalid."
],
"subClass" : {
"CANNOT_PARSE" : {
"message" : [
"Cannot decompress or parse the input plan (<errorMsg>)",
"This may be caused by a corrupted compressed plan.",
"To disable plan compression, set 'spark.connect.session.planCompression.threshold' to -1."
]
},
"PLAN_SIZE_LARGER_THAN_MAX" : {
"message" : [
"The plan size is larger than max (<planSize> vs. <maxPlanSize>)",
"This typically occurs when building very complex queries with many operations, large literals, or deeply nested expressions.",
"Consider splitting the query into smaller parts using temporary views for intermediate results or reducing the number of operations."
]
}
},
"sqlState" : "56K00"
},
"CONNECT_ML" : {
"message" : [
"Generic Spark Connect ML error."
],
"subClass" : {
"ATTRIBUTE_NOT_ALLOWED" : {
"message" : [
"<attribute> in <className> is not allowed to be accessed."
]
},
"CACHE_INVALID" : {
"message" : [
"Cannot retrieve Summary object <objectName> from the ML cache.",
"Because the Summary object is evicted if you don't use it for more than <evictTimeoutInMinutes> minutes.",
"In this case, you can call `model.evaluate(dataset)` to create a new Summary object."
]
},
"ML_CACHE_SIZE_OVERFLOW_EXCEPTION" : {
"message" : [
"The model cache size in current session is about to exceed",
"<mlCacheMaxSize> bytes.",
"Please delete existing cached model by executing 'del model' in python client before fitting new model or loading new model"
]
},
"MODEL_SIZE_OVERFLOW_EXCEPTION" : {
"message" : [
"The fitted or loaded model size is about <modelSize> bytes.",
"Please fit or load a model smaller than <modelMaxSize> bytes."
]
},
"MODEL_SUMMARY_LOST" : {
"message" : [
"The model <objectName> summary is lost because the cached model is offloaded."
]
},
"UNSUPPORTED_EXCEPTION" : {
"message" : [
"<message>"
]
}
},
"sqlState" : "XX000"
},
"CONSTRAINT_ALREADY_EXISTS" : {
"message" : [
"Constraint '<constraintName>' already exists. Please delete the existing constraint first.",
"Existing constraint:",
"<oldConstraint>"
],
"sqlState" : "42710"
},
"CONSTRAINT_DOES_NOT_EXIST" : {
"message" : [
"Cannot drop nonexistent constraint <constraintName> from table <tableName>."
],
"sqlState" : "42704"
},
"CONVERSION_INVALID_INPUT" : {
"message" : [
"The value <str> (<fmt>) cannot be converted to <targetType> because it is malformed. Correct the value as per the syntax, or change its format. Use <suggestion> to tolerate malformed input and return NULL instead."
],
"sqlState" : "22018"
},
"CORRUPTED_CATALOG_FUNCTION" : {
"message" : [
"Cannot convert the catalog function '<identifier>' into a SQL function due to corrupted function information in catalog. If the function is not a SQL function, please make sure the class name '<className>' is loadable."
],
"sqlState" : "0A000"
},
"CREATE_PERMANENT_VIEW_WITHOUT_ALIAS" : {
"message" : [
"Not allowed to create the permanent view <name> without explicitly assigning an alias for the expression <attr>."
],
"sqlState" : "0A000"
},
"CREATE_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : {
"message" : [
"CREATE TABLE column <columnName> specifies descriptor \"<optionName>\" more than once, which is invalid."
],
"sqlState" : "42710"
},
"CREATE_VIEW_COLUMN_ARITY_MISMATCH" : {
"message" : [
"Cannot create view <viewName>, the reason is"
],
"subClass" : {
"NOT_ENOUGH_DATA_COLUMNS" : {
"message" : [
"not enough data columns:",
"View columns: <viewColumns>.",
"Data columns: <dataColumns>."
]
},
"TOO_MANY_DATA_COLUMNS" : {
"message" : [
"too many data columns:",
"View columns: <viewColumns>.",
"Data columns: <dataColumns>."
]
}
},
"sqlState" : "21S01"
},
"CURSOR_ALREADY_EXISTS" : {
"message" : [
"Cannot declare cursor <cursorName> because it already exists in the current scope."
],
"sqlState" : "42723"
},
"CURSOR_ALREADY_OPEN" : {
"message" : [
"Cannot open cursor <cursorName> because it is already open."
],
"sqlState" : "24502"
},
"CURSOR_NOT_FOUND" : {
"message" : [
"Cursor <cursorName> not found in the current scope."
],
"sqlState" : "42883"
},
"CURSOR_NOT_OPEN" : {
"message" : [
"Cannot fetch from or close cursor <cursorName> because it is not open."
],
"sqlState" : "24501"
},
"CURSOR_NO_MORE_ROWS" : {
"message" : [
"No more rows available to fetch from cursor <cursorName>."
],
"sqlState" : "02000"
},
"CURSOR_OUTSIDE_SCRIPT" : {
"message" : [
"Cursor operations can only be used within SQL scripts."
],
"sqlState" : "0A000"
},
"CURSOR_REFERENCE_INVALID_QUALIFIER" : {
"message" : [
"Cursor reference <cursorName> is invalid. Cursor references can only have at most one qualifier (e.g., label.cursor)."
],
"sqlState" : "42601"
},
"CYCLIC_FUNCTION_REFERENCE" : {
"message" : [
"Cyclic function reference detected: <path>."
],
"sqlState" : "42887"
},
"DATAFLOW_GRAPH_NOT_FOUND" : {
"message" : [
"Dataflow graph with id <graphId> could not be found"
],
"sqlState" : "KD011"
},
"DATATYPE_CANNOT_ORDER" : {
"message" : [
"Type <dataType> does not support ordered operations."
],
"sqlState" : "0A000"
},
"DATATYPE_MISMATCH" : {
"message" : [
"Cannot resolve <sqlExpr> due to data type mismatch:"
],
"subClass" : {
"ARRAY_FUNCTION_DIFF_TYPES" : {
"message" : [
"Input to <functionName> should have been <dataType> followed by a value with same element type, but it's [<leftType>, <rightType>]."
]
},
"BINARY_ARRAY_DIFF_TYPES" : {
"message" : [
"Input to function <functionName> should have been two <arrayType> with same element type, but it's [<leftType>, <rightType>]."
]
},
"BINARY_OP_DIFF_TYPES" : {
"message" : [
"the left and right operands of the binary operator have incompatible types (<left> and <right>)."
]
},
"BINARY_OP_WRONG_TYPE" : {
"message" : [
"the binary operator requires the input type <inputType>, not <actualDataType>."
]
},
"BLOOM_FILTER_BINARY_OP_WRONG_TYPE" : {
"message" : [
"The Bloom filter binary input to <functionName> should be either a constant value or a scalar subquery expression, but it's <actual>."
]
},
"BLOOM_FILTER_WRONG_TYPE" : {
"message" : [
"Input to function <functionName> should have been <expectedLeft> followed by value with <expectedRight>, but it's [<actual>]."
]
},
"CANNOT_CONVERT_TO_JSON" : {
"message" : [
"Unable to convert column <name> of type <type> to JSON."
]
},
"CANNOT_DROP_ALL_FIELDS" : {
"message" : [
"Cannot drop all fields in struct."
]
},
"CAST_WITHOUT_SUGGESTION" : {
"message" : [
"cannot cast <srcType> to <targetType>."
]
},
"CAST_WITH_CONF_SUGGESTION" : {
"message" : [
"cannot cast <srcType> to <targetType> with ANSI mode on.",
"If you have to cast <srcType> to <targetType>, you can set <config> as <configVal>."
]
},
"CAST_WITH_FUNC_SUGGESTION" : {
"message" : [
"cannot cast <srcType> to <targetType>.",
"To convert values from <srcType> to <targetType>, you can use the functions <functionNames> instead."
]
},
"CREATE_MAP_KEY_DIFF_TYPES" : {
"message" : [
"The given keys of function <functionName> should all be the same type, but they are <dataType>."
]
},
"CREATE_MAP_VALUE_DIFF_TYPES" : {
"message" : [
"The given values of function <functionName> should all be the same type, but they are <dataType>."
]
},
"CREATE_NAMED_STRUCT_WITHOUT_FOLDABLE_STRING" : {
"message" : [
"Only foldable `STRING` expressions are allowed to appear at odd position, but they are <inputExprs>."
]
},
"DATA_DIFF_TYPES" : {
"message" : [
"Input to <functionName> should all be the same type, but it's <dataType>."
]
},
"FILTER_NOT_BOOLEAN" : {
"message" : [
"Filter expression <filter> of type <type> is not a boolean."
]
},
"HASH_MAP_TYPE" : {
"message" : [
"Input to the function <functionName> cannot contain elements of the \"MAP\" type. In Spark, same maps may have different hashcode, thus hash expressions are prohibited on \"MAP\" elements. To restore previous behavior set \"spark.sql.legacy.allowHashOnMapType\" to \"true\"."
]
},
"HASH_VARIANT_TYPE" : {
"message" : [
"Input to the function <functionName> cannot contain elements of the \"VARIANT\" type yet."
]
},
"INPUT_SIZE_NOT_ONE" : {
"message" : [
"Length of <exprName> should be 1."
]
},
"INVALID_ARG_VALUE" : {
"message" : [
"The <inputName> value must to be a <requireType> literal of <validValues>, but got <inputValue>."
]
},
"INVALID_JSON_MAP_KEY_TYPE" : {
"message" : [
"Input schema <schema> can only contain STRING as a key type for a MAP."
]
},
"INVALID_JSON_SCHEMA" : {
"message" : [
"Input schema <schema> must be a struct, an array, a map or a variant."
]
},
"INVALID_MAP_KEY_TYPE" : {
"message" : [
"The key of map cannot be/contain <keyType>."
]
},
"INVALID_ORDERING_TYPE" : {
"message" : [
"The <functionName> does not support ordering on type <dataType>."
]
},
"INVALID_ROW_LEVEL_OPERATION_ASSIGNMENTS" : {
"message" : [
"<errors>"
]
},
"INVALID_XML_MAP_KEY_TYPE" : {
"message" : [
"Input schema <schema> can only contain STRING as a key type for a MAP."
]
},
"INVALID_XML_SCHEMA" : {
"message" : [
"Input schema <schema> must be a struct or a variant."
]
},
"IN_SUBQUERY_DATA_TYPE_MISMATCH" : {
"message" : [
"The data type of one or more elements in the left hand side of an IN subquery is not compatible with the data type of the output of the subquery. Mismatched columns: [<mismatchedColumns>], left side: [<leftType>], right side: [<rightType>]."
]
},
"IN_SUBQUERY_LENGTH_MISMATCH" : {
"message" : [
"The number of columns in the left hand side of an IN subquery does not match the number of columns in the output of subquery. Left hand side columns(length: <leftLength>): [<leftColumns>], right hand side columns(length: <rightLength>): [<rightColumns>]."
]
},
"MAP_CONCAT_DIFF_TYPES" : {
"message" : [
"The <functionName> should all be of type map, but it's <dataType>."
]
},
"MAP_FUNCTION_DIFF_TYPES" : {
"message" : [
"Input to <functionName> should have been <dataType> followed by a value with same key type, but it's [<leftType>, <rightType>]."
]
},
"MAP_ZIP_WITH_DIFF_TYPES" : {
"message" : [
"Input to the <functionName> should have been two maps with compatible key types, but it's [<leftType>, <rightType>]."
]
},
"NON_FOLDABLE_INPUT" : {
"message" : [
"the input <inputName> should be a foldable <inputType> expression; however, got <inputExpr>."
]
},
"NON_STRING_TYPE" : {
"message" : [
"all arguments of the function <funcName> must be strings."
]
},
"NON_STRUCT_TYPE" : {
"message" : [
"the input <inputName> should be a struct expression; however, got <inputType>."
]
},
"NULL_TYPE" : {
"message" : [
"Null typed values cannot be used as arguments of <functionName>."
]
},
"PARAMETER_CONSTRAINT_VIOLATION" : {
"message" : [
"The <leftExprName>(<leftExprValue>) must be <constraint> the <rightExprName>(<rightExprValue>)."
]
},
"RANGE_FRAME_INVALID_TYPE" : {
"message" : [
"The data type <orderSpecType> used in the order specification does not support the data type <valueBoundaryType> which is used in the range frame."
]
},
"RANGE_FRAME_MULTI_ORDER" : {
"message" : [
"A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: <orderSpec>."
]
},
"RANGE_FRAME_WITHOUT_ORDER" : {
"message" : [
"A range window frame cannot be used in an unordered window specification."
]
},
"SEQUENCE_WRONG_INPUT_TYPES" : {
"message" : [
"<functionName> uses the wrong parameter type. The parameter type must conform to:",
"1. The start and stop expressions must resolve to the same type.",
"2. If start and stop expressions resolve to the <startType> type, then the step expression must resolve to the <stepType> type.",
"3. Otherwise, if start and stop expressions resolve to the <otherStartType> type, then the step expression must resolve to the same type."
]
},
"SPECIFIED_WINDOW_FRAME_DIFF_TYPES" : {
"message" : [
"Window frame bounds <lower> and <upper> do not have the same type: <lowerType> <> <upperType>."
]
},
"SPECIFIED_WINDOW_FRAME_INVALID_BOUND" : {
"message" : [
"Window frame upper bound <upper> does not follow the lower bound <lower>."
]
},
"SPECIFIED_WINDOW_FRAME_UNACCEPTED_TYPE" : {
"message" : [
"The data type of the <location> bound <exprType> does not match the expected data type <expectedType>."
]
},
"SPECIFIED_WINDOW_FRAME_WITHOUT_FOLDABLE" : {
"message" : [
"Window frame <location> bound <expression> is not a literal."
]
},
"SPECIFIED_WINDOW_FRAME_WRONG_COMPARISON" : {
"message" : [
"The lower bound of a window frame must be <comparison> to the upper bound."
]
},
"STACK_COLUMN_DIFF_TYPES" : {
"message" : [
"The data type of the column (<columnIndex>) do not have the same type: <leftType> (<leftParamIndex>) <> <rightType> (<rightParamIndex>)."
]
},
"TYPE_CHECK_FAILURE_WITH_HINT" : {
"message" : [
"<msg><hint>."
]
},
"UNEXPECTED_CLASS_TYPE" : {
"message" : [
"class <className> not found."
]
},
"UNEXPECTED_INPUT_TYPE" : {
"message" : [
"The <paramIndex> parameter requires the <requiredType> type, however <inputSql> has the type <inputType>."
]
},
"UNEXPECTED_NULL" : {
"message" : [
"The <exprName> must not be null."
]
},
"UNEXPECTED_RETURN_TYPE" : {
"message" : [
"The <functionName> requires return <expectedType> type, but the actual is <actualType> type."
]
},
"UNEXPECTED_STATIC_METHOD" : {
"message" : [
"cannot find a static method <methodName> that matches the argument types in <className>."
]
},
"UNSUPPORTED_INPUT_TYPE" : {
"message" : [
"The input of <functionName> can't be <dataType> type data."
]
},
"VALUE_OUT_OF_RANGE" : {
"message" : [
"The <exprName> must be between <valueRange> (current value = <currentValue>)."
]
},
"WRONG_NUM_ARG_TYPES" : {
"message" : [
"The expression requires <expectedNum> argument types but the actual number is <actualNum>."
]
},
"WRONG_NUM_ENDPOINTS" : {
"message" : [
"The number of endpoints must be >= 2 to construct intervals but the actual number is <actualNumber>."
]
}
},
"sqlState" : "42K09"
},
"DATATYPE_MISSING_SIZE" : {
"message" : [
"DataType <type> requires a length parameter, for example <type>(10). Please specify the length."
],
"sqlState" : "42K01"
},
"DATA_SOURCE_ALREADY_EXISTS" : {
"message" : [
"Data source '<provider>' already exists. Please choose a different name for the new data source."
],
"sqlState" : "42710"
},
"DATA_SOURCE_EXTERNAL_ERROR" : {
"message" : [
"Encountered error when saving to external data source."
],
"sqlState" : "KD010"
},
"DATA_SOURCE_NOT_EXIST" : {
"message" : [
"Data source '<provider>' not found. Please make sure the data source is registered."
],
"sqlState" : "42704"
},
"DATA_SOURCE_NOT_FOUND" : {
"message" : [
"Failed to find the data source: <provider>. Make sure the provider name is correct and the package is properly registered and compatible with your Spark version."
],
"sqlState" : "42K02"
},
"DATA_SOURCE_TABLE_SCHEMA_MISMATCH" : {
"message" : [
"The schema of the data source table does not match the expected schema. If you are using the DataFrameReader.schema API or creating a table, avoid specifying the schema.",
"Data Source schema: <dsSchema>",
"Expected schema: <expectedSchema>"
],
"sqlState" : "42K03"
},
"DATETIME_FIELD_OUT_OF_BOUNDS" : {
"message" : [
"<rangeMessage>."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
},
"WITH_SUGGESTION" : {
"message" : [
"If necessary set <ansiConfig> to \"false\" to bypass this error."
]
}
},
"sqlState" : "22023"
},
"DATETIME_OVERFLOW" : {
"message" : [
"Datetime operation overflow: <operation>."
],
"sqlState" : "22008"
},
"DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : {
"message" : [
"Decimal precision <precision> exceeds max precision <maxPrecision>."
],
"sqlState" : "22003"
},
"DEFAULT_DATABASE_NOT_EXISTS" : {
"message" : [
"Default database <defaultDatabase> does not exist, please create it first or change default database to `<defaultDatabase>`."
],
"sqlState" : "42704"
},
"DEFAULT_PLACEMENT_INVALID" : {
"message" : [
"A DEFAULT keyword in a MERGE, INSERT, UPDATE, or SET VARIABLE command could not be directly assigned to a target column because it was part of an expression.",
"For example: `UPDATE SET c1 = DEFAULT` is allowed, but `UPDATE T SET c1 = DEFAULT + 1` is not allowed."
],
"sqlState" : "42608"
},
"DEFAULT_UNSUPPORTED" : {
"message" : [
"Failed to execute <statementType> command because DEFAULT values are not supported for target data source with table provider: \"<dataSource>\"."
],
"sqlState" : "42623"
},
"DEFINE_FLOW_ONCE_OPTION_NOT_SUPPORTED" : {
"message" : [
"Defining a one-time flow <flowName> with the 'once' option is not supported."
],
"sqlState" : "0A000"
},
"DESCRIBE_JSON_NOT_EXTENDED" : {
"message" : [
"DESCRIBE TABLE ... AS JSON only supported when [EXTENDED|FORMATTED] is specified.",
"For example: DESCRIBE EXTENDED <tableName> AS JSON is supported but DESCRIBE <tableName> AS JSON is not."
],
"sqlState" : "0A000"
},
"DISTINCT_WINDOW_FUNCTION_UNSUPPORTED" : {
"message" : [
"Distinct window functions are not supported: <windowExpr>."
],
"sqlState" : "0A000"
},
"DIVIDE_BY_ZERO" : {
"message" : [
"Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set <config> to \"false\" to bypass this error."
],
"sqlState" : "22012"
},
"DUPLICATED_CTE_NAMES" : {
"message" : [
"CTE definition can't have duplicate names: <duplicateNames>."
],
"sqlState" : "42602"
},
"DUPLICATED_FIELD_NAME_IN_ARROW_STRUCT" : {
"message" : [
"Duplicated field names in Arrow Struct are not allowed, got <fieldNames>."
],
"sqlState" : "42713"
},
"DUPLICATED_MAP_KEY" : {
"message" : [
"Duplicate map key <key> was found, please check the input data.",
"If you want to remove the duplicated keys, you can set <mapKeyDedupPolicy> to \"LAST_WIN\" so that the key inserted at last takes precedence."
],
"sqlState" : "23505"
},
"DUPLICATED_METRICS_NAME" : {
"message" : [
"The metric name is not unique: <metricName>. The same name cannot be used for metrics with different results.",
"However multiple instances of metrics with with same result and name are allowed (e.g. self-joins)."
],
"sqlState" : "42710"
},
"DUPLICATE_ASSIGNMENTS" : {
"message" : [
"The columns or variables <nameList> appear more than once as assignment targets."
],
"sqlState" : "42701"
},
"DUPLICATE_CLAUSES" : {
"message" : [
"Found duplicate clauses: <clauseName>. Please, remove one of them."
],
"sqlState" : "42614"
},
"DUPLICATE_CONDITION_IN_SCOPE" : {
"message" : [
"Found duplicate condition <condition> in the scope. Please, remove one of them."
],
"sqlState" : "42734"
},
"DUPLICATE_EXCEPTION_HANDLER" : {
"message" : [
"Found duplicate handlers. Please, remove one of them."
],
"subClass" : {
"CONDITION" : {
"message" : [
"Found duplicate handlers for the same condition <condition>."
]
},
"SQLSTATE" : {
"message" : [
"Found duplicate handlers for the same SQLSTATE <sqlState>."
]
}
},
"sqlState" : "42734"
},
"DUPLICATE_FLOW_SQL_CONF" : {
"message" : [
"Found duplicate sql conf for dataset '<datasetName>': '<key>' is defined by both '<flowName1>' and '<flowName2>'"
],
"sqlState" : "42710"
},
"DUPLICATE_KEY" : {
"message" : [
"Found duplicate keys <keyColumn>."
],
"sqlState" : "23505"
},
"DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT" : {
"message" : [
"Call to routine <routineName> is invalid because it includes multiple argument assignments to the same parameter name <parameterName>."
],
"subClass" : {
"BOTH_POSITIONAL_AND_NAMED" : {
"message" : [
"A positional argument and named argument both referred to the same parameter. Please remove the named argument referring to this parameter."
]
},
"DOUBLE_NAMED_ARGUMENT_REFERENCE" : {
"message" : [
"More than one named argument referred to the same parameter. Please assign a value only once."
]
}
},
"sqlState" : "4274K"
},
"DUPLICATE_ROUTINE_PARAMETER_NAMES" : {
"message" : [
"Found duplicate name(s) in the parameter list of the user-defined routine <routineName>: <names>."
],
"sqlState" : "42734"
},
"DUPLICATE_ROUTINE_RETURNS_COLUMNS" : {
"message" : [
"Found duplicate column(s) in the RETURNS clause column list of the user-defined routine <routineName>: <columns>."
],
"sqlState" : "42711"
},
"DUPLICATE_VARIABLE_NAME_INSIDE_DECLARE" : {
"message" : [
"Found duplicate variable <variableName> in the declare variable list. Please, remove one of them."
],
"sqlState" : "42734"
},
"DYNAMIC_PARTITION_WRITE_PARTITION_NUM_LIMIT_EXCEEDED" : {
"message" : [
"Number of dynamic partitions created is <numWrittenParts>, which is more than <maxDynamicPartitions>. To solve this try to set <maxDynamicPartitionsKey> to at least <numWrittenParts>."
],
"sqlState" : "54054"
},
"EMITTING_ROWS_OLDER_THAN_WATERMARK_NOT_ALLOWED" : {
"message" : [
"Previous node emitted a row with eventTime=<emittedRowEventTime> which is older than current_watermark_value=<currentWatermark>",
"This can lead to correctness issues in the stateful operators downstream in the execution pipeline.",
"Please correct the operator logic to emit rows after current global watermark value."
],
"sqlState" : "42815"
},
"EMPTY_JSON_FIELD_VALUE" : {
"message" : [
"Failed to parse an empty string for data type <dataType>."
],
"sqlState" : "42604"
},
"EMPTY_SCHEMA_NOT_SUPPORTED_FOR_DATASOURCE" : {
"message" : [
"The <format> datasource does not support writing empty or nested empty schemas. Please make sure the data schema has at least one or more column(s)."
],
"sqlState" : "0A000"
},
"ENCODER_NOT_FOUND" : {
"message" : [
"Not found an encoder of the type <typeName> to Spark SQL internal representation.",
"Consider to change the input type to one of supported at '<docroot>/sql-ref-datatypes.html'."
],
"sqlState" : "42704"
},
"END_LABEL_WITHOUT_BEGIN_LABEL" : {
"message" : [
"End label <endLabel> can not exist without begin label."
],
"sqlState" : "42K0L"
},
"ERROR_READING_AVRO_UNKNOWN_FINGERPRINT" : {
"message" : [
"Error reading avro data -- encountered an unknown fingerprint: <fingerprint>, not sure what schema to use.",
"This could happen if you registered additional schemas after starting your spark context."
],
"sqlState" : "KD00B"
},
"EVENT_TIME_IS_NOT_ON_TIMESTAMP_TYPE" : {
"message" : [
"The event time <eventName> has the invalid type <eventType>, but expected \"TIMESTAMP\"."
],
"sqlState" : "42K09"
},
"EXCEED_LIMIT_LENGTH" : {
"message" : [
"Exceeds char/varchar type length limitation: <limit>."
],
"sqlState" : "54006"
},
"EXCEPT_NESTED_COLUMN_INVALID_TYPE" : {
"message" : [
"EXCEPT column <columnName> was resolved and expected to be StructType, but found type <dataType>."
],
"sqlState" : "428H2"
},
"EXCEPT_OVERLAPPING_COLUMNS" : {
"message" : [
"Columns in an EXCEPT list must be distinct and non-overlapping, but got (<columns>)."
],
"sqlState" : "42702"
},
"EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES" : {
"message" : [
"The USING clause of this EXECUTE IMMEDIATE command contained multiple arguments with same alias (<aliases>), which is invalid; please update the command to specify unique aliases and then try it again."
],
"sqlState" : "42701"
},
"EXPECT_PERMANENT_VIEW_NOT_TEMP" : {
"message" : [
"'<operation>' expects a permanent view but <viewName> is a temp view."
],
"sqlState" : "42809"
},
"EXPECT_TABLE_NOT_VIEW" : {
"message" : [
"'<operation>' expects a table but <viewName> is a view."
],
"subClass" : {
"NO_ALTERNATIVE" : {
"message" : [
""
]
},
"USE_ALTER_VIEW" : {
"message" : [
"Please use ALTER VIEW instead."
]
}
},
"sqlState" : "42809"
},
"EXPECT_VIEW_NOT_TABLE" : {
"message" : [
"The table <tableName> does not support <operation>."
],
"subClass" : {
"NO_ALTERNATIVE" : {
"message" : [
""
]
},
"USE_ALTER_TABLE" : {
"message" : [
"Please use ALTER TABLE instead."
]
}
},
"sqlState" : "42809"
},
"EXPRESSION_DECODING_FAILED" : {
"message" : [
"Failed to decode a row to a value of the expressions: <expressions>."
],
"sqlState" : "42846"
},
"EXPRESSION_ENCODING_FAILED" : {
"message" : [
"Failed to encode a value of the expressions: <expressions> to a row."
],
"sqlState" : "42846"
},
"EXPRESSION_TRANSLATION_TO_V2_IS_NOT_SUPPORTED" : {
"message" : [
"Expression <expr> cannot be translated to v2 expression."
],
"sqlState" : "0A000"
},
"EXPRESSION_TYPE_IS_NOT_ORDERABLE" : {
"message" : [
"Column expression <expr> cannot be sorted because its type <exprType> is not orderable."
],
"sqlState" : "42822"
},
"FAILED_EXECUTE_UDF" : {
"message" : [
"User defined function (<functionName>: (<signature>) => <result>) failed due to: <reason>."
],
"sqlState" : "39000"
},
"FAILED_FUNCTION_CALL" : {
"message" : [
"Failed preparing of the function <funcName> for call. Please, double check function's arguments."
],
"sqlState" : "38000"
},
"FAILED_JDBC" : {
"message" : [
"Failed JDBC <url> on the operation:"
],
"subClass" : {
"ALTER_TABLE" : {
"message" : [
"Alter the table <tableName>."
]
},
"CONNECTION" : {
"message" : [
"Couldn't connect to the database"
]
},
"CREATE_INDEX" : {
"message" : [
"Create the index <indexName> in the <tableName> table."
]
},
"CREATE_NAMESPACE" : {
"message" : [
"Create the namespace <namespace>."
]
},
"CREATE_NAMESPACE_COMMENT" : {
"message" : [
"Create a comment on the namespace: <namespace>."
]
},
"CREATE_TABLE" : {
"message" : [
"Create the table <tableName>."
]
},
"DROP_INDEX" : {
"message" : [
"Drop the index <indexName> in the <tableName> table."
]
},
"DROP_NAMESPACE" : {
"message" : [
"Drop the namespace <namespace>."
]
},
"DROP_TABLE" : {
"message" : [
"Drop the table <tableName>."
]
},
"GET_TABLES" : {
"message" : [
"Get tables from the namespace: <namespace>."
]
},
"LIST_NAMESPACES" : {
"message" : [
"List namespaces."
]
},
"LOAD_TABLE" : {
"message" : [
"Load the table <tableName>."
]
},
"NAMESPACE_EXISTS" : {
"message" : [
"Check that the namespace <namespace> exists."
]
},
"REMOVE_NAMESPACE_COMMENT" : {
"message" : [
"Remove a comment on the namespace: <namespace>."
]
},
"RENAME_TABLE" : {
"message" : [
"Rename the table <oldName> to <newName>."
]
},
"TABLE_EXISTS" : {
"message" : [
"Check that the table <tableName> exists."
]
},
"UNCLASSIFIED" : {
"message" : [
"<message>"
]
}
},
"sqlState" : "HV000"
},
"FAILED_PARSE_STRUCT_TYPE" : {
"message" : [
"Failed parsing struct: <raw>."
],
"sqlState" : "22018"
},
"FAILED_READ_FILE" : {
"message" : [
"Encountered error while reading file <path>."
],
"subClass" : {
"CANNOT_READ_FILE_FOOTER" : {
"message" : [
"Could not read footer. Please ensure that the file is in either ORC or Parquet format.",
"If not, please convert it to a valid format. If the file is in the valid format, please check if it is corrupt.",
"If it is, you can choose to either ignore it or fix the corruption."
]
},
"FILE_NOT_EXIST" : {
"message" : [
"File does not exist. It is possible the underlying files have been updated.",
"You can explicitly invalidate the cache in Spark by running 'REFRESH TABLE tableName' command in SQL or by recreating the Dataset/DataFrame involved."
]
},
"NO_HINT" : {
"message" : [
""
]
},
"PARQUET_COLUMN_DATA_TYPE_MISMATCH" : {
"message" : [
"Data type mismatches when reading Parquet column <column>. Expected Spark type <expectedType>, actual Parquet type <actualType>."
]
},
"UNSUPPORTED_FILE_SYSTEM" : {
"message" : [
"The file system <fileSystemClass> hasn't implemented <method>."
]
}
},
"sqlState" : "KD001"
},
"FAILED_REGISTER_CLASS_WITH_KRYO" : {
"message" : [
"Failed to register classes with Kryo."
],
"sqlState" : "KD000"
},
"FAILED_RENAME_PATH" : {
"message" : [
"Failed to rename <sourcePath> to <targetPath> as destination already exists."
],
"sqlState" : "42K04"
},
"FAILED_RENAME_TEMP_FILE" : {
"message" : [
"Failed to rename temp file <srcPath> to <dstPath> as FileSystem.rename returned false."
],
"sqlState" : "58030"
},
"FAILED_ROW_TO_JSON" : {
"message" : [
"Failed to convert the row value <value> of the class <class> to the target SQL type <sqlType> in the JSON format."
],
"sqlState" : "2203G"
},
"FAILED_TO_CREATE_PLAN_FOR_DIRECT_QUERY" : {
"message" : [
"Failed to create plan for direct query on files: <dataSourceType>"
],
"sqlState" : "58030"
},
"FAILED_TO_LOAD_ROUTINE" : {
"message" : [
"Failed to load routine <routineName>."
],
"sqlState" : "38000"
},
"FAILED_TO_PARSE_TOO_COMPLEX" : {
"message" : [
"The statement, including potential SQL functions and referenced views, was too complex to parse.",
"To mitigate this error divide the statement into multiple, less complex chunks."
],
"sqlState" : "54001"
},
"FEATURE_NOT_ENABLED" : {
"message" : [
"The feature <featureName> is not enabled. Consider setting the config <configKey> to <configValue> to enable this capability."
],
"sqlState" : "56038"
},
"FIELD_ALREADY_EXISTS" : {
"message" : [
"Cannot <op> column, because <fieldNames> already exists in <struct>."
],
"sqlState" : "42710"
},
"FIELD_NOT_FOUND" : {
"message" : [
"No such struct field <fieldName> in <fields>."
],
"sqlState" : "42704"
},
"FLATMAPGROUPSWITHSTATE_USER_FUNCTION_ERROR" : {
"message" : [
"An error occurred in the user provided function in flatMapGroupsWithState. Reason: <reason>"
],
"sqlState" : "39000"
},
"FORBIDDEN_OPERATION" : {
"message" : [
"The operation <statement> is not allowed on the <objectType>: <objectName>."
],
"sqlState" : "42809"
},
"FOREACH_BATCH_USER_FUNCTION_ERROR" : {
"message" : [
"An error occurred in the user provided function in foreach batch sink. Reason: <reason>"
],
"sqlState" : "39000"
},
"FOREACH_USER_FUNCTION_ERROR" : {
"message" : [
"An error occurred in the user provided function in foreach sink. Reason: <reason>"
],
"sqlState" : "39000"
},
"GENERATED_COLUMN_WITH_DEFAULT_VALUE" : {
"message" : [
"A column cannot have both a default value and a generation expression but column <colName> has default value: (<defaultValue>) and generation expression: (<genExpr>)."
],
"sqlState" : "42623"
},
"GEO_ENCODER_SRID_MISMATCH_ERROR" : {
"message" : [
"Failed to encode <type> value because provided SRID <valueSrid> of a value to encode does not match type SRID: <typeSrid>."
],
"sqlState" : "42K09"
},
"GET_TABLES_BY_TYPE_UNSUPPORTED_BY_HIVE_VERSION" : {
"message" : [
"Hive 2.2 and lower versions don't support getTablesByType. Please use Hive 2.3 or higher version."
],
"sqlState" : "56038"
},
"GRAPHITE_SINK_INVALID_PROTOCOL" : {
"message" : [
"Invalid Graphite protocol: <protocol>."
],
"sqlState" : "KD000"
},
"GRAPHITE_SINK_PROPERTY_MISSING" : {
"message" : [
"Graphite sink requires '<property>' property."
],
"sqlState" : "KD000"
},
"GROUPING_COLUMN_MISMATCH" : {
"message" : [
"Column of grouping (<grouping>) can't be found in grouping columns <groupingColumns>."
],
"sqlState" : "42803"
},
"GROUPING_ID_COLUMN_MISMATCH" : {
"message" : [
"Columns of grouping_id (<groupingIdColumn>) does not match grouping columns (<groupByColumns>)."
],
"sqlState" : "42803"
},
"GROUPING_SIZE_LIMIT_EXCEEDED" : {
"message" : [
"Grouping sets size cannot be greater than <maxSize>."
],
"sqlState" : "54000"
},
"GROUP_BY_AGGREGATE" : {
"message" : [
"Aggregate functions are not allowed in GROUP BY, but found <sqlExpr>."
],
"sqlState" : "42903"
},
"GROUP_BY_POS_AGGREGATE" : {
"message" : [
"GROUP BY <index> refers to an expression <aggExpr> that contains an aggregate function. Aggregate functions are not allowed in GROUP BY."
],
"sqlState" : "42903"
},
"GROUP_BY_POS_OUT_OF_RANGE" : {
"message" : [
"GROUP BY position <index> is not in select list (valid range is [1, <size>])."
],
"sqlState" : "42805"
},
"GROUP_EXPRESSION_TYPE_IS_NOT_ORDERABLE" : {
"message" : [
"The expression <sqlExpr> cannot be used as a grouping expression because its data type <dataType> is not an orderable data type."
],
"sqlState" : "42822"
},
"HINT_UNSUPPORTED_FOR_JDBC_DIALECT" : {
"message" : [
"The option `hint` is not supported for <jdbcDialect> in JDBC data source. Supported dialects are `MySQLDialect`, `OracleDialect` and `DatabricksDialect`."
],
"sqlState" : "42822"
},
"HLL_INVALID_INPUT_SKETCH_BUFFER" : {
"message" : [
"Invalid call to <function>; only valid HLL sketch buffers are supported as inputs (such as those produced by the `hll_sketch_agg` function)."
],
"sqlState" : "22546"
},
"HLL_INVALID_LG_K" : {
"message" : [
"Invalid call to <function>; the `lgConfigK` value must be between <min> and <max>, inclusive: <value>."
],
"sqlState" : "22546"
},
"HLL_K_MUST_BE_CONSTANT" : {
"message" : [
"Invalid call to <function>; the `K` value must be a constant value, but got a non-constant expression."
],
"sqlState" : "42K0E"
},
"HLL_UNION_DIFFERENT_LG_K" : {
"message" : [
"Sketches have different `lgConfigK` values: <left> and <right>. Set the `allowDifferentLgConfigK` parameter to true to call <function> with different `lgConfigK` values."
],
"sqlState" : "22000"
},
"HYBRID_ANALYZER_EXCEPTION" : {
"message" : [
"An failure occurred when attempting to resolve a query or command with both the legacy fixed-point analyzer as well as the single-pass resolver."
],
"subClass" : {
"FIXED_POINT_FAILED_SINGLE_PASS_SUCCEEDED" : {
"message" : [
"Fixed-point resolution failed, but single-pass resolution succeeded.",
"Single-pass analyzer output:",
"<singlePassOutput>"
]
},
"LOGICAL_PLAN_COMPARISON_MISMATCH" : {
"message" : [
"Outputs of fixed-point and single-pass analyzers do not match.",
"Fixed-point analyzer output:",
"<fixedPointOutput>",
"Single-pass analyzer output:",
"<singlePassOutput>"
]
},
"OUTPUT_SCHEMA_COMPARISON_MISMATCH" : {
"message" : [
"Output schemas of fixed-point and single-pass analyzers do not match.",
"Fixed-point analyzer output schema:",
"<fixedPointOutputSchema>",
"Single-pass analyzer output schema:",
"<singlePassOutputSchema>"
]
}
},
"sqlState" : "XX000"
},
"IDENTIFIER_TOO_MANY_NAME_PARTS" : {
"message" : [
"<identifier> is not a valid identifier as it has more than <limit> name parts."
],
"sqlState" : "42601"
},
"IDENTITY_COLUMNS_DUPLICATED_SEQUENCE_GENERATOR_OPTION" : {
"message" : [
"Duplicated IDENTITY column sequence generator option: <sequenceGeneratorOption>."
],
"sqlState" : "42601"
},
"IDENTITY_COLUMNS_ILLEGAL_STEP" : {
"message" : [
"IDENTITY column step cannot be 0."
],
"sqlState" : "42611"
},
"IDENTITY_COLUMNS_UNSUPPORTED_DATA_TYPE" : {
"message" : [
"DataType <dataType> is not supported for IDENTITY columns."
],
"sqlState" : "428H2"
},
"IDENTITY_COLUMN_WITH_DEFAULT_VALUE" : {
"message" : [
"A column cannot have both a default value and an identity column specification but column <colName> has default value: (<defaultValue>) and identity column specification: (<identityColumnSpec>)."
],
"sqlState" : "42623"
},
"ILLEGAL_DAY_OF_WEEK" : {
"message" : [
"Illegal input for day of week: <string>."
],
"sqlState" : "22009"
},
"ILLEGAL_STATE_STORE_VALUE" : {
"message" : [
"Illegal value provided to the State Store"
],
"subClass" : {
"EMPTY_LIST_VALUE" : {
"message" : [
"Cannot write empty list values to State Store for StateName <stateName>."
]
},
"NULL_VALUE" : {
"message" : [
"Cannot write null values to State Store for StateName <stateName>."
]
}
},
"sqlState" : "42601"
},
"INCOMPARABLE_PIVOT_COLUMN" : {
"message" : [
"Invalid pivot column <columnName>. Pivot columns must be comparable."
],
"sqlState" : "42818"
},
"INCOMPATIBLE_BATCH_VIEW_READ" : {
"message" : [
"View <datasetIdentifier> is a batch view and must be referenced using SparkSession#read. This check can be disabled by setting Spark conf pipelines.incompatibleViewCheck.enabled = false."
],
"sqlState" : "42000"
},
"INCOMPATIBLE_COLUMN_CHANGES_AFTER_VIEW_WITH_PLAN_CREATION" : {
"message" : [
"View <viewName> plan references table <tableName> whose <colType> columns changed since the view plan was initially captured.",
"Column changes:",
"<errors>",
"This indicates the table has evolved and the view based on the plan must be recreated."
],
"sqlState" : "51024"
},
"INCOMPATIBLE_COLUMN_TYPE" : {
"message" : [
"<operator> can only be performed on tables with compatible column types. The <columnOrdinalNumber> column of the <tableOrdinalNumber> table is <dataType1> type which is not compatible with <dataType2> at the same column of the first table.<hint>."
],
"sqlState" : "42825"
},
"INCOMPATIBLE_DATASOURCE_REGISTER" : {
"message" : [
"Detected an incompatible DataSourceRegister. Please remove the incompatible library from classpath or upgrade it. Error: <message>"
],
"sqlState" : "56038"
},
"INCOMPATIBLE_DATA_FOR_TABLE" : {
"message" : [
"Cannot write incompatible data for the table <tableName>:"
],
"subClass" : {
"AMBIGUOUS_COLUMN_NAME" : {
"message" : [
"Ambiguous column name in the input data <colName>."
]
},
"CANNOT_FIND_DATA" : {
"message" : [
"Cannot find data for the output column <colName>."
]
},
"CANNOT_SAFELY_CAST" : {
"message" : [
"Cannot safely cast <colName> <srcType> to <targetType>."
]
},
"EXTRA_COLUMNS" : {
"message" : [
"Cannot write extra columns <extraColumns>."
]
},
"EXTRA_STRUCT_FIELDS" : {
"message" : [
"Cannot write extra fields <extraFields> to the struct <colName>."
]
},
"NULLABLE_ARRAY_ELEMENTS" : {
"message" : [
"Cannot write nullable elements to array of non-nulls: <colName>."
]
},
"NULLABLE_COLUMN" : {
"message" : [
"Cannot write nullable values to non-null column <colName>."
]
},
"NULLABLE_MAP_VALUES" : {
"message" : [
"Cannot write nullable values to map of non-nulls: <colName>."
]
},
"STRUCT_MISSING_FIELDS" : {
"message" : [
"Struct <colName> missing fields: <missingFields>."
]
},
"UNEXPECTED_COLUMN_NAME" : {
"message" : [
"Struct <colName> <order>-th field name does not match (may be out of order): expected <expected>, found <found>."
]
}
},
"sqlState" : "KD000"
},
"INCOMPATIBLE_JOIN_TYPES" : {
"message" : [
"The join types <joinType1> and <joinType2> are incompatible."
],
"sqlState" : "42613"
},
"INCOMPATIBLE_STREAMING_VIEW_READ" : {
"message" : [
"View <datasetIdentifier> is a streaming view and must be referenced using SparkSession#readStream. This check can be disabled by setting Spark conf pipelines.incompatibleViewCheck.enabled = false."
],
"sqlState" : "42000"
},
"INCOMPATIBLE_TABLE_CHANGE_AFTER_ANALYSIS" : {
"message" : [
"Detected incompatible changes to table <tableName> after DataFrame/Dataset has been resolved and analyzed, meaning the underlying plan is out of sync. Please, re-create DataFrame/Dataset before attempting to execute the query again."
],
"subClass" : {
"COLUMNS_MISMATCH" : {
"message" : [
"Data columns have changed:",
"<errors>"
]
},
"METADATA_COLUMNS_MISMATCH" : {
"message" : [
"Metadata columns have changed:",
"<errors>"
]
},
"TABLE_ID_MISMATCH" : {
"message" : [
"Table ID has changed from <capturedTableId> to <currentTableId>."
]
}
},
"sqlState" : "51024"
},
"INCOMPATIBLE_VIEW_SCHEMA_CHANGE" : {
"message" : [
"The SQL query of view <viewName> has an incompatible schema change and column <colName> cannot be resolved. Expected <expectedNum> columns named <colName> but got <actualCols>.",
"Please try to re-create the view by running: <suggestion>."
],
"sqlState" : "51024"
},
"INCOMPLETE_TYPE_DEFINITION" : {
"message" : [
"Incomplete complex type:"
],
"subClass" : {
"ARRAY" : {
"message" : [
"The definition of \"ARRAY\" type is incomplete. You must provide an element type. For example: \"ARRAY<elementType>\"."
]
},
"MAP" : {
"message" : [
"The definition of \"MAP\" type is incomplete. You must provide a key type and a value type. For example: \"MAP<TIMESTAMP, INT>\"."
]
},
"STRUCT" : {
"message" : [
"The definition of \"STRUCT\" type is incomplete. You must provide at least one field type. For example: \"STRUCT<name STRING, phone DECIMAL(10, 0)>\"."
]
}
},
"sqlState" : "42K01"
},
"INCONSISTENT_BEHAVIOR_CROSS_VERSION" : {
"message" : [
"You may get a different result due to the upgrading to"
],
"subClass" : {
"DATETIME_PATTERN_RECOGNITION" : {
"message" : [
"Spark >= 3.0:",
"Fail to recognize <pattern> pattern in the DateTimeFormatter.",
"1) You can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0.",
"2) You can form a valid datetime pattern with the guide from '<docroot>/sql-ref-datetime-pattern.html'."
]
},
"DATETIME_WEEK_BASED_PATTERN" : {
"message" : [
"Spark >= 3.0:",
"All week-based patterns are unsupported since Spark 3.0, detected week-based character: <c>.",
"Please use the SQL function EXTRACT instead."
]
},
"PARSE_DATETIME_BY_NEW_PARSER" : {
"message" : [
"Spark >= 3.0:",
"Fail to parse <datetime> in the new parser.",
"You can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0, or set to \"CORRECTED\" and treat it as an invalid datetime string."
]
},
"READ_ANCIENT_DATETIME" : {
"message" : [
"Spark >= 3.0: reading dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z from <format> files can be ambiguous, as the files may be written by",
"Spark 2.x or legacy versions of Hive, which uses a legacy hybrid calendar that is different from Spark 3.0+'s Proleptic Gregorian calendar.",
"See more details in SPARK-31404.",
"You can set the SQL config <config> or the datasource option <option> to \"LEGACY\" to rebase the datetime values w.r.t. the calendar difference during reading.",
"To read the datetime values as it is, set the SQL config or the datasource option to \"CORRECTED\"."
]
},
"WRITE_ANCIENT_DATETIME" : {
"message" : [
"Spark >= 3.0:",
"writing dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z into <format> files can be dangerous, as the files may be read by Spark 2.x or legacy versions of Hive later, which uses a legacy hybrid calendar that is different from Spark 3.0+'s Proleptic Gregorian calendar.",
"See more details in SPARK-31404.",
"You can set <config> to \"LEGACY\" to rebase the datetime values w.r.t. the calendar difference during writing, to get maximum interoperability.",
"Or set the config to \"CORRECTED\" to write the datetime values as it is, if you are sure that the written files will only be read by Spark 3.0+ or other systems that use Proleptic Gregorian calendar."
]
}
},
"sqlState" : "42K0B"
},
"INCORRECT_RAMP_UP_RATE" : {
"message" : [
"Max offset with <rowsPerSecond> rowsPerSecond is <maxSeconds>, but 'rampUpTimeSeconds' is <rampUpTimeSeconds>."
],
"sqlState" : "22003"
},
"INDETERMINATE_COLLATION" : {
"message" : [
"Could not determine which collation to use for string operation. Use COLLATE clause to set the collation explicitly."
],
"sqlState" : "42P22"
},
"INDETERMINATE_COLLATION_IN_EXPRESSION" : {
"message" : [
"Data type of <expr> has indeterminate collation. Use COLLATE clause to set the collation explicitly."
],
"sqlState" : "42P22"
},
"INDETERMINATE_COLLATION_IN_SCHEMA" : {
"message" : [
"Schema contains indeterminate collation at: [<columnPaths>]. Use COLLATE clause to set the collation explicitly."
],
"sqlState" : "42P22"
},
"INDEX_ALREADY_EXISTS" : {
"message" : [
"Cannot create the index <indexName> on table <tableName> because it already exists."
],
"sqlState" : "42710"
},
"INDEX_NOT_FOUND" : {
"message" : [
"Cannot find the index <indexName> on table <tableName>."
],
"sqlState" : "42704"
},
"INSERT_COLUMN_ARITY_MISMATCH" : {
"message" : [
"Cannot write to <tableName>, the reason is"
],
"subClass" : {
"NOT_ENOUGH_DATA_COLUMNS" : {
"message" : [
"not enough data columns:",
"Table columns: <tableColumns>.",
"Data columns: <dataColumns>."
]
},
"TOO_MANY_DATA_COLUMNS" : {
"message" : [
"too many data columns:",
"Table columns: <tableColumns>.",
"Data columns: <dataColumns>."
]
}
},
"sqlState" : "21S01"
},
"INSERT_PARTITION_COLUMN_ARITY_MISMATCH" : {
"message" : [
"Cannot write to '<tableName>', <reason>:",
"Table columns: <tableColumns>.",
"Partition columns with static values: <staticPartCols>.",
"Data columns: <dataColumns>."
],
"sqlState" : "21S01"
},
"INSUFFICIENT_TABLE_PROPERTY" : {
"message" : [
"Can't find table property:"
],
"subClass" : {
"MISSING_KEY" : {
"message" : [
"<key>."
]
},
"MISSING_KEY_PART" : {
"message" : [
"<key>, <totalAmountOfParts> parts are expected."
]
}
},
"sqlState" : "XXKUC"
},
"INTERNAL_ERROR" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_ATTRIBUTE_NOT_FOUND" : {
"message" : [
"Could not find <missingAttr> in <inputAttrs>."
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_BROADCAST" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_EXECUTOR" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_HIVE_METASTORE_PARTITION_FILTER" : {
"message" : [
"Failed to get partition metadata by filter from Hive metastore.",
"To work around this issue, set '<hiveMetastorePartitionPruningFallbackOnException>' to true. Note that this may result in degraded performance as Spark will fetch all partition metadata instead of filtering at the metastore level.",
"To report this issue, visit: https://issues.apache.org/jira/browse/SPARK"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_INVALID_HIVE_COLUMN_TYPE" : {
"message" : [
"Failed to convert Hive table to Spark catalog table.",
"Database: <dbName>",
"Table: <tableName>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_INVALID_PARTITION_FILTER_VALUE" : {
"message" : [
"Partition filter value cannot contain both double quotes (\") and single quotes (')."
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_MEMORY" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_METADATA_CATALOG" : {
"message" : [
"An object in the metadata catalog has been corrupted:"
],
"subClass" : {
"SQL_CONFIG" : {
"message" : [
"Corrupted view SQL configs in catalog."
]
},
"TABLE_NAME_CONTEXT" : {
"message" : [
"Corrupted table name context in catalog: <numParts> parts expected, but part <index> is missing."
]
},
"TEMP_FUNCTION_REFERENCE" : {
"message" : [
"Corrupted view referred temp functions names in catalog."
]
},
"TEMP_VARIABLE_REFERENCE" : {
"message" : [
"Corrupted view referred temp variable names in catalog."
]
},
"TEMP_VIEW_REFERENCE" : {
"message" : [
"Corrupted view referred temp view names in catalog."
]
},
"VIEW_QUERY_COLUMN_ARITY" : {
"message" : [
"Corrupted view query output column names in catalog: <numCols> parts expected, but part <index> is missing."
]
}
},
"sqlState" : "XX000"
},
"INTERNAL_ERROR_NETWORK" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_SERDE_INTERFACE_NOT_FOUND" : {
"message" : [
"The SerDe interface was removed since Hive 2.3 (HIVE-15167).",
"Please migrate your custom SerDes to Hive 2.3 or later.",
"For more details, see: https://issues.apache.org/jira/browse/HIVE-15167"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_SHUFFLE" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_STORAGE" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_TWS" : {
"message" : [
"<message>"
],
"sqlState" : "XX000"
},
"INTERVAL_ARITHMETIC_OVERFLOW" : {
"message" : [
"Integer overflow while operating with intervals."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
"Try devising appropriate values for the interval parameters."
]
},
"WITH_SUGGESTION" : {
"message" : [
"Use <functionName> to tolerate overflow and return NULL instead."
]
}
},
"sqlState" : "22015"
},
"INTERVAL_DIVIDED_BY_ZERO" : {
"message" : [
"Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead."
],
"sqlState" : "22012"
},
"INVALID_AGGREGATE_FILTER" : {
"message" : [
"The FILTER expression <filterExpr> in an aggregate function is invalid."
],
"subClass" : {
"CONTAINS_AGGREGATE" : {
"message" : [
"Expected a FILTER expression without an aggregation, but found <aggExpr>."
]
},
"CONTAINS_WINDOW_FUNCTION" : {
"message" : [
"Expected a FILTER expression without a window function, but found <windowExpr>."
]
},
"NON_DETERMINISTIC" : {
"message" : [
"Expected a deterministic FILTER expression."
]
},
"NOT_BOOLEAN" : {
"message" : [
"Expected a FILTER expression of the BOOLEAN type."
]
}
},
"sqlState" : "42903"
},
"INVALID_AGNOSTIC_ENCODER" : {
"message" : [
"Found an invalid agnostic encoder. Expects an instance of AgnosticEncoder but got <encoderType>. For more information consult '<docroot>/api/java/index.html?org/apache/spark/sql/Encoder.html'."
],
"sqlState" : "42001"
},
"INVALID_ARRAY_INDEX" : {
"message" : [
"The index <indexValue> is out of bounds. The array has <arraySize> elements. Use the SQL function `get()` to tolerate accessing element at invalid index and return NULL instead."
],
"sqlState" : "22003"
},
"INVALID_ARRAY_INDEX_IN_ELEMENT_AT" : {
"message" : [
"The index <indexValue> is out of bounds. The array has <arraySize> elements. Use `try_element_at` to tolerate accessing element at invalid index and return NULL instead."
],
"sqlState" : "22003"
},
"INVALID_ARTIFACT_PATH" : {
"message" : [
"Artifact with name <name> is invalid. The name must be a relative path and cannot reference parent/sibling/nephew directories."
],
"sqlState" : "22023"
},
"INVALID_ATTRIBUTE_NAME_SYNTAX" : {
"message" : [
"Syntax error in the attribute name: <name>. Check that backticks appear in pairs, a quoted string is a complete name part and use a backtick only inside quoted name parts."
],
"sqlState" : "42601"
},
"INVALID_BITMAP_POSITION" : {
"message" : [
"The 0-indexed bitmap position <bitPosition> is out of bounds. The bitmap has <bitmapNumBits> bits (<bitmapNumBytes> bytes)."
],
"sqlState" : "22003"
},
"INVALID_BOOLEAN_STATEMENT" : {
"message" : [
"Boolean statement is expected in the condition, but <invalidStatement> was found."
],
"sqlState" : "22546"
},
"INVALID_BOUNDARY" : {
"message" : [
"The boundary <boundary> is invalid: <invalidValue>."
],
"subClass" : {
"END" : {
"message" : [
"Expected the value is '0', '<longMaxValue>', '[<intMinValue>, <intMaxValue>]'."
]
},
"START" : {
"message" : [
"Expected the value is '0', '<longMinValue>', '[<intMinValue>, <intMaxValue>]'."
]
}
},
"sqlState" : "22003"
},
"INVALID_BUCKET_COLUMN_DATA_TYPE" : {
"message" : [
"Cannot use <type> for bucket column. Collated data types are not supported for bucketing."
],
"sqlState" : "42601"
},
"INVALID_BUCKET_COUNT" : {
"message" : [
"Number of buckets should be greater than 0 but less than or equal to bucketing.maxBuckets (`<bucketingMaxBuckets>`). Got `<numBuckets>`."
],
"sqlState" : "22003"
},
"INVALID_BUCKET_FILE" : {
"message" : [
"Invalid bucket file: <path>."
],
"sqlState" : "58030"
},
"INVALID_BYTE_STRING" : {
"message" : [
"The expected format is ByteString, but was <unsupported> (<class>)."
],
"sqlState" : "22P03"
},
"INVALID_CLONE_SESSION_REQUEST" : {
"message" : [
"Invalid session clone request."
],
"subClass" : {
"TARGET_SESSION_ID_ALREADY_CLOSED" : {
"message" : [
"Cannot clone session to target session ID <targetSessionId> because a session with this ID was previously closed."
]
},
"TARGET_SESSION_ID_ALREADY_EXISTS" : {
"message" : [
"Cannot clone session to target session ID <targetSessionId> because a session with this ID already exists."
]
},
"TARGET_SESSION_ID_FORMAT" : {
"message" : [
"Target session ID <targetSessionId> for clone operation must be an UUID string of the format '00112233-4455-6677-8899-aabbccddeeff'."
]
}
},
"sqlState" : "42K04"
},
"INVALID_COLUMN_NAME_AS_PATH" : {
"message" : [
"The datasource <datasource> cannot save the column <columnName> because its name contains some characters that are not allowed in file paths. Please, use an alias to rename it."
],
"sqlState" : "46121"
},
"INVALID_COLUMN_OR_FIELD_DATA_TYPE" : {
"message" : [
"Column or field <name> is of type <type> while it's required to be <expectedType>."
],
"sqlState" : "42000"
},
"INVALID_CONF_VALUE" : {
"message" : [
"The value '<confValue>' in the config \"<confName>\" is invalid."
],
"subClass" : {
"OUT_OF_RANGE_OF_OPTIONS" : {
"message" : [
"It should be one of '<confOptions>'."
]
},
"REQUIREMENT" : {
"message" : [
"<confRequirement>"
]
},
"TIME_ZONE" : {
"message" : [
"Cannot resolve the given timezone."
]
},
"TYPE_MISMATCH" : {
"message" : [
"It should be a/an '<confType>' value."
]
}
},
"sqlState" : "22022"
},
"INVALID_CONSTRAINT_CHARACTERISTICS" : {
"message" : [
"Constraint characteristics [<characteristics>] are duplicated or conflict with each other."
],
"sqlState" : "42613"
},
"INVALID_CORRUPT_RECORD_TYPE" : {
"message" : [
"The column <columnName> for corrupt records must have the nullable STRING type, but got <actualType>."
],
"sqlState" : "42804"
},
"INVALID_CURSOR" : {
"message" : [
"The cursor is invalid."
],
"subClass" : {
"DISCONNECTED" : {
"message" : [
"The cursor has been disconnected by the server."
]
},
"NOT_REATTACHABLE" : {
"message" : [
"The cursor is not reattachable."
]
},
"POSITION_NOT_AVAILABLE" : {
"message" : [
"The cursor position id <responseId> is no longer available at index <index>."
]
},
"POSITION_NOT_FOUND" : {
"message" : [
"The cursor position id <responseId> is not found."
]
}
},
"sqlState" : "HY109"
},
"INVALID_CURSOR_DECLARATION" : {
"message" : [
"Cursors must be declared after variable/condition declarations, and before handlers and other statements."
],
"sqlState" : "42601"
},
"INVALID_DATETIME_PATTERN" : {
"message" : [
"Unrecognized datetime pattern: <pattern>."
],
"subClass" : {
"ILLEGAL_CHARACTER" : {
"message" : [
"Illegal pattern character found in datetime pattern: <c>. Please provide legal character."
]
},
"LENGTH" : {
"message" : [
"Too many letters in datetime pattern: <pattern>. Please reduce pattern length."
]
},
"SECONDS_FRACTION" : {
"message" : [
"Cannot detect a seconds fraction pattern of variable length. Please make sure the pattern contains 'S', and does not contain illegal characters."
]
},
"WITH_SUGGESTION" : {
"message" : [
"You can form a valid datetime pattern with the guide from '<docroot>/sql-ref-datetime-pattern.html'."
]
}
},
"sqlState" : "22007"
},
"INVALID_DEFAULT_VALUE" : {
"message" : [
"Failed to execute <statement> command because the destination column or variable <colName> has a DEFAULT value <defaultValue>,"
],
"subClass" : {
"DATA_TYPE" : {
"message" : [
"which requires <expectedType> type, but the statement provided a value of incompatible <actualType> type."
]
},
"NON_DETERMINISTIC" : {
"message" : [
"which contains a non-deterministic expression."
]
},
"NOT_CONSTANT" : {
"message" : [
"which is not a constant expression whose equivalent value is known at query planning time."
]
},
"SUBQUERY_EXPRESSION" : {
"message" : [
"which contains subquery expressions."
]
},
"UNRESOLVED_EXPRESSION" : {
"message" : [
"which fails to resolve as a valid expression."
]
}
},
"sqlState" : "42623"
},
"INVALID_DELIMITER_VALUE" : {
"message" : [
"Invalid value for delimiter."
],
"subClass" : {
"DELIMITER_LONGER_THAN_EXPECTED" : {
"message" : [
"Delimiter cannot be more than one character: <str>."
]
},
"EMPTY_STRING" : {
"message" : [
"Delimiter cannot be empty string."
]
},
"NULL_VALUE" : {
"message" : [
"Delimiter cannot be null."
]
},
"SINGLE_BACKSLASH" : {
"message" : [
"Single backslash is prohibited. It has special meaning as beginning of an escape sequence. To get the backslash character, pass a string with two backslashes as the delimiter."
]
},
"UNSUPPORTED_SPECIAL_CHARACTER" : {
"message" : [
"Unsupported special character for delimiter: <str>."
]
}
},
"sqlState" : "42602"
},
"INVALID_DRIVER_MEMORY" : {
"message" : [
"System memory <systemMemory> must be at least <minSystemMemory>.",
"Please increase heap size using the --driver-memory option or \"<config>\" in Spark configuration."
],
"sqlState" : "F0000"
},
"INVALID_EMPTY_LOCATION" : {
"message" : [
"The location name cannot be empty string, but `<location>` was given."
],
"sqlState" : "42K05"
},
"INVALID_ERROR_CONDITION_DECLARATION" : {
"message" : [
"Invalid condition declaration."
],
"subClass" : {
"NOT_AT_START_OF_COMPOUND_STATEMENT" : {
"message" : [
"Condition <conditionName> can only be declared at the start of a BEGIN END compound statement."
]
},
"QUALIFIED_CONDITION_NAME" : {
"message" : [
"Condition <conditionName> cannot be qualified."
]
},
"SPECIAL_CHARACTER_FOUND" : {
"message" : [
"Special character found in condition name <conditionName>. Only alphanumeric characters and underscores are allowed."
]
}
},
"sqlState" : "42K0R"
},
"INVALID_ESC" : {
"message" : [
"Found an invalid escape string: <invalidEscape>. The escape string must contain only one character."
],
"sqlState" : "42604"
},
"INVALID_ESCAPE_CHAR" : {
"message" : [
"`EscapeChar` should be a string literal of length one, but got <sqlExpr>."
],
"sqlState" : "42604"
},
"INVALID_EXECUTOR_MEMORY" : {
"message" : [
"Executor memory <executorMemory> must be at least <minSystemMemory>.",
"Please increase executor memory using the --executor-memory option or \"<config>\" in Spark configuration."
],
"sqlState" : "F0000"
},
"INVALID_EXPRESSION_ENCODER" : {
"message" : [
"Found an invalid expression encoder. Expects an instance of ExpressionEncoder but got <encoderType>. For more information consult '<docroot>/api/java/index.html?org/apache/spark/sql/Encoder.html'."
],
"sqlState" : "42001"
},
"INVALID_EXPR_TYPE_FOR_QUERY_EXECUTE_IMMEDIATE" : {
"message" : [
"Expression type must be string type but got <exprType>."
],
"sqlState" : "42K09"
},
"INVALID_EXTERNAL_TYPE" : {
"message" : [
"The external type <externalType> is not valid for the type <type> at the expression <expr>."
],
"sqlState" : "42K0N"
},
"INVALID_EXTRACT_BASE_FIELD_TYPE" : {
"message" : [
"Can't extract a value from <base>. Need a complex type [STRUCT, ARRAY, MAP] but got <other>."
],
"sqlState" : "42000"
},
"INVALID_EXTRACT_FIELD" : {
"message" : [
"Cannot extract <field> from <expr>."
],
"sqlState" : "42601"
},
"INVALID_EXTRACT_FIELD_TYPE" : {
"message" : [
"Field name should be a non-null string literal, but it's <extraction>."
],
"sqlState" : "42000"
},
"INVALID_FIELD_NAME" : {
"message" : [
"Field name <fieldName> is invalid: <path> is not a struct."
],
"sqlState" : "42000"
},
"INVALID_FLOW_QUERY_TYPE" : {
"message" : [
"Flow <flowIdentifier> returns an invalid relation type."
],
"subClass" : {
"BATCH_RELATION_FOR_STREAMING_TABLE" : {
"message" : [
"Streaming tables may only be defined by streaming relations, but the flow <flowIdentifier> attempts to write a batch relation to the streaming table <tableIdentifier>. Consider using the STREAM operator in Spark-SQL to convert the batch relation into a streaming relation, or populating the streaming table with an append once-flow instead."
]
},
"STREAMING_RELATION_FOR_MATERIALIZED_VIEW" : {
"message" : [
"Materialized views may only be defined by a batch relation, but the flow <flowIdentifier> attempts to write a streaming relation to the materialized view <tableIdentifier>."
]
},
"STREAMING_RELATION_FOR_ONCE_FLOW" : {
"message" : [
"<flowIdentifier> is an append once-flow that is defined by a streaming relation. Append once-flows may only be defined by or return a batch relation."
]
},
"STREAMING_RELATION_FOR_PERSISTED_VIEW" : {
"message" : [
"Persisted views may only be defined by a batch relation, but the flow <flowIdentifier> attempts to write a streaming relation to the persisted view <viewIdentifier>."
]
}
},
"sqlState" : "42000"
},
"INVALID_FORMAT" : {
"message" : [
"The format is invalid: <format>."
],
"subClass" : {
"CONT_THOUSANDS_SEPS" : {
"message" : [
"Thousands separators (, or G) must have digits in between them in the number format."
]
},
"CUR_MUST_BEFORE_DEC" : {
"message" : [
"Currency characters must appear before any decimal point in the number format."
]
},
"CUR_MUST_BEFORE_DIGIT" : {
"message" : [
"Currency characters must appear before digits in the number format."
]
},
"EMPTY" : {
"message" : [
"The number format string cannot be empty."
]
},
"ESC_AT_THE_END" : {
"message" : [
"The escape character is not allowed to end with."
]
},
"ESC_IN_THE_MIDDLE" : {
"message" : [
"The escape character is not allowed to precede <char>."
]
},
"MISMATCH_INPUT" : {
"message" : [
"The input <inputType> <input> does not match the format."
]
},
"THOUSANDS_SEPS_MUST_BEFORE_DEC" : {
"message" : [
"Thousands separators (, or G) may not appear after the decimal point in the number format."
]
},
"UNEXPECTED_TOKEN" : {
"message" : [
"Found the unexpected <token> in the format string; the structure of the format string must match: `[MI|S]` `[$]` `[0|9|G|,]*` `[.|D]` `[0|9]*` `[$]` `[PR|MI|S]`."
]
},
"WRONG_NUM_DIGIT" : {
"message" : [
"The format string requires at least one number digit."
]
},
"WRONG_NUM_TOKEN" : {
"message" : [
"At most one <token> is allowed in the number format."
]
}
},
"sqlState" : "42601"
},
"INVALID_FRACTION_OF_SECOND" : {
"message" : [
"Valid range for seconds is [0, 60] (inclusive), but the provided value is <secAndMicros>. To avoid this error, use `try_make_timestamp`, which returns NULL on error.",
"If you do not want to use the session default timestamp version of this function, use `try_make_timestamp_ntz` or `try_make_timestamp_ltz`."
],
"sqlState" : "22023"
},
"INVALID_HANDLE" : {
"message" : [
"The handle <handle> is invalid."
],
"subClass" : {
"FORMAT" : {
"message" : [
"Handle must be an UUID string of the format '00112233-4455-6677-8899-aabbccddeeff'"
]
},
"OPERATION_ABANDONED" : {
"message" : [
"Operation was considered abandoned because of inactivity and removed."
]
},
"OPERATION_ALREADY_EXISTS" : {
"message" : [
"Operation already exists."
]
},
"OPERATION_NOT_FOUND" : {
"message" : [
"Operation not found."
]
},
"SESSION_CHANGED" : {
"message" : [
"The existing Spark server driver instance has restarted. Please reconnect."
]
},
"SESSION_CLOSED" : {
"message" : [
"Session was closed."
]
},
"SESSION_NOT_FOUND" : {
"message" : [
"Session not found."
]
}
},
"sqlState" : "HY000"
},
"INVALID_HANDLER_DECLARATION" : {
"message" : [
"Invalid handler declaration."
],
"subClass" : {
"CONDITION_NOT_FOUND" : {
"message" : [
"Condition <condition> not found."
]
},
"DUPLICATE_CONDITION_IN_HANDLER_DECLARATION" : {
"message" : [
"Found duplicate condition <condition> in the handler declaration. Please, remove one of them."
]
},
"DUPLICATE_SQLSTATE_IN_HANDLER_DECLARATION" : {
"message" : [
"Found duplicate sqlState <sqlState> in the handler declaration. Please, remove one of them."
]
},
"INVALID_CONDITION_COMBINATION" : {
"message" : [
"Invalid combination of conditions in the handler declaration. SQLEXCEPTION and NOT FOUND cannot be used together with other condition/sqlstate values."
]
},
"WRONG_PLACE_OF_DECLARATION" : {
"message" : [
"Handlers must be declared after variable/condition/cursor declarations, and before other statements."
]
}
},
"sqlState" : "42K0Q"
},
"INVALID_IDENTIFIER" : {
"message" : [
"The unquoted identifier <ident> is invalid and must be back quoted as: `<ident>`.",
"Unquoted identifiers can only contain ASCII letters ('a' - 'z', 'A' - 'Z'), digits ('0' - '9'), and underbar ('_').",
"Unquoted identifiers must also not start with a digit.",
"Different data sources and meta stores may impose additional restrictions on valid identifiers."
],
"sqlState" : "42602"
},
"INVALID_INDEX_OF_ZERO" : {
"message" : [
"The index 0 is invalid. An index shall be either < 0 or > 0 (the first element has index 1)."
],
"sqlState" : "22003"
},
"INVALID_INLINE_TABLE" : {
"message" : [
"Invalid inline table."
],
"subClass" : {
"CANNOT_EVALUATE_EXPRESSION_IN_INLINE_TABLE" : {
"message" : [
"Cannot evaluate the expression <expr> in inline table definition."
]
},
"FAILED_SQL_EXPRESSION_EVALUATION" : {
"message" : [
"Failed to evaluate the SQL expression <sqlExpr>. Please check your syntax and ensure all required tables and columns are available."
]
},
"INCOMPATIBLE_TYPES_IN_INLINE_TABLE" : {
"message" : [
"Found incompatible types in the column <colName> for inline table."
]
},
"NUM_COLUMNS_MISMATCH" : {
"message" : [
"Inline table expected <expectedNumCols> columns but found <actualNumCols> columns in row <rowIndex>."
]
}
},
"sqlState" : "42000"
},
"INVALID_INTERVAL_FORMAT" : {
"message" : [
"Error parsing '<input>' to interval. Please ensure that the value provided is in a valid format for defining an interval. You can reference the documentation for the correct format."
],
"subClass" : {
"ARITHMETIC_EXCEPTION" : {
"message" : [
"Uncaught arithmetic exception while parsing '<input>'."
]
},
"DAY_TIME_PARSING" : {
"message" : [
"Error parsing interval day-time string: <msg>."
]
},
"INPUT_IS_EMPTY" : {
"message" : [
"Interval string cannot be empty."
]
},
"INPUT_IS_NULL" : {
"message" : [
"Interval string cannot be null."
]
},
"INTERVAL_PARSING" : {
"message" : [
"Error parsing interval <interval> string."
]
},
"INVALID_FRACTION" : {
"message" : [
"<unit> cannot have fractional part."
]
},
"INVALID_PRECISION" : {
"message" : [
"Interval can only support nanosecond precision, <value> is out of range."
]
},
"INVALID_PREFIX" : {
"message" : [
"Invalid interval prefix <prefix>."
]
},
"INVALID_UNIT" : {
"message" : [
"Invalid unit <unit>."
]
},
"INVALID_VALUE" : {
"message" : [
"Invalid value <value>."
]
},
"MISSING_NUMBER" : {
"message" : [
"Expect a number after <word> but hit EOL."
]
},
"MISSING_UNIT" : {
"message" : [
"Expect a unit name after <word> but hit EOL."
]
},
"SECOND_NANO_FORMAT" : {
"message" : [
"Interval string does not match second-nano format of ss.nnnnnnnnn."
]
},
"TIMEZONE_INTERVAL_OUT_OF_RANGE" : {
"message" : [
"The interval value must be in the range of [-18, +18] hours with second precision."
]
},
"UNKNOWN_PARSING_ERROR" : {
"message" : [
"Unknown error when parsing <word>."
]
},
"UNMATCHED_FORMAT_STRING" : {
"message" : [
"Interval string does not match <intervalStr> format of <supportedFormat> when cast to <typeName>: <input>."
]
},
"UNMATCHED_FORMAT_STRING_WITH_NOTICE" : {
"message" : [
"Interval string does not match <intervalStr> format of <supportedFormat> when cast to <typeName>: <input>. Set \"spark.sql.legacy.fromDayTimeString.enabled\" to \"true\" to restore the behavior before Spark 3.0."
]
},
"UNRECOGNIZED_NUMBER" : {
"message" : [
"Unrecognized number <number>."
]
},
"UNSUPPORTED_FROM_TO_EXPRESSION" : {
"message" : [
"Cannot support (interval '<input>' <from> to <to>) expression."
]
}
},
"sqlState" : "22006"
},
"INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION" : {
"message" : [
"Cannot add an interval to a date because its microseconds part is not 0. To resolve this, cast the input date to a timestamp, which supports the addition of intervals with non-zero microseconds."
],
"sqlState" : "22006"
},
"INVALID_JAVA_IDENTIFIER_AS_FIELD_NAME" : {
"message" : [
"<fieldName> is not a valid identifier of Java and cannot be used as field name",
"<walkedTypePath>."
],
"sqlState" : "46121"
},
"INVALID_JOIN_TYPE_FOR_JOINWITH" : {
"message" : [
"Invalid join type in joinWith: <joinType>."
],
"sqlState" : "42613"
},
"INVALID_JSON_DATA_TYPE" : {
"message" : [
"Failed to convert the JSON string '<invalidType>' to a data type. Please enter a valid data type."
],
"sqlState" : "2203G"
},
"INVALID_JSON_DATA_TYPE_FOR_COLLATIONS" : {
"message" : [
"Collations can only be applied to string types, but the JSON data type is <jsonType>."
],
"sqlState" : "2203G"
},
"INVALID_JSON_RECORD_TYPE" : {
"message" : [
"Detected an invalid type of a JSON record while inferring a common schema in the mode <failFastMode>. Expected a STRUCT type, but found <invalidType>."
],
"sqlState" : "22023"
},
"INVALID_JSON_ROOT_FIELD" : {
"message" : [
"Cannot convert JSON root field to target Spark type."
],
"sqlState" : "22032"
},
"INVALID_JSON_SCHEMA_MAP_TYPE" : {
"message" : [
"Input schema <jsonSchema> can only contain STRING as a key type for a MAP."
],
"sqlState" : "22032"
},
"INVALID_KRYO_SERIALIZER_BUFFER_SIZE" : {
"message" : [
"The value of the config \"<bufferSizeConfKey>\" must be less than 2048 MiB, but got <bufferSizeConfValue> MiB."
],
"sqlState" : "F0000"
},
"INVALID_KRYO_SERIALIZER_NO_DATA" : {
"message" : [
"The object '<obj>' is invalid or malformed to <serdeOp> using <serdeClass>."
],
"sqlState" : "22002"
},
"INVALID_LABEL_USAGE" : {
"message" : [
"The usage of the label <labelName> is invalid."
],
"subClass" : {
"DOES_NOT_EXIST" : {
"message" : [
"Label was used in the <statementType> statement, but the label does not belong to any surrounding block."
]
},
"ITERATE_IN_COMPOUND" : {
"message" : [
"ITERATE statement cannot be used with a label that belongs to a compound (BEGIN...END) body."
]
},
"QUALIFIED_LABEL_NAME" : {
"message" : [
"Label cannot be qualified."
]
}
},
"sqlState" : "42K0L"
},
"INVALID_LAMBDA_FUNCTION_CALL" : {
"message" : [
"Invalid lambda function call."
],
"subClass" : {
"DUPLICATE_ARG_NAMES" : {
"message" : [
"The lambda function has duplicate arguments <args>. Please, consider to rename the argument names or set <caseSensitiveConfig> to \"true\"."
]
},
"NON_HIGHER_ORDER_FUNCTION" : {
"message" : [
"A lambda function should only be used in a higher order function. However, its class is <class>, which is not a higher order function."
]
},
"NUM_ARGS_MISMATCH" : {
"message" : [
"A higher order function expects <expectedNumArgs> arguments, but got <actualNumArgs>."
]
},
"PARAMETER_DOES_NOT_ACCEPT_LAMBDA_FUNCTION" : {
"message" : [
"You passed a lambda function to a parameter that does not accept it. Please check if lambda function argument is in the correct position."
]
}
},
"sqlState" : "42K0D"
},
"INVALID_LATERAL_JOIN_TYPE" : {
"message" : [
"The <joinType> JOIN with LATERAL correlation is not allowed because an OUTER subquery cannot correlate to its join partner. Remove the LATERAL correlation or use an INNER JOIN, or LEFT OUTER JOIN instead."
],
"sqlState" : "42613"
},
"INVALID_LIMIT_LIKE_EXPRESSION" : {
"message" : [
"The limit like expression <expr> is invalid."
],
"subClass" : {
"DATA_TYPE" : {
"message" : [
"The <name> expression must be integer type, but got <dataType>."
]
},
"IS_NEGATIVE" : {
"message" : [
"The <name> expression must be equal to or greater than 0, but got <v>."
]
},
"IS_NULL" : {
"message" : [
"The evaluated <name> expression must not be null."
]
},
"IS_UNFOLDABLE" : {
"message" : [
"The <name> expression must evaluate to a constant value."
]
}
},
"sqlState" : "42K0E"
},
"INVALID_LOG_VERSION" : {
"message" : [
"UnsupportedLogVersion."
],
"subClass" : {
"EXACT_MATCH_VERSION" : {
"message" : [
"The only supported log version is v<matchVersion>, but encountered v<version>."
]
},
"MAX_SUPPORTED_VERSION" : {
"message" : [
"The maximum supported log version is v<maxSupportedVersion>, but encountered v<version>. The log file was produced by a newer version of Spark and cannot be read by this version. You need to upgrade."
]
}
},
"sqlState" : "KD002"
},
"INVALID_NAME_IN_USE_COMMAND" : {
"message" : [
"Invalid name '<name>' in <command> command. Reason: <reason>"
],
"sqlState" : "42000"
},
"INVALID_NON_DETERMINISTIC_EXPRESSIONS" : {
"message" : [
"The operator expects a deterministic expression, but the actual expression is <sqlExprs>."
],
"sqlState" : "42K0E"
},
"INVALID_NUMERIC_LITERAL_RANGE" : {
"message" : [
"Numeric literal <rawStrippedQualifier> is outside the valid range for <typeName> with minimum value of <minValue> and maximum value of <maxValue>. Please adjust the value accordingly."
],
"sqlState" : "22003"
},
"INVALID_OBSERVED_METRICS" : {
"message" : [
"Invalid observed metrics."
],
"subClass" : {
"AGGREGATE_EXPRESSION_WITH_DISTINCT_UNSUPPORTED" : {
"message" : [
"Aggregate expressions with DISTINCT are not allowed in observed metrics, but found: <expr>."
]
},
"AGGREGATE_EXPRESSION_WITH_FILTER_UNSUPPORTED" : {
"message" : [
"Aggregate expression with FILTER predicate are not allowed in observed metrics, but found: <expr>."
]
},
"MISSING_NAME" : {
"message" : [
"The observed metrics should be named: <operator>."
]
},
"NESTED_AGGREGATES_UNSUPPORTED" : {
"message" : [
"Nested aggregates are not allowed in observed metrics, but found: <expr>."
]
},
"NON_AGGREGATE_FUNC_ARG_IS_ATTRIBUTE" : {
"message" : [
"Attribute <expr> can only be used as an argument to an aggregate function."
]
},
"NON_AGGREGATE_FUNC_ARG_IS_NON_DETERMINISTIC" : {
"message" : [
"Non-deterministic expression <expr> can only be used as an argument to an aggregate function."
]
},
"WINDOW_EXPRESSIONS_UNSUPPORTED" : {
"message" : [
"Window expressions are not allowed in observed metrics, but found: <expr>."
]
}
},
"sqlState" : "42K0E"
},
"INVALID_OPTIONS" : {
"message" : [
"Invalid options:"
],
"subClass" : {
"NON_MAP_FUNCTION" : {
"message" : [
"Must use the `map()` function for options."
]
},
"NON_STRING_TYPE" : {
"message" : [
"A type of keys and values in `map()` must be string, but got <mapType>."
]
}
},
"sqlState" : "42K06"
},
"INVALID_PANDAS_UDF_PLACEMENT" : {
"message" : [
"The group aggregate pandas UDF <functionList> cannot be invoked together with as other, non-pandas aggregate functions."
],
"sqlState" : "0A000"
},
"INVALID_PARAMETER_VALUE" : {
"message" : [
"The value of parameter(s) <parameter> in <functionName> is invalid:"
],
"subClass" : {
"AES_CRYPTO_ERROR" : {
"message" : [
"detail message: <detailMessage>"
]
},
"AES_IV_LENGTH" : {
"message" : [
"supports 16-byte CBC IVs and 12-byte GCM IVs, but got <actualLength> bytes for <mode>."
]
},
"AES_KEY_LENGTH" : {
"message" : [
"expects a binary value with 16, 24 or 32 bytes, but got <actualLength> bytes."
]
},
"BINARY_FORMAT" : {
"message" : [
"expects one of binary formats 'base64', 'hex', 'utf-8', but got <invalidFormat>."
]
},
"BIT_POSITION_RANGE" : {
"message" : [
"expects an integer value in [0, <upper>), but got <invalidValue>."
]
},
"BOOLEAN" : {
"message" : [
"expects a boolean literal, but got <invalidValue>."
]
},
"CHARSET" : {
"message" : [
"expects one of the <charsets>, but got <charset>."
]
},
"DATETIME_UNIT" : {
"message" : [
"expects one of the units without quotes YEAR, QUARTER, MONTH, WEEK, DAY, DAYOFYEAR, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, but got the string literal <invalidValue>."
]
},
"DOUBLE" : {
"message" : [
"expects an double literal, but got <invalidValue>."
]
},
"DTYPE" : {
"message" : [
"Unsupported dtype: <invalidValue>. Valid values: float64, float32."
]
},
"EXTENSION" : {
"message" : [
"Invalid extension: <invalidValue>. Extension is limited to exactly 3 letters (e.g. csv, tsv, etc...)"
]
},
"INTEGER" : {
"message" : [
"expects an integer literal, but got <invalidValue>."
]
},
"INTERRUPT_TYPE_OPERATION_ID_REQUIRES_ID" : {
"message" : [
"INTERRUPT_TYPE_OPERATION_ID requested, but no operation_id provided."
]
},
"INTERRUPT_TYPE_TAG_REQUIRES_TAG" : {
"message" : [
"INTERRUPT_TYPE_TAG requested, but no operation_tag provided."
]
},
"LENGTH" : {
"message" : [
"Expects `length` greater than or equal to 0, but got <length>."
]
},
"LONG" : {
"message" : [
"expects a long literal, but got <invalidValue>."
]
},
"NULL" : {
"message" : [
"expects a non-NULL value."
]
},
"PATTERN" : {
"message" : [
"<value>."
]
},
"REGEX_GROUP_INDEX" : {
"message" : [
"Expects group index between 0 and <groupCount>, but got <groupIndex>."
]
},
"START" : {
"message" : [
"Expects a positive or a negative value for `start`, but got 0."
]
},
"STREAMING_LISTENER_COMMAND_MISSING" : {
"message" : [
"Missing command in StreamingQueryListenerBusCommand."
]
},
"STRING" : {
"message" : [
"expects a string literal, but got <invalidValue>."
]
},
"TIME_UNIT" : {
"message" : [
"expects one of the units 'HOUR', 'MINUTE', 'SECOND', 'MILLISECOND', 'MICROSECOND', but got '<invalidValue>'."
]
},
"ZERO_INDEX" : {
"message" : [
"expects %1$, %2$ and so on, but got %0$."
]
}
},
"sqlState" : "22023"
},
"INVALID_PARTITION_COLUMN_DATA_TYPE" : {
"message" : [
"Cannot use <type> for partition column."
],
"sqlState" : "0A000"
},
"INVALID_PARTITION_OPERATION" : {
"message" : [
"The partition command is invalid."
],
"subClass" : {
"PARTITION_MANAGEMENT_IS_UNSUPPORTED" : {
"message" : [
"Table <name> does not support partition management."
]
},
"PARTITION_SCHEMA_IS_EMPTY" : {
"message" : [
"Table <name> is not partitioned."
]
}
},
"sqlState" : "42601"
},
"INVALID_PARTITION_VALUE" : {
"message" : [
"Failed to cast value <value> to data type <dataType> for partition column <columnName>. Ensure the value matches the expected data type for this partition column."
],
"sqlState" : "42846"
},
"INVALID_PROPERTY_KEY" : {
"message" : [
"<key> is an invalid property key, please use quotes, e.g. SET <key>=<value>."
],
"sqlState" : "42602"
},
"INVALID_PROPERTY_VALUE" : {
"message" : [
"<value> is an invalid property value, please use quotes, e.g. SET <key>=<value>"
],
"sqlState" : "42602"
},
"INVALID_QUERY_MIXED_QUERY_PARAMETERS" : {
"message" : [
"Parameterized query must either use positional, or named parameters, but not both."
],
"sqlState" : "42613"
},
"INVALID_RECURSIVE_CTE" : {
"message" : [
"Invalid recursive definition found. Recursive queries must contain an UNION or an UNION ALL statement with 2 children. The first child needs to be the anchor term without any recursive references. Any top level inner CTE must not contain self references."
],
"sqlState" : "42836"
},
"INVALID_RECURSIVE_REFERENCE" : {
"message" : [
"Invalid recursive reference found inside WITH RECURSIVE clause."
],
"subClass" : {
"PLACE" : {
"message" : [
"Recursive references cannot be used on the right side of left outer/semi/anti joins, on the left side of right outer joins, in full outer joins, in aggregates, window functions or sorts"
]
}
},
"sqlState" : "42836"
},
"INVALID_REGEXP_REPLACE" : {
"message" : [
"Could not perform regexp_replace for source = \"<source>\", pattern = \"<pattern>\", replacement = \"<replacement>\" and position = <position>."
],
"sqlState" : "22023"
},
"INVALID_RESETTABLE_DEPENDENCY" : {
"message" : [
"Tables <upstreamResettableTables> are resettable but have a non-resettable downstream dependency '<downstreamTable>'. `reset` will fail as Spark Streaming does not support deleted source data. You can either remove the <resetAllowedKey>=false property from '<downstreamTable>' or add it to its upstream dependencies."
],
"sqlState" : "42000"
},
"INVALID_RESET_COMMAND_FORMAT" : {
"message" : [
"Expected format is 'RESET' or 'RESET key'. If you want to include special characters in key, please use quotes, e.g., RESET `key`."
],
"sqlState" : "42000"
},
"INVALID_SAVE_MODE" : {
"message" : [
"The specified save mode <mode> is invalid. Valid save modes include \"append\", \"overwrite\", \"ignore\", \"error\", \"errorifexists\", and \"default\"."
],
"sqlState" : "42000"
},
"INVALID_SCHEMA" : {
"message" : [
"The input schema <inputSchema> is not a valid schema string."
],
"subClass" : {
"NON_STRING_LITERAL" : {
"message" : [
"The input expression must be string literal and not null."
]
},
"NON_STRUCT_TYPE" : {
"message" : [
"The input expression should be evaluated to struct type, but got <dataType>."
]
},
"PARSE_ERROR" : {
"message" : [
"Cannot parse the schema:",
"<reason>"
]
}
},
"sqlState" : "42K07"
},
"INVALID_SCHEMA_OR_RELATION_NAME" : {
"message" : [
"<name> is not a valid name for tables/schemas. Valid names only contain alphabet characters, numbers and _."
],
"sqlState" : "42602"
},
"INVALID_SCHEMA_TYPE_NON_STRUCT" : {
"message" : [
"Invalid schema type. Expect a struct type, but got <dataType>."
],
"sqlState" : "42K09"
},
"INVALID_SET_SYNTAX" : {
"message" : [
"Expected format is 'SET', 'SET key', or 'SET key=value'. If you want to include special characters in key, or include semicolon in value, please use backquotes, e.g., SET `key`=`value`."
],
"sqlState" : "42000"
},
"INVALID_SINGLE_VARIANT_COLUMN" : {
"message" : [
"User specified schema <schema> is invalid when the `singleVariantColumn` option is enabled. The schema must either be a variant field, or a variant field plus a corrupt column field."
],
"sqlState" : "42613"
},
"INVALID_SPARK_CONFIG" : {
"message" : [
"Invalid Spark config:"
],
"subClass" : {
"INVALID_EXECUTOR_HEARTBEAT_INTERVAL" : {
"message" : [
"The value of <networkTimeoutKey>=<networkTimeoutValue>ms must be greater than the value of <executorHeartbeatIntervalKey>=<executorHeartbeatIntervalValue>ms."
]
},
"INVALID_EXECUTOR_MEMORY_OPTIONS" : {
"message" : [
"<executorOptsKey> is not allowed to specify max heap memory settings (was '<javaOpts>'). Use spark.executor.memory instead."
]
},
"INVALID_EXECUTOR_SPARK_OPTIONS" : {
"message" : [
"<executorOptsKey> is not allowed to set Spark options (was '<javaOpts>'). Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit."
]
},
"INVALID_MEMORY_FRACTION" : {
"message" : [
"<memoryFractionKey> should be between 0 and 1 (was '<memoryFractionValue>')."
]
},
"INVALID_SPARK_SUBMIT_DEPLOY_MODE_KEY" : {
"message" : [
"<sparkSubmitDeployModeKey> can only be \"cluster\" or \"client\"."
]
},
"NETWORK_AUTH_MUST_BE_ENABLED" : {
"message" : [
"<networkAuthEnabledConf> must be enabled when enabling encryption."
]
}
},
"sqlState" : "42616"
},
"INVALID_SQLSTATE" : {
"message" : [
"Invalid SQLSTATE value: '<sqlState>'. SQLSTATE must be exactly 5 characters long and contain only A-Z and 0-9. SQLSTATE must not start with '00', '01', or 'XX'."
],
"sqlState" : "428B3"
},
"INVALID_SQL_ARG" : {
"message" : [
"The argument <name> of `sql()` is invalid. Consider to replace it either by a SQL literal or by collection constructor functions such as `map()`, `array()`, `struct()`."
],
"sqlState" : "42K08"
},
"INVALID_SQL_FUNCTION_DATA_ACCESS" : {
"message" : [
"Cannot create a SQL function with CONTAINS SQL that accesses a table/view or a SQL function that reads SQL data. Please use READS SQL DATA instead."
],
"sqlState" : "42K0E"
},
"INVALID_SQL_FUNCTION_PLAN_STRUCTURE" : {
"message" : [
"Invalid SQL function plan structure",
"<plan>"
],
"sqlState" : "XXKD0"
},
"INVALID_SQL_SYNTAX" : {
"message" : [
"Invalid SQL syntax:"
],
"subClass" : {
"ANALYZE_TABLE_UNEXPECTED_NOSCAN" : {
"message" : [
"ANALYZE TABLE(S) ... COMPUTE STATISTICS ... <ctx> must be either NOSCAN or empty."
]
},
"CREATE_FUNC_WITH_COLUMN_CONSTRAINTS" : {
"message" : [
"CREATE FUNCTION with constraints on parameters is not allowed."
]
},
"CREATE_FUNC_WITH_GENERATED_COLUMNS_AS_PARAMETERS" : {
"message" : [
"CREATE FUNCTION with generated columns as parameters is not allowed."
]
},
"CREATE_ROUTINE_WITH_IF_NOT_EXISTS_AND_REPLACE" : {
"message" : [
"Cannot create a routine with both IF NOT EXISTS and REPLACE specified."
]
},
"CREATE_TEMP_FUNC_WITH_DATABASE" : {
"message" : [
"CREATE TEMPORARY FUNCTION with specifying a database(<database>) is not allowed."
]
},
"CREATE_TEMP_FUNC_WITH_IF_NOT_EXISTS" : {
"message" : [
"CREATE TEMPORARY FUNCTION with IF NOT EXISTS is not allowed."
]
},
"CREATE_TEMP_TABLE_USING_PROVIDER" : {
"message" : [
"CREATE TEMPORARY TABLE ... USING ... is a deprecated syntax. To overcome the issue, please use CREATE TEMPORARY VIEW instead."
]
},
"EMPTY_IN_PREDICATE" : {
"message" : [
"IN predicate requires at least one value. Empty IN clauses like 'IN ()' are not allowed. Consider using 'WHERE FALSE' if you need an always-false condition, or provide at least one value in the IN list."
]
},
"EMPTY_PARTITION_VALUE" : {
"message" : [
"Partition key <partKey> must set value."
]
},
"FUNCTION_WITH_UNSUPPORTED_SYNTAX" : {
"message" : [
"The function <prettyName> does not support <syntax>."
]
},
"INVALID_COLUMN_REFERENCE" : {
"message" : [
"Expected a column reference for transform <transform>: <expr>."
]
},
"INVALID_TABLE_FUNCTION_IDENTIFIER_ARGUMENT_MISSING_PARENTHESES" : {
"message" : [
"Syntax error: call to table-valued function is invalid because parentheses are missing around the provided TABLE argument <argumentName>; please surround this with parentheses and try again."
]
},
"INVALID_TABLE_VALUED_FUNC_NAME" : {
"message" : [
"Table valued function cannot specify database name: <funcName>."
]
},
"INVALID_WINDOW_REFERENCE" : {
"message" : [
"Window reference <windowName> is not a window specification."
]
},
"LATERAL_WITHOUT_SUBQUERY_OR_TABLE_VALUED_FUNC" : {
"message" : [
"LATERAL can only be used with subquery and table-valued functions."
]
},
"MULTI_PART_NAME" : {
"message" : [
"<statement> with multiple part name(<name>) is not allowed."
]
},
"OPTION_IS_INVALID" : {
"message" : [
"option or property key <key> is invalid; only <supported> are supported"
]
},
"REPETITIVE_WINDOW_DEFINITION" : {
"message" : [
"The definition of window <windowName> is repetitive."
]
},
"SHOW_FUNCTIONS_INVALID_PATTERN" : {
"message" : [
"Invalid pattern in SHOW FUNCTIONS: <pattern>. It must be a \"STRING\" literal."
]
},
"SHOW_FUNCTIONS_INVALID_SCOPE" : {
"message" : [
"SHOW <scope> FUNCTIONS not supported."
]
},
"TRANSFORM_WRONG_NUM_ARGS" : {
"message" : [
"The transform<transform> requires <expectedNum> parameters but the actual number is <actualNum>."
]
},
"UNRESOLVED_WINDOW_REFERENCE" : {
"message" : [
"Cannot resolve window reference <windowName>."
]
},
"UNSUPPORTED_FUNC_NAME" : {
"message" : [
"Unsupported function name <funcName>."
]
},
"UNSUPPORTED_SQL_STATEMENT" : {
"message" : [
"Unsupported SQL statement: <sqlText>."
]
},
"VARIABLE_TYPE_OR_DEFAULT_REQUIRED" : {
"message" : [
"The definition of a SQL variable requires either a datatype or a DEFAULT clause.",
"For example, use `DECLARE name STRING` or `DECLARE name = 'SQL'` instead of `DECLARE name`."
]
}
},
"sqlState" : "42000"
},
"INVALID_STATEMENT_FOR_EXECUTE_INTO" : {
"message" : [
"The INTO clause of EXECUTE IMMEDIATE is only valid for queries but the given statement is not a query: <sqlString>."
],
"sqlState" : "07501"
},
"INVALID_STATEMENT_OR_CLAUSE" : {
"message" : [
"The statement or clause: <operation> is not valid."
],
"sqlState" : "42601"
},
"INVALID_STREAMING_REAL_TIME_MODE_TRIGGER_INTERVAL" : {
"message" : [
"The real-time trigger interval is set to <interval> ms. This is less than the <minBatchDuration> ms minimum specified by spark.sql.streaming.realTimeMode.minBatchDuration."
],
"sqlState" : "22023"
},
"INVALID_SUBQUERY_EXPRESSION" : {
"message" : [
"Invalid subquery:"
],
"subClass" : {
"SCALAR_SUBQUERY_RETURN_MORE_THAN_ONE_OUTPUT_COLUMN" : {
"message" : [
"Scalar subquery must return only one column, but got <number>."
]
},
"STREAMING_QUERY" : {
"message" : [
"Streaming query is not allowed in subquery expressions."
]
}
},
"sqlState" : "42823"
},
"INVALID_TEMP_OBJ_REFERENCE" : {
"message" : [
"Cannot create the persistent object <objName> of the type <obj> because it references to the temporary object <tempObjName> of the type <tempObj>. Please make the temporary object <tempObjName> persistent, or make the persistent object <objName> temporary."
],
"sqlState" : "42K0F"
},
"INVALID_TIMEZONE" : {
"message" : [
"The timezone: <timeZone> is invalid. The timezone must be either a region-based zone ID or a zone offset. Region IDs must have the form 'area/city', such as 'America/Los_Angeles'. Zone offsets must be in the format '(+|-)HH', '(+|-)HH:mm’ or '(+|-)HH:mm:ss', e.g '-08' , '+01:00' or '-13:33:33', and must be in the range from -18:00 to +18:00. 'Z' and 'UTC' are accepted as synonyms for '+00:00'."
],
"sqlState" : "22009"
},
"INVALID_TIME_TRAVEL_SPEC" : {
"message" : [
"Cannot specify both version and timestamp when time travelling the table."
],
"sqlState" : "42K0E"
},
"INVALID_TIME_TRAVEL_TIMESTAMP_EXPR" : {
"message" : [
"The time travel timestamp expression <expr> is invalid."
],
"subClass" : {
"INPUT" : {
"message" : [
"Cannot be casted to the \"TIMESTAMP\" type."
]
},
"NON_DETERMINISTIC" : {
"message" : [
"Must be deterministic."
]
},
"OPTION" : {
"message" : [
"Timestamp string in the options must be able to cast to TIMESTAMP type."
]
},
"UNEVALUABLE" : {
"message" : [
"Must be evaluable."
]
}
},
"sqlState" : "42K0E"
},
"INVALID_TYPED_LITERAL" : {
"message" : [
"The value of the typed literal <valueType> is invalid: <value>."
],
"sqlState" : "42604"
},
"INVALID_UDF_IMPLEMENTATION" : {
"message" : [
"Function <funcName> does not implement a ScalarFunction or AggregateFunction."
],
"sqlState" : "38000"
},
"INVALID_URL" : {
"message" : [
"The url is invalid: <url>. Use `try_parse_url` to tolerate invalid URL and return NULL instead."
],
"sqlState" : "22P02"
},
"INVALID_USAGE_OF_STAR_OR_REGEX" : {
"message" : [
"Invalid usage of <elem> in <prettyName>."
],
"sqlState" : "42000"
},
"INVALID_UTF8_STRING" : {
"message" : [
"Invalid UTF8 byte sequence found in string: <str>."
],
"sqlState" : "22029"
},
"INVALID_VARIABLE_DECLARATION" : {
"message" : [
"Invalid variable declaration."
],
"subClass" : {
"NOT_ALLOWED_IN_SCOPE" : {
"message" : [
"Declaration of the variable <varName> is not allowed in this scope."
]
},
"ONLY_AT_BEGINNING" : {
"message" : [
"Variable <varName> can only be declared at the beginning of the compound."
]
},
"QUALIFIED_LOCAL_VARIABLE" : {
"message" : [
"The variable <varName> must be declared without a qualifier, as qualifiers are not allowed for local variable declarations."
]
},
"REPLACE_LOCAL_VARIABLE" : {
"message" : [
"The variable <varName> does not support DECLARE OR REPLACE, as local variables cannot be replaced."
]
}
},
"sqlState" : "42K0M"
},
"INVALID_VARIANT_CAST" : {
"message" : [
"The variant value `<value>` cannot be cast into `<dataType>`. Please use `try_variant_get` instead."
],
"sqlState" : "22023"
},
"INVALID_VARIANT_FROM_PARQUET" : {
"message" : [
"Invalid variant."
],
"subClass" : {
"MISSING_FIELD" : {
"message" : [
"Missing <field> field."
]
},
"NULLABLE_OR_NOT_BINARY_FIELD" : {
"message" : [
"The <field> must be a non-nullable binary."
]
},
"WRONG_NUM_FIELDS" : {
"message" : [
"Variant column must contain exactly two fields."
]
}
},
"sqlState" : "22023"
},
"INVALID_VARIANT_GET_PATH" : {
"message" : [
"The path `<path>` is not a valid variant extraction path in `<functionName>`.",
"A valid path should start with `$` and is followed by zero or more segments like `[123]`, `.name`, `['name']`, or `[\"name\"]`."
],
"sqlState" : "22023"
},
"INVALID_VARIANT_SHREDDING_SCHEMA" : {
"message" : [
"The schema `<schema>` is not a valid variant shredding schema."
],
"sqlState" : "22023"
},
"INVALID_VECTOR_NORM_DEGREE" : {
"message" : [
"For the <functionName> function, degree must be 1.0, 2.0, or float('inf'), but got <degree>."
],
"sqlState" : "22023"
},
"INVALID_WHERE_CONDITION" : {
"message" : [
"The WHERE condition <condition> contains invalid expressions: <expressionList>.",
"Rewrite the query to avoid window functions, aggregate functions, and generator functions in the WHERE clause."
],
"sqlState" : "42903"
},
"INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC" : {
"message" : [
"Cannot specify ORDER BY or a window frame for <aggFunc>."
],
"sqlState" : "42601"
},
"INVALID_WITHIN_GROUP_EXPRESSION" : {
"message" : [
"Invalid function <funcName> with WITHIN GROUP."
],
"subClass" : {
"DISTINCT_UNSUPPORTED" : {
"message" : [
"The function does not support DISTINCT with WITHIN GROUP."
]
},
"MISMATCH_WITH_DISTINCT_INPUT" : {
"message" : [
"The function is invoked with DISTINCT and WITHIN GROUP but expressions <funcArg> and <orderingExpr> do not match. The WITHIN GROUP ordering expression must be picked from the function inputs."
]
},
"WITHIN_GROUP_MISSING" : {
"message" : [
"WITHIN GROUP is required for the function."
]
},
"WRONG_NUM_ORDERINGS" : {
"message" : [
"The function requires <expectedNum> orderings in WITHIN GROUP but got <actualNum>."
]
}
},
"sqlState" : "42K0K"
},
"INVALID_WRITER_COMMIT_MESSAGE" : {
"message" : [
"The data source writer has generated an invalid number of commit messages. Expected exactly one writer commit message from each task, but received <detail>."
],
"sqlState" : "42KDE"
},
"INVALID_WRITE_DISTRIBUTION" : {
"message" : [
"The requested write distribution is invalid."
],
"subClass" : {
"PARTITION_NUM_AND_SIZE" : {
"message" : [
"The partition number and advisory partition size can't be specified at the same time."
]
},
"PARTITION_NUM_WITH_UNSPECIFIED_DISTRIBUTION" : {
"message" : [
"The number of partitions can't be specified with unspecified distribution."
]
},
"PARTITION_SIZE_WITH_UNSPECIFIED_DISTRIBUTION" : {
"message" : [
"The advisory partition size can't be specified with unspecified distribution."
]
}
},
"sqlState" : "42000"
},
"JDBC_EXTERNAL_ENGINE_SYNTAX_ERROR" : {
"message" : [
"JDBC external engine syntax error. The error was caused by the query <jdbcQuery>. <externalEngineError>."
],
"subClass" : {
"DURING_OUTPUT_SCHEMA_RESOLUTION" : {
"message" : [
"The error occurred during output schema resolution."
]
},
"DURING_QUERY_EXECUTION" : {
"message" : [
"The error occurred during query execution."
]
}
},
"sqlState" : "42000"
},
"JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE" : {
"message" : [
"The join condition <joinCondition> has the invalid type <conditionType>, expected \"BOOLEAN\"."
],
"sqlState" : "42K0E"
},
"KLL_INVALID_INPUT_SKETCH_BUFFER" : {
"message" : [
"Invalid call to <function>; only valid KLL sketch buffers are supported as inputs (such as those produced by the `kll_sketch_agg` function)."
],
"sqlState" : "22000"
},
"KLL_SKETCH_INVALID_QUANTILE_RANGE" : {
"message" : [
"For function <functionName>, the quantile value must be between 0.0 and 1.0 (inclusive)."
],
"sqlState" : "22003"
},
"KLL_SKETCH_K_MUST_BE_CONSTANT" : {
"message" : [
"For function <functionName>, the k parameter must be a constant value, but got a non-constant expression."
],
"sqlState" : "42K0E"
},
"KLL_SKETCH_K_OUT_OF_RANGE" : {
"message" : [
"For function <functionName>, the k parameter must be between 8 and 65535 (inclusive), but got <k>."
],
"sqlState" : "22003"
},
"KRYO_BUFFER_OVERFLOW" : {
"message" : [
"Kryo serialization failed: <exceptionMsg>. To avoid this, increase \"<bufferSizeConfKey>\" value."
],
"sqlState" : "54006"
},
"LABELS_MISMATCH" : {
"message" : [
"Begin label <beginLabel> does not match the end label <endLabel>."
],
"sqlState" : "42K0L"
},
"LABEL_OR_FOR_VARIABLE_ALREADY_EXISTS" : {
"message" : [
"The label or FOR variable <label> already exists. Choose another name or rename the existing one."
],
"sqlState" : "42K0L"
},
"LABEL_OR_FOR_VARIABLE_NAME_FORBIDDEN" : {
"message" : [
"The label or FOR variable name <label> is forbidden."
],
"sqlState" : "42K0L"
},
"LATERAL_JOIN_WITH_ARROW_UDTF_UNSUPPORTED" : {
"message" : [
"LATERAL JOIN with Arrow-optimized user-defined table functions (UDTFs) is not supported. Arrow UDTFs cannot be used on the right-hand side of a lateral join.",
"Please use a regular UDTF instead, or restructure your query to avoid the lateral join."
],
"sqlState" : "0A000"
},
"LOAD_DATA_PATH_NOT_EXISTS" : {
"message" : [
"LOAD DATA input path does not exist: <path>."
],
"sqlState" : "42K03"
},
"LOCAL_MUST_WITH_SCHEMA_FILE" : {
"message" : [
"LOCAL must be used together with the schema of `file`, but got: `<actualSchema>`."
],
"sqlState" : "42601"
},
"LOCAL_RELATION_SIZE_LIMIT_EXCEEDED" : {
"message" : [
"Local relation size (<actualSize> bytes) exceeds the limit (<sizeLimit> bytes)."
],
"sqlState" : "54000"
},
"LOCATION_ALREADY_EXISTS" : {
"message" : [
"Cannot name the managed table as <identifier>, as its associated location <location> already exists. Please pick a different table name, or remove the existing location first."
],
"sqlState" : "42710"
},
"MALFORMED_CHARACTER_CODING" : {
"message" : [
"Invalid value found when performing <function> with <charset>"
],
"sqlState" : "22000"
},
"MALFORMED_CSV_RECORD" : {
"message" : [
"Malformed CSV record: <badRecord>"
],
"sqlState" : "KD000"
},
"MALFORMED_LOG_FILE" : {
"message" : [
"Log file was malformed: failed to read correct log version from <text>."
],
"sqlState" : "KD002"
},
"MALFORMED_PROTOBUF_MESSAGE" : {
"message" : [
"Malformed Protobuf messages are detected in message deserialization. Parse Mode: <failFastMode>. To process malformed protobuf message as null result, try setting the option 'mode' as 'PERMISSIVE'."
],
"sqlState" : "XX000"
},
"MALFORMED_RECORD_IN_PARSING" : {
"message" : [
"Malformed records are detected in record parsing: <badRecord>.",
"Parse Mode: <failFastMode>. To process malformed records as null result, try setting the option 'mode' as 'PERMISSIVE'."
],
"subClass" : {
"CANNOT_PARSE_JSON_ARRAYS_AS_STRUCTS" : {
"message" : [
"Parsing JSON arrays as structs is forbidden."
]
},
"CANNOT_PARSE_STRING_AS_DATATYPE" : {
"message" : [
"Cannot parse the value <fieldValue> of the field <fieldName> as target spark data type <targetType> from the input type <inputType>."
]
},
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
}
},
"sqlState" : "22023"
},
"MALFORMED_STATE_IN_RATE_PER_MICRO_BATCH_SOURCE" : {
"message" : [
"Malformed state in RatePerMicroBatch source."
],
"subClass" : {
"INVALID_OFFSET" : {
"message" : [
"The offset value is invalid: startOffset should less than or equal to the endOffset, but startOffset(<startOffset>) > endOffset(<endOffset>)."
]
},
"INVALID_TIMESTAMP" : {
"message" : [
"The timestamp value is invalid: startTimestamp should less than or equal to the endTimestamp, but startTimestamp(<startTimestamp>) > endTimestamp(<endTimestamp>).",
"This could happen when the streaming query is restarted with a newer `startingTimestamp` and reprocess the first batch (i.e. batch 0). Please consider using a new checkpoint location."
]
}
},
"sqlState" : "22000"
},
"MALFORMED_VARIANT" : {
"message" : [
"Variant binary is malformed. Please check the data source is valid."
],
"sqlState" : "22023"
},
"MATERIALIZED_VIEW_WITH_MULTIPLE_QUERIES" : {
"message" : [
"Invalid destination <tableName> with multiple flows: <flows>."
],
"sqlState" : "42000"
},
"MERGE_CARDINALITY_VIOLATION" : {
"message" : [
"The ON search condition of the MERGE statement matched a single row from the target table with multiple rows of the source table.",
"This could result in the target row being operated on more than once with an update or delete operation and is not allowed."
],
"sqlState" : "23K01"
},
"MERGE_WITHOUT_WHEN" : {
"message" : [
"There must be at least one WHEN clause in a MERGE statement."
],
"sqlState" : "42601"
},
"MISSING_AGGREGATION" : {
"message" : [
"The non-aggregating expression <expression> is based on columns which are not participating in the GROUP BY clause.",
"Add the columns or the expression to the GROUP BY, aggregate the expression, or use <expressionAnyValue> if you do not care which of the values within a group is returned."
],
"sqlState" : "42803"
},
"MISSING_ATTRIBUTES" : {
"message" : [
"Resolved attribute(s) <missingAttributes> missing from <input> in operator <operator>."
],
"subClass" : {
"RESOLVED_ATTRIBUTE_APPEAR_IN_OPERATION" : {
"message" : [
"Attribute(s) with the same name appear in the operation: <operation>.",
"Please check if the right attribute(s) are used."
]
},
"RESOLVED_ATTRIBUTE_MISSING_FROM_INPUT" : {
"message" : [
""
]
}
},
"sqlState" : "XX000"
},
"MISSING_CATALOG_ABILITY" : {
"message" : [
"Catalog <plugin> does not support"
],
"subClass" : {
"CREATE_FUNCTION" : {
"message" : [
"CREATE FUNCTION."
]
},
"DROP_FUNCTION" : {
"message" : [
"DROP FUNCTION."
]
},
"FUNCTIONS" : {
"message" : [
"functions."
]
},
"NAMESPACES" : {
"message" : [
"namespaces."
]
},
"PROCEDURES" : {
"message" : [
"procedures."
]
},
"REFRESH_FUNCTION" : {
"message" : [
"REFRESH FUNCTION."
]
},
"TABLES" : {
"message" : [
"tables."
]
},
"TABLE_VALUED_FUNCTIONS" : {
"message" : [
"table-valued functions."
]
},
"VIEWS" : {
"message" : [
"views."
]
}
},
"sqlState" : "0A000"
},
"MISSING_DATABASE_FOR_V1_SESSION_CATALOG" : {
"message" : [
"Database name is not specified in the v1 session catalog. Please ensure to provide a valid database name when interacting with the v1 catalog."
],
"sqlState" : "3F000"
},
"MISSING_GROUP_BY" : {
"message" : [
"The query does not include a GROUP BY clause. Add GROUP BY or turn it into the window functions using OVER clauses."
],
"sqlState" : "42803"
},
"MISSING_TIMEOUT_CONFIGURATION" : {
"message" : [
"The operation has timed out, but no timeout duration is configured. To set a processing time-based timeout, use 'GroupState.setTimeoutDuration()' in your 'mapGroupsWithState' or 'flatMapGroupsWithState' operation. For event-time-based timeout, use 'GroupState.setTimeoutTimestamp()' and define a watermark using 'Dataset.withWatermark()'."
],
"sqlState" : "HY000"
},
"MISSING_WINDOW_SPECIFICATION" : {
"message" : [
"Window specification is not defined in the WINDOW clause for <windowName>. For more information about WINDOW clauses, please refer to '<docroot>/sql-ref-syntax-qry-select-window.html'."
],
"sqlState" : "42P20"
},
"MULTIPART_FLOW_NAME_NOT_SUPPORTED" : {
"message" : [
"Flow with multipart name '<flowName>' is not supported."
],
"sqlState" : "0A000"
},
"MULTIPLE_PRIMARY_KEYS" : {
"message" : [
"Multiple primary keys are defined: <columns>. Please ensure that only one primary key is defined for the table."
],
"sqlState" : "42P16"
},
"MULTIPLE_QUERY_RESULT_CLAUSES_WITH_PIPE_OPERATORS" : {
"message" : [
"<clause1> and <clause2> cannot coexist in the same SQL pipe operator using '|>'. Please separate the multiple result clauses into separate pipe operators and then retry the query again."
],
"sqlState" : "42000"
},
"MULTIPLE_TIME_TRAVEL_SPEC" : {
"message" : [
"Cannot specify time travel in both the time travel clause and options."
],
"sqlState" : "42K0E"
},
"MULTIPLE_XML_DATA_SOURCE" : {
"message" : [
"Detected multiple data sources with the name <provider> (<sourceNames>). Please specify the fully qualified class name or remove <externalSource> from the classpath."
],
"sqlState" : "42710"
},
"MULTI_ALIAS_WITHOUT_GENERATOR" : {
"message" : [
"Multi part aliasing (<names>) is not supported with <expr> as it is not a generator function."
],
"sqlState" : "42K0E"
},
"MULTI_SOURCES_UNSUPPORTED_FOR_EXPRESSION" : {
"message" : [
"The expression <expr> does not support more than one source."
],
"sqlState" : "42K0E"
},
"MULTI_UDF_INTERFACE_ERROR" : {
"message" : [
"Not allowed to implement multiple UDF interfaces, UDF class <className>."
],
"sqlState" : "0A000"
},
"NAMED_PARAMETERS_NOT_SUPPORTED" : {
"message" : [
"Named parameters are not supported for function <functionName>; please retry the query with positional arguments to the function call instead."
],
"sqlState" : "4274K"
},
"NAMED_PARAMETER_SUPPORT_DISABLED" : {
"message" : [
"Cannot call function <functionName> because named argument references are not enabled here.",
"In this case, the named argument reference was <argument>.",
"Set \"spark.sql.allowNamedFunctionArguments\" to \"true\" to turn on feature."
],
"sqlState" : "0A000"
},
"NEGATIVE_SCALE_DISALLOWED" : {
"message" : [
"Negative scale is not allowed: '<scale>'. Set the config <sqlConf> to \"true\" to allow it."
],
"sqlState" : "0A000"
},
"NEGATIVE_VALUES_IN_FREQUENCY_EXPRESSION" : {
"message" : [
"Found the negative value in <frequencyExpression>: <negativeValue>, but expected a positive integral value."
],
"sqlState" : "22003"
},
"NESTED_AGGREGATE_FUNCTION" : {
"message" : [
"It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."
],
"sqlState" : "42607"
},
"NESTED_EXECUTE_IMMEDIATE" : {
"message" : [
"Nested EXECUTE IMMEDIATE commands are not allowed. Please ensure that the SQL query provided (<sqlString>) does not contain another EXECUTE IMMEDIATE command."
],
"sqlState" : "07501"
},
"NEW_CHECK_CONSTRAINT_VIOLATION" : {
"message" : [
"The new check constraint (<expression>) cannot be added because it would be violated by existing data in table <tableName>. Please ensure all existing rows satisfy the constraint before adding it."
],
"sqlState" : "23512"
},
"NONEXISTENT_FIELD_NAME_IN_LIST" : {
"message" : [
"Field(s) <nonExistFields> do(es) not exist. Available fields: <fieldNames>"
],
"sqlState" : "HV091"
},
"NON_DETERMINISTIC_CHECK_CONSTRAINT" : {
"message" : [
"The check constraint `<checkCondition>` is non-deterministic. Check constraints must only contain deterministic expressions."
],
"sqlState" : "42621"
},
"NON_FOLDABLE_ARGUMENT" : {
"message" : [
"The function <funcName> requires the parameter <paramName> to be a foldable expression of the type <paramType>, but the actual argument is a non-foldable."
],
"sqlState" : "42K08"
},
"NON_LAST_MATCHED_CLAUSE_OMIT_CONDITION" : {
"message" : [
"When there are more than one MATCHED clauses in a MERGE statement, only the last MATCHED clause can omit the condition."
],
"sqlState" : "42613"
},
"NON_LAST_NOT_MATCHED_BY_SOURCE_CLAUSE_OMIT_CONDITION" : {
"message" : [
"When there are more than one NOT MATCHED BY SOURCE clauses in a MERGE statement, only the last NOT MATCHED BY SOURCE clause can omit the condition."
],
"sqlState" : "42613"
},
"NON_LAST_NOT_MATCHED_BY_TARGET_CLAUSE_OMIT_CONDITION" : {
"message" : [
"When there are more than one NOT MATCHED [BY TARGET] clauses in a MERGE statement, only the last NOT MATCHED [BY TARGET] clause can omit the condition."
],
"sqlState" : "42613"
},
"NON_LITERAL_PIVOT_VALUES" : {
"message" : [
"Literal expressions required for pivot values, found <expression>."
],
"sqlState" : "42K08"
},
"NON_PARTITION_COLUMN" : {
"message" : [
"PARTITION clause cannot contain the non-partition column: <columnName>."
],
"sqlState" : "42000"
},
"NON_TIME_WINDOW_NOT_SUPPORTED_IN_STREAMING" : {
"message" : [
"Window function is not supported in <windowFunc> (as column <columnName>) on streaming DataFrames/Datasets.",
"Structured Streaming only supports time-window aggregation using the WINDOW function. (window specification: <windowSpec>)"
],
"sqlState" : "42KDE"
},
"NOT_ALLOWED_IN_FROM" : {
"message" : [
"Not allowed in the FROM clause:"
],
"subClass" : {
"LATERAL_WITH_PIVOT" : {
"message" : [
"LATERAL together with PIVOT."
]
},
"LATERAL_WITH_UNPIVOT" : {
"message" : [
"LATERAL together with UNPIVOT."
]
},
"UNPIVOT_WITH_PIVOT" : {
"message" : [
"UNPIVOT together with PIVOT."
]
}
},
"sqlState" : "42601"
},
"NOT_ALLOWED_IN_PIPE_OPERATOR_WHERE" : {
"message" : [
"Not allowed in the pipe WHERE clause:"
],
"subClass" : {
"WINDOW_CLAUSE" : {
"message" : [
"WINDOW clause."
]
}
},
"sqlState" : "42601"
},
"NOT_A_CONSTANT_STRING" : {
"message" : [
"The expression <expr> used for the routine or clause <name> must be a constant STRING which is NOT NULL."
],
"subClass" : {
"NOT_CONSTANT" : {
"message" : [
"To be considered constant the expression must not depend on any columns, contain a subquery, or invoke a non deterministic function such as rand()."
]
},
"NULL" : {
"message" : [
"The expression evaluates to NULL."
]
},
"WRONG_TYPE" : {
"message" : [
"The data type of the expression is <dataType>."
]
}
},
"sqlState" : "42601"
},
"NOT_A_PARTITIONED_TABLE" : {
"message" : [
"Operation <operation> is not allowed for <tableIdentWithDB> because it is not a partitioned table."
],
"sqlState" : "42809"
},
"NOT_A_SCALAR_FUNCTION" : {
"message" : [
"<functionName> appears as a scalar expression here, but the function was defined as a table function. Please update the query to move the function call into the FROM clause, or redefine <functionName> as a scalar function instead."
],
"sqlState" : "42887"
},
"NOT_A_TABLE_FUNCTION" : {
"message" : [
"<functionName> appears as a table function here, but the function was defined as a scalar function. Please update the query to move the function call outside the FROM clause, or redefine <functionName> as a table function instead."
],
"sqlState" : "42887"
},
"NOT_NULL_ASSERT_VIOLATION" : {
"message" : [
"NULL value appeared in non-nullable field: <walkedTypePath>If the schema is inferred from a Scala tuple/case class, or a Java bean, please try to use scala.Option[_] or other nullable types (such as java.lang.Integer instead of int/scala.Int)."
],
"sqlState" : "42000"
},
"NOT_NULL_CONSTRAINT_VIOLATION" : {
"message" : [
"Assigning a NULL is not allowed here."
],
"subClass" : {
"ARRAY_ELEMENT" : {
"message" : [
"The array <columnPath> is defined to contain only elements that are NOT NULL."
]
},
"MAP_VALUE" : {
"message" : [
"The map <columnPath> is defined to contain only values that are NOT NULL."
]
}
},
"sqlState" : "42000"
},
"NOT_SUPPORTED_CHANGE_COLUMN" : {
"message" : [
"ALTER TABLE ALTER/CHANGE COLUMN is not supported for changing <table>'s column <originName> with type <originType> to <newName> with type <newType>."
],
"sqlState" : "0A000"
},
"NOT_SUPPORTED_CHANGE_SAME_COLUMN" : {
"message" : [
"ALTER TABLE ALTER/CHANGE COLUMN is not supported for changing <table>'s column <fieldName> including its nested fields multiple times in the same command."
],
"sqlState" : "0A000"
},
"NOT_SUPPORTED_COMMAND_FOR_V2_TABLE" : {
"message" : [
"<cmd> is not supported for v2 tables."
],
"sqlState" : "0A000"
},
"NOT_SUPPORTED_COMMAND_WITHOUT_HIVE_SUPPORT" : {
"message" : [
"<cmd> is not supported, if you want to enable it, please set \"spark.sql.catalogImplementation\" to \"hive\"."
],
"sqlState" : "0A000"
},
"NOT_SUPPORTED_IN_JDBC_CATALOG" : {
"message" : [
"Not supported command in JDBC catalog:"
],
"subClass" : {
"COMMAND" : {
"message" : [
"<cmd>"
]
},
"COMMAND_WITH_PROPERTY" : {
"message" : [
"<cmd> with property <property>."
]
}
},
"sqlState" : "0A000"
},
"NOT_UNRESOLVED_ENCODER" : {
"message" : [
"Unresolved encoder expected, but <attr> was found."
],
"sqlState" : "42601"
},
"NO_DEFAULT_COLUMN_VALUE_AVAILABLE" : {
"message" : [
"Can't determine the default value for <colName> since it is not nullable and it has no default value."
],
"sqlState" : "42608"
},
"NO_HANDLER_FOR_UDAF" : {
"message" : [
"No handler for UDAF '<functionName>'. Use sparkSession.udf.register(...) instead."
],
"sqlState" : "42000"
},
"NO_MERGE_ACTION_SPECIFIED" : {
"message" : [
"df.mergeInto needs to be followed by at least one of whenMatched/whenNotMatched/whenNotMatchedBySource."
],
"sqlState" : "42K0E"
},
"NO_SQL_TYPE_IN_PROTOBUF_SCHEMA" : {
"message" : [
"Cannot find <catalystFieldPath> in Protobuf schema."
],
"sqlState" : "42S22"
},
"NO_UDF_INTERFACE" : {
"message" : [
"UDF class <className> doesn't implement any UDF interface."
],
"sqlState" : "38000"
},
"NULLABLE_COLUMN_OR_FIELD" : {
"message" : [
"Column or field <name> is nullable while it's required to be non-nullable."
],
"sqlState" : "42000"
},
"NULLABLE_ROW_ID_ATTRIBUTES" : {
"message" : [
"Row ID attributes cannot be nullable: <nullableRowIdAttrs>."
],
"sqlState" : "42000"
},
"NULL_DATA_SOURCE_OPTION" : {
"message" : [
"Data source read/write option <option> cannot have null value."
],
"sqlState" : "22024"
},
"NULL_MAP_KEY" : {
"message" : [
"Cannot use null as map key."
],
"sqlState" : "2200E"
},
"NULL_QUERY_STRING_EXECUTE_IMMEDIATE" : {
"message" : [
"Execute immediate requires a non-null variable as the query string, but the provided variable <varName> is null."
],
"sqlState" : "22004"
},
"NUMERIC_OUT_OF_SUPPORTED_RANGE" : {
"message" : [
"The value <value> cannot be interpreted as a numeric since it has more than 38 digits."
],
"sqlState" : "22003"
},
"NUMERIC_VALUE_OUT_OF_RANGE" : {
"message" : [
""
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
"The <roundedValue> rounded half up from <originalValue> cannot be represented as Decimal(<precision>, <scale>)."
]
},
"WITH_SUGGESTION" : {
"message" : [
"<value> cannot be represented as Decimal(<precision>, <scale>). If necessary set <config> to \"false\" to bypass this error, and return NULL instead."
]
}
},
"sqlState" : "22003"
},
"NUM_COLUMNS_MISMATCH" : {
"message" : [
"<operator> can only be performed on inputs with the same number of columns, but the first input has <firstNumColumns> columns and the <invalidOrdinalNum> input has <invalidNumColumns> columns."
],
"sqlState" : "42826"
},
"NUM_TABLE_VALUE_ALIASES_MISMATCH" : {
"message" : [
"Number of given aliases does not match number of output columns.",
"Function name: <funcName>; number of aliases: <aliasesNum>; number of output columns: <outColsNum>."
],
"sqlState" : "42826"
},
"OPERATION_CANCELED" : {
"message" : [
"Operation has been canceled."
],
"sqlState" : "HY008"
},
"ORDER_BY_POS_OUT_OF_RANGE" : {
"message" : [
"ORDER BY position <index> is not in select list (valid range is [1, <size>])."
],
"sqlState" : "42805"
},
"PARQUET_CONVERSION_FAILURE" : {
"message" : [
"Unable to create a Parquet converter for the data type <dataType> whose Parquet type is <parquetType>."
],
"subClass" : {
"DECIMAL" : {
"message" : [
"Parquet DECIMAL type can only be backed by INT32, INT64, FIXED_LEN_BYTE_ARRAY, or BINARY."
]
},
"UNSUPPORTED" : {
"message" : [
"Please modify the conversion making sure it is supported."
]
},
"WITHOUT_DECIMAL_METADATA" : {
"message" : [
"Please read this column/field as Spark BINARY type."
]
}
},
"sqlState" : "42846"
},
"PARQUET_TYPE_ILLEGAL" : {
"message" : [
"Illegal Parquet type: <parquetType>."
],
"sqlState" : "42846"
},
"PARQUET_TYPE_NOT_RECOGNIZED" : {
"message" : [
"Unrecognized Parquet type: <field>."
],
"sqlState" : "42846"
},
"PARQUET_TYPE_NOT_SUPPORTED" : {
"message" : [
"Parquet type not yet supported: <parquetType>."
],
"sqlState" : "42846"
},
"PARSE_EMPTY_STATEMENT" : {
"message" : [
"Syntax error, unexpected empty statement."
],
"sqlState" : "42617"
},
"PARSE_MODE_UNSUPPORTED" : {
"message" : [
"The function <funcName> doesn't support the <mode> mode. Acceptable modes are PERMISSIVE and FAILFAST."
],
"sqlState" : "42601"
},
"PARSE_SYNTAX_ERROR" : {
"message" : [
"Syntax error at or near <error><hint>."
],
"sqlState" : "42601"
},
"PARTITIONS_ALREADY_EXIST" : {
"message" : [
"Cannot ADD or RENAME TO partition(s) <partitionList> in table <tableName> because they already exist.",
"Choose a different name, drop the existing partition, or add the IF NOT EXISTS clause to tolerate a pre-existing partition."
],
"sqlState" : "428FT"
},
"PARTITIONS_NOT_FOUND" : {
"message" : [
"The partition(s) <partitionList> cannot be found in table <tableName>.",
"Verify the partition specification and table name.",
"To tolerate the error on drop use ALTER TABLE … DROP IF EXISTS PARTITION."
],
"sqlState" : "428FT"
},
"PARTITION_COLUMN_NOT_FOUND_IN_SCHEMA" : {
"message" : [
"Partition column <column> not found in schema <schema>. Please provide the existing column for partitioning."
],
"sqlState" : "42000"
},
"PARTITION_TRANSFORM_EXPRESSION_NOT_IN_PARTITIONED_BY" : {
"message" : [
"The expression <expression> must be inside 'partitionedBy'."
],
"sqlState" : "42S23"
},
"PATH_ALREADY_EXISTS" : {
"message" : [
"Path <outputPath> already exists. Set mode as \"overwrite\" to overwrite the existing path."
],
"sqlState" : "42K04"
},
"PATH_NOT_FOUND" : {
"message" : [
"Path does not exist: <path>."
],
"sqlState" : "42K03"
},
"PIPELINE_DATASET_WITHOUT_FLOW" : {
"message" : [
"Pipeline dataset <identifier> does not have any defined flows. Please attach a query with the dataset's definition, or explicitly define at least one flow that writes to the dataset."
],
"sqlState" : "0A000"
},
"PIPELINE_DUPLICATE_IDENTIFIERS" : {
"message" : [
"A duplicate identifier was found for elements registered in the pipeline's dataflow graph."
],
"subClass" : {
"FLOW" : {
"message" : [
"Flow <flowName> was found in multiple datasets: <datasetNames>"
]
},
"OUTPUT" : {
"message" : [
"Attempted to register a <outputType1> with identifier <outputName>, but a <outputType2> has already been registered with that identifier. Please ensure all outputs created within this pipeline have unique identifiers."
]
}
},
"sqlState" : "42710"
},
"PIPELINE_GRAPH_NOT_TOPOLOGICALLY_SORTED" : {
"message" : [
"There is a cycle between <flowName> and <inputName>."
],
"sqlState" : "0A000"
},
"PIPELINE_SQL_GRAPH_ELEMENT_REGISTRATION_ERROR" : {
"message" : [
"<message>",
"<offendingQuery>",
"<codeLocation>"
],
"sqlState" : "42000"
},
"PIPELINE_STORAGE_ROOT_INVALID" : {
"message" : [
"Pipeline storage root must be an absolute path with a URI scheme (e.g., file://, s3a://, hdfs://). Got: `<storage_root>`."
],
"sqlState" : "42K03"
},
"PIPE_OPERATOR_AGGREGATE_EXPRESSION_CONTAINS_NO_AGGREGATE_FUNCTION" : {
"message" : [
"Non-grouping expression <expr> is provided as an argument to the |> AGGREGATE pipe operator but does not contain any aggregate function; please update it to include an aggregate function and then retry the query again."
],
"sqlState" : "0A000"
},
"PIPE_OPERATOR_CONTAINS_AGGREGATE_FUNCTION" : {
"message" : [
"Aggregate function <expr> is not allowed when using the pipe operator |> <clause> clause; please use the pipe operator |> AGGREGATE clause instead."
],
"sqlState" : "0A000"
},
"PIVOT_VALUE_DATA_TYPE_MISMATCH" : {
"message" : [
"Invalid pivot value '<value>': value data type <valueType> does not match pivot column data type <pivotType>."
],
"sqlState" : "42K09"
},
"PLAN_VALIDATION_FAILED_RULE_EXECUTOR" : {
"message" : [
"The input plan of <ruleExecutor> is invalid: <reason>"
],
"sqlState" : "XXKD0"
},
"PLAN_VALIDATION_FAILED_RULE_IN_BATCH" : {
"message" : [
"Rule <rule> in batch <batch> generated an invalid plan: <reason>"
],
"sqlState" : "XXKD0"
},
"POINTER_ARRAY_OUT_OF_MEMORY" : {
"message" : [
"Not enough memory to grow pointer array"
],
"sqlState" : "82002"
},
"PROTOBUF_DEPENDENCY_NOT_FOUND" : {
"message" : [
"Could not find dependency: <dependencyName>."
],
"sqlState" : "42K0G"
},
"PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND" : {
"message" : [
"Error reading Protobuf descriptor file at path: <filePath>."
],
"sqlState" : "42K0G"
},
"PROTOBUF_FIELD_MISSING" : {
"message" : [
"Searching for <field> in Protobuf schema at <protobufSchema> gave <matchSize> matches. Candidates: <matches>."
],
"sqlState" : "42K0G"
},
"PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA" : {
"message" : [
"Found <field> in Protobuf schema but there is no match in the SQL schema."
],
"sqlState" : "42K0G"
},
"PROTOBUF_FIELD_TYPE_MISMATCH" : {
"message" : [
"Type mismatch encountered for field: <field>."
],
"sqlState" : "42K0G"
},
"PROTOBUF_MESSAGE_NOT_FOUND" : {
"message" : [
"Unable to locate Message <messageName> in Descriptor."
],
"sqlState" : "42K0G"
},
"PROTOBUF_NOT_LOADED_SQL_FUNCTIONS_UNUSABLE" : {
"message" : [
"Cannot call the <functionName> SQL function because the Protobuf data source is not loaded.",
"Please restart your job or session with the 'spark-protobuf' package loaded, such as by using the --packages argument on the command line, and then retry your query or command again."
],
"sqlState" : "22KD3"
},
"PROTOBUF_TYPE_NOT_SUPPORT" : {
"message" : [
"Protobuf type not yet supported: <protobufType>."
],
"sqlState" : "42K0G"
},
"PYTHON_DATA_SOURCE_ERROR" : {
"message" : [
"Failed to <action> Python data source <type>: <msg>"
],
"sqlState" : "38000"
},
"PYTHON_STREAMING_DATA_SOURCE_RUNTIME_ERROR" : {
"message" : [
"Failed when Python streaming data source perform <action>: <msg>"
],
"sqlState" : "38000"
},
"RECURSION_LEVEL_LIMIT_EXCEEDED" : {
"message" : [
"Recursion level limit <levelLimit> reached but query has not exhausted, try increasing it like 'WITH RECURSIVE t(col) MAX RECURSION LEVEL 200'."
],
"sqlState" : "42836"
},
"RECURSION_ROW_LIMIT_EXCEEDED" : {
"message" : [
"Recursion row limit <rowLimit> reached but query has not exhausted, try setting a larger LIMIT value when querying the CTE relation."
],
"sqlState" : "42836"
},
"RECURSIVE_CTE_IN_LEGACY_MODE" : {
"message" : [
"Recursive definitions cannot be used in legacy CTE precedence mode (spark.sql.legacy.ctePrecedencePolicy=LEGACY)."
],
"sqlState" : "42836"
},
"RECURSIVE_CTE_WITH_LEGACY_INLINE_FLAG" : {
"message" : [
"Recursive definitions cannot be used when legacy inline flag is set to true (spark.sql.legacy.inlineCTEInCommands=true)."
],
"sqlState" : "42836"
},
"RECURSIVE_PROTOBUF_SCHEMA" : {
"message" : [
"Found recursive reference in Protobuf schema, which can not be processed by Spark by default: <fieldDescriptor>. try setting the option `recursive.fields.max.depth` 1 to 10. Going beyond 10 levels of recursion is not allowed."
],
"sqlState" : "42K0G"
},
"RECURSIVE_VIEW" : {
"message" : [
"Recursive view <viewIdent> detected (cycle: <newPath>)."
],
"sqlState" : "42K0H"
},
"REF_DEFAULT_VALUE_IS_NOT_ALLOWED_IN_PARTITION" : {
"message" : [
"References to DEFAULT column values are not allowed within the PARTITION clause."
],
"sqlState" : "42601"
},
"REMAINDER_BY_ZERO" : {
"message" : [
"Remainder by zero. Use `try_mod` to tolerate divisor being 0 and return NULL instead. If necessary set <config> to \"false\" to bypass this error."
],
"sqlState" : "22012"
},
"RENAME_SRC_PATH_NOT_FOUND" : {
"message" : [
"Failed to rename as <sourcePath> was not found."
],
"sqlState" : "42K03"
},
"REPEATED_CLAUSE" : {
"message" : [
"The <clause> clause may be used at most once per <operation> operation."
],
"sqlState" : "42614"
},
"REQUIRED_PARAMETER_NOT_FOUND" : {
"message" : [
"Cannot invoke routine <routineName> because the parameter named <parameterName> is required, but the routine call did not supply a value. Please update the routine call to supply an argument value (either positionally at index <index> or by name) and retry the query again."
],
"sqlState" : "4274K"
},
"REQUIRES_EXPLICIT_NAME_IN_WATERMARK_CLAUSE" : {
"message" : [
"The watermark clause requires an explicit name if expression is specified, but got <sqlExpr>."
],
"sqlState" : "42000"
},
"REQUIRES_SINGLE_PART_NAMESPACE" : {
"message" : [
"<sessionCatalog> requires a single-part namespace, but got <namespace>."
],
"sqlState" : "42K05"
},
"ROUTINE_ALREADY_EXISTS" : {
"message" : [
"Cannot create the <newRoutineType> <routineName> because a <existingRoutineType> of that name already exists.",
"Choose a different name, drop or replace the existing <existingRoutineType>, or add the IF NOT EXISTS clause to tolerate a pre-existing <newRoutineType>."
],
"sqlState" : "42723"
},
"ROUTINE_NOT_FOUND" : {
"message" : [
"The routine <routineName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog.",
"To tolerate the error on drop use DROP ... IF EXISTS."
],
"sqlState" : "42883"
},
"ROW_SUBQUERY_TOO_MANY_ROWS" : {
"message" : [
"More than one row returned by a subquery used as a row."
],
"sqlState" : "21000"
},
"ROW_VALUE_IS_NULL" : {
"message" : [
"Found NULL in a row at the index <index>, expected a non-NULL value."
],
"sqlState" : "22023"
},
"RULE_ID_NOT_FOUND" : {
"message" : [
"Not found an id for the rule name \"<ruleName>\". Please modify RuleIdCollection.scala if you are adding a new rule."
],
"sqlState" : "22023"
},
"RUN_EMPTY_PIPELINE" : {
"message" : [
"Pipelines are expected to have at least one non-temporary dataset defined (tables, persisted views) but no non-temporary datasets were found in your pipeline.",
"Please verify that you have included the expected source files, and that your source code includes table definitions (e.g., CREATE MATERIALIZED VIEW in SQL code, @dp.table in python code)."
],
"sqlState" : "42617"
},
"SCALAR_FUNCTION_NOT_COMPATIBLE" : {
"message" : [
"ScalarFunction <scalarFunc> not overrides method 'produceResult(InternalRow)' with custom implementation."
],
"sqlState" : "42K0O"
},
"SCALAR_FUNCTION_NOT_FULLY_IMPLEMENTED" : {
"message" : [
"ScalarFunction <scalarFunc> not implements or overrides method 'produceResult(InternalRow)'."
],
"sqlState" : "42K0P"
},
"SCALAR_SUBQUERY_IS_IN_GROUP_BY_OR_AGGREGATE_FUNCTION" : {
"message" : [
"The correlated scalar subquery '<sqlExpr>' is neither present in GROUP BY, nor in an aggregate function.",
"Add it to GROUP BY using ordinal position or wrap it in `first()` (or `first_value`) if you don't care which value you get."
],
"sqlState" : "0A000"
},
"SCALAR_SUBQUERY_TOO_MANY_ROWS" : {
"message" : [
"More than one row returned by a subquery used as an expression."
],
"sqlState" : "21000"
},
"SCHEMA_ALREADY_EXISTS" : {
"message" : [
"Cannot create schema <schemaName> because it already exists.",
"Choose a different name, drop the existing schema, or add the IF NOT EXISTS clause to tolerate pre-existing schema."
],
"sqlState" : "42P06"
},
"SCHEMA_NOT_EMPTY" : {
"message" : [
"Cannot drop a schema <schemaName> because it contains objects.",
"Use DROP SCHEMA ... CASCADE to drop the schema and all its objects."
],
"sqlState" : "2BP01"
},
"SCHEMA_NOT_FOUND" : {
"message" : [
"The schema <schemaName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a catalog, verify the current_schema() output, or qualify the name with the correct catalog.",
"To tolerate the error on drop use DROP SCHEMA IF EXISTS."
],
"sqlState" : "42704"
},
"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER" : {
"message" : [
"The second argument of <functionName> function needs to be an integer."
],
"sqlState" : "22023"
},
"SEED_EXPRESSION_IS_UNFOLDABLE" : {
"message" : [
"The seed expression <seedExpr> of the expression <exprWithSeed> must be foldable."
],
"sqlState" : "42K08"
},
"SHOW_COLUMNS_WITH_CONFLICT_NAMESPACE" : {
"message" : [
"SHOW COLUMNS with conflicting namespaces: <namespaceA> != <namespaceB>."
],
"sqlState" : "42K05"
},
"SKETCH_INVALID_LG_NOM_ENTRIES" : {
"message" : [
"Invalid call to <function>; the `lgNomEntries` value must be between <min> and <max>, inclusive: <value>."
],
"sqlState" : "22003"
},
"SORT_BY_WITHOUT_BUCKETING" : {
"message" : [
"sortBy must be used together with bucketBy."
],
"sqlState" : "42601"
},
"SPARK_JOB_CANCELLED" : {
"message" : [
"Job <jobId> cancelled <reason>"
],
"sqlState" : "XXKDA"
},
"SPECIFY_BUCKETING_IS_NOT_ALLOWED" : {
"message" : [
"A CREATE TABLE without explicit column list cannot specify bucketing information.",
"Please use the form with explicit column list and specify bucketing information.",
"Alternatively, allow bucketing information to be inferred by omitting the clause."
],
"sqlState" : "42601"
},
"SPECIFY_CLUSTER_BY_WITH_BUCKETING_IS_NOT_ALLOWED" : {
"message" : [
"Cannot specify both CLUSTER BY and CLUSTERED BY INTO BUCKETS."
],
"sqlState" : "42908"
},
"SPECIFY_CLUSTER_BY_WITH_PARTITIONED_BY_IS_NOT_ALLOWED" : {
"message" : [
"Cannot specify both CLUSTER BY and PARTITIONED BY."
],
"sqlState" : "42908"
},
"SPECIFY_PARTITION_IS_NOT_ALLOWED" : {
"message" : [
"A CREATE TABLE without explicit column list cannot specify PARTITIONED BY.",
"Please use the form with explicit column list and specify PARTITIONED BY.",
"Alternatively, allow partitioning to be inferred by omitting the PARTITION BY clause."
],
"sqlState" : "42601"
},
"SPILL_OUT_OF_MEMORY" : {
"message" : [
"Error while calling spill() on <consumerToSpill> : <message>"
],
"sqlState" : "82003"
},
"SQL_CONF_NOT_FOUND" : {
"message" : [
"The SQL config <sqlConf> cannot be found. Please verify that the config exists."
],
"sqlState" : "42K0I"
},
"SQL_SCRIPT_IN_EXECUTE_IMMEDIATE" : {
"message" : [
"SQL Scripts in EXECUTE IMMEDIATE commands are not allowed. Please ensure that the SQL query provided (<sqlString>) is not SQL Script. Make sure the sql_string is a well-formed SQL statement and does not contain BEGIN and END."
],
"sqlState" : "07501"
},
"STAGE_MATERIALIZATION_MULTIPLE_FAILURES" : {
"message" : [
"Multiple failures (<failureCount>) in stage materialization: <failureDetails>"
],
"sqlState" : "XX000"
},
"STAR_GROUP_BY_POS" : {
"message" : [
"Star (*) is not allowed in a select list when GROUP BY an ordinal position is used."
],
"sqlState" : "0A000"
},
"STATEFUL_PROCESSOR_CANNOT_PERFORM_OPERATION_WITH_INVALID_HANDLE_STATE" : {
"message" : [
"Failed to perform stateful processor operation=<operationType> with invalid handle state=<handleState>."
],
"sqlState" : "42802"
},
"STATEFUL_PROCESSOR_CANNOT_PERFORM_OPERATION_WITH_INVALID_TIME_MODE" : {
"message" : [
"Failed to perform stateful processor operation=<operationType> with invalid timeMode=<timeMode>"
],
"sqlState" : "42802"
},
"STATEFUL_PROCESSOR_DUPLICATE_STATE_VARIABLE_DEFINED" : {
"message" : [
"State variable with name <stateVarName> has already been defined in the StatefulProcessor."
],
"sqlState" : "42802"
},
"STATEFUL_PROCESSOR_INCORRECT_TIME_MODE_TO_ASSIGN_TTL" : {
"message" : [
"Cannot use TTL for state=<stateName> in timeMode=<timeMode>, use TimeMode.ProcessingTime() instead."
],
"sqlState" : "42802"
},
"STATEFUL_PROCESSOR_TTL_DURATION_MUST_BE_POSITIVE" : {
"message" : [
"TTL duration must be greater than zero for State store operation=<operationType> on state=<stateName>."
],
"sqlState" : "42802"
},
"STATEFUL_PROCESSOR_UNKNOWN_TIME_MODE" : {
"message" : [
"Unknown time mode <timeMode>. Accepted timeMode modes are 'none', 'processingTime', 'eventTime'"
],
"sqlState" : "42802"
},
"STATE_REPARTITION_INVALID_CHECKPOINT" : {
"message" : [
"The provided checkpoint location '<checkpointLocation>' is in an invalid state."
],
"subClass" : {
"LAST_BATCH_ABANDONED_REPARTITION" : {
"message" : [
"The last batch ID <lastBatchId> is a repartition batch with <lastBatchShufflePartitions> shuffle partitions and didn't finish successfully.",
"You're now requesting to repartition to <numPartitions> shuffle partitions.",
"Please retry with the same number of shuffle partitions as the previous attempt.",
"Once that completes successfully, you can repartition to another number of shuffle partitions."
]
},
"LAST_BATCH_FAILED" : {
"message" : [
"The last batch ID <lastBatchId> didn't finish successfully. Please make sure the streaming query finishes successfully, before repartitioning.",
"If using ProcessingTime trigger, you can use AvailableNow trigger instead, which will make sure the query terminates successfully by itself.",
"If you want to skip this check, set enforceExactlyOnceSink parameter in repartition to false.",
"But this can cause duplicate output records from the failed batch when using exactly-once sinks."
]
},
"MISSING_OFFSET_SEQ_METADATA" : {
"message" : [
"The OffsetSeq (v<version>) metadata is missing for batch ID <batchId>. Please make sure the checkpoint is from a supported Spark version (Spark 4.0+)."
]
},
"NO_BATCH_FOUND" : {
"message" : [
"No microbatch has been recorded in the checkpoint location. Make sure the streaming query has successfully completed at least one microbatch before repartitioning."
]
},
"NO_COMMITTED_BATCH" : {
"message" : [
"There is no committed microbatch. Make sure the streaming query has successfully completed at least one microbatch before repartitioning."
]
},
"OFFSET_SEQ_NOT_FOUND" : {
"message" : [
"Offset sequence entry for batch ID <batchId> not found. You might have set a very low value for",
"'spark.sql.streaming.minBatchesToRetain' config during the streaming query execution or you deleted files in the checkpoint location."
]
},
"SHUFFLE_PARTITIONS_ALREADY_MATCH" : {
"message" : [
"The number of shuffle partitions in the last committed batch (id=<batchId>) is the same as the requested <numPartitions> partitions.",
"Hence, already has the requested number of partitions, so no-op."
]
},
"UNSUPPORTED_OFFSET_SEQ_VERSION" : {
"message" : [
"Unsupported offset sequence version <version>. Please make sure the checkpoint is from a supported Spark version (Spark 4.0+)."
]
},
"UNSUPPORTED_PROVIDER" : {
"message" : [
"<provider> is not supported"
]
},
"UNSUPPORTED_STATEFUL_OPERATOR" : {
"message" : [
"Unsupported stateful operator <operatorName>. Please use a checkpoint with supported stateful operators."
]
},
"UNSUPPORTED_TRANSFORM_WITH_STATE_VARIABLE_TYPE" : {
"message" : [
"Unsupported transformWithState variable type <variableType> (TTL_Enabled: <ttlEnabled>, ColFamilyName: <colFamilyName>).",
"Please use a checkpoint with supported transform with state variable types."
]
}
},
"sqlState" : "55019"
},
"STATE_REPARTITION_INVALID_PARAMETER" : {
"message" : [
"The repartition parameter <parameter> is invalid:"
],
"subClass" : {
"IS_EMPTY" : {
"message" : [
"cannot be empty."
]
},
"IS_NOT_GREATER_THAN_ZERO" : {
"message" : [
"must be greater than zero."
]
},
"IS_NULL" : {
"message" : [
"cannot be null."
]
}
},
"sqlState" : "42616"
},
"STATE_REWRITER_INVALID_CHECKPOINT" : {
"message" : [
"The state rewrite checkpoint location '<checkpointLocation>' is in an invalid state."
],
"subClass" : {
"MISSING_KEY_ENCODER_SPEC" : {
"message" : [
"Key state encoder spec is expected for column family '<colFamilyName>' but was not found.",
"This is likely a bug, please report it."
]
},
"MISSING_OPERATOR_METADATA" : {
"message" : [
"No stateful operator metadata was found for batch <batchId>.",
"Ensure that the checkpoint is for a stateful streaming query and the query ran on a Spark version that supports operator metadata (Spark 4.0+)."
]
},
"STATE_CHECKPOINT_FORMAT_VERSION_MISMATCH" : {
"message" : [
"The checkpoint format version in SQLConf does not match the checkpoint version in the commit log.",
"Expected version <expectedVersion>, but found <actualVersion>.",
"Please set '<sqlConfKey>' to <expectedVersion> in your SQLConf before retrying."
]
},
"UNSUPPORTED_STATE_STORE_METADATA_VERSION" : {
"message" : [
"Unsupported state store metadata version encountered.",
"Only StateStoreMetadataV1 and StateStoreMetadataV2 are supported."
]
}
},
"sqlState" : "55019"
},
"STATE_STORE_CANNOT_CREATE_COLUMN_FAMILY_WITH_RESERVED_CHARS" : {
"message" : [
"Failed to create column family with unsupported starting character and name=<colFamilyName>."
],
"sqlState" : "42802"
},
"STATE_STORE_CANNOT_USE_COLUMN_FAMILY_WITH_INVALID_NAME" : {
"message" : [
"Failed to perform column family operation=<operationName> with invalid name=<colFamilyName>. Column family name cannot be empty or include leading/trailing spaces or use the reserved keyword=default"
],
"sqlState" : "42802"
},
"STATE_STORE_CHECKPOINT_IDS_NOT_SUPPORTED" : {
"message" : [
"<msg>"
],
"sqlState" : "KD002"
},
"STATE_STORE_CHECKPOINT_LOCATION_NOT_EMPTY" : {
"message" : [
"The checkpoint location <checkpointLocation> should be empty on batch 0",
"Please either use a new checkpoint location, or delete the existing data in the checkpoint location."
],
"sqlState" : "42K03"
},
"STATE_STORE_COLUMN_FAMILY_SCHEMA_INCOMPATIBLE" : {
"message" : [
"Incompatible schema transformation with column family=<colFamilyName>, oldSchema=<oldSchema>, newSchema=<newSchema>."
],
"sqlState" : "42802"
},
"STATE_STORE_COMMIT_VALIDATION_FAILED" : {
"message" : [
"State store commit validation failed for batch <batchId>.",
"Expected <expectedCommits> commits but got <actualCommits>.",
"Missing commits: <missingCommits>.",
"This error typically occurs when using operations like show() or limit() in foreachBatch that don't process all partitions, or if you are swallowing an exception and returning from the function early.",
"To fix: ensure your foreachBatch function processes the entire DataFrame."
],
"sqlState" : "XXKST"
},
"STATE_STORE_HANDLE_NOT_INITIALIZED" : {
"message" : [
"The handle has not been initialized for this StatefulProcessor.",
"Please only use the StatefulProcessor within the transformWithState operator."
],
"sqlState" : "42802"
},
"STATE_STORE_INCORRECT_NUM_ORDERING_COLS_FOR_RANGE_SCAN" : {
"message" : [
"Incorrect number of ordering ordinals=<numOrderingCols> for range scan encoder. The number of ordering ordinals cannot be zero or greater than number of schema columns."
],
"sqlState" : "42802"
},
"STATE_STORE_INCORRECT_NUM_PREFIX_COLS_FOR_PREFIX_SCAN" : {
"message" : [
"Incorrect number of prefix columns=<numPrefixCols> for prefix scan encoder. Prefix columns cannot be zero or greater than or equal to num of schema columns."
],
"sqlState" : "42802"
},
"STATE_STORE_INVALID_CONFIG_AFTER_RESTART" : {
"message" : [
"Cannot change <configName> from <oldConfig> to <newConfig> between restarts. Please set <configName> to <oldConfig>, or restart with a new checkpoint directory."
],
"sqlState" : "42K06"
},
"STATE_STORE_INVALID_PROVIDER" : {
"message" : [
"The given State Store Provider <inputClass> does not extend org.apache.spark.sql.execution.streaming.state.StateStoreProvider."
],
"sqlState" : "42K06"
},
"STATE_STORE_INVALID_STAMP" : {
"message" : [
"Invalid stamp <providedStamp>, current stamp: <currentStamp>."
],
"sqlState" : "XXKST"
},
"STATE_STORE_INVALID_STATE_MACHINE_TRANSITION" : {
"message" : [
"Invalid state machine transition detected for state store <storeId>. Old state: <oldState>, New state: <newState>, Operation: <operation>."
],
"sqlState" : "XXKST"
},
"STATE_STORE_INVALID_VALUE_SCHEMA_EVOLUTION" : {
"message" : [
"Schema evolution is not possible new value_schema=<newValueSchema> and old value_schema=<oldValueSchema>",
"Please check https://avro.apache.org/docs/1.11.1/specification/_print/#schema-resolution for valid schema evolution."
],
"sqlState" : "XXKST"
},
"STATE_STORE_INVALID_VARIABLE_TYPE_CHANGE" : {
"message" : [
"Cannot change <stateVarName> to <newType> between query restarts. Please set <stateVarName> to <oldType>, or restart with a new checkpoint directory."
],
"sqlState" : "42K06"
},
"STATE_STORE_KEY_ROW_FORMAT_VALIDATION_FAILURE" : {
"message" : [
"The streaming query failed to validate written state for key row for stateStore=<stateStoreID>.",
"The following reasons may cause this:",
"1. An old Spark version wrote the checkpoint that is incompatible with the current one",
"2. Corrupt checkpoint files",
"3. The query changed in an incompatible way between restarts",
"For the first case, use a new checkpoint directory or use the original Spark version",
"to process the streaming state. Retrieved error_message=<errorMsg>"
],
"sqlState" : "XX000"
},
"STATE_STORE_KEY_SCHEMA_NOT_COMPATIBLE" : {
"message" : [
"Provided key schema does not match existing state key schema.",
"Please check number and type of fields.",
"Existing key_schema=<storedKeySchema> and new key_schema=<newKeySchema>.",
"If you want to force running the query without schema validation, please set spark.sql.streaming.stateStore.stateSchemaCheck to false.",
"However, please note that running the query with incompatible schema could cause non-deterministic behavior."
],
"sqlState" : "XXKST"
},
"STATE_STORE_NULL_TYPE_ORDERING_COLS_NOT_SUPPORTED" : {
"message" : [
"Null type ordering column with name=<fieldName> at index=<index> is not supported for range scan encoder."
],
"sqlState" : "42802"
},
"STATE_STORE_OPERATION_OUT_OF_ORDER" : {
"message" : [
"Streaming stateful operator attempted to access state store out of order. This is a bug, please retry. error_msg=<errorMsg>"
],
"sqlState" : "XXKST"
},
"STATE_STORE_PROVIDER_DOES_NOT_SUPPORT_FINE_GRAINED_STATE_REPLAY" : {
"message" : [
"The given State Store Provider <inputClass> does not extend org.apache.spark.sql.execution.streaming.state.SupportsFineGrainedReplay.",
"Therefore, it does not support option snapshotStartBatchId or readChangeFeed in state data source."
],
"sqlState" : "42K06"
},
"STATE_STORE_ROW_CHECKSUM_VERIFICATION_FAILED" : {
"message" : [
"Row checksum verification failed for stateStore=<stateStoreId>. The row may be corrupted.",
"Expected checksum: <expectedChecksum>, Computed checksum: <computedChecksum>."
],
"sqlState" : "XXKST"
},
"STATE_STORE_STATE_SCHEMA_FILES_THRESHOLD_EXCEEDED" : {
"message" : [
"The number of state schema files <numStateSchemaFiles> exceeds the maximum number of state schema files for this query: <maxStateSchemaFiles>.",
"Added: <addedColumnFamilies>, Removed: <removedColumnFamilies>",
"Please set 'spark.sql.streaming.stateStore.stateSchemaFilesThreshold' to a higher number, or revert state schema modifications"
],
"sqlState" : "42K06"
},
"STATE_STORE_UNEXPECTED_EMPTY_FILE_IN_ROCKSDB_ZIP" : {
"message" : [
"Detected an empty file <fileName> when trying to write the RocksDB snapshot zip file <zipFileName>. This is unexpected, please retry."
],
"sqlState" : "XXKST"
},
"STATE_STORE_UNKNOWN_INTERNAL_COLUMN_FAMILY" : {
"message" : [
"Unknown internal column family: <colFamilyName>.",
"This internal column family is not recognized by the StateStoreColumnFamilySchemaUtils."
],
"sqlState" : "XXKST"
},
"STATE_STORE_UNSUPPORTED_OPERATION" : {
"message" : [
"<operationType> operation not supported with <entity>"
],
"sqlState" : "XXKST"
},
"STATE_STORE_UNSUPPORTED_OPERATION_BINARY_INEQUALITY" : {
"message" : [
"Binary inequality column is not supported with state store. Provided schema: <schema>."
],
"sqlState" : "XXKST"
},
"STATE_STORE_UNSUPPORTED_OPERATION_ON_MISSING_COLUMN_FAMILY" : {
"message" : [
"State store operation=<operationType> not supported on missing column family=<colFamilyName>."
],
"sqlState" : "42802"
},
"STATE_STORE_VALUE_ROW_FORMAT_VALIDATION_FAILURE" : {
"message" : [
"The streaming query failed to validate written state for value row for stateStore=<stateStoreID>.",
"The following reasons may cause this:",
"1. An old Spark version wrote the checkpoint that is incompatible with the current one",
"2. Corrupt checkpoint files",
"3. The query changed in an incompatible way between restarts",
"For the first case, use a new checkpoint directory or use the original Spark version",
"to process the streaming state. Retrieved error_message=<errorMsg>"
],
"sqlState" : "XX000"
},
"STATE_STORE_VALUE_SCHEMA_EVOLUTION_THRESHOLD_EXCEEDED" : {
"message" : [
"The number of state schema evolutions <numSchemaEvolutions> exceeds the maximum number of state schema evolutions, <maxSchemaEvolutions>, allowed for this column family.",
"Offending column family: <colFamilyName>",
"Please set 'spark.sql.streaming.stateStore.valueStateSchemaEvolutionThreshold' to a higher number, or revert state schema modifications"
],
"sqlState" : "42K06"
},
"STATE_STORE_VALUE_SCHEMA_NOT_COMPATIBLE" : {
"message" : [
"Provided value schema does not match existing state value schema.",
"Please check number and type of fields.",
"Existing value_schema=<storedValueSchema> and new value_schema=<newValueSchema>.",
"If you want to force running the query without schema validation, please set spark.sql.streaming.stateStore.stateSchemaCheck to false.",
"However, please note that running the query with incompatible schema could cause non-deterministic behavior."
],
"sqlState" : "XXKST"
},
"STATE_STORE_VARIABLE_SIZE_ORDERING_COLS_NOT_SUPPORTED" : {
"message" : [
"Variable size ordering column with name=<fieldName> at index=<index> is not supported for range scan encoder."
],
"sqlState" : "42802"
},
"STATIC_PARTITION_COLUMN_IN_INSERT_COLUMN_LIST" : {
"message" : [
"Static partition column <staticName> is also specified in the column list."
],
"sqlState" : "42713"
},
"STDS_COMMITTED_BATCH_UNAVAILABLE" : {
"message" : [
"No committed batch found, checkpoint location: <checkpointLocation>. Ensure that the query has run and committed any microbatch before stopping."
],
"sqlState" : "KD006"
},
"STDS_CONFLICT_OPTIONS" : {
"message" : [
"The options <options> cannot be specified together. Please specify the one."
],
"sqlState" : "42613"
},
"STDS_FAILED_TO_READ_OPERATOR_METADATA" : {
"message" : [
"Failed to read the operator metadata for checkpointLocation=<checkpointLocation> and batchId=<batchId>.",
"Either the file does not exist, or the file is corrupted.",
"Rerun the streaming query to construct the operator metadata, and report to the corresponding communities or vendors if the error persists."
],
"sqlState" : "42K03"
},
"STDS_FAILED_TO_READ_STATE_SCHEMA" : {
"message" : [
"Failed to read the state schema. Either the file does not exist, or the file is corrupted. options: <sourceOptions>.",
"Rerun the streaming query to construct the state schema, and report to the corresponding communities or vendors if the error persists."
],
"sqlState" : "42K03"
},
"STDS_INTERNAL_ERROR" : {
"message" : [
"Internal error: <message>",
"Please, report this bug to the corresponding communities or vendors, and provide the full stack trace."
],
"sqlState" : "XXKST"
},
"STDS_INVALID_OPTION_VALUE" : {
"message" : [
"Invalid value for source option '<optionName>':"
],
"subClass" : {
"IS_EMPTY" : {
"message" : [
"cannot be empty."
]
},
"IS_NEGATIVE" : {
"message" : [
"cannot be negative."
]
},
"WITH_MESSAGE" : {
"message" : [
"<message>"
]
}
},
"sqlState" : "42616"
},
"STDS_MIXED_CHECKPOINT_FORMAT_VERSIONS_NOT_SUPPORTED" : {
"message" : [
"Reading state across different checkpoint format versions is not supported.",
"startBatchId=<startBatchId>, endBatchId=<endBatchId>.",
"startFormatVersion=<startFormatVersion>, endFormatVersion=<endFormatVersion>."
],
"sqlState" : "KD002"
},
"STDS_NO_PARTITION_DISCOVERED_IN_STATE_STORE" : {
"message" : [
"The state does not have any partition. Please double check that the query points to the valid state. options: <sourceOptions>"
],
"sqlState" : "KD006"
},
"STDS_OFFSET_LOG_UNAVAILABLE" : {
"message" : [
"The offset log for <batchId> does not exist, checkpoint location: <checkpointLocation>.",
"Please specify the batch ID which is available for querying - you can query the available batch IDs via using state metadata data source."
],
"sqlState" : "KD006"
},
"STDS_OFFSET_METADATA_LOG_UNAVAILABLE" : {
"message" : [
"Metadata is not available for offset log for <batchId>, checkpoint location: <checkpointLocation>.",
"The checkpoint seems to be only run with older Spark version(s). Run the streaming query with the recent Spark version, so that Spark constructs the state metadata."
],
"sqlState" : "KD006"
},
"STDS_REQUIRED_OPTION_UNSPECIFIED" : {
"message" : [
"'<optionName>' must be specified."
],
"sqlState" : "42601"
},
"STREAMING_CHECKPOINT_MISSING_METADATA_FILE" : {
"message" : [
"Checkpoint location <checkpointLocation> is in an inconsistent state: the metadata file is missing but offset and/or commit logs contain data. Please restore the metadata file or create a new checkpoint directory."
],
"sqlState" : "42K03"
},
"STREAMING_OUTPUT_MODE" : {
"message" : [
"Invalid streaming output mode: <outputMode>."
],
"subClass" : {
"INVALID" : {
"message" : [
"Accepted output modes are 'Append', 'Complete', 'Update'."
]
},
"UNSUPPORTED_DATASOURCE" : {
"message" : [
"This output mode is not supported in Data Source <className>."
]
},
"UNSUPPORTED_OPERATION" : {
"message" : [
"This output mode is not supported for <operation> on streaming DataFrames/DataSets."
]
}
},
"sqlState" : "42KDE"
},
"STREAMING_PYTHON_RUNNER_INITIALIZATION_COMMUNICATION_FAILURE" : {
"message" : [
"Streaming Runner initialization failed during initial config communication. Cause: <msg>"
],
"sqlState" : "XXKST"
},
"STREAMING_PYTHON_RUNNER_INITIALIZATION_FAILURE" : {
"message" : [
"Streaming Runner initialization failed, returned <resFromPython>. Cause: <msg>"
],
"sqlState" : "XXKST"
},
"STREAMING_PYTHON_RUNNER_INITIALIZATION_TIMEOUT_FAILURE" : {
"message" : [
"Streaming Runner initialization failed. Socket connection timeout. Cause: <msg>"
],
"sqlState" : "XXKST"
},
"STREAMING_QUERY_EVOLUTION_ERROR" : {
"message" : [
"Streaming query evolution error:"
],
"subClass" : {
"DUPLICATE_SOURCE_NAMES" : {
"message" : [
"Duplicate streaming source names detected: <names>. Each streaming source must have a unique name."
]
},
"INVALID_SOURCE_NAME" : {
"message" : [
"Invalid streaming source name '<sourceName>'. Source names must only contain ASCII letters (a-z, A-Z), digits (0-9), and underscores (_)."
]
},
"SOURCE_NAMING_NOT_SUPPORTED" : {
"message" : [
"Streaming source naming is not supported. Source name '<name>' was provided but the feature is disabled. Please enable the feature by setting spark.sql.streaming.queryEvolution.enableSourceEvolution to true."
]
},
"UNNAMED_STREAMING_SOURCES_WITH_ENFORCEMENT" : {
"message" : [
"All streaming sources must be named when spark.sql.streaming.queryEvolution.enableSourceEvolution is enabled. Unnamed sources found: <sourceInfo>. Use the name() method to assign names to all streaming sources."
]
}
},
"sqlState" : "42KDE"
},
"STREAMING_REAL_TIME_MODE" : {
"message" : [
"Streaming real-time mode has the following limitation:"
],
"subClass" : {
"ASYNC_PROGRESS_TRACKING_NOT_SUPPORTED" : {
"message" : [
"Async progress tracking is not supported in real-time mode. Set option asyncProgressTrackingEnabled to false and retry your query."
]
},
"IDENTICAL_SOURCES_IN_UNION_NOT_SUPPORTED" : {
"message" : [
"Real-time mode does not support union on two or more identical streaming data sources in a single query. This includes scenarios such as referencing the same source DataFrame more than once, or using two data sources with identical configurations for some sources. For Kafka, avoid reusing the same DataFrame and create different ones. Sources provided in the query: <sources>"
]
},
"INPUT_STREAM_NOT_SUPPORTED" : {
"message" : [
"The input stream <className> is not supported in Real-time Mode."
]
},
"OPERATOR_OR_SINK_NOT_IN_ALLOWLIST" : {
"message" : [
"The <errorType>(s): <message> not in the <errorType> allowlist for Real-Time Mode. To bypass this check, set spark.sql.streaming.realTimeMode.allowlistCheck to false. By changing this, you agree to run the query at your own risk."
]
},
"OUTPUT_MODE_NOT_SUPPORTED" : {
"message" : [
"The output mode <outputMode> is not supported. To work around this limitation, set the output mode to Update. In the future, <outputMode> may be supported."
]
},
"SINK_NOT_SUPPORTED" : {
"message" : [
"The <className> sink is currently not supported. See the Real-Time Mode User Guide for a list of supported sinks."
]
}
},
"sqlState" : "0A000"
},
"STREAMING_STATEFUL_OPERATOR_MISSING_STATE_DIRECTORY" : {
"message" : [
"Cannot restart streaming query with stateful operators because the state directory is empty or missing.",
"Stateful operators in current batch: [<OpsInCurBatchSeq>].",
"This typically occurs when state files have been deleted or the streaming query was previously run without stateful operators but restarted with stateful operators.",
"Please remove the stateful operators, use a new checkpoint location, or restore the missing state files."
],
"sqlState" : "42K03"
},
"STREAMING_STATEFUL_OPERATOR_NOT_MATCH_IN_STATE_METADATA" : {
"message" : [
"Streaming stateful operator name does not match with the operator in state metadata. This likely to happen when user adds/removes/changes stateful operator of existing streaming query.",
"Stateful operators in the metadata: [<OpsInMetadataSeq>]; Stateful operators in current batch: [<OpsInCurBatchSeq>]."
],
"sqlState" : "42K03"
},
"STREAMING_UNFINISHED_REPARTITION_DETECTED" : {
"message" : [
"The latest batch <batchId> is for an unfinished state repartitioning i.e. last committed batch is: <lastCommittedBatchId>).",
"Please retry the repartitioning operation and let it finish successfully before starting the streaming query."
],
"sqlState" : "55019"
},
"STREAM_FAILED" : {
"message" : [
"Query [id = <id>, runId = <runId>] terminated with exception: <message>"
],
"sqlState" : "XXKST"
},
"STRUCT_ARRAY_LENGTH_MISMATCH" : {
"message" : [
"Input row doesn't have expected number of values required by the schema. <expected> fields are required while <actual> values are provided."
],
"sqlState" : "2201E"
},
"ST_INVALID_ALGORITHM_VALUE" : {
"message" : [
"Invalid or unsupported edge interpolation algorithm value: '<alg>'."
],
"sqlState" : "22023"
},
"ST_INVALID_CRS_VALUE" : {
"message" : [
"Invalid or unsupported CRS (coordinate reference system) value: '<crs>'."
],
"sqlState" : "22023"
},
"ST_INVALID_SRID_VALUE" : {
"message" : [
"Invalid or unsupported SRID (spatial reference identifier) value: <srid>."
],
"sqlState" : "22023"
},
"SUM_OF_LIMIT_AND_OFFSET_EXCEEDS_MAX_INT" : {
"message" : [
"The sum of the LIMIT clause and the OFFSET clause must not be greater than the maximum 32-bit integer value (2,147,483,647) but found limit = <limit>, offset = <offset>."
],
"sqlState" : "22003"
},
"SYNTAX_DISCONTINUED" : {
"message" : [
"Support of the clause or keyword: <clause> has been discontinued in this context."
],
"subClass" : {
"BANG_EQUALS_NOT" : {
"message" : [
"The '!' keyword is only supported as an alias for the prefix operator 'NOT'.",
"Use the 'NOT' keyword instead for infix clauses such as `NOT LIKE`, `NOT IN`, `NOT BETWEEN`, etc.",
"To re-enable the '!' keyword, set \"spark.sql.legacy.bangEqualsNot\" to \"true\"."
]
}
},
"sqlState" : "42601"
},
"TABLE_OR_VIEW_ALREADY_EXISTS" : {
"message" : [
"Cannot create table or view <relationName> because it already exists.",
"Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects."
],
"sqlState" : "42P07"
},
"TABLE_OR_VIEW_NOT_FOUND" : {
"message" : [
"The table or view <relationName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.",
"To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS."
],
"sqlState" : "42P01"
},
"TABLE_VALUED_ARGUMENTS_NOT_YET_IMPLEMENTED_FOR_SQL_FUNCTIONS" : {
"message" : [
"Cannot <action> SQL user-defined function <functionName> with TABLE arguments because this functionality is not yet implemented."
],
"sqlState" : "0A000"
},
"TABLE_VALUED_FUNCTION_FAILED_TO_ANALYZE_IN_PYTHON" : {
"message" : [
"Failed to analyze the Python user defined table function: <msg>"
],
"sqlState" : "38000"
},
"TABLE_VALUED_FUNCTION_REQUIRED_METADATA_INCOMPATIBLE_WITH_CALL" : {
"message" : [
"Failed to evaluate the table function <functionName> because its table metadata <requestedMetadata>, but the function call <invalidFunctionCallProperty>."
],
"sqlState" : "22023"
},
"TABLE_VALUED_FUNCTION_REQUIRED_METADATA_INVALID" : {
"message" : [
"Failed to evaluate the table function <functionName> because its table metadata was invalid; <reason>."
],
"sqlState" : "22023"
},
"TABLE_VALUED_FUNCTION_TOO_MANY_TABLE_ARGUMENTS" : {
"message" : [
"There are too many table arguments for table-valued function.",
"It allows one table argument, but got: <num>.",
"If you want to allow it, please set \"spark.sql.allowMultipleTableArguments.enabled\" to \"true\""
],
"sqlState" : "54023"
},
"TASK_WRITE_FAILED" : {
"message" : [
"Task failed while writing rows to <path>."
],
"sqlState" : "58030"
},
"TEMP_TABLE_OR_VIEW_ALREADY_EXISTS" : {
"message" : [
"Cannot create the temporary view <relationName> because it already exists.",
"Choose a different name, drop or replace the existing view."
],
"sqlState" : "42P07"
},
"TEMP_VIEW_NAME_TOO_MANY_NAME_PARTS" : {
"message" : [
"CREATE TEMPORARY VIEW or the corresponding Dataset APIs only accept single-part view names, but got: <actualName>."
],
"sqlState" : "428EK"
},
"THETA_INVALID_INPUT_SKETCH_BUFFER" : {
"message" : [
"Invalid call to <function>; only valid Theta sketch buffers are supported as inputs (such as those produced by the `theta_sketch_agg` function)."
],
"sqlState" : "22000"
},
"THETA_LG_NOM_ENTRIES_MUST_BE_CONSTANT" : {
"message" : [
"Invalid call to <function>; the `lgNomEntries` value must be a constant value, but got a non-constant expression."
],
"sqlState" : "42K0E"
},
"TRAILING_COMMA_IN_SELECT" : {
"message" : [
"Trailing comma detected in SELECT clause. Remove the trailing comma before the FROM clause."
],
"sqlState" : "42601"
},
"TRANSFORM_WITH_STATE_SCHEMA_MUST_BE_NULLABLE" : {
"message" : [
"If Avro encoding is enabled, all the fields in the schema for column family <columnFamilyName> must be nullable",
"when using the TransformWithState operator.",
"Please make the schema nullable. Current schema: <schema>"
],
"sqlState" : "XXKST"
},
"TRANSFORM_WITH_STATE_USER_FUNCTION_ERROR" : {
"message" : [
"An error occurred in the user-defined function <function> of the StatefulProcessor. Reason: <reason>."
],
"sqlState" : "39000"
},
"TRANSPOSE_EXCEED_ROW_LIMIT" : {
"message" : [
"Number of rows exceeds the allowed limit of <maxValues> for TRANSPOSE. If this was intended, set <config> to at least the current row count."
],
"sqlState" : "54006"
},
"TRANSPOSE_INVALID_INDEX_COLUMN" : {
"message" : [
"Invalid index column for TRANSPOSE because: <reason>"
],
"sqlState" : "42804"
},
"TRANSPOSE_NO_LEAST_COMMON_TYPE" : {
"message" : [
"Transpose requires non-index columns to share a least common type, but <dt1> and <dt2> do not."
],
"sqlState" : "42K09"
},
"TUPLE_INVALID_INPUT_SKETCH_BUFFER" : {
"message" : [
"Invalid call to <function>; only valid Tuple sketch buffers are supported as inputs (such as those produced by the corresponding `tuple_sketch_agg_*` function)."
],
"sqlState" : "22000"
},
"TUPLE_INVALID_SKETCH_MODE" : {
"message" : [
"Invalid call to <function>: mode '<mode>' is not supported. Valid modes are: <validModes>."
],
"sqlState" : "22023"
},
"TUPLE_IS_EMPTY" : {
"message" : [
"Due to Scala's limited support of tuple, empty tuple is not supported."
],
"sqlState" : "22004"
},
"TUPLE_SIZE_EXCEEDS_LIMIT" : {
"message" : [
"Due to Scala's limited support of tuple, tuples with more than 22 elements are not supported."
],
"sqlState" : "54011"
},
"UDTF_ALIAS_NUMBER_MISMATCH" : {
"message" : [
"The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF.",
"Expected <aliasesSize> aliases, but got <aliasesNames>.",
"Please ensure that the number of aliases provided matches the number of columns output by the UDTF."
],
"sqlState" : "42802"
},
"UDTF_INVALID_ALIAS_IN_REQUESTED_ORDERING_STRING_FROM_ANALYZE_METHOD" : {
"message" : [
"Failed to evaluate the user-defined table function because its 'analyze' method returned a requested OrderingColumn whose column name expression included an unnecessary alias <aliasName>; please remove this alias and then try the query again."
],
"sqlState" : "42802"
},
"UDTF_INVALID_REQUESTED_SELECTED_EXPRESSION_FROM_ANALYZE_METHOD_REQUIRES_ALIAS" : {
"message" : [
"Failed to evaluate the user-defined table function because its 'analyze' method returned a requested 'select' expression (<expression>) that does not include a corresponding alias; please update the UDTF to specify an alias there and then try the query again."
],
"sqlState" : "42802"
},
"UNABLE_TO_ACQUIRE_MEMORY" : {
"message" : [
"Unable to acquire <requestedBytes> bytes of memory, got <receivedBytes>."
],
"sqlState" : "53200"
},
"UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE" : {
"message" : [
"Unable to convert SQL type <toType> to Protobuf type <protobufType>."
],
"sqlState" : "42K0G"
},
"UNABLE_TO_FETCH_HIVE_TABLES" : {
"message" : [
"Unable to fetch tables of Hive database: <dbName>."
],
"sqlState" : "58030"
},
"UNABLE_TO_INFER_PIPELINE_TABLE_SCHEMA" : {
"message" : [
"Failed to infer the schema for table <tableName> from its upstream flows.",
"Please modify the flows that write to this table to make their schemas compatible.",
"",
"Inferred schema so far:",
"<inferredDataSchema>",
"",
"Incompatible schema:",
"<incompatibleDataSchema>"
],
"sqlState" : "42KD9"
},
"UNABLE_TO_INFER_SCHEMA" : {
"message" : [
"Unable to infer schema for <format>. It must be specified manually."
],
"sqlState" : "42KD9"
},
"UNBOUND_SQL_PARAMETER" : {
"message" : [
"Found the unbound parameter: <name>. Please, fix `args` and provide a mapping of the parameter to either a SQL literal or collection constructor functions such as `map()`, `array()`, `struct()`."
],
"sqlState" : "42P02"
},
"UNCLOSED_BRACKETED_COMMENT" : {
"message" : [
"Found an unclosed bracketed comment. Please, append */ at the end of the comment."
],
"sqlState" : "42601"
},
"UNEXPECTED_INPUT_TYPE" : {
"message" : [
"Parameter <paramIndex> of function <functionName> requires the <requiredType> type, however <inputSql> has the type <inputType>."
],
"sqlState" : "42K09"
},
"UNEXPECTED_POSITIONAL_ARGUMENT" : {
"message" : [
"Cannot invoke routine <routineName> because it contains positional argument(s) following the named argument assigned to <parameterName>; please rearrange them so the positional arguments come first and then retry the query again."
],
"sqlState" : "4274K"
},
"UNEXPECTED_SERIALIZER_FOR_CLASS" : {
"message" : [
"The class <className> has an unexpected expression serializer. Expects \"STRUCT\" or \"IF\" which returns \"STRUCT\" but found <expr>."
],
"sqlState" : "42846"
},
"UNEXPECTED_USE_OF_PARAMETER_MARKER" : {
"message" : [
"Internal error: Unexpected parameter marker <parameterMarker> found during AST building.",
"This should have been blocked at the grammar level. Report this as a bug."
],
"sqlState" : "XX000"
},
"UNION_NOT_SUPPORTED_IN_RECURSIVE_CTE" : {
"message" : [
"The UNION operator is not yet supported within recursive common table expressions (WITH clauses that refer to themselves, directly or indirectly). Please use UNION ALL instead."
],
"sqlState" : "42836"
},
"UNKNOWN_PRIMITIVE_TYPE_IN_VARIANT" : {
"message" : [
"Unknown primitive type with id <id> was found in a variant value."
],
"sqlState" : "22023"
},
"UNKNOWN_PROTOBUF_MESSAGE_TYPE" : {
"message" : [
"Attempting to treat <descriptorName> as a Message, but it was <containingType>."
],
"sqlState" : "42K0G"
},
"UNPIVOT_REQUIRES_ATTRIBUTES" : {
"message" : [
"UNPIVOT requires all given <given> expressions to be columns when no <empty> expressions are given. These are not columns: [<expressions>]."
],
"sqlState" : "42K0A"
},
"UNPIVOT_REQUIRES_VALUE_COLUMNS" : {
"message" : [
"At least one value column needs to be specified for UNPIVOT, all columns specified as ids."
],
"sqlState" : "42K0A"
},
"UNPIVOT_VALUE_DATA_TYPE_MISMATCH" : {
"message" : [
"Unpivot value columns must share a least common type, some types do not: [<types>]."
],
"sqlState" : "42K09"
},
"UNPIVOT_VALUE_SIZE_MISMATCH" : {
"message" : [
"All unpivot value columns must have the same size as there are value column names (<names>)."
],
"sqlState" : "428C4"
},
"UNRECOGNIZED_PARAMETER_NAME" : {
"message" : [
"Cannot invoke routine <routineName> because the routine call included a named argument reference for the argument named <argumentName>, but this routine does not include any signature containing an argument with this name. Did you mean one of the following? [<proposal>]."
],
"sqlState" : "4274K"
},
"UNRECOGNIZED_SQL_TYPE" : {
"message" : [
"Unrecognized SQL type - name: <typeName>, id: <jdbcType>."
],
"sqlState" : "42704"
},
"UNRECOGNIZED_STATISTIC" : {
"message" : [
"The statistic <stats> is not recognized. Valid statistics include `count`, `count_distinct`, `approx_count_distinct`, `mean`, `stddev`, `min`, `max`, and percentile values. Percentile must be a numeric value followed by '%', within the range 0% to 100%."
],
"sqlState" : "42704"
},
"UNRESOLVABLE_TABLE_VALUED_FUNCTION" : {
"message" : [
"Could not resolve <name> to a table-valued function.",
"Please make sure that <name> is defined as a table-valued function and that all required parameters are provided correctly.",
"If <name> is not defined, please create the table-valued function before using it.",
"For more information about defining table-valued functions, please refer to the Apache Spark documentation."
],
"sqlState" : "42883"
},
"UNRESOLVED_ALL_IN_GROUP_BY" : {
"message" : [
"Cannot infer grouping columns for GROUP BY ALL based on the select clause. Please explicitly specify the grouping columns."
],
"sqlState" : "42803"
},
"UNRESOLVED_COLUMN" : {
"message" : [
"A column, variable, or function parameter with name <objectName> cannot be resolved."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
},
"WITH_SUGGESTION" : {
"message" : [
"Did you mean one of the following? [<proposal>]."
]
}
},
"sqlState" : "42703"
},
"UNRESOLVED_COLUMN_AMONG_FIELD_NAMES" : {
"message" : [
"Cannot resolve column name \"<colName>\" among (<fieldNames>)."
],
"sqlState" : "42703"
},
"UNRESOLVED_FIELD" : {
"message" : [
"A field with name <fieldName> cannot be resolved with the struct-type column <columnPath>."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
},
"WITH_SUGGESTION" : {
"message" : [
"Did you mean one of the following? [<proposal>]."
]
}
},
"sqlState" : "42703"
},
"UNRESOLVED_MAP_KEY" : {
"message" : [
"Cannot resolve column <objectName> as a map key. If the key is a string literal, add the single quotes '' around it."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
},
"WITH_SUGGESTION" : {
"message" : [
"Otherwise did you mean one of the following column(s)? [<proposal>]."
]
}
},
"sqlState" : "42703"
},
"UNRESOLVED_ROUTINE" : {
"message" : [
"Cannot resolve routine <routineName> on search path <searchPath>."
],
"sqlState" : "42883"
},
"UNRESOLVED_TABLE_PATH" : {
"message" : [
"Storage path for table <identifier> cannot be resolved."
],
"sqlState" : "22KD1"
},
"UNRESOLVED_USING_COLUMN_FOR_JOIN" : {
"message" : [
"USING column <colName> cannot be resolved on the <side> side of the join. The <side>-side columns: [<suggestion>]."
],
"sqlState" : "42703"
},
"UNRESOLVED_VARIABLE" : {
"message" : [
"Cannot resolve variable <variableName> on search path <searchPath>."
],
"sqlState" : "42883"
},
"UNSUPPORTED_ADD_FILE" : {
"message" : [
"Don't support add file."
],
"subClass" : {
"DIRECTORY" : {
"message" : [
"The file <path> is a directory, consider to set \"spark.sql.legacy.addSingleFileInAddFile\" to \"false\"."
]
},
"LOCAL_DIRECTORY" : {
"message" : [
"The local directory <path> is not supported in a non-local master mode."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_ARROWTYPE" : {
"message" : [
"Unsupported arrow type <typeName>."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_CALL" : {
"message" : [
"Cannot call the method \"<methodName>\" of the class \"<className>\"."
],
"subClass" : {
"FIELD_INDEX" : {
"message" : [
"The row shall have a schema to get an index of the field <fieldName>."
]
},
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING" : {
"message" : [
"The char/varchar type can't be used in the table schema.",
"If you want Spark treat them as string type as same as Spark 3.0 and earlier, please set \"spark.sql.legacy.charVarcharAsString\" to \"true\"."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_COLLATION" : {
"message" : [
"Collation <collationName> is not supported for:"
],
"subClass" : {
"FOR_FUNCTION" : {
"message" : [
"function <functionName>. Please try to use a different collation."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_CONNECT_FEATURE" : {
"message" : [
"Feature is not supported in Spark Connect:"
],
"subClass" : {
"DATASET_QUERY_EXECUTION" : {
"message" : [
"Access to the Dataset Query Execution. This is server side developer API."
]
},
"RDD" : {
"message" : [
"Resilient Distributed Datasets (RDDs)."
]
},
"SESSION_BASE_RELATION_TO_DATAFRAME" : {
"message" : [
"Invoking SparkSession 'baseRelationToDataFrame'. This is server side developer API"
]
},
"SESSION_EXPERIMENTAL_METHODS" : {
"message" : [
"Access to SparkSession Experimental (methods). This is server side developer API"
]
},
"SESSION_LISTENER_MANAGER" : {
"message" : [
"Access to the SparkSession Listener Manager. This is server side developer API"
]
},
"SESSION_SESSION_STATE" : {
"message" : [
"Access to the SparkSession Session State. This is server side developer API."
]
},
"SESSION_SHARED_STATE" : {
"message" : [
"Access to the SparkSession Shared State. This is server side developer API."
]
},
"SESSION_SPARK_CONTEXT" : {
"message" : [
"Access to the SparkContext."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_CONSTRAINT_CHARACTERISTIC" : {
"message" : [
"Constraint characteristic '<characteristic>' is not supported for constraint type '<constraintType>'."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY" : {
"message" : [
"Unsupported data source type for direct query on files: <dataSourceType>"
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATATYPE" : {
"message" : [
"Unsupported data type <typeName>."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATA_SOURCE_SAVE_MODE" : {
"message" : [
"The data source \"<source>\" cannot be written in the <createMode> mode. Please use either the \"Append\" or \"Overwrite\" mode instead."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE" : {
"message" : [
"The <format> datasource doesn't support the column <columnName> of the type <columnType>."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATA_TYPE_FOR_ENCODER" : {
"message" : [
"Cannot create encoder for <dataType>. Please use a different output data type for your UDF or DataFrame."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_DEFAULT_VALUE" : {
"message" : [
"DEFAULT column values is not supported."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
""
]
},
"WITH_SUGGESTION" : {
"message" : [
"Enable it by setting \"spark.sql.defaultColumn.enabled\" to \"true\"."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_DESERIALIZER" : {
"message" : [
"The deserializer is not supported:"
],
"subClass" : {
"DATA_TYPE_MISMATCH" : {
"message" : [
"need a(n) <desiredType> field but got <dataType>."
]
},
"FIELD_NUMBER_MISMATCH" : {
"message" : [
"try to map <schema> to Tuple<ordinal>, but failed as the number of fields does not line up."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_EXPRESSION_GENERATED_COLUMN" : {
"message" : [
"Cannot create generated column <fieldName> with generation expression <expressionStr> because <reason>."
],
"sqlState" : "42621"
},
"UNSUPPORTED_EXPR_FOR_OPERATOR" : {
"message" : [
"A query operator contains one or more unsupported expressions.",
"Consider to rewrite it to avoid window functions, aggregate functions, and generator functions in the WHERE clause.",
"Invalid expressions: [<invalidExprSqls>]"
],
"sqlState" : "42K0E"
},
"UNSUPPORTED_EXPR_FOR_PARAMETER" : {
"message" : [
"A query parameter contains unsupported expression.",
"Parameters can either be variables or literals.",
"Invalid expression: [<invalidExprSql>]"
],
"sqlState" : "42K0E"
},
"UNSUPPORTED_EXPR_FOR_WINDOW" : {
"message" : [
"Expression <sqlExpr> not supported within a window function."
],
"sqlState" : "42P20"
},
"UNSUPPORTED_FEATURE" : {
"message" : [
"The feature is not supported:"
],
"subClass" : {
"AES_MODE" : {
"message" : [
"AES-<mode> with the padding <padding> by the <functionName> function."
]
},
"AES_MODE_AAD" : {
"message" : [
"<functionName> with AES-<mode> does not support additional authenticate data (AAD)."
]
},
"AES_MODE_IV" : {
"message" : [
"<functionName> with AES-<mode> does not support initialization vectors (IVs)."
]
},
"ALTER_TABLE_SERDE_FOR_DATASOURCE_TABLE" : {
"message" : [
"ALTER TABLE SET SERDE is not supported for table <tableName> created with the datasource API. Consider using an external Hive table or updating the table properties with compatible options for your table format."
]
},
"ANALYZE_UNCACHED_TEMP_VIEW" : {
"message" : [
"The ANALYZE TABLE FOR COLUMNS command can operate on temporary views that have been cached already. Consider to cache the view <viewName>."
]
},
"ANALYZE_UNSUPPORTED_COLUMN_TYPE" : {
"message" : [
"The ANALYZE TABLE FOR COLUMNS command does not support the type <columnType> of the column <columnName> in the table <tableName>."
]
},
"ANALYZE_VIEW" : {
"message" : [
"The ANALYZE TABLE command does not support views."
]
},
"CATALOG_OPERATION" : {
"message" : [
"Catalog <catalogName> does not support <operation>."
]
},
"CLAUSE_WITH_PIPE_OPERATORS" : {
"message" : [
"The SQL pipe operator syntax using |> does not support <clauses>."
]
},
"COLLATIONS_IN_MAP_KEYS" : {
"message" : [
"Collated strings for keys of maps"
]
},
"COMBINATION_QUERY_RESULT_CLAUSES" : {
"message" : [
"Combination of ORDER BY/SORT BY/DISTRIBUTE BY/CLUSTER BY."
]
},
"COMMENT_NAMESPACE" : {
"message" : [
"Attach a comment to the namespace <namespace>."
]
},
"CONTINUE_EXCEPTION_HANDLER" : {
"message" : [
"CONTINUE exception handler is not supported. Use EXIT handler."
]
},
"CREATE_FLOW_QUERY_EXECUTION" : {
"message" : [
"Direct execution of a CREATE FLOW statement is not supported. To create a flow, create and execute a pipeline with a SQL file containing your statement attached in the pipeline spec."
]
},
"CREATE_PIPELINE_DATASET_QUERY_EXECUTION" : {
"message" : [
"Direct execution of a CREATE ... <pipelineDatasetType> query. To create a pipeline dataset, create and execute a pipeline with a SQL file containing your query attached in the pipeline definition."
]
},
"DESC_TABLE_COLUMN_JSON" : {
"message" : [
"DESC TABLE COLUMN AS JSON not supported for individual columns."
]
},
"DESC_TABLE_COLUMN_PARTITION" : {
"message" : [
"DESC TABLE COLUMN for a specific partition."
]
},
"DROP_DATABASE" : {
"message" : [
"Drop the default database <database>."
]
},
"DROP_NAMESPACE" : {
"message" : [
"Drop the namespace <namespace>."
]
},
"GEOSPATIAL_DISABLED" : {
"message" : [
"Geospatial feature is disabled."
]
},
"HIVE_TABLE_TYPE" : {
"message" : [
"The <tableName> is hive <tableType>."
]
},
"HIVE_WITH_ANSI_INTERVALS" : {
"message" : [
"Hive table <tableName> with ANSI intervals."
]
},
"INSERT_PARTITION_SPEC_IF_NOT_EXISTS" : {
"message" : [
"INSERT INTO <tableName> with IF NOT EXISTS in the PARTITION spec."
]
},
"INTERRUPT_TYPE" : {
"message" : [
"Unsupported interrupt type: <interruptType>."
]
},
"LAMBDA_FUNCTION_WITH_PYTHON_UDF" : {
"message" : [
"Lambda function with Python UDF <funcName> in a higher order function."
]
},
"LAMBDA_FUNCTION_WITH_SQL_UDF" : {
"message" : [
"Lambda function with SQL UDF <funcName> in a higher order function."
]
},
"LATERAL_COLUMN_ALIAS_IN_AGGREGATE_FUNC" : {
"message" : [
"Referencing a lateral column alias <lca> in the aggregate function <aggFunc>."
]
},
"LATERAL_COLUMN_ALIAS_IN_AGGREGATE_WITH_WINDOW_AND_HAVING" : {
"message" : [
"Referencing lateral column alias <lca> in the aggregate query both with window expressions and with having clause. Please rewrite the aggregate query by removing the having clause or removing lateral alias reference in the SELECT list."
]
},
"LATERAL_COLUMN_ALIAS_IN_GENERATOR" : {
"message" : [
"Referencing a lateral column alias <lca> in generator expression <generatorExpr>."
]
},
"LATERAL_COLUMN_ALIAS_IN_GROUP_BY" : {
"message" : [
"Referencing a lateral column alias via GROUP BY alias/ALL is not supported yet."
]
},
"LATERAL_COLUMN_ALIAS_IN_WINDOW" : {
"message" : [
"Referencing a lateral column alias <lca> in window expression <windowExpr>."
]
},
"LATERAL_JOIN_USING" : {
"message" : [
"JOIN USING with LATERAL correlation."
]
},
"LITERAL_TYPE" : {
"message" : [
"Literal for '<value>' of <type>."
]
},
"MULTIPLE_BUCKET_TRANSFORMS" : {
"message" : [
"Multiple bucket TRANSFORMs."
]
},
"MULTI_ACTION_ALTER" : {
"message" : [
"The target JDBC server hosting table <tableName> does not support ALTER TABLE with multiple actions. Split the ALTER TABLE up into individual actions to avoid this error."
]
},
"OBJECT_LEVEL_COLLATIONS" : {
"message" : [
"Default collation for the specified object."
]
},
"ORC_TYPE_CAST" : {
"message" : [
"Unable to convert <orcType> of Orc to data type <toType>."
]
},
"OVERWRITE_BY_SUBQUERY" : {
"message" : [
"INSERT OVERWRITE with a subquery condition."
]
},
"PANDAS_UDAF_IN_PIVOT" : {
"message" : [
"Pandas user defined aggregate function in the PIVOT clause."
]
},
"PARAMETER_MARKER_IN_UNEXPECTED_STATEMENT" : {
"message" : [
"Parameter markers are not allowed in <statement>."
]
},
"PARTITION_BY_VARIANT" : {
"message" : [
"Cannot use VARIANT producing expressions to partition a DataFrame, but the type of expression <expr> is <dataType>."
]
},
"PARTITION_WITH_NESTED_COLUMN_IS_UNSUPPORTED" : {
"message" : [
"Invalid partitioning: <cols> is missing or is in a map or array."
]
},
"PIPE_OPERATOR_AGGREGATE_UNSUPPORTED_CASE" : {
"message" : [
"The SQL pipe operator syntax with aggregation (using |> AGGREGATE) does not support <case>."
]
},
"PIVOT_AFTER_GROUP_BY" : {
"message" : [
"PIVOT clause following a GROUP BY clause. Consider pushing the GROUP BY into a subquery."
]
},
"PIVOT_TYPE" : {
"message" : [
"Pivoting by the value '<value>' of the column data type <type>."
]
},
"PURGE_PARTITION" : {
"message" : [
"Partition purge."
]
},
"PURGE_TABLE" : {
"message" : [
"Purge table."
]
},
"PYTHON_UDF_IN_ON_CLAUSE" : {
"message" : [
"Python UDF in the ON clause of a <joinType> JOIN. In case of an INNER JOIN consider rewriting to a CROSS JOIN with a WHERE clause."
]
},
"QUERY_ONLY_CORRUPT_RECORD_COLUMN" : {
"message" : [
"Queries from raw JSON/CSV/XML files are disallowed when the",
"referenced columns only include the internal corrupt record column",
"(named `_corrupt_record` by default). For example:",
"`spark.read.schema(schema).json(file).filter($\"_corrupt_record\".isNotNull).count()`",
"and `spark.read.schema(schema).json(file).select(\"_corrupt_record\").show()`.",
"Instead, you can cache or save the parsed results and then send the same query.",
"For example, `val df = spark.read.schema(schema).json(file).cache()` and then",
"`df.filter($\"_corrupt_record\".isNotNull).count()`."
]
},
"REMOVE_NAMESPACE_COMMENT" : {
"message" : [
"Remove a comment from the namespace <namespace>."
]
},
"REPLACE_NESTED_COLUMN" : {
"message" : [
"The replace function does not support nested column <colName>."
]
},
"SCHEMA_LEVEL_COLLATIONS" : {
"message" : [
"Default collation for the specified schema."
]
},
"SET_NAMESPACE_PROPERTY" : {
"message" : [
"<property> is a reserved namespace property, <msg>."
]
},
"SET_OPERATION_ON_MAP_TYPE" : {
"message" : [
"Cannot have MAP type columns in DataFrame which calls set operations (INTERSECT, EXCEPT, etc.), but the type of column <colName> is <dataType>."
]
},
"SET_OPERATION_ON_VARIANT_TYPE" : {
"message" : [
"Cannot have VARIANT type columns in DataFrame which calls set operations (INTERSECT, EXCEPT, etc.), but the type of column <colName> is <dataType>."
]
},
"SET_PROPERTIES_AND_DBPROPERTIES" : {
"message" : [
"set PROPERTIES and DBPROPERTIES at the same time."
]
},
"SET_TABLE_PROPERTY" : {
"message" : [
"<property> is a reserved table property, <msg>."
]
},
"SET_VARIABLE_USING_SET" : {
"message" : [
"<variableName> is a VARIABLE and cannot be updated using the SET statement. Use SET VARIABLE <variableName> = ... instead."
]
},
"SQL_CURSOR" : {
"message" : [
"SQL cursor operations (DECLARE CURSOR, OPEN, FETCH, CLOSE) are not supported."
]
},
"SQL_SCRIPTING" : {
"message" : [
"SQL Scripting is under development and not all features are supported. SQL Scripting enables users to write procedural SQL including control flow and error handling. To enable existing features set <sqlScriptingEnabled> to `true`."
]
},
"SQL_SCRIPTING_DROP_TEMPORARY_VARIABLE" : {
"message" : [
"DROP TEMPORARY VARIABLE is not supported within SQL scripts. To bypass this, use `EXECUTE IMMEDIATE 'DROP TEMPORARY VARIABLE ...'` ."
]
},
"SQL_SCRIPTING_WITH_POSITIONAL_PARAMETERS" : {
"message" : [
"Positional parameters are not supported with SQL Scripting."
]
},
"STATE_STORE_MULTIPLE_COLUMN_FAMILIES" : {
"message" : [
"Creating multiple column families with <stateStoreProvider> is not supported."
]
},
"STATE_STORE_REMOVING_COLUMN_FAMILIES" : {
"message" : [
"Removing column families with <stateStoreProvider> is not supported."
]
},
"STATE_STORE_TTL" : {
"message" : [
"State TTL with <stateStoreProvider> is not supported. Please use RocksDBStateStoreProvider."
]
},
"STORE_BACKEND_NOT_SUPPORTED_FOR_TWS" : {
"message" : [
"Store backend <stateStoreProvider> is not supported by TransformWithState operator. Please use RocksDBStateStoreProvider."
]
},
"TABLE_OPERATION" : {
"message" : [
"Table <tableName> does not support <operation>. Please check the current catalog and namespace to make sure the qualified table name is expected, and also check the catalog implementation which is configured by \"spark.sql.catalog\"."
]
},
"TEMPORARY_VIEW_WITH_SCHEMA_BINDING_MODE" : {
"message" : [
"Temporary views cannot be created with the WITH SCHEMA clause. Recreate the temporary view when the underlying schema changes, or use a persisted view."
]
},
"TIME_TRAVEL" : {
"message" : [
"Time travel on the relation: <relationId>."
]
},
"TOO_MANY_TYPE_ARGUMENTS_FOR_UDF_CLASS" : {
"message" : [
"UDF class with <num> type arguments."
]
},
"TRANSFORM_DISTINCT_ALL" : {
"message" : [
"TRANSFORM with the DISTINCT/ALL clause."
]
},
"TRANSFORM_NON_HIVE" : {
"message" : [
"TRANSFORM with SERDE is only supported in hive mode."
]
},
"TRIM_COLLATION" : {
"message" : [
"TRIM specifier in the collation."
]
},
"UPDATE_COLUMN_NULLABILITY" : {
"message" : [
"Update column nullability for MySQL and MS SQL Server."
]
},
"WRITE_FOR_BINARY_SOURCE" : {
"message" : [
"Write for the binary file data source."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_GENERATOR" : {
"message" : [
"The generator is not supported:"
],
"subClass" : {
"MULTI_GENERATOR" : {
"message" : [
"only one generator allowed per SELECT clause but found <num>: <generators>."
]
},
"NESTED_IN_EXPRESSIONS" : {
"message" : [
"nested in expressions <expression>."
]
},
"NOT_GENERATOR" : {
"message" : [
"<functionName> is expected to be a generator. However, its class is <classCanonicalName>, which is not a generator."
]
},
"OUTSIDE_SELECT" : {
"message" : [
"outside the SELECT clause, found: <plan>."
]
}
},
"sqlState" : "42K0E"
},
"UNSUPPORTED_GROUPING_EXPRESSION" : {
"message" : [
"grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup."
],
"sqlState" : "42K0E"
},
"UNSUPPORTED_INSERT" : {
"message" : [
"Can't insert into the target."
],
"subClass" : {
"MULTI_PATH" : {
"message" : [
"Can only write data to relations with a single path but given paths are <paths>."
]
},
"NOT_ALLOWED" : {
"message" : [
"The target relation <relationId> does not allow insertion."
]
},
"NOT_PARTITIONED" : {
"message" : [
"The target relation <relationId> is not partitioned."
]
},
"RDD_BASED" : {
"message" : [
"An RDD-based table is not allowed."
]
},
"READ_FROM" : {
"message" : [
"The target relation <relationId> is also being read from."
]
}
},
"sqlState" : "42809"
},
"UNSUPPORTED_INSERT_WITH_SCHEMA_EVOLUTION" : {
"message" : [
"INSERT WITH SCHEMA EVOLUTION ... is unsupported for this table format."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_JOIN_TYPE" : {
"message" : [
"Unsupported join type '<typ>'. Supported join types include: <supported>."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_MERGE_CONDITION" : {
"message" : [
"MERGE operation contains unsupported <condName> condition."
],
"subClass" : {
"AGGREGATE" : {
"message" : [
"Aggregates are not allowed: <cond>."
]
},
"NON_DETERMINISTIC" : {
"message" : [
"Non-deterministic expressions are not allowed: <cond>."
]
},
"SUBQUERY" : {
"message" : [
"Subqueries are not allowed: <cond>."
]
}
},
"sqlState" : "42K0E"
},
"UNSUPPORTED_OVERWRITE" : {
"message" : [
"Can't overwrite the target that is also being read from."
],
"subClass" : {
"PATH" : {
"message" : [
"The target path is <path>."
]
},
"TABLE" : {
"message" : [
"The target table is <table>."
]
}
},
"sqlState" : "42902"
},
"UNSUPPORTED_PARTITION_TRANSFORM" : {
"message" : [
"Unsupported partition transform: <transform>. The supported transforms are `identity`, `bucket`, and `clusterBy`. Ensure your transform expression uses one of these."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_PIPELINE_SPARK_SQL_COMMAND" : {
"message" : [
"'<command>' is not supported in spark.sql(\"...\") API in Spark Declarative Pipeline."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_SAVE_MODE" : {
"message" : [
"The save mode <saveMode> is not supported for:"
],
"subClass" : {
"EXISTENT_PATH" : {
"message" : [
"an existent path."
]
},
"NON_EXISTENT_PATH" : {
"message" : [
"a non-existent path."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_SHOW_CREATE_TABLE" : {
"message" : [
"Unsupported a SHOW CREATE TABLE command."
],
"subClass" : {
"ON_DATA_SOURCE_TABLE_WITH_AS_SERDE" : {
"message" : [
"The table <tableName> is a Spark data source table. Please use SHOW CREATE TABLE without AS SERDE instead."
]
},
"ON_TEMPORARY_VIEW" : {
"message" : [
"The command is not supported on a temporary view <tableName>."
]
},
"ON_TRANSACTIONAL_HIVE_TABLE" : {
"message" : [
"Failed to execute the command against transactional Hive table <tableName>.",
"Please use SHOW CREATE TABLE <tableName> AS SERDE to show Hive DDL instead."
]
},
"WITH_UNSUPPORTED_FEATURE" : {
"message" : [
"Failed to execute the command against table/view <tableName> which is created by Hive and uses the following unsupported features",
"<unsupportedFeatures>"
]
},
"WITH_UNSUPPORTED_SERDE_CONFIGURATION" : {
"message" : [
"Failed to execute the command against the table <tableName> which is created by Hive and uses the following unsupported serde configuration",
"<configs>",
"Please use SHOW CREATE TABLE <tableName> AS SERDE to show Hive DDL instead."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_SINGLE_PASS_ANALYZER_FEATURE" : {
"message" : [
"The single-pass analyzer cannot process this query or command because it does not yet support <feature>."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_SQL_UDF_USAGE" : {
"message" : [
"Using SQL function <functionName> in <nodeName> is not supported."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_STREAMING_OPERATOR_WITHOUT_WATERMARK" : {
"message" : [
"<outputMode> output mode not supported for <statefulOperator> on streaming DataFrames/DataSets without watermark."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY" : {
"message" : [
"Unsupported subquery expression:"
],
"subClass" : {
"ACCESSING_OUTER_QUERY_COLUMN_IS_NOT_ALLOWED" : {
"message" : [
"Accessing outer query column is not allowed in this location:",
"<treeNode>"
]
},
"AGGREGATE_FUNCTION_MIXED_OUTER_LOCAL_REFERENCES" : {
"message" : [
"Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: <function>."
]
},
"CORRELATED_COLUMN_IS_NOT_ALLOWED_IN_PREDICATE" : {
"message" : [
"Correlated column is not allowed in predicate:",
"<treeNode>"
]
},
"CORRELATED_COLUMN_NOT_FOUND" : {
"message" : [
"A correlated outer name reference within a subquery expression body was not found in the enclosing query: <value>."
]
},
"CORRELATED_REFERENCE" : {
"message" : [
"Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses: <sqlExprs>."
]
},
"HIGHER_ORDER_FUNCTION" : {
"message" : [
"Subquery expressions are not supported within higher-order functions. Please remove all subquery expressions from higher-order functions and then try the query again."
]
},
"LATERAL_JOIN_CONDITION_NON_DETERMINISTIC" : {
"message" : [
"Lateral join condition cannot be non-deterministic: <condition>."
]
},
"MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY" : {
"message" : [
"Correlated scalar subqueries must be aggregated to return at most one row."
]
},
"NON_CORRELATED_COLUMNS_IN_GROUP_BY" : {
"message" : [
"A GROUP BY clause in a scalar correlated subquery cannot contain non-correlated columns: <value>."
]
},
"NON_DETERMINISTIC_LATERAL_SUBQUERIES" : {
"message" : [
"Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row:",
"<treeNode>"
]
},
"SCALAR_SUBQUERY_IN_VALUES" : {
"message" : [
"Scalar subqueries in the VALUES clause."
]
},
"UNSUPPORTED_CORRELATED_EXPRESSION_IN_JOIN_CONDITION" : {
"message" : [
"Correlated subqueries in the join predicate cannot reference both join inputs:",
"<subqueryExpression>"
]
},
"UNSUPPORTED_CORRELATED_REFERENCE_DATA_TYPE" : {
"message" : [
"Correlated column reference '<expr>' cannot be <dataType> type."
]
},
"UNSUPPORTED_CORRELATED_SCALAR_SUBQUERY" : {
"message" : [
"Correlated scalar subqueries can only be used in filters, aggregations, projections, and UPDATE/MERGE/DELETE commands:",
"<treeNode>"
]
},
"UNSUPPORTED_IN_EXISTS_SUBQUERY" : {
"message" : [
"IN/EXISTS predicate subqueries can only be used in filters, joins, aggregations, window functions, projections, and UPDATE/MERGE/DELETE commands:",
"<treeNode>"
]
},
"UNSUPPORTED_TABLE_ARGUMENT" : {
"message" : [
"Table arguments are used in a function where they are not supported:",
"<treeNode>"
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_TABLE_CHANGE_IN_AUTO_SCHEMA_EVOLUTION" : {
"message" : [
"The table changes <changes> are not supported by the catalog on table <tableName>."
],
"sqlState" : "42000"
},
"UNSUPPORTED_TABLE_CHANGE_IN_JDBC_CATALOG" : {
"message" : [
"The table change <change> is not supported for the JDBC catalog on table <tableName>. Supported changes include: AddColumn, RenameColumn, DeleteColumn, UpdateColumnType, UpdateColumnNullability."
],
"sqlState" : "42000"
},
"UNSUPPORTED_TIME_PRECISION" : {
"message" : [
"The seconds precision <precision> of the TIME data type is out of the supported range [0, 6]."
],
"sqlState" : "0A001"
},
"UNSUPPORTED_TIME_TYPE" : {
"message" : [
"The data type TIME is not supported."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_TYPED_LITERAL" : {
"message" : [
"Literals of the type <unsupportedType> are not supported. Supported types are <supportedTypes>."
],
"sqlState" : "0A000"
},
"UNTYPED_SCALA_UDF" : {
"message" : [
"You're using untyped Scala UDF, which does not have the input type information. Spark may blindly pass null to the Scala closure with primitive-type argument, and the closure will see the default value of the Java type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result is 0 for null input. To get rid of this error, you could:",
"1. use typed Scala UDF APIs(without return type parameter), e.g. `udf((x: Int) => x)`.",
"2. use Java UDF APIs, e.g. `udf(new UDF1[String, Integer] { override def call(s: String): Integer = s.length() }, IntegerType)`, if input types are all non primitive.",
"3. set \"spark.sql.legacy.allowUntypedScalaUDF\" to \"true\" and use this API with caution."
],
"sqlState" : "42K0E"
},
"USER_DEFINED_FUNCTIONS" : {
"message" : [
"User defined function is invalid:"
],
"subClass" : {
"CANNOT_CONTAIN_COMPLEX_FUNCTIONS" : {
"message" : [
"SQL scalar function cannot contain aggregate/window/generate functions: <queryText>"
]
},
"CANNOT_REPLACE_NON_SQL_UDF_WITH_SQL_UDF" : {
"message" : [
"Cannot replace the non-SQL function <name> with a SQL function."
]
},
"NOT_A_VALID_DEFAULT_EXPRESSION" : {
"message" : [
"The DEFAULT expression of `<functionName>`.`<parameterName>` is not supported because it contains a subquery."
]
},
"NOT_A_VALID_DEFAULT_PARAMETER_POSITION" : {
"message" : [
"In routine `<functionName>` parameter `<parameterName>` with DEFAULT must not be followed by parameter `<nextParameterName>` without DEFAULT."
]
},
"NOT_NULL_ON_FUNCTION_PARAMETERS" : {
"message" : [
"Cannot specify NOT NULL on function parameters: <input>"
]
},
"RETURN_COLUMN_COUNT_MISMATCH" : {
"message" : [
"The number of columns produced by the RETURN clause (num: `<outputSize>`) does not match the number of column names specified by the RETURNS clause (num: `<returnParamSize>`) of <name>."
]
},
"ROUTINE_PROPERTY_TOO_LARGE" : {
"message" : [
"Cannot convert user defined routine <name> to catalog function: routine properties are too large."
]
},
"SQL_TABLE_UDF_BODY_MUST_BE_A_QUERY" : {
"message" : [
"SQL table function <name> body must be a query."
]
},
"SQL_TABLE_UDF_MISSING_COLUMN_NAMES" : {
"message" : [
"The relation returned by the query in the CREATE FUNCTION statement for <functionName> with RETURNS TABLE clause lacks explicit names for one or more output columns; please rewrite the function body to provide explicit column names or add column names to the RETURNS TABLE clause, and re-run the command."
]
}
},
"sqlState" : "42601"
},
"USER_RAISED_EXCEPTION" : {
"message" : [
"<errorMessage>"
],
"sqlState" : "P0001"
},
"USER_RAISED_EXCEPTION_PARAMETER_MISMATCH" : {
"message" : [
"The `raise_error()` function was used to raise error class: <errorClass> which expects parameters: <expectedParms>.",
"The provided parameters <providedParms> do not match the expected parameters.",
"Please make sure to provide all expected parameters."
],
"sqlState" : "P0001"
},
"USER_RAISED_EXCEPTION_UNKNOWN_ERROR_CLASS" : {
"message" : [
"The `raise_error()` function was used to raise an unknown error class: <errorClass>"
],
"sqlState" : "P0001"
},
"USER_SPECIFIED_AND_ACTUAL_SCHEMA_MISMATCH" : {
"message" : [
"The user-specified schema doesn't match the actual schema:",
"user-specified: <schema>, actual: <actualSchema>. If you're using",
"DataFrameReader.schema API or creating a table, please do not specify the schema.",
"Or if you're scanning an existed table, please drop it and re-create it."
],
"sqlState" : "42K03"
},
"USER_SPECIFIED_AND_INFERRED_SCHEMA_NOT_COMPATIBLE" : {
"message" : [
"Table '<tableName>' has a user-specified schema that is incompatible with the schema",
"inferred from its query.",
"<streamingTableHint>",
"",
"Declared schema:",
"<specifiedSchema>",
"",
"Inferred schema:",
"<inferredDataSchema>"
],
"sqlState" : "42000"
},
"VARIABLE_ALREADY_EXISTS" : {
"message" : [
"Cannot create the variable <variableName> because it already exists.",
"Choose a different name, or drop or replace the existing variable."
],
"sqlState" : "42723"
},
"VARIABLE_NOT_FOUND" : {
"message" : [
"The variable <variableName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog.",
"To tolerate the error on drop use DROP VARIABLE IF EXISTS."
],
"sqlState" : "42883"
},
"VARIANT_CONSTRUCTOR_SIZE_LIMIT" : {
"message" : [
"Cannot construct a Variant larger than 16 MiB. The maximum allowed size of a Variant value is 16 MiB."
],
"sqlState" : "22023"
},
"VARIANT_DUPLICATE_KEY" : {
"message" : [
"Failed to build variant because of a duplicate object key `<key>`."
],
"sqlState" : "22023"
},
"VARIANT_SIZE_LIMIT" : {
"message" : [
"Cannot build variant bigger than <sizeLimit> in <functionName>.",
"Please avoid large input strings to this expression (for example, add function calls(s) to check the expression size and convert it to NULL first if it is too big)."
],
"sqlState" : "22023"
},
"VECTOR_DIMENSION_MISMATCH" : {
"message" : [
"Vectors passed to <functionName> must have the same dimension, but got <leftDim> and <rightDim>."
],
"sqlState" : "22000"
},
"VIEW_ALREADY_EXISTS" : {
"message" : [
"Cannot create view <relationName> because it already exists.",
"Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects."
],
"sqlState" : "42P07"
},
"VIEW_EXCEED_MAX_NESTED_DEPTH" : {
"message" : [
"The depth of view <viewName> exceeds the maximum view resolution depth (<maxNestedDepth>).",
"Analysis is aborted to avoid errors. If you want to work around this, please try to increase the value of \"spark.sql.view.maxNestedViewDepth\"."
],
"sqlState" : "54K00"
},
"VIEW_NOT_FOUND" : {
"message" : [
"The view <relationName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.",
"To tolerate the error on drop use DROP VIEW IF EXISTS."
],
"sqlState" : "42P01"
},
"VIEW_WRITE_NOT_ALLOWED" : {
"message" : [
"Cannot write into view <name>, please write into a table instead."
],
"sqlState" : "42809"
},
"WINDOW_FUNCTION_AND_FRAME_MISMATCH" : {
"message" : [
"<funcName> function can only be evaluated in an ordered row-based window frame with a single offset: <windowExpr>."
],
"sqlState" : "42K0E"
},
"WINDOW_FUNCTION_WITHOUT_OVER_CLAUSE" : {
"message" : [
"Window function <funcName> requires an OVER clause."
],
"sqlState" : "42601"
},
"WRITE_STREAM_NOT_ALLOWED" : {
"message" : [
"`writeStream` can be called only on streaming Dataset/DataFrame."
],
"sqlState" : "42601"
},
"WRONG_COMMAND_FOR_OBJECT_TYPE" : {
"message" : [
"The operation <operation> requires a <requiredType>. But <objectName> is a <foundType>. Use <alternative> instead."
],
"sqlState" : "42809"
},
"WRONG_NUM_ARGS" : {
"message" : [
"The <functionName> requires <expectedNum> parameters but the actual number is <actualNum>."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
"message" : [
"Please, refer to '<docroot>/sql-ref-functions.html' for a fix."
]
},
"WITH_SUGGESTION" : {
"message" : [
"If you have to call this function with <legacyNum> parameters, set the legacy configuration <legacyConfKey> to <legacyConfValue>."
]
}
},
"sqlState" : "42605"
},
"XML_ROW_TAG_MISSING" : {
"message" : [
"<rowTag> option is required for reading/writing files in XML format."
],
"sqlState" : "42KDF"
},
"_LEGACY_ERROR_TEMP_0001" : {
"message" : [
"Invalid InsertIntoContext."
]
},
"_LEGACY_ERROR_TEMP_0004" : {
"message" : [
"Empty source for merge: you should specify a source table/subquery in merge."
]
},
"_LEGACY_ERROR_TEMP_0006" : {
"message" : [
"The number of inserted values cannot match the fields."
]
},
"_LEGACY_ERROR_TEMP_0012" : {
"message" : [
"DISTRIBUTE BY is not supported."
]
},
"_LEGACY_ERROR_TEMP_0014" : {
"message" : [
"TABLESAMPLE does not accept empty inputs."
]
},
"_LEGACY_ERROR_TEMP_0015" : {
"message" : [
"TABLESAMPLE(<msg>) is not supported."
]
},
"_LEGACY_ERROR_TEMP_0016" : {
"message" : [
"<bytesStr> is not a valid byte length literal, expected syntax: DIGIT+ ('B' | 'K' | 'M' | 'G')."
]
},
"_LEGACY_ERROR_TEMP_0018" : {
"message" : [
"Function trim doesn't support with type <trimOption>. Please use BOTH, LEADING or TRAILING as trim type."
]
},
"_LEGACY_ERROR_TEMP_0024" : {
"message" : [
"Can only have a single from-to unit in the interval literal syntax."
]
},
"_LEGACY_ERROR_TEMP_0026" : {
"message" : [
"Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: <value>."
]
},
"_LEGACY_ERROR_TEMP_0027" : {
"message" : [
"The value of from-to unit must be a string."
]
},
"_LEGACY_ERROR_TEMP_0029" : {
"message" : [
"Cannot mix year-month and day-time fields: <literal>."
]
},
"_LEGACY_ERROR_TEMP_0031" : {
"message" : [
"Invalid number of buckets: <describe>."
]
},
"_LEGACY_ERROR_TEMP_0032" : {
"message" : [
"Duplicated table paths found: '<pathOne>' and '<pathTwo>'. LOCATION and the case insensitive key 'path' in OPTIONS are all used to indicate the custom table path, you can only specify one of them."
]
},
"_LEGACY_ERROR_TEMP_0033" : {
"message" : [
"Expected either STORED AS or STORED BY, not both."
]
},
"_LEGACY_ERROR_TEMP_0034" : {
"message" : [
"<operation> is not supported in Hive-style <command><msg>."
]
},
"_LEGACY_ERROR_TEMP_0035" : {
"message" : [
"Operation not allowed: <message>."
]
},
"_LEGACY_ERROR_TEMP_0037" : {
"message" : [
"It is not allowed to add catalog/namespace prefix <quoted> to the table name in CACHE TABLE AS SELECT."
]
},
"_LEGACY_ERROR_TEMP_0045" : {
"message" : [
"Invalid time zone displacement value."
]
},
"_LEGACY_ERROR_TEMP_0046" : {
"message" : [
"CREATE TEMPORARY TABLE without a provider is not allowed."
]
},
"_LEGACY_ERROR_TEMP_0047" : {
"message" : [
"'ROW FORMAT' must be used with 'STORED AS'."
]
},
"_LEGACY_ERROR_TEMP_0048" : {
"message" : [
"Unsupported operation: Used defined record reader/writer classes."
]
},
"_LEGACY_ERROR_TEMP_0049" : {
"message" : [
"Directory path and 'path' in OPTIONS should be specified one, but not both."
]
},
"_LEGACY_ERROR_TEMP_0051" : {
"message" : [
"Empty set in <element> grouping sets is not supported."
]
},
"_LEGACY_ERROR_TEMP_0052" : {
"message" : [
"CREATE VIEW with both IF NOT EXISTS and REPLACE is not allowed."
]
},
"_LEGACY_ERROR_TEMP_0053" : {
"message" : [
"It is not allowed to define a TEMPORARY view with IF NOT EXISTS."
]
},
"_LEGACY_ERROR_TEMP_0056" : {
"message" : [
"Invalid time travel spec: <reason>."
]
},
"_LEGACY_ERROR_TEMP_0060" : {
"message" : [
"<msg>."
]
},
"_LEGACY_ERROR_TEMP_0062" : {
"message" : [
"<msg>."
]
},
"_LEGACY_ERROR_TEMP_0063" : {
"message" : [
"<msg>."
]
},
"_LEGACY_ERROR_TEMP_0064" : {
"message" : [
"<msg>."
]
},
"_LEGACY_ERROR_TEMP_1000" : {
"message" : [
"LEGACY store assignment policy is disallowed in Spark data source V2. Please set the configuration <configKey> to other values."
]
},
"_LEGACY_ERROR_TEMP_1002" : {
"message" : [
"Unable to generate an encoder for inner class `<className>` without access to the scope that this class was defined in.",
"Try moving this class out of its parent class."
]
},
"_LEGACY_ERROR_TEMP_1005" : {
"message" : [
"<expr> doesn't show up in the GROUP BY list <groupByAliases>."
]
},
"_LEGACY_ERROR_TEMP_1006" : {
"message" : [
"Aggregate expression required for pivot, but '<sql>' did not appear in any aggregate function."
]
},
"_LEGACY_ERROR_TEMP_1008" : {
"message" : [
"<quoted> is not a temp view of streaming logical plan, please use batch API such as `DataFrameReader.table` to read it."
]
},
"_LEGACY_ERROR_TEMP_1012" : {
"message" : [
"Cannot write into v1 table: <identifier>."
]
},
"_LEGACY_ERROR_TEMP_1017" : {
"message" : [
"<name> is a temporary function. '<cmd>' expects a persistent function.<hintStr>"
]
},
"_LEGACY_ERROR_TEMP_1018" : {
"message" : [
"<quoted> is a permanent view, which is not supported by streaming reading API such as `DataStreamReader.table` yet."
]
},
"_LEGACY_ERROR_TEMP_1021" : {
"message" : [
"count(<targetString>.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)."
]
},
"_LEGACY_ERROR_TEMP_1030" : {
"message" : [
"Window aggregate function with filter predicate is not supported yet."
]
},
"_LEGACY_ERROR_TEMP_1031" : {
"message" : [
"It is not allowed to use a window function inside an aggregate function. Please use the inner window function in a sub-query."
]
},
"_LEGACY_ERROR_TEMP_1032" : {
"message" : [
"<expr> does not have any WindowExpression."
]
},
"_LEGACY_ERROR_TEMP_1033" : {
"message" : [
"<expr> has multiple Window Specifications (<distinctWindowSpec>).",
"Please file a bug report with this error message, stack trace, and the query."
]
},
"_LEGACY_ERROR_TEMP_1034" : {
"message" : [
"It is not allowed to use window functions inside <clauseName> clause."
]
},
"_LEGACY_ERROR_TEMP_1035" : {
"message" : [
"Cannot specify window frame for <prettyName> function."
]
},
"_LEGACY_ERROR_TEMP_1036" : {
"message" : [
"Window Frame <wf> must match the required frame <required>."
]
},
"_LEGACY_ERROR_TEMP_1037" : {
"message" : [
"Window function <wf> requires window to be ordered, please add ORDER BY clause. For example SELECT <wf>(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table."
]
},
"_LEGACY_ERROR_TEMP_1039" : {
"message" : [
"Multiple time/session window expressions would result in a cartesian product of rows, therefore they are currently not supported."
]
},
"_LEGACY_ERROR_TEMP_1040" : {
"message" : [
"Gap duration expression used in session window must be CalendarIntervalType, but got <dt>."
]
},
"_LEGACY_ERROR_TEMP_1045" : {
"message" : [
"ALTER TABLE SET LOCATION does not support partition for v2 tables."
]
},
"_LEGACY_ERROR_TEMP_1046" : {
"message" : [
"Join strategy hint parameter should be an identifier or string but was <unsupported> (<class>)."
]
},
"_LEGACY_ERROR_TEMP_1047" : {
"message" : [
"<hintName> Hint parameters should include an optional integral partitionNum and/or columns, but <invalidParams> can not be recognized as either partitionNum or columns."
]
},
"_LEGACY_ERROR_TEMP_1048" : {
"message" : [
"<hintName> Hint expects a partition number as a parameter."
]
},
"_LEGACY_ERROR_TEMP_1050" : {
"message" : [
"Can only star expand struct data types. Attribute: `<attributes>`."
]
},
"_LEGACY_ERROR_TEMP_1052" : {
"message" : [
"ADD COLUMN with v1 tables cannot specify NOT NULL."
]
},
"_LEGACY_ERROR_TEMP_1058" : {
"message" : [
"Cannot create table with both USING <provider> and <serDeInfo>."
]
},
"_LEGACY_ERROR_TEMP_1059" : {
"message" : [
"STORED AS with file format '<serdeInfo>' is invalid."
]
},
"_LEGACY_ERROR_TEMP_1060" : {
"message" : [
"<command> does not support nested column: <column>."
]
},
"_LEGACY_ERROR_TEMP_1066" : {
"message" : [
"<database> is a system preserved database, you cannot create a database with this name."
]
},
"_LEGACY_ERROR_TEMP_1068" : {
"message" : [
"<database> is a system preserved database, you cannot use it as current database. To access global temporary views, you should use qualified name with the GLOBAL_TEMP_DATABASE, e.g. SELECT * FROM <database>.viewName."
]
},
"_LEGACY_ERROR_TEMP_1069" : {
"message" : [
"CREATE EXTERNAL TABLE must be accompanied by LOCATION."
]
},
"_LEGACY_ERROR_TEMP_1071" : {
"message" : [
"Some existing schema fields (<nonExistentColumnNames>) are not present in the new schema. We don't support dropping columns yet."
]
},
"_LEGACY_ERROR_TEMP_1072" : {
"message" : [
"Only the tables/views belong to the same database can be retrieved. Querying tables/views are <qualifiedTableNames>."
]
},
"_LEGACY_ERROR_TEMP_1073" : {
"message" : [
"RENAME TABLE source and destination databases do not match: '<db>' != '<newDb>'."
]
},
"_LEGACY_ERROR_TEMP_1074" : {
"message" : [
"RENAME TEMPORARY VIEW from '<oldName>' to '<newName>': cannot specify database name '<db>' in the destination table."
]
},
"_LEGACY_ERROR_TEMP_1076" : {
"message" : [
"Partition spec is invalid. <details>."
]
},
"_LEGACY_ERROR_TEMP_1079" : {
"message" : [
"Resource Type '<resourceType>' is not supported."
]
},
"_LEGACY_ERROR_TEMP_1080" : {
"message" : [
"Table <identifier> did not specify database."
]
},
"_LEGACY_ERROR_TEMP_1081" : {
"message" : [
"Table <identifier> did not specify locationUri."
]
},
"_LEGACY_ERROR_TEMP_1082" : {
"message" : [
"Partition [<specString>] did not specify locationUri."
]
},
"_LEGACY_ERROR_TEMP_1089" : {
"message" : [
"Column statistics deserialization is not supported for column <name> of data type: <dataType>."
]
},
"_LEGACY_ERROR_TEMP_1090" : {
"message" : [
"Column statistics serialization is not supported for column <colName> of data type: <dataType>."
]
},
"_LEGACY_ERROR_TEMP_1098" : {
"message" : [
"DataType '<x>' is not supported by <className>."
]
},
"_LEGACY_ERROR_TEMP_1103" : {
"message" : [
"Unsupported component type <clz> in arrays."
]
},
"_LEGACY_ERROR_TEMP_1104" : {
"message" : [
"The second argument should be a double literal."
]
},
"_LEGACY_ERROR_TEMP_1107" : {
"message" : [
"Table <table> declares <batchWrite> capability but <v2WriteClassName> is not an instance of <v1WriteClassName>."
]
},
"_LEGACY_ERROR_TEMP_1108" : {
"message" : [
"Delete by condition with subquery is not supported: <condition>."
]
},
"_LEGACY_ERROR_TEMP_1109" : {
"message" : [
"Exec update failed: cannot translate expression to source filter: <f>."
]
},
"_LEGACY_ERROR_TEMP_1110" : {
"message" : [
"Cannot delete from table <table> where <filters>."
]
},
"_LEGACY_ERROR_TEMP_1111" : {
"message" : [
"DESCRIBE does not support partition for v2 tables."
]
},
"_LEGACY_ERROR_TEMP_1114" : {
"message" : [
"The streaming sources in a query do not have a common supported execution mode.",
"Sources support micro-batch: <microBatchSources>.",
"Sources support continuous: <continuousSources>."
]
},
"_LEGACY_ERROR_TEMP_1120" : {
"message" : [
"Unsupported NamespaceChange <changes> in JDBC catalog."
]
},
"_LEGACY_ERROR_TEMP_1121" : {
"message" : [
"Table does not support <cmd>: <table>."
]
},
"_LEGACY_ERROR_TEMP_1122" : {
"message" : [
"Table <table> is not a row-level operation table."
]
},
"_LEGACY_ERROR_TEMP_1123" : {
"message" : [
"Cannot rename a table with ALTER VIEW. Please use ALTER TABLE instead."
]
},
"_LEGACY_ERROR_TEMP_1126" : {
"message" : [
"Nested databases are not supported by v1 session catalog: <catalog>."
]
},
"_LEGACY_ERROR_TEMP_1127" : {
"message" : [
"Invalid partitionExprs specified: <sortOrders> For range partitioning use REPARTITION_BY_RANGE instead."
]
},
"_LEGACY_ERROR_TEMP_1128" : {
"message" : [
"Failed to resolve the schema for <format> for the partition column: <partitionColumn>. It must be specified manually."
]
},
"_LEGACY_ERROR_TEMP_1132" : {
"message" : [
"A schema needs to be specified when using <className>."
]
},
"_LEGACY_ERROR_TEMP_1134" : {
"message" : [
"Unable to infer schema for <format> at <fileCatalog>. It must be specified manually."
]
},
"_LEGACY_ERROR_TEMP_1135" : {
"message" : [
"<className> is not a valid Spark SQL Data Source."
]
},
"_LEGACY_ERROR_TEMP_1137" : {
"message" : [
"Unable to resolve <name> given [<outputStr>]."
]
},
"_LEGACY_ERROR_TEMP_1138" : {
"message" : [
"Hive built-in ORC data source must be used with Hive support enabled. Please use the native ORC data source by setting 'spark.sql.orc.impl' to 'native'."
]
},
"_LEGACY_ERROR_TEMP_1139" : {
"message" : [
"Failed to find data source: <provider>. Avro is built-in but external data source module since Spark 2.4. Please deploy the application as per the deployment section of Apache Avro Data Source Guide."
]
},
"_LEGACY_ERROR_TEMP_1140" : {
"message" : [
"Failed to find data source: <provider>. Please deploy the application as per the deployment section of Structured Streaming + Kafka Integration Guide."
]
},
"_LEGACY_ERROR_TEMP_1141" : {
"message" : [
"Multiple sources found for <provider> (<sourceNames>), please specify the fully qualified class name."
]
},
"_LEGACY_ERROR_TEMP_1143" : {
"message" : [
"The data to be inserted needs to have the same number of columns as the target table: target table has <targetSize> column(s) but the inserted data has <actualSize> column(s), which contain <staticPartitionsSize> partition column(s) having assigned constant values."
]
},
"_LEGACY_ERROR_TEMP_1144" : {
"message" : [
"The data to be inserted needs to have the same number of partition columns as the target table: target table has <targetSize> partition column(s) but the inserted data has <providedPartitionsSize> partition columns specified."
]
},
"_LEGACY_ERROR_TEMP_1145" : {
"message" : [
"<partKey> is not a partition column. Partition columns are <partitionColumns>."
]
},
"_LEGACY_ERROR_TEMP_1146" : {
"message" : [
"Partition column <partColumn> have multiple values specified, <values>. Please only specify a single value."
]
},
"_LEGACY_ERROR_TEMP_1147" : {
"message" : [
"The ordering of partition columns is <partColumns>. All partition columns having constant values need to appear before other partition columns that do not have an assigned constant value."
]
},
"_LEGACY_ERROR_TEMP_1149" : {
"message" : [
"Fail to rebuild expression: missing key <filter> in `translatedFilterToExpr`."
]
},
"_LEGACY_ERROR_TEMP_1151" : {
"message" : [
"Fail to resolve data source for the table <table> since the table serde property has the duplicated key <key> with extra options specified for this scan operation. To fix this, you can rollback to the legacy behavior of ignoring the extra options by setting the config <config> to `false`, or address the conflicts of the same config."
]
},
"_LEGACY_ERROR_TEMP_1155" : {
"message" : [
"Partition column `<col>` not found in schema <schemaCatalog>."
]
},
"_LEGACY_ERROR_TEMP_1158" : {
"message" : [
"Saving data into a view is not allowed."
]
},
"_LEGACY_ERROR_TEMP_1159" : {
"message" : [
"The format of the existing table <tableName> is `<existingProvider>`. It doesn't match the specified format `<specifiedProvider>`."
]
},
"_LEGACY_ERROR_TEMP_1160" : {
"message" : [
"The location of the existing table <identifier> is `<existingTableLoc>`. It doesn't match the specified location `<tableDescLoc>`."
]
},
"_LEGACY_ERROR_TEMP_1161" : {
"message" : [
"The column number of the existing table <tableName> (<existingTableSchema>) doesn't match the data schema (<querySchema>)."
]
},
"_LEGACY_ERROR_TEMP_1162" : {
"message" : [
"Cannot resolve '<col>' given input columns: [<inputColumns>]."
]
},
"_LEGACY_ERROR_TEMP_1163" : {
"message" : [
"Specified partitioning does not match that of the existing table <tableName>.",
"Specified partition columns: [<specifiedPartCols>].",
"Existing partition columns: [<existingPartCols>]."
]
},
"_LEGACY_ERROR_TEMP_1164" : {
"message" : [
"Specified bucketing does not match that of the existing table <tableName>.",
"Specified bucketing: <specifiedBucketString>.",
"Existing bucketing: <existingBucketString>."
]
},
"_LEGACY_ERROR_TEMP_1165" : {
"message" : [
"It is not allowed to specify partitioning when the table schema is not defined."
]
},
"_LEGACY_ERROR_TEMP_1166" : {
"message" : [
"Bucketing column '<bucketCol>' should not be part of partition columns '<normalizedPartCols>'."
]
},
"_LEGACY_ERROR_TEMP_1167" : {
"message" : [
"Bucket sorting column '<sortCol>' should not be part of partition columns '<normalizedPartCols>'."
]
},
"_LEGACY_ERROR_TEMP_1169" : {
"message" : [
"Requested partitioning does not match the table <tableName>:",
"Requested partitions: <normalizedPartSpec>.",
"Table partitions: <partColNames>."
]
},
"_LEGACY_ERROR_TEMP_1171" : {
"message" : [
"createTableColumnTypes option column <col> not found in schema <schema>."
]
},
"_LEGACY_ERROR_TEMP_1181" : {
"message" : [
"Stream-stream join without equality predicate is not supported."
]
},
"_LEGACY_ERROR_TEMP_1182" : {
"message" : [
"Column <ambiguousAttrs> are ambiguous. It's probably because you joined several Datasets together, and some of these Datasets are the same. This column points to one of the Datasets but Spark is unable to figure out which one. Please alias the Datasets with different names via `Dataset.as` before joining them, and specify the column using qualified name, e.g. `df.as(\"a\").join(df.as(\"b\"), $\"a.id\" > $\"b.id\")`. You can also set <config> to false to disable this check."
]
},
"_LEGACY_ERROR_TEMP_1183" : {
"message" : [
"Cannot use \"INTERVAL\" type in the table schema."
]
},
"_LEGACY_ERROR_TEMP_1186" : {
"message" : [
"Multi-part identifier cannot be empty."
]
},
"_LEGACY_ERROR_TEMP_1187" : {
"message" : [
"Hive data source can only be used with tables, you can not <operation> files of Hive data source directly."
]
},
"_LEGACY_ERROR_TEMP_1188" : {
"message" : [
"There is a 'path' option set and <method>() is called with a path parameter. Either remove the path option, or call <method>() without the parameter. To ignore this check, set '<config>' to 'true'."
]
},
"_LEGACY_ERROR_TEMP_1189" : {
"message" : [
"User specified schema not supported with `<operation>`."
]
},
"_LEGACY_ERROR_TEMP_1190" : {
"message" : [
"Temporary view <viewName> doesn't support streaming write."
]
},
"_LEGACY_ERROR_TEMP_1191" : {
"message" : [
"Streaming into views <viewName> is not supported."
]
},
"_LEGACY_ERROR_TEMP_1192" : {
"message" : [
"The input source(<source>) is different from the table <tableName>'s data source provider(<provider>)."
]
},
"_LEGACY_ERROR_TEMP_1193" : {
"message" : [
"Table <tableName> doesn't support streaming write - <t>."
]
},
"_LEGACY_ERROR_TEMP_1194" : {
"message" : [
"queryName must be specified for memory sink."
]
},
"_LEGACY_ERROR_TEMP_1195" : {
"message" : [
"'<source>' is not supported with continuous trigger."
]
},
"_LEGACY_ERROR_TEMP_1196" : {
"message" : [
"<columnType> column <columnName> not found in existing columns (<validColumnNames>)."
]
},
"_LEGACY_ERROR_TEMP_1197" : {
"message" : [
"'<operation>' does not support partitioning."
]
},
"_LEGACY_ERROR_TEMP_1198" : {
"message" : [
"Function '<unbound>' cannot process input: (<arguments>): <unsupported>."
]
},
"_LEGACY_ERROR_TEMP_1199" : {
"message" : [
"Invalid bound function '<bound>: there are <argsLen> arguments but <inputTypesLen> parameters returned from 'inputTypes()'."
]
},
"_LEGACY_ERROR_TEMP_1205" : {
"message" : [
"Expected only partition pruning predicates: <nonPartitionPruningPredicates>."
]
},
"_LEGACY_ERROR_TEMP_1207" : {
"message" : [
"The duration and time inputs to window must be an integer, long or string literal."
]
},
"_LEGACY_ERROR_TEMP_1210" : {
"message" : [
"The second argument in <funcName> should be a boolean literal."
]
},
"_LEGACY_ERROR_TEMP_1211" : {
"message" : [
"Detected implicit cartesian product for <joinType> join between logical plans",
"<leftPlan>",
"and",
"<rightPlan>",
"Join condition is missing or trivial.",
"Either: use the CROSS JOIN syntax to allow cartesian products between these relations, or: enable implicit cartesian products by setting the configuration variable spark.sql.crossJoin.enabled=true."
]
},
"_LEGACY_ERROR_TEMP_1212" : {
"message" : [
"Found conflicting attributes <conflictingAttrs> in the condition joining outer plan:",
"<outerPlan>",
"and subplan:",
"<subplan>."
]
},
"_LEGACY_ERROR_TEMP_1213" : {
"message" : [
"Window expression is empty in <expr>."
]
},
"_LEGACY_ERROR_TEMP_1214" : {
"message" : [
"Found different window function type in <windowExpressions>."
]
},
"_LEGACY_ERROR_TEMP_1218" : {
"message" : [
"<tableIdentifier> should be converted to HadoopFsRelation."
]
},
"_LEGACY_ERROR_TEMP_1219" : {
"message" : [
"Hive metastore does not support altering database location."
]
},
"_LEGACY_ERROR_TEMP_1222" : {
"message" : [
"Unknown resource type: <resourceType>."
]
},
"_LEGACY_ERROR_TEMP_1223" : {
"message" : [
"Invalid field id '<field>' in day-time interval. Supported interval fields: <supportedIds>."
]
},
"_LEGACY_ERROR_TEMP_1224" : {
"message" : [
"'interval <startFieldName> to <endFieldName>' is invalid."
]
},
"_LEGACY_ERROR_TEMP_1225" : {
"message" : [
"Invalid field id '<field>' in year-month interval. Supported interval fields: <supportedIds>."
]
},
"_LEGACY_ERROR_TEMP_1226" : {
"message" : [
"The SQL config '<configName>' was removed in the version <version>. <comment>"
]
},
"_LEGACY_ERROR_TEMP_1228" : {
"message" : [
"Decimal scale (<scale>) cannot be greater than precision (<precision>)."
]
},
"_LEGACY_ERROR_TEMP_1232" : {
"message" : [
"Partition spec is invalid. The spec (<specKeys>) must match the partition spec (<partitionColumnNames>) defined in table '<tableName>'."
]
},
"_LEGACY_ERROR_TEMP_1237" : {
"message" : [
"The list of partition columns with values in partition specification for table '<table>' in database '<database>' is not a prefix of the list of partition columns defined in the table schema. Expected a prefix of [<schemaColumns>], but got [<specColumns>]."
]
},
"_LEGACY_ERROR_TEMP_1239" : {
"message" : [
"Analyzing column statistics is not supported for column <name> of data type: <dataType>."
]
},
"_LEGACY_ERROR_TEMP_1241" : {
"message" : [
"CREATE-TABLE-AS-SELECT cannot create table with location to a non-empty directory <tablePath>. To allow overwriting the existing non-empty directory, set '<config>' to true."
]
},
"_LEGACY_ERROR_TEMP_1246" : {
"message" : [
"Can't find column `<name>` given table data columns <fieldNames>."
]
},
"_LEGACY_ERROR_TEMP_1247" : {
"message" : [
"Operation not allowed: ALTER TABLE SET [SERDE | SERDEPROPERTIES] for a specific partition is not supported for tables created with the datasource API."
]
},
"_LEGACY_ERROR_TEMP_1250" : {
"message" : [
"<action> is not allowed on <tableName> since filesource partition management is disabled (spark.sql.hive.manageFilesourcePartitions = false)."
]
},
"_LEGACY_ERROR_TEMP_1251" : {
"message" : [
"<action> is not allowed on <tableName> since its partition metadata is not stored in the Hive metastore. To import this information into the metastore, run `msck repair table <tableName>`."
]
},
"_LEGACY_ERROR_TEMP_1255" : {
"message" : [
"Cannot drop built-in function '<functionName>'."
]
},
"_LEGACY_ERROR_TEMP_1256" : {
"message" : [
"Cannot refresh built-in function <functionName>."
]
},
"_LEGACY_ERROR_TEMP_1257" : {
"message" : [
"Cannot refresh temporary function <functionName>."
]
},
"_LEGACY_ERROR_TEMP_1259" : {
"message" : [
"ALTER ADD COLUMNS does not support views. You must drop and re-create the views for adding the new columns. Views: <table>."
]
},
"_LEGACY_ERROR_TEMP_1260" : {
"message" : [
"ALTER ADD COLUMNS does not support datasource table with type <tableType>. You must drop and re-create the table for adding the new columns. Tables: <table>."
]
},
"_LEGACY_ERROR_TEMP_1261" : {
"message" : [
"LOAD DATA is not supported for datasource tables: <tableIdentWithDB>."
]
},
"_LEGACY_ERROR_TEMP_1262" : {
"message" : [
"LOAD DATA target table <tableIdentWithDB> is partitioned, but no partition spec is provided."
]
},
"_LEGACY_ERROR_TEMP_1263" : {
"message" : [
"LOAD DATA target table <tableIdentWithDB> is partitioned, but number of columns in provided partition spec (<partitionSize>) do not match number of partitioned columns in table (<targetTableSize>)."
]
},
"_LEGACY_ERROR_TEMP_1264" : {
"message" : [
"LOAD DATA target table <tableIdentWithDB> is not partitioned, but a partition spec was provided."
]
},
"_LEGACY_ERROR_TEMP_1266" : {
"message" : [
"Operation not allowed: TRUNCATE TABLE on external tables: <tableIdentWithDB>."
]
},
"_LEGACY_ERROR_TEMP_1267" : {
"message" : [
"Operation not allowed: TRUNCATE TABLE ... PARTITION is not supported for tables that are not partitioned: <tableIdentWithDB>."
]
},
"_LEGACY_ERROR_TEMP_1268" : {
"message" : [
"Failed to truncate table <tableIdentWithDB> when removing data of the path: <path>."
]
},
"_LEGACY_ERROR_TEMP_1276" : {
"message" : [
"The logical plan that represents the view is not analyzed."
]
},
"_LEGACY_ERROR_TEMP_1280" : {
"message" : [
"It is not allowed to create a persisted view from the Dataset API."
]
},
"_LEGACY_ERROR_TEMP_1286" : {
"message" : [
"User-defined partition column <columnName> not found in the JDBC relation: <schema>."
]
},
"_LEGACY_ERROR_TEMP_1287" : {
"message" : [
"Partition column type should be <numericType>, <dateType>, or <timestampType>, but <dataType> found."
]
},
"_LEGACY_ERROR_TEMP_1288" : {
"message" : [
"Table or view '<name>' already exists. SaveMode: ErrorIfExists."
]
},
"_LEGACY_ERROR_TEMP_1290" : {
"message" : [
"Text data source supports only a single column, and you have <schemaSize> columns."
]
},
"_LEGACY_ERROR_TEMP_1291" : {
"message" : [
"Can't find required partition column <readField> in partition schema <partitionSchema>."
]
},
"_LEGACY_ERROR_TEMP_1292" : {
"message" : [
"Temporary view '<tableIdent>' should not have specified a database."
]
},
"_LEGACY_ERROR_TEMP_1293" : {
"message" : [
"Hive data source can only be used with tables, you can't use it with CREATE TEMP VIEW USING."
]
},
"_LEGACY_ERROR_TEMP_1294" : {
"message" : [
"The timestamp provided for the '<strategy>' option is invalid. The expected format is 'YYYY-MM-DDTHH:mm:ss', but the provided timestamp: <timeString>."
]
},
"_LEGACY_ERROR_TEMP_1295" : {
"message" : [
"Set a host to read from with option(\"host\", ...)."
]
},
"_LEGACY_ERROR_TEMP_1296" : {
"message" : [
"Set a port to read from with option(\"port\", ...)."
]
},
"_LEGACY_ERROR_TEMP_1297" : {
"message" : [
"IncludeTimestamp must be set to either \"true\" or \"false\"."
]
},
"_LEGACY_ERROR_TEMP_1298" : {
"message" : [
"checkpointLocation must be specified either through option(\"checkpointLocation\", ...) or SparkSession.conf.set(\"<config>\", ...)."
]
},
"_LEGACY_ERROR_TEMP_1299" : {
"message" : [
"This query does not support recovering from checkpoint location. Delete <checkpointPath> to start over."
]
},
"_LEGACY_ERROR_TEMP_1300" : {
"message" : [
"Unable to find the column `<colName>` given [<actualColumns>]."
]
},
"_LEGACY_ERROR_TEMP_1306" : {
"message" : [
"There is a 'path' or 'paths' option set and load() is called with path parameters. Either remove the path option if it's the same as the path parameter, or add it to the load() parameter if you do want to read multiple paths. To ignore this check, set '<config>' to 'true'."
]
},
"_LEGACY_ERROR_TEMP_1307" : {
"message" : [
"There is a 'path' option set and save() is called with a path parameter. Either remove the path option, or call save() without the parameter. To ignore this check, set '<config>' to 'true'."
]
},
"_LEGACY_ERROR_TEMP_1309" : {
"message" : [
"insertInto() can't be used together with partitionBy(). Partition columns have already been defined for the table. It is not necessary to use partitionBy()."
]
},
"_LEGACY_ERROR_TEMP_1310" : {
"message" : [
"Couldn't find a catalog to handle the identifier <quote>."
]
},
"_LEGACY_ERROR_TEMP_1312" : {
"message" : [
"'<operation>' does not support bucketBy right now."
]
},
"_LEGACY_ERROR_TEMP_1313" : {
"message" : [
"'<operation>' does not support bucketBy and sortBy right now."
]
},
"_LEGACY_ERROR_TEMP_1316" : {
"message" : [
"Invalid partition transformation: <expr>."
]
},
"_LEGACY_ERROR_TEMP_1320" : {
"message" : [
"Typed column <typedCol> that needs input type and schema cannot be passed in untyped `select` API. Use the typed `Dataset.select` API instead."
]
},
"_LEGACY_ERROR_TEMP_1321" : {
"message" : [
"Invalid view name: <viewName>."
]
},
"_LEGACY_ERROR_TEMP_1322" : {
"message" : [
"Invalid number of buckets: bucket(<numBuckets>, <e>)."
]
},
"_LEGACY_ERROR_TEMP_1323" : {
"message" : [
"\"<colName>\" is not a numeric column. Aggregation function can only be applied on a numeric column."
]
},
"_LEGACY_ERROR_TEMP_1324" : {
"message" : [
"The pivot column <pivotColumn> has more than <maxValues> distinct values, this could indicate an error. If this was intended, set <config> to at least the number of distinct values of the pivot column."
]
},
"_LEGACY_ERROR_TEMP_1327" : {
"message" : [
"Command execution is not supported in runner <runner>."
]
},
"_LEGACY_ERROR_TEMP_1328" : {
"message" : [
"Can not instantiate class <className>, please make sure it has public non argument constructor."
]
},
"_LEGACY_ERROR_TEMP_1329" : {
"message" : [
"Can not load class <className>, please make sure it is on the classpath."
]
},
"_LEGACY_ERROR_TEMP_1330" : {
"message" : [
"Class <className> doesn't implement interface UserDefinedAggregateFunction."
]
},
"_LEGACY_ERROR_TEMP_1332" : {
"message" : [
"<errorMessage>"
]
},
"_LEGACY_ERROR_TEMP_1338" : {
"message" : [
"Sinks cannot request distribution and ordering in continuous execution mode."
]
},
"_LEGACY_ERROR_TEMP_2003" : {
"message" : [
"Unsuccessful try to zip maps with <size> unique keys due to exceeding the array size limit <maxRoundedArrayLength>."
]
},
"_LEGACY_ERROR_TEMP_2017" : {
"message" : [
"not resolved."
]
},
"_LEGACY_ERROR_TEMP_2026" : {
"message" : [
"Failed to convert value <value> (class of <cls>) with the type of <dataType> to JSON."
]
},
"_LEGACY_ERROR_TEMP_2027" : {
"message" : [
"Unexpected operator <op> in correlated subquery<pos>."
]
},
"_LEGACY_ERROR_TEMP_2028" : {
"message" : [
"This line should be unreachable<err>."
]
},
"_LEGACY_ERROR_TEMP_2030" : {
"message" : [
"Can not handle nested schema yet... plan <plan>."
]
},
"_LEGACY_ERROR_TEMP_2031" : {
"message" : [
"The input external row cannot be null."
]
},
"_LEGACY_ERROR_TEMP_2032" : {
"message" : [
"<fieldCannotBeNullMsg>"
]
},
"_LEGACY_ERROR_TEMP_2033" : {
"message" : [
"Unable to create database <name> as failed to create its directory <locationUri>."
]
},
"_LEGACY_ERROR_TEMP_2034" : {
"message" : [
"Unable to drop database <name> as failed to delete its directory <locationUri>."
]
},
"_LEGACY_ERROR_TEMP_2035" : {
"message" : [
"Unable to create table <table> as failed to create its directory <defaultTableLocation>."
]
},
"_LEGACY_ERROR_TEMP_2036" : {
"message" : [
"Unable to delete partition path <partitionPath>."
]
},
"_LEGACY_ERROR_TEMP_2037" : {
"message" : [
"Unable to drop table <table> as failed to delete its directory <dir>."
]
},
"_LEGACY_ERROR_TEMP_2038" : {
"message" : [
"Unable to rename table <oldName> to <newName> as failed to rename its directory <oldDir>."
]
},
"_LEGACY_ERROR_TEMP_2039" : {
"message" : [
"Unable to create partition path <partitionPath>."
]
},
"_LEGACY_ERROR_TEMP_2040" : {
"message" : [
"Unable to rename partition path <oldPartPath>."
]
},
"_LEGACY_ERROR_TEMP_2041" : {
"message" : [
"<methodName> is not implemented."
]
},
"_LEGACY_ERROR_TEMP_2045" : {
"message" : [
"Unsupported table change: <message>"
]
},
"_LEGACY_ERROR_TEMP_2046" : {
"message" : [
"[BUG] Not a DataSourceRDDPartition: <split>."
]
},
"_LEGACY_ERROR_TEMP_2047" : {
"message" : [
"'path' is not specified."
]
},
"_LEGACY_ERROR_TEMP_2048" : {
"message" : [
"Schema must be specified when creating a streaming source DataFrame. If some files already exist in the directory, then depending on the file format you may be able to create a static DataFrame on that directory with 'spark.read.load(directory)' and infer schema from it."
]
},
"_LEGACY_ERROR_TEMP_2049" : {
"message" : [
"Data source <className> does not support streamed <operator>."
]
},
"_LEGACY_ERROR_TEMP_2050" : {
"message" : [
"Expected exactly one path to be specified, but got: <paths>."
]
},
"_LEGACY_ERROR_TEMP_2052" : {
"message" : [
"<className> was removed in Spark 2.0. Please check if your library is compatible with Spark 2.0."
]
},
"_LEGACY_ERROR_TEMP_2053" : {
"message" : [
"buildReader is not supported for <format>."
]
},
"_LEGACY_ERROR_TEMP_2056" : {
"message" : [
"Unable to clear output directory <staticPrefixPath> prior to writing to it."
]
},
"_LEGACY_ERROR_TEMP_2057" : {
"message" : [
"Unable to clear partition directory <path> prior to writing to it."
]
},
"_LEGACY_ERROR_TEMP_2059" : {
"message" : [
"End of stream."
]
},
"_LEGACY_ERROR_TEMP_2060" : {
"message" : [
"The fallback v1 relation reports inconsistent schema:",
"Schema of v2 scan: <v2Schema>.",
"Schema of v1 relation: <v1Schema>."
]
},
"_LEGACY_ERROR_TEMP_2061" : {
"message" : [
"No records should be returned from EmptyDataReader."
]
},
"_LEGACY_ERROR_TEMP_2065" : {
"message" : [
"Cannot create columnar reader."
]
},
"_LEGACY_ERROR_TEMP_2066" : {
"message" : [
"Invalid namespace name: <namespace>."
]
},
"_LEGACY_ERROR_TEMP_2068" : {
"message" : [
"Missing database location."
]
},
"_LEGACY_ERROR_TEMP_2070" : {
"message" : [
"Writing job failed."
]
},
"_LEGACY_ERROR_TEMP_2071" : {
"message" : [
"Commit denied for partition <partId> (task <taskId>, attempt <attemptId>, stage <stageId>.<stageAttempt>)."
]
},
"_LEGACY_ERROR_TEMP_2073" : {
"message" : [
"Cannot create JDBC table with partition."
]
},
"_LEGACY_ERROR_TEMP_2074" : {
"message" : [
"user-specified schema."
]
},
"_LEGACY_ERROR_TEMP_2076" : {
"message" : [
"The length of <path> is <len>, which exceeds the max length allowed: <maxLength>."
]
},
"_LEGACY_ERROR_TEMP_2077" : {
"message" : [
"Unsupported field name: <fieldName>."
]
},
"_LEGACY_ERROR_TEMP_2078" : {
"message" : [
"Both '<jdbcTableName>' and '<jdbcQueryString>' can not be specified at the same time."
]
},
"_LEGACY_ERROR_TEMP_2079" : {
"message" : [
"Option '<jdbcTableName>' or '<jdbcQueryString>' is required."
]
},
"_LEGACY_ERROR_TEMP_2080" : {
"message" : [
"Option `<optionName>` can not be empty."
]
},
"_LEGACY_ERROR_TEMP_2081" : {
"message" : [
"Invalid value `<value>` for parameter `<jdbcTxnIsolationLevel>`. This can be `NONE`, `READ_UNCOMMITTED`, `READ_COMMITTED`, `REPEATABLE_READ` or `SERIALIZABLE`."
]
},
"_LEGACY_ERROR_TEMP_2082" : {
"message" : [
"Can't get JDBC type for <catalogString>."
]
},
"_LEGACY_ERROR_TEMP_2083" : {
"message" : [
"Unsupported type <content>."
]
},
"_LEGACY_ERROR_TEMP_2084" : {
"message" : [
"Unsupported array element type <catalogString> based on binary."
]
},
"_LEGACY_ERROR_TEMP_2085" : {
"message" : [
"Nested arrays unsupported."
]
},
"_LEGACY_ERROR_TEMP_2086" : {
"message" : [
"Can't translate non-null value for field <pos>."
]
},
"_LEGACY_ERROR_TEMP_2087" : {
"message" : [
"Invalid value `<n>` for parameter `<jdbcNumPartitions>` in table writing via JDBC. The minimum value is 1."
]
},
"_LEGACY_ERROR_TEMP_2089" : {
"message" : [
"DataType: <catalogString>."
]
},
"_LEGACY_ERROR_TEMP_2090" : {
"message" : [
"The input filter of <owner> should be fully convertible."
]
},
"_LEGACY_ERROR_TEMP_2093" : {
"message" : [
"Found duplicate field(s) \"<requiredFieldName>\": <matchedOrcFields> in case-insensitive mode."
]
},
"_LEGACY_ERROR_TEMP_2094" : {
"message" : [
"Found duplicate field(s) \"<requiredId>\": <matchedFields> in id mapping mode."
]
},
"_LEGACY_ERROR_TEMP_2095" : {
"message" : [
"Failed to merge incompatible schemas <left> and <right>."
]
},
"_LEGACY_ERROR_TEMP_2097" : {
"message" : [
"Could not execute broadcast in <timeout> secs. You can increase the timeout for broadcasts via <broadcastTimeout> or disable broadcast join by setting <autoBroadcastJoinThreshold> to -1 or remove the broadcast hint if it exists in your code."
]
},
"_LEGACY_ERROR_TEMP_2098" : {
"message" : [
"Could not compare cost with <cost>."
]
},
"_LEGACY_ERROR_TEMP_2100" : {
"message" : [
"not support type: <dataType>."
]
},
"_LEGACY_ERROR_TEMP_2101" : {
"message" : [
"Not support non-primitive type now."
]
},
"_LEGACY_ERROR_TEMP_2103" : {
"message" : [
"Dictionary encoding should not be used because of dictionary overflow."
]
},
"_LEGACY_ERROR_TEMP_2105" : {
"message" : [
"Could not allocate memory to grow BytesToBytesMap."
]
},
"_LEGACY_ERROR_TEMP_2106" : {
"message" : [
"Can't acquire <size> bytes memory to build hash relation, got <got> bytes."
]
},
"_LEGACY_ERROR_TEMP_2107" : {
"message" : [
"There is not enough memory to build hash map."
]
},
"_LEGACY_ERROR_TEMP_2108" : {
"message" : [
"Does not support row that is larger than 256M."
]
},
"_LEGACY_ERROR_TEMP_2109" : {
"message" : [
"Cannot build HashedRelation with more than 1/3 billion unique keys."
]
},
"_LEGACY_ERROR_TEMP_2110" : {
"message" : [
"Cannot build a HashedRelation that is larger than 8G."
]
},
"_LEGACY_ERROR_TEMP_2111" : {
"message" : [
"Failed to push a row into <rowQueue>."
]
},
"_LEGACY_ERROR_TEMP_2112" : {
"message" : [
"Unexpected window function frame <frame>."
]
},
"_LEGACY_ERROR_TEMP_2115" : {
"message" : [
"Unknown column: <unknownColumn>."
]
},
"_LEGACY_ERROR_TEMP_2116" : {
"message" : [
"Unexpected: <o>."
]
},
"_LEGACY_ERROR_TEMP_2120" : {
"message" : [
"Do not support array of type <clazz>."
]
},
"_LEGACY_ERROR_TEMP_2121" : {
"message" : [
"Do not support type <clazz>."
]
},
"_LEGACY_ERROR_TEMP_2124" : {
"message" : [
"Failed to merge decimal types with incompatible scale <leftScale> and <rightScale>."
]
},
"_LEGACY_ERROR_TEMP_2126" : {
"message" : [
"Unsuccessful attempt to build maps with <size> elements due to exceeding the map size limit <maxRoundedArrayLength>."
]
},
"_LEGACY_ERROR_TEMP_2128" : {
"message" : [
"The key array and value array of MapData must have the same length."
]
},
"_LEGACY_ERROR_TEMP_2129" : {
"message" : [
"Conflict found: Field <field> <actual> differs from <field> <expected> derived from <candidate>."
]
},
"_LEGACY_ERROR_TEMP_2131" : {
"message" : [
"Exception when registering StreamingQueryListener."
]
},
"_LEGACY_ERROR_TEMP_2144" : {
"message" : [
"Unable to find constructor for <tpe>. This could happen if <tpe> is an interface, or a trait without companion object constructor."
]
},
"_LEGACY_ERROR_TEMP_2145" : {
"message" : [
"<paramName> cannot be more than one character."
]
},
"_LEGACY_ERROR_TEMP_2146" : {
"message" : [
"<paramName> should be an integer. Found <value>."
]
},
"_LEGACY_ERROR_TEMP_2147" : {
"message" : [
"<paramName> flag can be true or false."
]
},
"_LEGACY_ERROR_TEMP_2148" : {
"message" : [
"null value found but field <name> is not nullable."
]
},
"_LEGACY_ERROR_TEMP_2154" : {
"message" : [
"Failed to get outer pointer for <innerCls>."
]
},
"_LEGACY_ERROR_TEMP_2163" : {
"message" : [
"Initial type <dataType> must be a <target>."
]
},
"_LEGACY_ERROR_TEMP_2164" : {
"message" : [
"Initial type <dataType> must be an <arrayType>, a <structType> or a <mapType>."
]
},
"_LEGACY_ERROR_TEMP_2166" : {
"message" : [
"Malformed JSON."
]
},
"_LEGACY_ERROR_TEMP_2168" : {
"message" : [
"Decorrelate inner query through <plan> is not supported."
]
},
"_LEGACY_ERROR_TEMP_2169" : {
"message" : [
"This method should not be called in the analyzer."
]
},
"_LEGACY_ERROR_TEMP_2170" : {
"message" : [
"Cannot safely merge SERDEPROPERTIES:",
"<props1>",
"<props2>",
"The conflict keys: <conflictKeys>."
]
},
"_LEGACY_ERROR_TEMP_2171" : {
"message" : [
"Not supported pair: <r1>, <r2> at <function>()."
]
},
"_LEGACY_ERROR_TEMP_2172" : {
"message" : [
"Once strategy's idempotence is broken for batch <batchName>",
"<plan>."
]
},
"_LEGACY_ERROR_TEMP_2176" : {
"message" : [
"Cannot create array with <numElements> elements of data due to exceeding the limit <maxRoundedArrayLength> elements for ArrayData. <additionalErrorMessage>"
]
},
"_LEGACY_ERROR_TEMP_2179" : {
"message" : [
"HiveServer2 Kerberos principal or keytab is not correctly configured."
]
},
"_LEGACY_ERROR_TEMP_2180" : {
"message" : [
"Parent SparkUI to attach this tab to not found."
]
},
"_LEGACY_ERROR_TEMP_2181" : {
"message" : [
"inferSchema is not supported for hive data source."
]
},
"_LEGACY_ERROR_TEMP_2182" : {
"message" : [
"Requested partitioning does not match the <tableIdentifier> table:",
"Requested partitions: <partitionKeys>.",
"Table partitions: <partitionColumnNames>."
]
},
"_LEGACY_ERROR_TEMP_2183" : {
"message" : [
"Dynamic partition key <key> is not among written partition paths."
]
},
"_LEGACY_ERROR_TEMP_2184" : {
"message" : [
"Cannot remove partition directory '<partitionPath>'."
]
},
"_LEGACY_ERROR_TEMP_2185" : {
"message" : [
"Cannot create staging directory: <message>"
]
},
"_LEGACY_ERROR_TEMP_2194" : {
"message" : [
"Unsupported Hive Metastore version <version>. Please set <key> with a valid version."
]
},
"_LEGACY_ERROR_TEMP_2195" : {
"message" : [
"<cnf> when creating Hive client using classpath: <execJars> Please make sure that jars for your version of hive and hadoop are included in the paths passed to <key>."
]
},
"_LEGACY_ERROR_TEMP_2198" : {
"message" : [
"Failed to rename as <dstPath> already exists."
]
},
"_LEGACY_ERROR_TEMP_2200" : {
"message" : [
"Error: we detected a possible problem with the location of your \"_spark_metadata\"",
"directory and you likely need to move it before restarting this query.",
"",
"Earlier version of Spark incorrectly escaped paths when writing out the",
"\"_spark_metadata\" directory for structured streaming. While this was corrected in",
"Spark 3.0, it appears that your query was started using an earlier version that",
"",
"Correct \"_spark_metadata\" Directory: <metadataPath>",
"Incorrect \"_spark_metadata\" Directory: <legacyMetadataPath>",
"",
"Please move the data from the incorrect directory to the correct one, delete the",
"incorrect directory, and then restart this query. If you believe you are receiving",
"this message in error, you can disable it with the SQL conf",
"<StreamingCheckpointEscapedPathCheckEnabled>."
]
},
"_LEGACY_ERROR_TEMP_2201" : {
"message" : [
"Partition column <col> not found in schema <schema>."
]
},
"_LEGACY_ERROR_TEMP_2203" : {
"message" : [
"Cannot set timeout duration without enabling processing time timeout in [map|flatMap]GroupsWithState."
]
},
"_LEGACY_ERROR_TEMP_2204" : {
"message" : [
"Cannot get event time watermark timestamp without setting watermark before [map|flatMap]GroupsWithState."
]
},
"_LEGACY_ERROR_TEMP_2205" : {
"message" : [
"Cannot set timeout timestamp without enabling event time timeout in [map|flatMapGroupsWithState."
]
},
"_LEGACY_ERROR_TEMP_2207" : {
"message" : [
"Multiple streaming queries are concurrently using <path>."
]
},
"_LEGACY_ERROR_TEMP_2208" : {
"message" : [
"<commitProtocol> does not support adding files with an absolute path."
]
},
"_LEGACY_ERROR_TEMP_2209" : {
"message" : [
"Data source <srcName> does not support microbatch processing.",
"",
"Either the data source is disabled at",
"SQLConf.get.DISABLED_V2_STREAMING_MICROBATCH_READERS.key (The disabled sources",
"are [<disabledSources>]) or the table <table> does not have MICRO_BATCH_READ",
"capability. Meanwhile, the fallback, data source v1, is not available.\""
]
},
"_LEGACY_ERROR_TEMP_2210" : {
"message" : [
"StreamingRelationExec cannot be executed."
]
},
"_LEGACY_ERROR_TEMP_2212" : {
"message" : [
"Invalid catalog name: <name>."
]
},
"_LEGACY_ERROR_TEMP_2214" : {
"message" : [
"Plugin class for catalog '<name>' does not implement CatalogPlugin: <pluginClassName>."
]
},
"_LEGACY_ERROR_TEMP_2215" : {
"message" : [
"Cannot find catalog plugin class for catalog '<name>': <pluginClassName>."
]
},
"_LEGACY_ERROR_TEMP_2216" : {
"message" : [
"Failed to find public no-arg constructor for catalog '<name>': <pluginClassName>)."
]
},
"_LEGACY_ERROR_TEMP_2217" : {
"message" : [
"Failed to call public no-arg constructor for catalog '<name>': <pluginClassName>)."
]
},
"_LEGACY_ERROR_TEMP_2218" : {
"message" : [
"Cannot instantiate abstract catalog plugin class for catalog '<name>': <pluginClassName>."
]
},
"_LEGACY_ERROR_TEMP_2219" : {
"message" : [
"Failed during instantiating constructor for catalog '<name>': <pluginClassName>."
]
},
"_LEGACY_ERROR_TEMP_2220" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_2222" : {
"message" : [
"Cannot mutate ReadOnlySQLConf."
]
},
"_LEGACY_ERROR_TEMP_2223" : {
"message" : [
"Cannot clone/copy ReadOnlySQLConf."
]
},
"_LEGACY_ERROR_TEMP_2224" : {
"message" : [
"Cannot get SQLConf inside scheduler event loop thread."
]
},
"_LEGACY_ERROR_TEMP_2225" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_2226" : {
"message" : [
"null literals can't be casted to <name>."
]
},
"_LEGACY_ERROR_TEMP_2227" : {
"message" : [
"<name> is not an UserDefinedType. Please make sure registering an UserDefinedType for <userClass>."
]
},
"_LEGACY_ERROR_TEMP_2228" : {
"message" : [
"Can not load in UserDefinedType <name> for user class <userClass>."
]
},
"_LEGACY_ERROR_TEMP_2229" : {
"message" : [
"<name> is not a public class. Only public classes are supported."
]
},
"_LEGACY_ERROR_TEMP_2230" : {
"message" : [
"Primitive types are not supported."
]
},
"_LEGACY_ERROR_TEMP_2233" : {
"message" : [
"Only Data Sources providing FileFormat are supported: <providingClass>."
]
},
"_LEGACY_ERROR_TEMP_2234" : {
"message" : [
"Failed to set original ACL <aclEntries> back to the created path: <path>. Exception: <message>"
]
},
"_LEGACY_ERROR_TEMP_2236" : {
"message" : [
"Unrecognized compression scheme type ID: <typeId>."
]
},
"_LEGACY_ERROR_TEMP_2237" : {
"message" : [
"<className>.getParentLogger is not yet implemented."
]
},
"_LEGACY_ERROR_TEMP_2241" : {
"message" : [
"Nonatomic partition table <tableName> can not add multiple partitions."
]
},
"_LEGACY_ERROR_TEMP_2242" : {
"message" : [
"<provider> source does not support user-specified schema."
]
},
"_LEGACY_ERROR_TEMP_2243" : {
"message" : [
"Nonatomic partition table <tableName> can not drop multiple partitions."
]
},
"_LEGACY_ERROR_TEMP_2244" : {
"message" : [
"The table <tableName> does not support truncation of multiple partition."
]
},
"_LEGACY_ERROR_TEMP_2245" : {
"message" : [
"Table does not support overwrite by expression: <table>."
]
},
"_LEGACY_ERROR_TEMP_2246" : {
"message" : [
"Table does not support dynamic partition overwrite: <table>."
]
},
"_LEGACY_ERROR_TEMP_2248" : {
"message" : [
"Cannot broadcast the table over <maxBroadcastTableRows> rows: <numRows> rows."
]
},
"_LEGACY_ERROR_TEMP_2249" : {
"message" : [
"Cannot broadcast the table that is larger than <maxBroadcastTableBytes>: <dataSize>."
]
},
"_LEGACY_ERROR_TEMP_2250" : {
"message" : [
"Not enough memory to build and broadcast the table to all worker nodes. As a workaround, you can either disable broadcast by setting <autoBroadcastJoinThreshold> to -1 or increase the spark driver memory by setting <driverMemory> to a higher value<analyzeTblMsg> or apply the shuffle sort merge join hint as described in the Spark documentation: https://spark.apache.org/docs/latest/sql-ref-syntax-qry-select-hints.html#join-hints."
]
},
"_LEGACY_ERROR_TEMP_2251" : {
"message" : [
"<execName> does not support the execute() code path."
]
},
"_LEGACY_ERROR_TEMP_2252" : {
"message" : [
"Cannot merge <className> with <otherClass>."
]
},
"_LEGACY_ERROR_TEMP_2253" : {
"message" : [
"Data source <sourceName> does not support continuous processing."
]
},
"_LEGACY_ERROR_TEMP_2254" : {
"message" : [
"Data read failed."
]
},
"_LEGACY_ERROR_TEMP_2255" : {
"message" : [
"Epoch marker generation failed."
]
},
"_LEGACY_ERROR_TEMP_2256" : {
"message" : [
"Foreach writer has been aborted due to a task failure."
]
},
"_LEGACY_ERROR_TEMP_2260" : {
"message" : [
"Cannot purge as it might break internal state."
]
},
"_LEGACY_ERROR_TEMP_2261" : {
"message" : [
"Clean up source files is not supported when reading from the output directory of FileStreamSink."
]
},
"_LEGACY_ERROR_TEMP_2262" : {
"message" : [
"latestOffset(Offset, ReadLimit) should be called instead of this method."
]
},
"_LEGACY_ERROR_TEMP_2263" : {
"message" : [
"Error: we detected a possible problem with the location of your checkpoint and you",
"likely need to move it before restarting this query.",
"",
"Earlier version of Spark incorrectly escaped paths when writing out checkpoints for",
"structured streaming. While this was corrected in Spark 3.0, it appears that your",
"query was started using an earlier version that incorrectly handled the checkpoint",
"path.",
"",
"Correct Checkpoint Directory: <checkpointPath>",
"Incorrect Checkpoint Directory: <legacyCheckpointDir>",
"",
"Please move the data from the incorrect directory to the correct one, delete the",
"incorrect directory, and then restart this query. If you believe you are receiving",
"this message in error, you can disable it with the SQL conf",
"<StreamingCheckpointEscapedPathCheckEnabled>."
]
},
"_LEGACY_ERROR_TEMP_2264" : {
"message" : [
"Subprocess exited with status <exitCode>. Error: <stderrBuffer>."
]
},
"_LEGACY_ERROR_TEMP_2265" : {
"message" : [
"<nodeName> without serde does not support <dt> as output data type."
]
},
"_LEGACY_ERROR_TEMP_2266" : {
"message" : [
"Invalid `startIndex` provided for generating iterator over the array. Total elements: <numRows>, requested `startIndex`: <startIndex>."
]
},
"_LEGACY_ERROR_TEMP_2267" : {
"message" : [
"The backing <className> has been modified since the creation of this Iterator."
]
},
"_LEGACY_ERROR_TEMP_2268" : {
"message" : [
"<nodeName> does not implement doExecuteBroadcast."
]
},
"_LEGACY_ERROR_TEMP_2269" : {
"message" : [
"<globalTempDB> is a system preserved database, please rename your existing database to resolve the name conflict, or set a different value for <globalTempDatabase>, and launch your Spark application again."
]
},
"_LEGACY_ERROR_TEMP_2270" : {
"message" : [
"comment on table is not supported."
]
},
"_LEGACY_ERROR_TEMP_2272" : {
"message" : [
"Rename column is only supported for MySQL version 8.0 and above."
]
},
"_LEGACY_ERROR_TEMP_2273" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_2330" : {
"message" : [
"Cannot change nullable column to non-nullable: <fieldName>."
]
},
"_LEGACY_ERROR_TEMP_2446" : {
"message" : [
"Operation not allowed: <cmd> only works on table with location provided: <tableIdentWithDB>"
]
},
"_LEGACY_ERROR_TEMP_2450" : {
"message" : [
"No handler for UDF/UDAF/UDTF '<clazz>'"
]
},
"_LEGACY_ERROR_TEMP_3000" : {
"message" : [
"Unexpected Py4J server <class>."
]
},
"_LEGACY_ERROR_TEMP_3001" : {
"message" : [
"EOFException occurred while reading the port number from <daemonModule>'s stdout<additionalMessage>."
]
},
"_LEGACY_ERROR_TEMP_3002" : {
"message" : [
"Data of type <other> is not supported"
]
},
"_LEGACY_ERROR_TEMP_3003" : {
"message" : [
"Could not compute split, block <blockId> of RDD <id> not found"
]
},
"_LEGACY_ERROR_TEMP_3004" : {
"message" : [
"Attempted to use <string> after its blocks have been removed!"
]
},
"_LEGACY_ERROR_TEMP_3005" : {
"message" : [
"Histogram on either an empty RDD or RDD containing +/-infinity or NaN"
]
},
"_LEGACY_ERROR_TEMP_3006" : {
"message" : [
"empty RDD"
]
},
"_LEGACY_ERROR_TEMP_3008" : {
"message" : [
"Cannot use map-side combining with array keys."
]
},
"_LEGACY_ERROR_TEMP_3009" : {
"message" : [
"HashPartitioner cannot partition array keys."
]
},
"_LEGACY_ERROR_TEMP_3010" : {
"message" : [
"reduceByKeyLocally() does not support array keys"
]
},
"_LEGACY_ERROR_TEMP_3011" : {
"message" : [
"This RDD lacks a SparkContext. It could happen in the following cases:",
"(1) RDD transformations and actions are NOT invoked by the driver, but inside of other transformations; for example, rdd1.map(x => rdd2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the rdd1.map transformation. For more information, see SPARK-5063.",
"(2) When a Spark Streaming job recovers from checkpoint, this exception will be hit if a reference to an RDD not defined by the streaming job is used in DStream operations. For more information, See SPARK-13758."
]
},
"_LEGACY_ERROR_TEMP_3012" : {
"message" : [
"Cannot change storage level of an RDD after it was already assigned a level"
]
},
"_LEGACY_ERROR_TEMP_3013" : {
"message" : [
"Can only zip RDDs with same number of elements in each partition"
]
},
"_LEGACY_ERROR_TEMP_3014" : {
"message" : [
"empty collection"
]
},
"_LEGACY_ERROR_TEMP_3015" : {
"message" : [
"countByValueApprox() does not support arrays"
]
},
"_LEGACY_ERROR_TEMP_3016" : {
"message" : [
"Checkpoint directory has not been set in the SparkContext"
]
},
"_LEGACY_ERROR_TEMP_3017" : {
"message" : [
"Invalid checkpoint file: <path>"
]
},
"_LEGACY_ERROR_TEMP_3018" : {
"message" : [
"Failed to create checkpoint path <checkpointDirPath>"
]
},
"_LEGACY_ERROR_TEMP_3019" : {
"message" : [
"Checkpoint RDD has a different number of partitions from original RDD. Original",
"RDD [ID: <originalRDDId>, num of partitions: <originalRDDLength>];",
"Checkpoint RDD [ID: <newRDDId>, num of partitions: <newRDDLength>]."
]
},
"_LEGACY_ERROR_TEMP_3020" : {
"message" : [
"Checkpoint dir must be specified."
]
},
"_LEGACY_ERROR_TEMP_3021" : {
"message" : [
"Error asking standalone scheduler to shut down executors"
]
},
"_LEGACY_ERROR_TEMP_3022" : {
"message" : [
"Error stopping standalone scheduler's driver endpoint"
]
},
"_LEGACY_ERROR_TEMP_3023" : {
"message" : [
"Can't run submitMapStage on RDD with 0 partitions"
]
},
"_LEGACY_ERROR_TEMP_3024" : {
"message" : [
"attempted to access non-existent accumulator <id>"
]
},
"_LEGACY_ERROR_TEMP_3025" : {
"message" : [
"TaskSetManagers should only send Resubmitted task statuses for tasks in ShuffleMapStages."
]
},
"_LEGACY_ERROR_TEMP_3026" : {
"message" : [
"duration() called on unfinished task"
]
},
"_LEGACY_ERROR_TEMP_3028" : {
"message" : [
"<errorMsg>"
]
},
"_LEGACY_ERROR_TEMP_3029" : {
"message" : [
"Exiting due to error from cluster scheduler: <message>"
]
},
"_LEGACY_ERROR_TEMP_3030" : {
"message" : [
"Task <currentTaskAttemptId> has not locked block <blockId> for writing"
]
},
"_LEGACY_ERROR_TEMP_3031" : {
"message" : [
"Block <blockId> does not exist"
]
},
"_LEGACY_ERROR_TEMP_3032" : {
"message" : [
"Error occurred while waiting for replication to finish"
]
},
"_LEGACY_ERROR_TEMP_3033" : {
"message" : [
"Unable to register with external shuffle server due to : <message>"
]
},
"_LEGACY_ERROR_TEMP_3034" : {
"message" : [
"Error occurred while waiting for async. reregistration"
]
},
"_LEGACY_ERROR_TEMP_3035" : {
"message" : [
"Unexpected shuffle block <blockId> with unsupported shuffle resolver <shuffleBlockResolver>"
]
},
"_LEGACY_ERROR_TEMP_3036" : {
"message" : [
"Failure while trying to store block <blockId> on <blockManagerId>."
]
},
"_LEGACY_ERROR_TEMP_3037" : {
"message" : [
"Block <blockId> was not found even though it's read-locked"
]
},
"_LEGACY_ERROR_TEMP_3038" : {
"message" : [
"get() failed for block <blockId> even though we held a lock"
]
},
"_LEGACY_ERROR_TEMP_3039" : {
"message" : [
"BlockManager returned null for BlockStatus query: <blockId>"
]
},
"_LEGACY_ERROR_TEMP_3040" : {
"message" : [
"BlockManagerMasterEndpoint returned false, expected true."
]
},
"_LEGACY_ERROR_TEMP_3041" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3042" : {
"message" : [
"Failed to get block <blockId>, which is not a shuffle block"
]
},
"_LEGACY_ERROR_TEMP_3052" : {
"message" : [
"Unexpected resolved action: <other>"
]
},
"_LEGACY_ERROR_TEMP_3053" : {
"message" : [
"Unexpected WHEN NOT MATCHED action: <other>"
]
},
"_LEGACY_ERROR_TEMP_3054" : {
"message" : [
"<expr> is not currently supported"
]
},
"_LEGACY_ERROR_TEMP_3056" : {
"message" : [
"Unexpected row-level read relations (allow multiple = <allowMultipleReads>): <other>"
]
},
"_LEGACY_ERROR_TEMP_3057" : {
"message" : [
"Cannot retrieve row-level operation from <table>"
]
},
"_LEGACY_ERROR_TEMP_3058" : {
"message" : [
"Found duplicate column(s) <checkType>: <duplicateColumns>"
]
},
"_LEGACY_ERROR_TEMP_3059" : {
"message" : [
"The positions provided (<pos>) cannot be resolved in",
"<schema>"
]
},
"_LEGACY_ERROR_TEMP_3060" : {
"message" : [
"Couldn't find column <i> in:",
"<schema>"
]
},
"_LEGACY_ERROR_TEMP_3061" : {
"message" : [
"<e>",
"<schema>"
]
},
"_LEGACY_ERROR_TEMP_3062" : {
"message" : [
"Expected <columnPath> to be a nested data type, but found <o>. Was looking for the index of <attr> in a nested field"
]
},
"_LEGACY_ERROR_TEMP_3063" : {
"message" : [
"pivot is not supported on a streaming DataFrames/Datasets"
]
},
"_LEGACY_ERROR_TEMP_3065" : {
"message" : [
"<clazz>: <msg>"
]
},
"_LEGACY_ERROR_TEMP_3067" : {
"message" : [
"Streaming aggregation doesn't support group aggregate pandas UDF"
]
},
"_LEGACY_ERROR_TEMP_3068" : {
"message" : [
"Global aggregation with session window in streaming query is not supported."
]
},
"_LEGACY_ERROR_TEMP_3069" : {
"message" : [
"<internalName> is a reserved column name that cannot be read in combination with <colName> column."
]
},
"_LEGACY_ERROR_TEMP_3070" : {
"message" : [
"<internalName> is a reserved column name that cannot be read in combination with <colName> column."
]
},
"_LEGACY_ERROR_TEMP_3071" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3072" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3073" : {
"message" : [
"Unexpected instruction: <other>"
]
},
"_LEGACY_ERROR_TEMP_3074" : {
"message" : [
"field <fieldName> not found from given schema <schema>"
]
},
"_LEGACY_ERROR_TEMP_3075" : {
"message" : [
"Couldn't find scan attribute for <tableAttr> in <scanAttrs>"
]
},
"_LEGACY_ERROR_TEMP_3076" : {
"message" : [
"Redefining watermark is disallowed. You can set the config '<config>' to 'false' to restore the previous behavior. Note that multiple stateful operators will be disallowed."
]
},
"_LEGACY_ERROR_TEMP_3077" : {
"message" : [
"More than one event time columns are available. Please ensure there is at most one event time column per stream. event time columns: <eventTimeCols>"
]
},
"_LEGACY_ERROR_TEMP_3079" : {
"message" : [
"Dynamic partition cannot be the parent of a static partition."
]
},
"_LEGACY_ERROR_TEMP_3080" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3081" : {
"message" : [
"Save mode <mode> not allowed for Kafka. Allowed save modes are <append> and <errorIfExists> (default)."
]
},
"_LEGACY_ERROR_TEMP_3082" : {
"message" : [
"Creating bucketed Hive serde table is not supported yet."
]
},
"_LEGACY_ERROR_TEMP_3083" : {
"message" : [
"Unable to infer the schema. The schema specification is required to create the table <tableName>."
]
},
"_LEGACY_ERROR_TEMP_3084" : {
"message" : [
"No handler for UDF/UDAF/UDTF '<clazz>': <e>"
]
},
"_LEGACY_ERROR_TEMP_3086" : {
"message" : [
"Cannot persist <tableName> into Hive metastore as table property keys may not start with 'spark.sql.': <invalidKeys>"
]
},
"_LEGACY_ERROR_TEMP_3087" : {
"message" : [
"Cannot set or change the preserved property key: 'EXTERNAL'"
]
},
"_LEGACY_ERROR_TEMP_3088" : {
"message" : [
"The metadata is corrupted. Unable to find the partition column names from the schema. schema: <schema>. Partition columns: <partColumnNames>"
]
},
"_LEGACY_ERROR_TEMP_3089" : {
"message" : [
"Corrupted <typeName> in catalog: <numCols> parts expected, but part <index> is missing."
]
},
"_LEGACY_ERROR_TEMP_3090" : {
"message" : [
"Raw list type in java is unsupported because Spark cannot infer the element type."
]
},
"_LEGACY_ERROR_TEMP_3091" : {
"message" : [
"Raw map type in java is unsupported because Spark cannot infer key and value types."
]
},
"_LEGACY_ERROR_TEMP_3092" : {
"message" : [
"Collection types with wildcards (e.g. List<?> or Map<?, ?>) are unsupported because Spark cannot infer the data type for these type parameters."
]
},
"_LEGACY_ERROR_TEMP_3093" : {
"message" : [
"Unsupported java type <c>"
]
},
"_LEGACY_ERROR_TEMP_3094" : {
"message" : [
"<dt> is not supported."
]
},
"_LEGACY_ERROR_TEMP_3095" : {
"message" : [
"<dt> cannot be converted to Hive TypeInfo"
]
},
"_LEGACY_ERROR_TEMP_3096" : {
"message" : [
"Converted table has <resLen> columns,",
"but source Hive table has <relLen> columns.",
"Set <key> to false,",
"or recreate table <ident> to workaround."
]
},
"_LEGACY_ERROR_TEMP_3097" : {
"message" : [
"Column in converted table has different data type with source Hive table's.",
"Set <key> to false,",
"or recreate table <ident> to workaround."
]
},
"_LEGACY_ERROR_TEMP_3100" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3101" : {
"message" : [
"The input is not a correct window column: <windowTime>"
]
},
"_LEGACY_ERROR_TEMP_3102" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3103" : {
"message" : [
"Namespace '<namespace>' is non empty. <details>"
]
},
"_LEGACY_ERROR_TEMP_3104" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3105" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3106" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3107" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3108" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3109" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3110" : {
"message" : [
"Cannot bind a V1 function."
]
},
"_LEGACY_ERROR_TEMP_3113" : {
"message" : [
"UnresolvedTableSpec doesn't have a data type"
]
},
"_LEGACY_ERROR_TEMP_3114" : {
"message" : [
"UnresolvedTableSpec doesn't have a data type"
]
},
"_LEGACY_ERROR_TEMP_3121" : {
"message" : [
"A HllSketch instance cannot be updates with a Spark <dataType> type"
]
},
"_LEGACY_ERROR_TEMP_3129" : {
"message" : [
"Cannot convert this array to unsafe format as it's too big."
]
},
"_LEGACY_ERROR_TEMP_3130" : {
"message" : [
"Cannot create BufferHolder for input UnsafeRow because there are too many fields (number of fields: <numFields>)"
]
},
"_LEGACY_ERROR_TEMP_3131" : {
"message" : [
"Unsupported data type <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3132" : {
"message" : [
"CaseInsensitiveStringMap is read-only."
]
},
"_LEGACY_ERROR_TEMP_3133" : {
"message" : [
"<class> does not implement rowIdSchema"
]
},
"_LEGACY_ERROR_TEMP_3134" : {
"message" : [
"<class> does not implement metadataSchema"
]
},
"_LEGACY_ERROR_TEMP_3135" : {
"message" : [
"<class> does not support batch write"
]
},
"_LEGACY_ERROR_TEMP_3136" : {
"message" : [
"<class> does not support streaming write"
]
},
"_LEGACY_ERROR_TEMP_3137" : {
"message" : [
"<description>: Batch write is not supported"
]
},
"_LEGACY_ERROR_TEMP_3138" : {
"message" : [
"<description>: Streaming write is not supported"
]
},
"_LEGACY_ERROR_TEMP_3139" : {
"message" : [
"<description>: Delta batch write is not supported"
]
},
"_LEGACY_ERROR_TEMP_3140" : {
"message" : [
"<class> does not implement build"
]
},
"_LEGACY_ERROR_TEMP_3141" : {
"message" : [
"<class> does not support user defined function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3142" : {
"message" : [
"<class> does not support user defined aggregate function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3143" : {
"message" : [
"Partition renaming is not supported"
]
},
"_LEGACY_ERROR_TEMP_3144" : {
"message" : [
"Partition truncate is not supported"
]
},
"_LEGACY_ERROR_TEMP_3145" : {
"message" : [
"Partitions truncate is not supported"
]
},
"_LEGACY_ERROR_TEMP_3147" : {
"message" : [
"<description>: Batch scan are not supported"
]
},
"_LEGACY_ERROR_TEMP_3148" : {
"message" : [
"<description>: Micro-batch scan are not supported"
]
},
"_LEGACY_ERROR_TEMP_3149" : {
"message" : [
"<description>: Continuous scan are not supported"
]
},
"_LEGACY_ERROR_TEMP_3150" : {
"message" : [
"Cannot create columnar reader."
]
},
"_LEGACY_ERROR_TEMP_3152" : {
"message" : [
"Datatype not supported <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3155" : {
"message" : [
"Datatype not supported <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3160" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3161" : {
"message" : [
"Uploading artifact file to local file system destination path is not supported."
]
},
"_LEGACY_ERROR_TEMP_3162" : {
"message" : [
"Unsupported physical type <type>."
]
},
"_LEGACY_ERROR_TEMP_3163" : {
"message" : [
"Unsupported number of children: <num>."
]
},
"_LEGACY_ERROR_TEMP_3165" : {
"message" : [
"Cannot merge <classA> with <classB>"
]
},
"_LEGACY_ERROR_TEMP_3166" : {
"message" : [
"latestOffset(Offset, ReadLimit) should be called instead of this method"
]
},
"_LEGACY_ERROR_TEMP_3167" : {
"message" : [
"continuous mode is not supported!"
]
},
"_LEGACY_ERROR_TEMP_3169" : {
"message" : [
"AcceptsLatestSeenOffset is not supported with DSv1 streaming source: <unsupportedSources>"
]
},
"_LEGACY_ERROR_TEMP_3170" : {
"message" : [
"SortAggregate code-gen does not support grouping keys"
]
},
"_LEGACY_ERROR_TEMP_3171" : {
"message" : [
"Number of nulls not set for Parquet file <filePath>. Set SQLConf <config> to false and execute again."
]
},
"_LEGACY_ERROR_TEMP_3172" : {
"message" : [
"No min/max found for Parquet file <filePath>. Set SQLConf <config> to false and execute again."
]
},
"_LEGACY_ERROR_TEMP_3173" : {
"message" : [
"Cannot specify 'USING index_type' in 'CREATE INDEX'"
]
},
"_LEGACY_ERROR_TEMP_3175" : {
"message" : [
"Index Type <v> is not supported. The supported Index Types are: <supportedIndexTypeList>"
]
},
"_LEGACY_ERROR_TEMP_3176" : {
"message" : [
"applyInPandasWithState is unsupported in batch query. Use applyInPandas instead."
]
},
"_LEGACY_ERROR_TEMP_3177" : {
"message" : [
"<class> does not support function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3178" : {
"message" : [
"<class> does not support inverse distribution function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3179" : {
"message" : [
"createIndex is not supported"
]
},
"_LEGACY_ERROR_TEMP_3180" : {
"message" : [
"indexExists is not supported"
]
},
"_LEGACY_ERROR_TEMP_3181" : {
"message" : [
"dropIndex is not supported"
]
},
"_LEGACY_ERROR_TEMP_3182" : {
"message" : [
"listIndexes is not supported"
]
},
"_LEGACY_ERROR_TEMP_3183" : {
"message" : [
"TableSample is not supported by this data source"
]
},
"_LEGACY_ERROR_TEMP_3184" : {
"message" : [
"<class> does not support aggregate function: <funcName> with DISTINCT"
]
},
"_LEGACY_ERROR_TEMP_3185" : {
"message" : [
"Schema evolution not supported."
]
},
"_LEGACY_ERROR_TEMP_3186" : {
"message" : [
"Boolean is not supported"
]
},
"_LEGACY_ERROR_TEMP_3187" : {
"message" : [
"only readInts is valid."
]
},
"_LEGACY_ERROR_TEMP_3188" : {
"message" : [
"only skipIntegers is valid"
]
},
"_LEGACY_ERROR_TEMP_3189" : {
"message" : [
"Unsupported encoding: <encoding>"
]
},
"_LEGACY_ERROR_TEMP_3190" : {
"message" : [
"RLE encoding is not supported for values of type: <typeName>"
]
},
"_LEGACY_ERROR_TEMP_3191" : {
"message" : [
"Dictionary encoding does not support String"
]
},
"_LEGACY_ERROR_TEMP_3192" : {
"message" : [
"Datatype not supported <dt>"
]
},
"_LEGACY_ERROR_TEMP_3198" : {
"message" : [
"Cannot grow BufferHolder by size <neededSize> because the size is negative"
]
},
"_LEGACY_ERROR_TEMP_3199" : {
"message" : [
"Cannot grow BufferHolder by size <neededSize> because the size after growing exceeds size limitation <arrayMax>"
]
},
"_LEGACY_ERROR_TEMP_3200" : {
"message" : [
"Read-ahead limit < 0"
]
},
"_LEGACY_ERROR_TEMP_3201" : {
"message" : [
"'note' is malformed in the expression [<exprName>]. It should start with a newline and 4 leading spaces; end with a newline and two spaces; however, got [<note>]."
]
},
"_LEGACY_ERROR_TEMP_3202" : {
"message" : [
"'group' is malformed in the expression [<exprName>]. It should be a value in <validGroups>; however, got <group>."
]
},
"_LEGACY_ERROR_TEMP_3203" : {
"message" : [
"'source' is malformed in the expression [<exprName>]. It should be a value in <validSources>; however, got [<source>]."
]
},
"_LEGACY_ERROR_TEMP_3204" : {
"message" : [
"'since' is malformed in the expression [<exprName>]. It should not start with a negative number; however, got [<since>]."
]
},
"_LEGACY_ERROR_TEMP_3205" : {
"message" : [
"'deprecated' is malformed in the expression [<exprName>]. It should start with a newline and 4 leading spaces; end with a newline and two spaces; however, got [<deprecated>]."
]
},
"_LEGACY_ERROR_TEMP_3206" : {
"message" : [
"<value> is not a boolean string."
]
},
"_LEGACY_ERROR_TEMP_3207" : {
"message" : [
"Unexpected V2 expression: <expr>"
]
},
"_LEGACY_ERROR_TEMP_3208" : {
"message" : [
"The number of fields (<numFields>) in the partition identifier is not equal to the partition schema length (<schemaLen>). The identifier might not refer to one partition."
]
},
"_LEGACY_ERROR_TEMP_3215" : {
"message" : [
"Expected a Boolean type expression in replaceNullWithFalse, but got the type <dataType> in <expr>."
]
},
"_LEGACY_ERROR_TEMP_3218" : {
"message" : [
"Must be 2 children: <others>"
]
},
"_LEGACY_ERROR_TEMP_3219" : {
"message" : [
"The value (<other>) of the type (<otherClass>) cannot be converted to the <dataType> type."
]
},
"_LEGACY_ERROR_TEMP_3220" : {
"message" : [
"The value (<other>) of the type (<otherClass>) cannot be converted to an array of <elementType>"
]
},
"_LEGACY_ERROR_TEMP_3221" : {
"message" : [
"The value (<other>) of the type (<otherClass>) cannot be converted to a map type with key type (<keyType>) and value type (<valueType>)"
]
},
"_LEGACY_ERROR_TEMP_3222" : {
"message" : [
"Only literals are allowed in the partition spec, but got <expr>"
]
},
"_LEGACY_ERROR_TEMP_3223" : {
"message" : [
"Cannot find field: <name> in <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3224" : {
"message" : [
"Cannot delete array element"
]
},
"_LEGACY_ERROR_TEMP_3225" : {
"message" : [
"Cannot delete map value"
]
},
"_LEGACY_ERROR_TEMP_3226" : {
"message" : [
"Cannot delete map key"
]
},
"_LEGACY_ERROR_TEMP_3228" : {
"message" : [
"AFTER column not found: <afterCol>"
]
},
"_LEGACY_ERROR_TEMP_3229" : {
"message" : [
"Not a struct: <name>"
]
},
"_LEGACY_ERROR_TEMP_3230" : {
"message" : [
"Field not found: <name>"
]
},
"_LEGACY_ERROR_TEMP_3231" : {
"message" : [
"Intervals greater than a month is not supported (<interval>)."
]
},
"_LEGACY_ERROR_TEMP_3232" : {
"message" : [
"Unknown EvalMode value: <other>"
]
},
"_LEGACY_ERROR_TEMP_3233" : {
"message" : [
"cannot generate code for unsupported type: <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3235" : {
"message" : [
"The numbers of zipped arrays and field names should be the same"
]
},
"_LEGACY_ERROR_TEMP_3238" : {
"message" : [
"Failed to convert value <v> (class of <class>) in type <dt> to XML."
]
},
"_LEGACY_ERROR_TEMP_3239" : {
"message" : [
"Failed to parse data with unexpected event <e>"
]
},
"_LEGACY_ERROR_TEMP_3240" : {
"message" : [
"Failed to parse a value for data type <dt> with event <e>"
]
},
"_LEGACY_ERROR_TEMP_3241" : {
"message" : [
"<msg>"
]
},
"_LEGACY_ERROR_TEMP_3242" : {
"message" : [
"sequence step must be an <intervalType> of day granularity if start and end values are dates"
]
},
"_LEGACY_ERROR_TEMP_3243" : {
"message" : [
"Illegal sequence boundaries: <start> to <stop> by <step>"
]
},
"_LEGACY_ERROR_TEMP_3244" : {
"message" : [
"Unsupported type: <castType>"
]
},
"_LEGACY_ERROR_TEMP_3245" : {
"message" : [
"For input string: <s>"
]
},
"_LEGACY_ERROR_TEMP_3246" : {
"message" : [
"Failed to parse a value for data type <dataType>."
]
},
"_LEGACY_ERROR_TEMP_3260" : {
"message" : [
"'<s>' is an invalid timestamp"
]
},
"_LEGACY_ERROR_TEMP_3262" : {
"message" : [
"Doesn't support month or year interval: <interval>"
]
},
"_LEGACY_ERROR_USER_RAISED_EXCEPTION" : {
"message" : [
"<errorMessage>"
],
"sqlState" : "P0001"
}
}