blob: d6f09693ceaf5f0eff4102c1fc3aed45cd800961 [file] [log] [blame]
====
---- QUERY
CREATE TABLE iceberg_test
STORED AS ICEBERG;
---- CATCH
AnalysisException: Table requires at least 1 column for managed iceberg table.
====
---- QUERY
CREATE TABLE iceberg_test(
level STRING
)
PARTITION BY SPEC
(
level IDENTITY,
event_time HOUR
)
STORED AS ICEBERG;
---- CATCH
AnalysisException: Cannot find source column: event_time
====
---- QUERY
CREATE TABLE non_iceberg_table_with_spec (i INT)
PARTITION BY SPEC (i identity);
---- CATCH
AnalysisException: PARTITION BY SPEC is only valid for Iceberg tables.
====
---- QUERY
CREATE TABLE iceberg_table_hadoop_tables(
level STRING
)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.tables');
====
---- QUERY
# iceberg_non_partitioned is not partitioned
SHOW PARTITIONS functional_parquet.iceberg_non_partitioned
---- CATCH
AnalysisException: Table is not partitioned: functional_parquet.iceberg_non_partitioned
====
---- QUERY
CREATE TABLE iceberg_table_hadoop_tables_cat_loc(i INT)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.tables',
'iceberg.catalog_location'='/test-warehouse/cat_loc')
---- CATCH
iceberg.catalog_location cannot be set for Iceberg table stored in hadoop.tables
====
---- QUERY
CREATE TABLE iceberg_table_hadoop_catalog(
level STRING
)
STORED AS ICEBERG
LOCATION '/test-warehouse/$DATABASE/hadoop_catalog_test/iceberg_test'
TBLPROPERTIES('iceberg.catalog'='hadoop.catalog');
---- CATCH
AnalysisException: Location cannot be set for Iceberg table with 'hadoop.catalog'.
====
---- QUERY
CREATE TABLE iceberg_table_hadoop_catalog(
level STRING
)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.catalog');
---- CATCH
AnalysisException: Table property 'iceberg.catalog_location' is necessary for Iceberg table with 'hadoop.catalog'.
====
---- QUERY
CREATE EXTERNAL TABLE iceberg_external_table_hadoop_catalog
STORED AS ICEBERG
LOCATION '/test-warehouse/$DATABASE/hadoop_catalog_test/iceberg_test'
TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
'iceberg.catalog_location'='/test-warehouse/fake_table',
'iceberg.table_identifier'='fake_db.fake_table');
---- CATCH
AnalysisException: Location cannot be set for Iceberg table with 'hadoop.catalog'.
====
---- QUERY
CREATE EXTERNAL TABLE iceberg_table_hadoop_catalog
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
'iceberg.table_identifier'='fake_db.fake_table');
---- CATCH
AnalysisException: Table property 'iceberg.catalog_location' is necessary for Iceberg table with 'hadoop.catalog'.
====
---- QUERY
CREATE EXTERNAL TABLE fake_iceberg_table_hadoop_catalog
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
'iceberg.catalog_location'='/test-warehouse/fake_table',
'iceberg.table_identifier'='fake_db.fake_table');
SHOW CREATE TABLE fake_iceberg_table_hadoop_catalog;
---- CATCH
row_regex:.*CAUSED BY: TableLoadingException: Table does not exist: fake_db.fake_table*
====
---- QUERY
CREATE TABLE iceberg_overwrite_bucket (i int)
PARTITION BY SPEC (i bucket 3)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.tables');
INSERT OVERWRITE iceberg_overwrite_bucket VALUES (1), (2), (3);
---- CATCH
AnalysisException: The Iceberg table has BUCKET partitioning.
====
---- QUERY
CREATE TABLE iceberg_hive_cat_with_cat_locaction (i int)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hive.catalog',
'iceberg.catalog_location'='/test-warehouse/catalog_loc')
---- CATCH
iceberg.catalog_location cannot be set for Iceberg table stored in hive.catalog
====
---- QUERY
CREATE TABLE iceberg_hadoop_tables_with_metdata_locaction (i int)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.tables',
'metadata_location'='/test-warehouse/catalog/metadata_loc')
---- CATCH
metadata_location cannot be set for Iceberg tables
====
---- QUERY
CREATE TABLE iceberg_hadoop_cat_with_metadata_locaction (i int)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
'iceberg.catalog_location'='/test-warehouse/catalog',
'metadata_location'='/test-warehouse/catalog/metadata_loc')
---- CATCH
metadata_location cannot be set for Iceberg tables
====
---- QUERY
CREATE TABLE iceberg_hive_cat_with_metadata_locaction (i int)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hive.catalog',
'metadata_location'='/test-warehouse/catalog/metadata_loc')
---- CATCH
metadata_location cannot be set for Iceberg tables
====
---- QUERY
CREATE TABLE iceberg_partitioned_insert(
level STRING,
event_time TIMESTAMP
)
PARTITION BY SPEC
(
level IDENTITY
)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.tables');
---- RESULTS
'Table has been created.'
====
---- QUERY
INSERT INTO iceberg_partitioned_insert PARTITION(level='level') VALUES(now());
---- CATCH
Static partitioning is not supported for Iceberg tables.
====
---- QUERY
CREATE TABLE all_colss_needed_for_insert (i int, j int, k int)
partition by spec (j identity, k identity)
stored as iceberg;
---- RESULTS
'Table has been created.'
====
---- QUERY
insert into all_colss_needed_for_insert values (1);
---- CATCH
has more columns (3) than the SELECT / VALUES clause returns
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_tables RENAME TO iceberg_table_hadoop_tables_new;
---- CATCH
UnsupportedOperationException: Cannot rename Iceberg tables that use 'hadoop.tables' as catalog.
====
---- QUERY
CREATE TABLE iceberg_table_hive_catalog (level STRING)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hive.catalog')
====
---- QUERY
CREATE TABLE iceberg_table_hadoop_catalog(
level STRING,
event_time TIMESTAMP
)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.catalog'='hadoop.catalog',
'iceberg.catalog_location'='/$DATABASE/hadoop_catalog_test');
ALTER TABLE iceberg_table_hadoop_catalog RENAME TO iceberg_table_hadoop_catalog_new;
---- CATCH
UnsupportedOperationException: Cannot rename Iceberg tables that use 'hadoop.catalog' as catalog.
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog set TBLPROPERTIES('iceberg.catalog'='hadoop.tables');
---- CATCH
AnalysisException: Changing the 'iceberg.catalog' table property is not supported for Iceberg table.
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog set TBLPROPERTIES('iceberg.catalog_location'='/fake_location');
---- CATCH
AnalysisException: Changing the 'iceberg.catalog_location' table property is not supported for Iceberg table.
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog set TBLPROPERTIES('iceberg.table_identifier'='fake_db.fake_table');
---- CATCH
AnalysisException: Changing the 'iceberg.table_identifier' table property is not supported for Iceberg table.
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog set TBLPROPERTIES('metadata_location'='/test-warehouse/metadata_loc');
---- CATCH
AnalysisException: Changing the 'metadata_location' table property is not supported for Iceberg table.
====
---- QUERY
ALTER TABLE iceberg_table_hive_catalog set TBLPROPERTIES('metadata_location'='/test-warehouse/metadata_loc');
---- CATCH
AnalysisException: Changing the 'metadata_location' table property is not supported for Iceberg table.
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog set FILEFORMAT PARQUET;
---- CATCH
AnalysisException: ALTER TABLE SET FILEFORMAT is not supported on Iceberg tables: $DATABASE.iceberg_table_hadoop_catalog
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog SET ROW FORMAT DELIMITED FIELDS TERMINATED BY ',';
---- CATCH
AnalysisException: ALTER TABLE SET ROWFORMAT is not supported on Iceberg tables: $DATABASE.iceberg_table_hadoop_catalog
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog SET LOCATION '/fake_location';
---- CATCH
AnalysisException: ALTER TABLE SET LOCATION is not supported on Iceberg tables: $DATABASE.iceberg_table_hadoop_catalog
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog ADD PARTITION(fake_col='fake_value');
---- CATCH
AnalysisException: ALTER TABLE ADD PARTITION is not supported for Iceberg tables: $DATABASE.iceberg_table_hadoop_catalog
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog DROP PARTITION(fake_col='fake_value');
---- CATCH
AnalysisException: ALTER TABLE DROP PARTITION is not supported for Iceberg tables: $DATABASE.iceberg_table_hadoop_catalog
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog RECOVER PARTITIONS;
---- CATCH
AnalysisException: ALTER TABLE RECOVER PARTITIONS is not supported on Iceberg tables: $DATABASE.iceberg_table_hadoop_catalog
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog DROP COLUMN level;
---- CATCH
UnsupportedOperationException: Unsupported ALTER TABLE operation for Iceberg tables: DROP_COLUMN
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog CHANGE COLUMN level level1 STRING;
---- CATCH
UnsupportedOperationException: Unsupported ALTER TABLE operation for Iceberg tables: ALTER_COLUMN
====
---- QUERY
ALTER TABLE iceberg_table_hadoop_catalog REPLACE COLUMNS(level INT, register_time DATE);
---- CATCH
UnsupportedOperationException: Unsupported ALTER TABLE operation for Iceberg tables: REPLACE_COLUMNS
====
---- QUERY
CREATE TABLE iceberg_transactional(i int)
STORED AS ICEBERG
tblproperties('iceberg.catalog'='hadoop.tables', 'transactional'='true');
---- CATCH
Iceberg tables cannot have Hive ACID table properties.
====
---- QUERY
CREATE TABLE iceberg_insert_only(i int)
STORED AS ICEBERG
tblproperties('iceberg.catalog'='hadoop.catalog',
'iceberg.catalog_location'='/$EXTERNAL_WAREHOUSE_DIR/specified_location',
'transactional'='true', 'transactional_properties'='insert_only');
---- CATCH
Iceberg tables cannot have Hive ACID table properties.
====
---- QUERY
DESCRIBE HISTORY functional_parquet.emptytable;
---- CATCH
DESCRIBE HISTORY must specify an Iceberg table: functional_parquet.emptytable
====
---- QUERY
# CHAR is not supported.
CREATE TABLE chars_formats (cs CHAR(5))
STORED AS iceberg;
---- CATCH
Type CHAR(5) is not supported in Iceberg
====
---- QUERY
# VARCHAR is not supported.
CREATE TABLE chars_formats (vc VARCHAR(100))
STORED AS iceberg;
---- CATCH
Type VARCHAR(100) is not supported in Iceberg
====
---- QUERY
# Impala cannot write UTC normalized timestamps.
INSERT INTO functional_parquet.iceberg_non_partitioned
VALUES (1, "user", "action", now());
---- CATCH
AnalysisException: The Iceberg table has a TIMESTAMPTZ column that Impala cannot write.
====
---- QUERY
CREATE TABLE iceberg_kudu_option(id int primary key)
STORED AS ICEBERG
---- CATCH
AnalysisException: Unsupported column options for file format 'ICEBERG': 'id INT PRIMARY KEY'
====
---- QUERY
# PARTITIONED BY and PARTITION BY SPEC is not allowed in same statement.
CREATE TABLE iceberg_part_spec_part (i INT)
PARTITIONED BY (p INT)
PARTITION BY SPEC (i TRUNCATE 10)
STORED AS ICEBERG;
---- CATCH
Syntax error in line
====
---- QUERY
# PARTITION BY SPEC and PARTITIONED BY is not allowed in same statement.
CREATE TABLE iceberg_part_part_spec (i INT)
PARTITION BY SPEC (i TRUNCATE 10)
PARTITIONED BY (p INT)
STORED AS ICEBERG;
---- CATCH
Syntax error in line
====
---- QUERY
CREATE TABLE iceberg_wrong_fileformat (i int)
STORED AS ICEBERG
TBLPROPERTIES('iceberg.file_format'='or');
---- CATCH
Invalid fileformat for Iceberg table: or
====
---- QUERY
CREATE TABLE iceberg_set_wrong_fileformat (i int)
STORED AS ICEBERG;
ALTER TABLE iceberg_set_wrong_fileformat SET TBLPROPERTIES ('iceberg.file_format'='parq');
---- CATCH
Invalid fileformat for Iceberg table: parq
====