blob: 83c104c2281debce35815c3004d5baf568513b09 [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.spark.extensions;
import java.util.Map;
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.hive.TestHiveMetastore;
import org.apache.iceberg.spark.SparkCatalogTestBase;
import org.apache.iceberg.spark.SparkTestBase;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.internal.SQLConf;
import org.junit.BeforeClass;
import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTOREURIS;
public abstract class SparkExtensionsTestBase extends SparkCatalogTestBase {
public SparkExtensionsTestBase(String catalogName, String implementation, Map<String, String> config) {
super(catalogName, implementation, config);
}
@BeforeClass
public static void startMetastoreAndSpark() {
SparkTestBase.metastore = new TestHiveMetastore();
metastore.start();
SparkTestBase.hiveConf = metastore.hiveConf();
SparkTestBase.spark = SparkSession.builder()
.master("local[2]")
.config("spark.testing", "true")
.config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic")
.config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName())
.config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname))
.config("spark.sql.shuffle.partitions", "4")
.enableHiveSupport()
.getOrCreate();
SparkTestBase.catalog = new HiveCatalog(spark.sessionState().newHadoopConf());
}
}