HCATALOG-583 e2e tests do not build
git-svn-id: https://svn.apache.org/repos/asf/incubator/hcatalog/trunk@1427292 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/CHANGES.txt b/CHANGES.txt
index 17147e3..ff8a9c2 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -168,6 +168,8 @@
OPTIMIZATIONS
BUG FIXES
+ HCAT-583 e2e tests do not build (gates)
+
HCAT-577 HCatContext causes persistance of undesired jobConf parameters (khorgath via gates)
HCAT-584 Changes in HCAT-538 break Pig stores into non-partitioned tables (gates)
diff --git a/src/test/e2e/hcatalog/conf/default.conf b/src/test/e2e/hcatalog/conf/default.conf
index a2f32b6..703063f 100644
--- a/src/test/e2e/hcatalog/conf/default.conf
+++ b/src/test/e2e/hcatalog/conf/default.conf
@@ -56,6 +56,7 @@
, 'hcatlib' => "$ENV{'HCAT_HOME'}/share/hcatalog/lib"
, 'hcatconf' => "$ENV{'HCAT_HOME'}/etc/hcatalog"
, 'hcatbin' => "$ENV{'HCAT_HOME'}/bin/hcat"
+ , 'hcatcoredevlib' => "../../../../../core/build/lib/test"
#PIG
, 'pighome' => $ENV{'PIG_HOME'}
diff --git a/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm b/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm
index 55686fc..ac9b8ec 100644
--- a/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm
+++ b/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm
@@ -254,8 +254,8 @@
# Get all of the additional jars we'll need.
my $additionalJars = Util::getHBaseLibs($testCmd, $log); #hbase before hive for precedence over bundled hbase
- $additionalJars .= Util::getHiveLibs($testCmd, $log);
$additionalJars .= Util::getHCatLibs($testCmd, $log);
+ $additionalJars .= Util::getHiveLibs($testCmd, $log);
$testCmd->{'libjars'} = $additionalJars;
$testCmd->{'libjars'} =~ s/:/,/g;
my $hadoopcmd = Util::replaceParameters( $testCmd->{'hadoop'}, $outfile, $testCmd, $log );
diff --git a/src/test/e2e/hcatalog/drivers/Util.pm b/src/test/e2e/hcatalog/drivers/Util.pm
index fa4a662..40af6e6 100644
--- a/src/test/e2e/hcatalog/drivers/Util.pm
+++ b/src/test/e2e/hcatalog/drivers/Util.pm
@@ -412,10 +412,10 @@
/hive-.*\.jar$/ && do {
$cp .= $cfg->{'hivelib'} . '/' . $_ . ':';
};
- /libfb303.jar/ && do {
+ /libfb303-.*\.jar/ && do {
$cp .= $cfg->{'hivelib'} . '/' . $_ . ':';
};
- /libthrift.jar/ && do {
+ /libthrift-.*\.jar/ && do {
$cp .= $cfg->{'hivelib'} . '/' . $_ . ':';
};
/datanucleus-.*\.jar$/ && do {
@@ -474,7 +474,7 @@
opendir(LIB, $cfg->{'hcatshare'}) or die "Cannot open $cfg->{'hcatshare'}, $!\n";
my @jars = readdir(LIB);
foreach (@jars) {
- /hcatalog-[0-9].*\.jar$/ && do {
+ (/hcatalog-core-[0-9].*\.jar$/ || /hcatalog-pig-adapter-[0-9].*\.jar$/) && do {
$cp .= $cfg->{'hcatshare'} . '/' . $_ . ':';
};
}
@@ -487,6 +487,18 @@
};
}
closedir(LIB);
+
+ # Get jars required non-hcat jars that are not distributed with Hadoop or Hive
+ opendir(LIB, $cfg->{'hcatcoredevlib'}) or die "Cannot open $cfg->{'hcatcoredevlib'}, $!\n";
+ my @jars = readdir(LIB);
+ foreach (@jars) {
+ /guava.*\.jar$/ && do {
+ $cp .= $cfg->{'hcatcoredevlib'} . '/' . $_ . ':';
+ };
+ }
+ closedir(LIB);
+
+
return $cp;
}
diff --git a/src/test/e2e/hcatalog/tests/hive.conf b/src/test/e2e/hcatalog/tests/hive.conf
index eee33d1..2adee23 100644
--- a/src/test/e2e/hcatalog/tests/hive.conf
+++ b/src/test/e2e/hcatalog/tests/hive.conf
@@ -137,7 +137,7 @@
'num' => 4,
'sql' => q?
drop table if exists hive_read_4;
-create table hive_read_4 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as textfile;
+create external table hive_read_4 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as textfile;
alter table hive_read_4 add partition (b='1') location '/user/hcat/tests/data/studenttab10k';
alter table hive_read_4 set fileformat rcfile;
alter table hive_read_4 add partition (b='2') location '/user/hcat/tests/data/all100krc';
diff --git a/src/test/e2e/hcatalog/tests/pig.conf b/src/test/e2e/hcatalog/tests/pig.conf
index 89b0f1d..5b0ad9b 100644
--- a/src/test/e2e/hcatalog/tests/pig.conf
+++ b/src/test/e2e/hcatalog/tests/pig.conf
@@ -161,7 +161,7 @@
# A table with one partition in text and one in rc
'num' => 4
,'hcat_prep'=>q?drop table if exists pig_read_4;
-create table pig_read_4 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as textfile;
+create external table pig_read_4 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as textfile;
alter table pig_read_4 add partition (b='1') location '/user/hcat/tests/data/studenttab10k';
alter table pig_read_4 set fileformat rcfile;
alter table pig_read_4 add partition (b='2') location '/user/hcat/tests/data/all100krc';?
@@ -177,7 +177,7 @@
'num' => 5
,'hcat_prep'=>q?create database if not exists pig_db_1;
drop table if exists pig_db_1.pig_read_5;
-create table pig_db_1.pig_read_5 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as textfile;
+create external table pig_db_1.pig_read_5 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as textfile;
use pig_db_1;
alter table pig_read_5 add partition (b='1') location '/user/hcat/tests/data/studenttab10k';?
,'pig' => q\a = load 'pig_db_1.pig_read_5' using org.apache.hcatalog.pig.HCatLoader();
diff --git a/src/test/e2e/hcatalog/tools/generate/generate_data.pl b/src/test/e2e/hcatalog/tools/generate/generate_data.pl
index 7afc76d..e246340 100644
--- a/src/test/e2e/hcatalog/tools/generate/generate_data.pl
+++ b/src/test/e2e/hcatalog/tools/generate/generate_data.pl
@@ -330,7 +330,7 @@
sub findAllJars()
{
- my @files = <../../../../../build/ivy/lib/default/*.jar>;
+ my @files = <../../../../../core/build/lib/test/*.jar>;
my $classpath = "";
my $file = undef;
foreach $file (@files) {
diff --git a/src/test/e2e/hcatalog/udfs/java/build.xml b/src/test/e2e/hcatalog/udfs/java/build.xml
index d1348ed..50b9e4e 100644
--- a/src/test/e2e/hcatalog/udfs/java/build.xml
+++ b/src/test/e2e/hcatalog/udfs/java/build.xml
@@ -22,10 +22,15 @@
<property name="udf.jarfile" value="testudf.jar"/>
<property name="udfs.build.dir" value="${basedir}/build"/>
<property name="udfs.src.dir" value="${basedir}/org/"/>
+ <property name="hcatalog.base" value="../../../../../.."/>
+ <property name="hcatalog.core.build" value="${hcatalog.base}/core/build"/>
+ <property name="hcatalog.pig.build" value="${hcatalog.base}/hcatalog-pig-adapter/build"/>
<path id="udf-classpath">
- <fileset file="../../../../../../build/hcatalog/*.jar"/>
- <fileset file="../../../../../../build/ivy/lib/default/*.jar"/>
+ <fileset file="${hcatalog.core.build}/hcatalog-core-*.jar"/>
+ <fileset file="${hcatalog.core.build}/lib/compile/*.jar"/>
+ <fileset file="${hcatalog.pig.build}/hcatalog-pig-adapter-*.jar"/>
+ <fileset file="${hcatalog.pig.build}/lib/compile/*.jar"/>
</path>
<target name="init">