blob: 682b5e9a655c293f6166106292ac45098a9a5ad9 [file] [log] [blame]
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Name=Hive
name=hive
version=0.10.0-SNAPSHOT
year=2011
javac.debug=on
javac.version=1.6
javac.optimize=on
javac.deprecation=off
javac.args=
javac.args.warnings=
hadoop-0.20.version=0.20.2
hadoop-0.20S.version=1.0.0
hadoop-0.23.version=2.0.0-alpha
hadoop.version=${hadoop-0.20.version}
hadoop.security.version=${hadoop-0.20S.version}
# Used to determine which set of Hadoop artifacts we depend on.
# - 20: hadoop-core, hadoop-test
# - 23: hadoop-common, hadoop-mapreduce-*, etc
hadoop.mr.rev=20
build.dir.hive=${hive.root}/build
build.dir.hadoop=${build.dir.hive}/hadoopcore
hadoop.version.ant-internal=${hadoop.version}
hadoop.root.default=${build.dir.hadoop}/hadoop-${hadoop.version.ant-internal}
hadoop.root=${hadoop.root.default}
test.hadoop.bin.path=${hive.root}/testutils/hadoop
java.net.preferIPv4Stack=true
# Newer versions of Hadoop name the jar as hadoop-{core,test}-VERSION instead of hadoop-VERSION-{core,test}
# We will add both styles to the classpath and it will pick up whichever is there
hadoop.oldstyle-name.jar=${hadoop.root}/hadoop-${hadoop.version.ant-internal}-core.jar
hadoop.oldstyle-name.tools.jar=${hadoop.root}/hadoop-${hadoop.version.ant-internal}-tools.jar
hadoop.oldstyle-name.test.jar=${hadoop.root}/hadoop-${hadoop.version.ant-internal}-test.jar
hadoop.newstyle-name.jar=${hadoop.root}/hadoop-core-${hadoop.version.ant-internal}.jar
hadoop.newstyle-name.test.jar=${hadoop.root}/hadoop-test-${hadoop.version.ant-internal}.jar
hadoop.newstyle-name.tools.jar=${hadoop.root}/hadoop-tools-${hadoop.version.ant-internal}.jar
# The following are used for versions of Hadoop that are broken into separate jars
# They are ignored if not present
hadoop.common.jar=${hadoop.root}/share/hadoop/common/hadoop-common-${hadoop.version.ant-internal}.jar
hadoop.common.test.jar=${hadoop.root}/share/hadoop/common/hadoop-common-${hadoop.version.ant-internal}-tests.jar
hadoop.hdfs.jar=${hadoop.root}/share/hadoop/hdfs/hadoop-hdfs-${hadoop.version.ant-internal}.jar
hadoop.hdfs.test.jar=${hadoop.root}/share/hadoop/hdfs/hadoop-hdfs-${hadoop.version.ant-internal}-tests.jar
hadoop.mapreduce.jar=${hadoop.root}/modules/hadoop-mapreduce-client-core-${hadoop.version.ant-internal}.jar
hadoop.mapreduce.test.jar=${hadoop.root}/hadoop-mapreduce-test-${hadoop.version.ant-internal}.jar
hadoop.mapreduce.tools.jar=${hadoop.root}/hadoop-mapreduce-tools-${hadoop.version.ant-internal}.jar
jetty.test.jar=${hadoop.root}/lib/jetty-5.1.4.jar
servlet.test.jar=${hadoop.root}/lib/servlet-api.jar
jasper.test.jar=${hadoop.root}/lib/jetty-ext/jasper-runtime.jar
jasperc.test.jar=${hadoop.root}/lib/jetty-ext/jasper-compiler.jar
jsp.test.jar=${hadoop.root}/lib/jetty-ext/jsp-api.jar
common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
#
# Test Properties
#
# Cancel the individual tests if they don't finish in the given time
# (measured in milliseconds). Ignored if fork is disabled. When running
# multiple tests inside the same Java VM (see forkMode), timeout
# applies to the time that all tests use together, not to an individual test.
test.junit.timeout=43200000
# Use this property to selectively disable tests from the command line:
# ant test -Dtest.junit.exclude="**/TestCliDriver.class"
# ant test -Dtest.junit.exclude="**/Test*CliDriver.class,**/TestPartitions.class"
test.junit.exclude=
#
# Ivy Properties
#
build.ivy.dir=${build.dir.hive}/ivy
build.ivy.lib.dir=${build.ivy.dir}/lib
build.ivy.report.dir=${build.ivy.dir}/report
build.ivy.maven.dir=${build.ivy.dir}/maven
ivy.conf.dir=${hive.root}/ivy
ivy.version=2.1.0
ivy.jar=${build.ivy.lib.dir}/ivy-${ivy.version}.jar
ivy.changingPattern=.*SNAPSHOT
ivy.publish.pattern=[artifact]-[revision].[ext]
ivy.artifact.retrieve.pattern=[conf]/[artifact]-[revision](-[classifier]).[ext]
ivysettings.xml=${ivy.conf.dir}/ivysettings.xml
ivyresolvelog=download-only
ivy.mvn.repo=http://repo2.maven.org/maven2
ivy_repo_url=${ivy.mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar
hive.ivy.org=org.apache.hive
mvn.publish.repo=snapshots
mvn.jar.dir=${build.dir.hive}/maven/jars
mvn.pom.dir=${build.dir.hive}/maven/poms
mvn.license.dir=${build.dir.hive}/maven/licenses
mvn.deploy.id=apache.snapshots.https
mvn.deploy.url=https://repository.apache.org/content/repositories/snapshots
#
# unit test Properties
#
failonerror=false
#
# Data nucleus repository - needed for jdo2-api-2.3-ec.jar download
#
datanucleus.repo=http://www.datanucleus.org/downloads/maven2
#
# Eclipse Properties
#
# JVM arguments for Eclipse launch configurations
eclipse.launch.jvm.args=-Xms256m -Xmx1024m