blob: 8c918014bc4090b68ba2e833d6be7b9216c5c8a0 [file] [log] [blame]
# Pig configuration file. All values can be overwritten by command line arguments.
# see bin/pig -help
# log4jconf log4j configuration file
# log4jconf=./conf/log4j.properties
# brief logging (no timestamps)
brief=false
# clustername, name of the hadoop jobtracker. If no port is defined port 50020 will be used.
#cluster
#debug level, INFO is default
debug=INFO
# a file that contains pig script
#file=
# load jarfile, colon separated
#jar=
#verbose print all log messages to screen (default to print only INFO and above to screen)
verbose=false
#exectype local|mapreduce, mapreduce is default
#exectype=mapreduce
# hod realted properties
#ssh.gateway
#hod.expect.root
#hod.expect.uselatest
#hod.command
#hod.config.dir
#hod.param
#Do not spill temp files smaller than this size (bytes)
pig.spill.size.threshold=5000000
#EXPERIMENT: Activate garbage collection when spilling a file bigger than this size (bytes)
#This should help reduce the number of files being spilled.
pig.spill.gc.activation.size=40000000
######################
# Everything below this line is Yahoo specific. Note that I've made
# (almost) no changes to the lines above to make merging in from Apache
# easier. Any values I don't want from above I override below.
#
# This file is configured for use with HOD on the production clusters. If you
# want to run pig with a static cluster you will need to remove everything
# below this line and set the cluster value (above) to the
# hostname and port of your job tracker.
exectype=mapreduce
hod.config.dir=/export/crawlspace/kryptonite/hod/current/conf
hod.server=local
cluster.domain=inktomisearch.com
log.file=
yinst.cluster=kryptonite