blob: 2a0bf2bed0523c623dc88d92aaa03c4bc23a65eb [file] [log] [blame]
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
spring.datasource.url = jdbc:postgresql://10.148.215.23:35432/quartz?autoReconnect=true&useSSL=false
spring.datasource.username = griffin
spring.datasource.password = 123456
spring.jpa.generate-ddl=true
spring.datasource.driver-class-name = org.postgresql.Driver
spring.jpa.show-sql = true
# Hive metastore
hive.metastore.uris = thrift://10.148.215.23:39083
hive.metastore.dbname = default
hive.hmshandler.retry.attempts = 15
hive.hmshandler.retry.interval = 2000ms
# Hive cache time
cache.evict.hive.fixedRate.in.milliseconds = 900000
# Kafka schema registry
kafka.schema.registry.url = http://localhost:8081
# Update job instance state at regular intervals
jobInstance.fixedDelay.in.milliseconds = 60000
# Expired time of job instance which is 7 days that is 604800000 milliseconds.Time unit only supports milliseconds
jobInstance.expired.milliseconds = 604800000
# schedule predicate job every 5 minutes and repeat 12 times at most
#interval time unit s:second m:minute h:hour d:day,only support these four units
predicate.job.interval = 5m
predicate.job.repeat.count = 12
# external properties directory location
external.config.location =
# external BATCH or STREAMING env
external.env.location =
# login strategy ("default" or "ldap")
login.strategy = default
# ldap
ldap.url = ldap://hostname:port
ldap.email = @example.com
ldap.searchBase = DC=org,DC=example
ldap.searchPattern = (sAMAccountName={0})
# hdfs default name
fs.defaultFS =
# elasticsearch
elasticsearch.host = 10.148.215.23
elasticsearch.port = 39200
elasticsearch.scheme = http
# elasticsearch.user = user
# elasticsearch.password = password
# livy
livy.uri=http://10.148.215.23:38998/batches
livy.need.queue=false
livy.task.max.concurrent.count=20
livy.task.submit.interval.second=3
livy.task.appId.retry.count=3
# yarn url
yarn.uri=http://10.148.215.23:38088
logging.file=logs/griffin-service.log