Docker Container Build and Run setup with foundations for adding docker integration tests. Docker images built with Hadoop 2.8.4 Hive 2.3.3 and Spark 2.3.1 and published to docker-hub
Look at quickstart document for how to setup docker and run demo
This commit is contained in:
committed by
vinoth chandar
parent
9710b5a3a6
commit
f3418e4718
73
docker/hoodie/hadoop/hive_base/conf/hive-log4j2.properties
Normal file
73
docker/hoodie/hadoop/hive_base/conf/hive-log4j2.properties
Normal file
@@ -0,0 +1,73 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
status = INFO
|
||||
name = HiveLog4j2
|
||||
packages = org.apache.hadoop.hive.ql.log
|
||||
|
||||
# list of properties
|
||||
property.hive.log.level = INFO
|
||||
property.hive.root.logger = DRFA
|
||||
property.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name}
|
||||
property.hive.log.file = hive.log
|
||||
|
||||
# list of all appenders
|
||||
appenders = console, DRFA
|
||||
|
||||
# console appender
|
||||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.target = SYSTEM_ERR
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
|
||||
|
||||
# daily rolling file appender
|
||||
appender.DRFA.type = RollingFile
|
||||
appender.DRFA.name = DRFA
|
||||
appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
|
||||
# Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session
|
||||
appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
|
||||
appender.DRFA.layout.type = PatternLayout
|
||||
appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
|
||||
appender.DRFA.policies.type = Policies
|
||||
appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.DRFA.policies.time.interval = 1
|
||||
appender.DRFA.policies.time.modulate = true
|
||||
appender.DRFA.strategy.type = DefaultRolloverStrategy
|
||||
appender.DRFA.strategy.max = 30
|
||||
|
||||
# list of all loggers
|
||||
loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX
|
||||
|
||||
logger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn
|
||||
logger.NIOServerCnxn.level = WARN
|
||||
|
||||
logger.ClientCnxnSocketNIO.name = org.apache.zookeeper.ClientCnxnSocketNIO
|
||||
logger.ClientCnxnSocketNIO.level = WARN
|
||||
|
||||
logger.DataNucleus.name = DataNucleus
|
||||
logger.DataNucleus.level = ERROR
|
||||
|
||||
logger.Datastore.name = Datastore
|
||||
logger.Datastore.level = ERROR
|
||||
|
||||
logger.JPOX.name = JPOX
|
||||
logger.JPOX.level = ERROR
|
||||
|
||||
# root logger
|
||||
rootLogger.level = ${sys:hive.log.level}
|
||||
rootLogger.appenderRefs = root
|
||||
rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}
|
||||
Reference in New Issue
Block a user