zoukankan      html  css  js  c++  java
  • Hive 日志配置 hive-log4j2.properties

    配置不同级别日志到各自文件中

    # Licensed to the Apache Software Foundation (ASF) under one
    # or more contributor license agreements.  See the NOTICE file
    # distributed with this work for additional information
    # regarding copyright ownership.  The ASF licenses this file
    # to you under the Apache License, Version 2.0 (the
    # "License"); you may not use this file except in compliance
    # with the License.  You may obtain a copy of the License at
    #
    #     http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    
    status = INFO
    name = HiveLog4j2
    packages = org.apache.hadoop.hive.ql.log
    
    # list of properties
    property.hive.log.level = INFO
    property.hive.root.logger = DRFA
    property.hive.log.dir = /home/hive/logs
    property.hive.log.file = hive.log
    property.hive.perflogger.log.level = INFO
    
    
    # list of all appenders
    appenders = console, DRFA, DRFB ,warn
    
    # console appender
    appender.console.type = Console
    appender.console.name = console
    appender.console.target = SYSTEM_ERR
    appender.console.layout.type = PatternLayout
    appender.console.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
    
    # daily rolling file appender
    appender.DRFA.type = RollingRandomAccessFile
    appender.DRFA.name = DRFA
    appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
    # Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session
    appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
    appender.DRFA.layout.type = PatternLayout
    appender.DRFA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
    appender.DRFA.policies.type = Policies
    appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
    appender.DRFA.policies.time.interval = 1
    appender.DRFA.policies.time.modulate = true
    appender.DRFA.strategy.type = DefaultRolloverStrategy
    appender.DRFA.strategy.max = 30
    
    #ERROR-log
    appender.DRFB.type = RollingRandomAccessFile
    appender.DRFB.name = RollingFileError
    appender.DRFB.fileName = ${sys:hive.log.dir}/hive-error.log
    # Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session
    appender.DRFB.filePattern = ${sys:hive.log.dir}/hive-error.%d{yyyy-MM-dd}.log
    appender.DRFB.layout.type = PatternLayout
    appender.DRFB.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
    appender.DRFB.policies.type = Policies
    appender.DRFB.policies.time.type = TimeBasedTriggeringPolicy
    appender.DRFB.policies.time.interval = 1
    appender.DRFB.policies.time.modulate = true
    appender.DRFB.strategy.type = DefaultRolloverStrategy
    appender.DRFB.strategy.max = 30
    
    
    appender.warn.type = RollingRandomAccessFile
    appender.warn.name = RollingFileWARN
    appender.warn.fileName = ${sys:hive.log.dir}/hive-warn.log
    # Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session
    appender.warn.filePattern = ${sys:hive.log.dir}/hive-warn.%d{yyyy-MM-dd}.log
    appender.warn.layout.type = PatternLayout
    appender.warn.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
    appender.warn.policies.type = Policies
    appender.warn.policies.time.type = TimeBasedTriggeringPolicy
    appender.warn.policies.time.interval = 1
    appender.warn.policies.time.modulate = true
    appender.warn.strategy.type = DefaultRolloverStrategy
    appender.warn.strategy.max = 30
    
    
    # list of all loggers
    loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, PerfLogger, AmazonAws, ApacheHttp
    
    logger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn
    logger.NIOServerCnxn.level = WARN
    
    logger.ClientCnxnSocketNIO.name = org.apache.zookeeper.ClientCnxnSocketNIO
    logger.ClientCnxnSocketNIO.level = WARN
    
    logger.DataNucleus.name = DataNucleus
    logger.DataNucleus.level = ERROR
    
    logger.Datastore.name = Datastore
    logger.Datastore.level = ERROR
    
    logger.JPOX.name = JPOX
    logger.JPOX.level = ERROR
    
    logger.AmazonAws.name=com.amazonaws
    logger.AmazonAws.level = INFO
    
    logger.ApacheHttp.name=org.apache.http
    logger.ApacheHttp.level = INFO
    
    logger.PerfLogger.name = org.apache.hadoop.hive.ql.log.PerfLogger
    logger.PerfLogger.level = ${sys:hive.perflogger.log.level}
    
    # root logger
    rootLogger.level = ${sys:hive.log.level}
    rootLogger.appenderRef.DRFA.ref = DRFA
    rootLogger.appenderRef.DRFA.level = INFO
    
    rootLogger.appenderRef.DRFB.ref = RollingFileError
    rootLogger.appenderRef.DRFB.level = ERROR
    
    rootLogger.appenderRef.warn.ref = RollingFileWARN
    rootLogger.appenderRef.warn.level = WARN
    #rootLogger.appenderRefs = root,warn
    #rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}
  • 相关阅读:
    [再寄小读者之数学篇](2014-07-17 一阶中值)
    对流体力学做出巨大贡献的杰出历史人物
    理科生毁灭世界
    [再寄小读者之数学篇](2014-07-17 行列式的计算)
    [再寄小读者之数学篇](2014-07-16 高阶导数的一个表达式)
    [再寄小读者之数学篇](2014-07-16 与对数有关的不等式)
    [再寄小读者之数学篇](2014-07-16 凹函数与次线性性)
    [再寄小读者之数学篇](2014-07-16 二阶中值)
    [再寄小读者之数学篇](2014-07-16 任意阶导数在零处为零的一个充分条件)
    对PostgreSQL xmin的深入学习
  • 原文地址:https://www.cnblogs.com/cangshublogs/p/12263705.html
Copyright © 2011-2022 走看看