Created
November 1, 2018 09:43
-
-
Save oivoodoo/d34b245d02e98592eff6a83cfbc401e3 to your computer and use it in GitHub Desktop.
log4j.properties
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Spark Streaming Logging Configuration | |
# See also: http://spark.apache.org/docs/2.0.2/running-on-yarn.html#debugging-your-application | |
log4j.rootLogger=INFO, stdout, stderr | |
# Write all logs to standard Spark stderr file | |
log4j.appender.stderr=org.apache.log4j.RollingFileAppender | |
log4j.appender.stderr.file=${spark.yarn.app.container.log.dir}/stderr | |
log4j.appender.stderr.threshold=ERROR | |
log4j.appender.stderr.RollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy | |
log4j.appender.stderr.TriggeringPolicy=org.apache.log4j.rolling.SizeBasedTriggeringPolicy | |
log4j.appender.stderr.layout=org.apache.log4j.PatternLayout | |
log4j.appender.stderr.layout.ConversionPattern=%d %p %c %m %n | |
log4j.appender.stderr.maxFileSize=52428800 | |
log4j.appender.stderr.maxBackupIndex=10 | |
log4j.appender.stderr.encoding=UTF-8 | |
# Write application logs to stdout file | |
log4j.appender.stdout=org.apache.log4j.RollingFileAppender | |
log4j.appender.stdout.append=true | |
log4j.appender.stdout.file=${spark.yarn.app.container.log.dir}/stdout | |
log4j.appender.stdout.RollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy | |
log4j.appender.stdout.TriggeringPolicy=org.apache.log4j.rolling.SizeBasedTriggeringPolicy | |
log4j.appender.stdout.threshold=INFO | |
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout | |
log4j.appender.stdout.layout.ConversionPattern=%d %p %c %m %n | |
log4j.appender.stdout.maxFileSize=52428800 | |
log4j.appender.stdout.maxBackupIndex=10 | |
log4j.appender.stdout.encoding=UTF-8 | |
log4j.appender.datalakestdout=org.apache.log4j.RollingFileAppender | |
log4j.appender.datalakestdout.append=true | |
log4j.appender.datalakestdout.file=${spark.yarn.app.container.log.dir}/datalake.stdout | |
log4j.appender.datalakestdout.RollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy | |
log4j.appender.datalakestdout.TriggeringPolicy=org.apache.log4j.rolling.SizeBasedTriggeringPolicy | |
log4j.appender.datalakestdout.threshold=INFO | |
log4j.appender.datalakestdout.layout=org.apache.log4j.PatternLayout | |
log4j.appender.datalakestdout.layout.ConversionPattern=%d %p %c %m %n | |
log4j.appender.datalakestdout.maxFileSize=52428800 | |
log4j.appender.datalakestdout.maxBackupIndex=10 | |
log4j.appender.datalakestdout.encoding=UTF-8 | |
log4j.appender.datalakestderr=org.apache.log4j.RollingFileAppender | |
log4j.appender.datalakestderr.append=true | |
log4j.appender.datalakestderr.file=${spark.yarn.app.container.log.dir}/datalake.stderr | |
log4j.appender.datalakestderr.RollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy | |
log4j.appender.datalakestderr.TriggeringPolicy=org.apache.log4j.rolling.SizeBasedTriggeringPolicy | |
log4j.appender.datalakestderr.threshold=ERROR | |
log4j.appender.datalakestderr.layout=org.apache.log4j.PatternLayout | |
log4j.appender.datalakestderr.layout.ConversionPattern=%d %p %c %m %n | |
log4j.appender.datalakestderr.maxFileSize=52428800 | |
log4j.appender.datalakestderr.maxBackupIndex=10 | |
log4j.appender.datalakestderr.encoding=UTF-8 | |
# application namespace configuration | |
log4j.logger.com.blastworksinc.spark.etl.datalake=INFO, datalakestdout, datalakestderr | |
# Set the default spark-shell log level to WARN. When running the spark-shell, the | |
# log level for this class is used to overwrite the root logger's log level, so that | |
# the user can have different defaults for the shell and regular Spark apps. | |
log4j.logger.org.apache.spark.repl.Main=WARN | |
# Settings to quiet third party logs that are too verbose | |
log4j.logger.org.spark_project.jetty=WARN | |
log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR | |
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO | |
log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO | |
log4j.logger.org.apache.parquet=ERROR | |
log4j.logger.parquet=ERROR | |
# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support | |
log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL | |
log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment