# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Set everything to be logged to the console rootLogger.level = debug rootLogger.appenderRef.stdout.ref = console # In the pattern layout configuration below, we specify an explicit `%ex` conversion # pattern for logging Throwables. If this was omitted, then (by default) Log4J would # implicitly add an `%xEx` conversion pattern which logs stacktraces with additional # class packaging information. That extra information can sometimes add a substantial # performance overhead, so we disable it in our default logging config. # For more information, see SPARK-39361. appender.console.type = Console appender.console.name = console appender.console.target = SYSTEM_ERR appender.console.layout.type = PatternLayout appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n%ex # Set the default spark-shell/spark-sql log level to WARN. When running the # spark-shell/spark-sql, the log level for these classes is used to overwrite # the root logger's log level, so that the user can have different defaults # for the shell and regular Spark apps. logger.repl.name = org.apache.spark.repl.Main logger.repl.level = warn logger.thriftserver.name = org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver logger.thriftserver.level = warn # Settings to quiet third party logs that are too verbose logger.jetty1.name = org.sparkproject.jetty logger.jetty1.level = warn logger.jetty2.name = org.sparkproject.jetty.util.component.AbstractLifeCycle logger.jetty2.level = error logger.replexprTyper.name = org.apache.spark.repl.SparkIMain$exprTyper logger.replexprTyper.level = info logger.replSparkILoopInterpreter.name = org.apache.spark.repl.SparkILoop$SparkILoopInterpreter logger.replSparkILoopInterpreter.level = info logger.parquet1.name = org.apache.parquet logger.parquet1.level = error logger.parquet2.name = parquet logger.parquet2.level = error # SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support logger.RetryingHMSHandler.name = org.apache.hadoop.hive.metastore.RetryingHMSHandler logger.RetryingHMSHandler.level = fatal logger.FunctionRegistry.name = org.apache.hadoop.hive.ql.exec.FunctionRegistry logger.FunctionRegistry.level = error # For deploying Spark ThriftServer # SPARK-34128: Suppress undesirable TTransportException warnings involved in THRIFT-4805 appender.console.filter.1.type = RegexFilter appender.console.filter.1.regex = .*Thrift error occurred during processing of message.* appender.console.filter.1.onMatch = deny appender.console.filter.1.onMismatch = neutral