From cdd6d29c6706ba453515c14adfc0a851cade9f77 Mon Sep 17 00:00:00 2001 From: Sumedh Wale Date: Wed, 9 Feb 2022 23:53:28 +0530 Subject: [PATCH] [SNAPPYDATA] remaining changes for log4j 2.x transition - updating remaining log4j 1.x usages - changing gradle build to log4j 2.x - update gradle builds to incorporate the pom.xml changes - refactored gradle builds to move common sections to base build.gradle - upgraded to log4j 2.17.2 - changed jetty version to 9.4.43.v20210629 - porting remaining changes for kafka 2.2.2 upgrade - increased timeout in JavaTestUtils for parallel test runs - fix YarnClusterSuite failures by forcing usage of old jetty version --- build.gradle | 118 +++++++++++++++++- common/network-common/build.gradle | 2 +- common/network-yarn/build.gradle | 29 +---- common/unsafe/pom.xml | 4 - .../java/org/apache/spark/unsafe/Native.java | 15 +-- core/build.gradle | 48 ++----- .../org/apache/spark/internal/Logging.scala | 16 ++- .../apache/spark/internal/LoggingSuite.scala | 2 +- .../examples/mllib/LinearRegression.scala | 3 +- .../spark/sql/kafka010/KafkaSourceSuite.scala | 4 + .../spark/sql/kafka010/KafkaTestUtils.scala | 12 +- .../streaming/kafka010/KafkaTestUtils.scala | 48 +++++-- external/kafka-0-8-assembly/pom.xml | 12 +- launcher/build.gradle | 19 +-- pom.xml | 20 +-- .../spark/sql/catalyst/util/package.scala | 7 -- sql/hive/build.gradle | 60 +-------- .../spark/streaming/JavaTestUtils.scala | 3 +- yarn/build.gradle | 64 ++++------ .../org/apache/spark/deploy/yarn/Client.scala | 4 +- .../spark/deploy/yarn/YarnAllocator.scala | 1 - .../scheduler/cluster/YarnScheduler.scala | 8 +- 22 files changed, 241 insertions(+), 258 deletions(-) diff --git a/build.gradle b/build.gradle index fa2a9439afd69..b538774caed7a 100644 --- a/build.gradle +++ b/build.gradle @@ -63,9 +63,10 @@ allprojects { protobufVersion = '3.6.1' jerseyVersion = '2.22.2' sunJerseyVersion = '1.19.4' - jettyVersion = '9.4.44.v20210927' + jettyVersion = '9.4.43.v20210629' + yarnTestJettyVersion = '9.3.24.v20180605' // only used by yarn tests jettyOldVersion = '6.1.26' - log4jVersion = '1.2.17' + log4j2Version = '2.17.2' slf4jVersion = '1.7.32' junitVersion = '4.12' mockitoVersion = '1.10.19' @@ -148,6 +149,110 @@ allprojects { pegdownVersion = '1.6.0' shadePackageName = 'org.spark_project' + + libraries = [ + hadoop_client: dependencies.create("org.apache.hadoop:hadoop-client:${hadoopVersion}") { + exclude(group: 'asm', module: 'asm') + exclude(group: 'org.codehaus.jackson', module: 'jackson-core-asl') + exclude(group: 'org.codehaus.jackson', module: 'jackson-mapper-asl') + exclude(group: 'org.ow2.asm', module: 'asm') + exclude(group: 'org.jboss.netty', module: 'netty') + exclude(group: 'commons-logging', module: 'commons-logging') + exclude(group: 'org.mockito', module: 'mockito-all') + exclude(group: 'org.mortbay.jetty', module: 'servlet-api-2.5') + exclude(group: 'javax.servlet', module: 'servlet-api') + exclude(group: 'junit', module: 'junit') + exclude(group: 'com.google.guava', module: 'guava') + exclude(group: 'com.sun.jersey') + exclude(group: 'com.sun.jersey.jersey-test-framework') + exclude(group: 'com.sun.jersey.contribs') + exclude(group: 'io.netty', module: 'netty') + exclude(group: 'io.netty', module: 'netty-all') + exclude(group: 'org.apache.directory.server', module: 'apacheds-kerberos-codec') + exclude(group: 'log4j', module: 'log4j') + exclude(group: 'org.slf4j', module: 'slf4j-logj12') + }, + hive_exec: dependencies.create("org.spark-project.hive:hive-exec:${hiveVersion}") { + exclude(group: 'org.datanucleus', module: 'datanucleus-core') + exclude(group: 'org.spark-project.hive', module: 'hive-metastore') + exclude(group: 'org.spark-project.hive', module: 'hive-shims') + exclude(group: 'org.spark-project.hive', module: 'hive-ant') + exclude(group: 'org.spark-project.hive', module: 'spark-client') + exclude(group: 'org.apache.ant', module: 'ant') + exclude(group: 'com.esotericsoftware.kryo', module: 'kryo') + exclude(group: 'commons-codec', module: 'commons-codec') + exclude(group: 'commons-httpclient', module: 'commons-httpclient') + exclude(group: 'org.apache.avro', module: 'avro-mapred') + exclude(group: 'org.apache.calcite', module: 'calcite-core') + exclude(group: 'org.apache.curator', module: 'apache-curator') + exclude(group: 'org.apache.curator', module: 'curator-client') + exclude(group: 'org.apache.curator', module: 'curator-framework') + exclude(group: 'org.apache.thrift', module: 'libthrift') + exclude(group: 'org.apache.thrift', module: 'libfb303') + exclude(group: 'org.apache.zookeeper', module: 'zookeeper') + exclude(group: 'org.slf4j', module: 'slf4j-api') + exclude(group: 'org.slf4j', module: 'slf4j-log4j12') + exclude(group: 'log4j', module: 'log4j') + exclude(group: 'log4j', module: 'apache-log4j-extras') + exclude(group: 'commons-logging', module: 'commons-logging') + exclude(group: 'org.codehaus.groovy', module: 'groovy-all') + exclude(group: 'jline', module: 'jline') + exclude(group: 'org.json', module: 'json') + }, + hive_meta: dependencies.create("org.spark-project.hive:hive-metastore:${hiveVersion}") { + exclude(group: 'org.datanucleus', module: 'datanucleus-core') + exclude(group: 'org.datanucleus', module: 'datanucleus-api-jdo') + exclude(group: 'org.datanucleus', module: 'datanucleus-rdbms') + exclude(group: 'org.spark-project.hive', module: 'hive-serde') + exclude(group: 'org.spark-project.hive', module: 'hive-shims') + exclude(group: 'org.apache.thrift', module: 'libfb303') + exclude(group: 'org.apache.thrift', module: 'libthrift') + exclude(group: 'javax.servlet', module: 'servlet-api') + exclude(group: 'com.google.guava', module: 'guava') + exclude(group: 'org.slf4j', module: 'slf4j-api') + exclude(group: 'org.slf4j', module: 'slf4j-log4j12') + exclude(group: 'log4j', module: 'log4j') + exclude(group: 'log4j', module: 'apache-log4j-extras') + exclude(group: 'org.apache.derby', module: 'derby') + }, + avro_ipc: dependencies.create("org.apache.avro:avro-ipc:${avroVersion}") { + exclude(group: 'io.netty', module: 'netty') + exclude(group: 'org.mortbay.jetty', module: 'jetty') + exclude(group: 'org.mortbay.jetty', module: 'jetty-util') + exclude(group: 'org.mortbay.jetty', module: 'servlet-api') + exclude(group: 'org.apache.velocity', module: 'velocity') + exclude(group: 'log4j', module: 'log4j') + exclude(group: 'org.slf4j', module: 'slf4j-log4j12') + }, + avro_mapred: dependencies.create("org.apache.avro:avro-mapred:${avroVersion}:hadoop2") { + exclude(group: 'io.netty', module: 'netty') + exclude(group: 'org.mortbay.jetty', module: 'jetty') + exclude(group: 'org.mortbay.jetty', module: 'jetty-util') + exclude(group: 'org.mortbay.jetty', module: 'servlet-api') + exclude(group: 'org.apache.velocity', module: 'velocity') + exclude(group: 'org.apache.avro', module: 'avro-ipc') + }, + netty_all: dependencies.create("io.netty:netty-all:${nettyAllVersion}") { + exclude(group: 'io.netty', module: 'netty-codec-dns') + exclude(group: 'io.netty', module: 'netty-codec-haproxy') + exclude(group: 'io.netty', module: 'netty-codec-http') + exclude(group: 'io.netty', module: 'netty-codec-http2') + exclude(group: 'io.netty', module: 'netty-codec-memcache') + exclude(group: 'io.netty', module: 'netty-codec-mqtt') + exclude(group: 'io.netty', module: 'netty-codec-redis') + exclude(group: 'io.netty', module: 'netty-codec-smtp') + exclude(group: 'io.netty', module: 'netty-codec-socks') + exclude(group: 'io.netty', module: 'netty-codec-stomp') + exclude(group: 'io.netty', module: 'netty-codec-xml') + exclude(group: 'io.netty', module: 'netty-handler-proxy') + exclude(group: 'io.netty', module: 'netty-resolver-dns') + exclude(group: 'io.netty', module: 'netty-resolver-dns-classes-macos') + exclude(group: 'io.netty', module: 'netty-resolver-dns-native-macos') + exclude(group: 'io.netty', module: 'netty-transport-rxtx') + exclude(group: 'io.netty', module: 'netty-transport-sctp') + exclude(group: 'io.netty', module: 'netty-transport-udt') + } + ] } // default output directory like in sbt/maven @@ -285,9 +390,11 @@ subprojects { compile 'org.scala-lang:scala-library:' + scalaVersion compile 'org.scala-lang:scala-reflect:' + scalaVersion - compile group: 'log4j', name:'log4j', version: log4jVersion + compile 'org.apache.logging.log4j:log4j-api:' + log4j2Version + compile 'org.apache.logging.log4j:log4j-core:' + log4j2Version + compile 'org.apache.logging.log4j:log4j-1.2-api:' + log4j2Version compile 'org.slf4j:slf4j-api:' + slf4jVersion - compile 'org.slf4j:slf4j-log4j12:' + slf4jVersion + compile 'org.apache.logging.log4j:log4j-slf4j-impl:' + log4j2Version testCompile "junit:junit:${junitVersion}" testCompile "org.scalatest:scalatest_${scalaBinaryVersion}:${scalatestVersion}" @@ -389,10 +496,11 @@ gradle.taskGraph.whenReady { graph -> 'SPARK_PREPEND_CLASSES': '1', 'SPARK_SCALA_VERSION': scalaBinaryVersion, 'SPARK_TESTING': '1', + 'SPARK_LOCAL_IP': 'localhost', 'PYSPARK_PYTHON': sparkPython, 'PYSPARK_DRIVER_PYTHON': sparkPython, 'JAVA_HOME': System.getProperty('java.home') - systemProperties 'log4j.configuration': "file:${test.project.projectDir}/src/test/resources/log4j.properties", + systemProperties 'log4j.configurationFile': "file:${test.project.projectDir}/src/test/resources/log4j2.properties", 'derby.system.durability': 'test', 'java.awt.headless': 'true', 'java.io.tmpdir': "${rootProject.buildDir}/tmp", diff --git a/common/network-common/build.gradle b/common/network-common/build.gradle index 89f3fc35d5025..105f3637d64b5 100644 --- a/common/network-common/build.gradle +++ b/common/network-common/build.gradle @@ -20,7 +20,7 @@ description = 'Spark Project Networking' dependencies { compile project(subprojectBase + 'snappy-spark-tags_' + scalaBinaryVersion) - compile group: 'io.netty', name: 'netty-all', version: nettyAllVersion + compile libraries.netty_all compile group: 'com.google.code.findbugs', name: 'jsr305', version: jsr305Version compile group: 'com.google.guava', name: 'guava', version: guavaVersion compile group: 'org.fusesource.leveldbjni', name: 'leveldbjni-all', version: levelDbJniVersion diff --git a/common/network-yarn/build.gradle b/common/network-yarn/build.gradle index 80cb3a9f9eb35..af72b61c55c9e 100644 --- a/common/network-yarn/build.gradle +++ b/common/network-yarn/build.gradle @@ -21,35 +21,10 @@ dependencies { compile project(subprojectBase + 'snappy-spark-network-shuffle_' + scalaBinaryVersion) compile project(subprojectBase + 'snappy-spark-tags_' + scalaBinaryVersion) - compile group: 'io.netty', name: 'netty-all', version: nettyAllVersion - compileOnly (group: 'org.apache.hadoop', name: 'hadoop-client', version: hadoopVersion) { - exclude(group: 'asm', module: 'asm') - exclude(group: 'org.codehaus.jackson', module: 'jackson-core-asl') - exclude(group: 'org.codehaus.jackson', module: 'jackson-mapper-asl') - exclude(group: 'org.ow2.asm', module: 'asm') - exclude(group: 'org.jboss.netty', module: 'netty') - exclude(group: 'commons-logging', module: 'commons-logging') - exclude(group: 'org.mockito', module: 'mockito-all') - exclude(group: 'org.mortbay.jetty', module: 'servlet-api-2.5') - exclude(group: 'javax.servlet', module: 'servlet-api') - exclude(group: 'junit', module: 'junit') - exclude(group: 'com.google.guava', module: 'guava') - exclude(group: 'com.sun.jersey') - exclude(group: 'com.sun.jersey.jersey-test-framework') - exclude(group: 'com.sun.jersey.contribs') - exclude(group: 'io.netty', module: 'netty') - exclude(group: 'io.netty', module: 'netty-all') - exclude(group: 'org.apache.directory.server', module: 'apacheds-kerberos-codec') - } + compile libraries.netty_all + compileOnly libraries.hadoop_client testCompile project(path: subprojectBase + 'snappy-spark-tags_' + scalaBinaryVersion, configuration: 'testOutput') - /* - runtimeJar project(subprojectBase + 'snappy-spark-network-common_' + scalaBinaryVersion) - runtimeJar project(subprojectBase + 'snappy-spark-network-shuffle_' + scalaBinaryVersion) - runtimeJar group: 'io.netty', name: 'netty-all', version: nettyAllVersion - runtimeJar group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion - runtimeJar group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: jacksonVersion - */ } shadowJar { diff --git a/common/unsafe/pom.xml b/common/unsafe/pom.xml index ac846786c2156..0b276b517e5a9 100644 --- a/common/unsafe/pom.xml +++ b/common/unsafe/pom.xml @@ -65,10 +65,6 @@ com.google.guava guava - - log4j - log4j - diff --git a/common/unsafe/src/main/java/org/apache/spark/unsafe/Native.java b/common/unsafe/src/main/java/org/apache/spark/unsafe/Native.java index cfaa3bec46ea9..c3f0105561ee2 100644 --- a/common/unsafe/src/main/java/org/apache/spark/unsafe/Native.java +++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/Native.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. + * Copyright (c) 2017-2022 TIBCO Software Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You @@ -23,7 +23,8 @@ import java.security.CodeSource; import java.util.Locale; -import org.apache.log4j.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Optimized JNI calls. @@ -64,7 +65,7 @@ private Native() { String arch = System.getProperty("os.arch"); is64Bit = arch.contains("64") || arch.contains("s390x"); - logger = Logger.getLogger(Native.class); + logger = LoggerFactory.getLogger(Native.class); String library = "native" + suffix; if (is64Bit()) { @@ -104,13 +105,9 @@ private Native() { loaded = true; } catch (IOException ioe) { - if (logger.isInfoEnabled()) { - logger.info("library " + library + " could not be loaded due to " + ioe); - } + logger.info("library " + library + " could not be loaded due to " + ioe); } catch (UnsatisfiedLinkError ule) { - if (logger.isInfoEnabled()) { - logger.info("library " + library + " could not be loaded"); - } + logger.info("library " + library + " could not be loaded"); } nativeLoaded = loaded; } diff --git a/core/build.gradle b/core/build.gradle index 26de5d43b2823..a7e2f46866acf 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -24,21 +24,8 @@ dependencies { compile project(subprojectBase + 'snappy-spark-unsafe_' + scalaBinaryVersion) compile project(subprojectBase + 'snappy-spark-tags_' + scalaBinaryVersion) - compile(group: 'org.apache.avro', name: 'avro-ipc', version: avroVersion) { - exclude(group: 'io.netty', module: 'netty') - exclude(group: 'org.mortbay.jetty', module: 'jetty') - exclude(group: 'org.mortbay.jetty', module: 'jetty-util') - exclude(group: 'org.mortbay.jetty', module: 'servlet-api') - exclude(group: 'org.apache.velocity', module: 'velocity') - } - compile(group: 'org.apache.avro', name: 'avro-mapred', version: avroVersion, classifier: 'hadoop2') { - exclude(group: 'io.netty', module: 'netty') - exclude(group: 'org.mortbay.jetty', module: 'jetty') - exclude(group: 'org.mortbay.jetty', module: 'jetty-util') - exclude(group: 'org.mortbay.jetty', module: 'servlet-api') - exclude(group: 'org.apache.velocity', module: 'velocity') - exclude(group: 'org.apache.avro', module: 'avro-ipc') - } + compile libraries.avro_ipc + compile libraries.avro_mapred compile group: 'com.google.guava', name: 'guava', version: guavaVersion compile group: 'com.esotericsoftware', name: 'kryo-shaded', version: kryoVersion compile(group: 'com.twitter', name: 'chill_' + scalaBinaryVersion, version: chillVersion) { @@ -49,7 +36,9 @@ dependencies { } compile group: 'org.apache.xbean', name: 'xbean-asm5-shaded', version: xbeanAsm5Version // explicitly include netty from akka-remote to not let zookeeper override it - compile group: 'io.netty', name: 'netty', version: nettyVersion + compile(group: 'io.netty', name: 'netty', version: nettyVersion) { + exclude(group: 'log4j', module: 'log4j') + } // explicitly exclude old netty from zookeeper compile(group: 'org.apache.zookeeper', name: 'zookeeper', version: zookeeperVersion) { exclude(group: 'org.jboss.netty', module: 'netty') @@ -62,25 +51,7 @@ dependencies { exclude(group: 'log4j', module: 'log4j') } compile group: 'com.google.protobuf', name: 'protobuf-java', version: protobufVersion - compile(group: 'org.apache.hadoop', name: 'hadoop-client', version: hadoopVersion) { - exclude(group: 'asm', module: 'asm') - exclude(group: 'org.codehaus.jackson', module: 'jackson-mapper-asl') - exclude(group: 'org.ow2.asm', module: 'asm') - exclude(group: 'org.apache.zookeeper', module: 'zookeeper') - exclude(group: 'org.jboss.netty', module: 'netty') - exclude(group: 'jline', module: 'jline') - exclude(group: 'commons-logging', module: 'commons-logging') - exclude(group: 'org.mockito', module: 'mockito-all') - exclude(group: 'org.mortbay.jetty', module: 'servlet-api-2.5') - exclude(group: 'javax.servlet', module: 'servlet-api') - exclude(group: 'junit', module: 'junit') - exclude(group: 'com.google.guava', module: 'guava') - exclude(group: 'com.sun.jersey') - exclude(group: 'com.sun.jersey.jersey-test-framework') - exclude(group: 'com.sun.jersey.contribs') - exclude(group: 'com.google.protobuf', module: 'protobuf-java') - exclude(group: 'org.apache.directory.server', module: 'apacheds-kerberos-codec') - } + compile libraries.hadoop_client compile group: 'org.codehaus.jackson', name: 'jackson-mapper-asl', version: jackson1Version compile(group: 'net.java.dev.jets3t', name: 'jets3t', version: jets3tVersion) { exclude(group: 'commons-codec', module: 'commons-codec') @@ -91,6 +62,9 @@ dependencies { exclude(group: 'org.jboss.netty', module: 'netty') exclude(group: 'jline', module: 'jline') exclude(group: 'com.google.guava', module: 'guava') + exclude(group: 'org.slf4j', module: 'slf4j-api') + exclude(group: 'org.slf4j', module: 'slf4j-log4j12') + exclude(group: 'log4j', module: 'log4j') } compile 'org.scala-lang:scalap:' + scalaVersion @@ -113,8 +87,7 @@ dependencies { compile(group: 'org.apache.commons', name: 'commons-crypto', version: commonsCryptoVersion) { exclude(group: 'net.java.dev.jna', module: 'jna') } - compile group: 'io.netty', name: 'netty', version: nettyVersion - compile group: 'io.netty', name: 'netty-all', version: nettyAllVersion + compile libraries.netty_all compile group: 'org.slf4j', name: 'jul-to-slf4j', version: slf4jVersion compile group: 'org.slf4j', name: 'jcl-over-slf4j', version: slf4jVersion compile group: 'org.xerial.snappy', name: 'snappy-java', version: snappyJavaVersion @@ -127,7 +100,6 @@ dependencies { compile group: 'org.glassfish.jersey.core', name: 'jersey-server', version: jerseyVersion compile group: 'org.glassfish.jersey.containers', name: 'jersey-container-servlet', version: jerseyVersion compile group: 'org.glassfish.jersey.containers', name: 'jersey-container-servlet-core', version: jerseyVersion - compile group: 'io.netty', name: 'netty-all', version: nettyAllVersion compile(group: 'com.clearspring.analytics', name: 'stream', version: streamVersion) { exclude(group: 'it.unimi.dsi', module: 'fastutil') } diff --git a/core/src/main/scala/org/apache/spark/internal/Logging.scala b/core/src/main/scala/org/apache/spark/internal/Logging.scala index 4629613db3931..56f12ea12c5f2 100644 --- a/core/src/main/scala/org/apache/spark/internal/Logging.scala +++ b/core/src/main/scala/org/apache/spark/internal/Logging.scala @@ -103,13 +103,21 @@ private[spark] trait Logging { } protected def initializeLogIfNecessary(isInterpreter: Boolean): Unit = { + initializeLogIfNecessary(isInterpreter, silent = false) + } + + protected def initializeLogIfNecessary( + isInterpreter: Boolean, + silent: Boolean = false): Boolean = { if (!Logging.initialized) { Logging.initLock.synchronized { if (!Logging.initialized) { - initializeLogging(isInterpreter) + initializeLogging(isInterpreter, silent) + return true } } } + false } private def initializeLogging(isInterpreter: Boolean, silent: Boolean): Unit = { @@ -172,8 +180,12 @@ private[spark] trait Logging { } } -private object Logging { +private[spark] object Logging { @volatile private var initialized = false + @volatile private var defaultRootLevel: Level = null + @volatile private var defaultSparkLog4jConfig = false + @volatile private[spark] var sparkShellThresholdLevel: Level = null + val initLock = new Object() try { // We use reflection here to handle the case where users remove the diff --git a/core/src/test/scala/org/apache/spark/internal/LoggingSuite.scala b/core/src/test/scala/org/apache/spark/internal/LoggingSuite.scala index 6213936130e96..772cc3265428e 100644 --- a/core/src/test/scala/org/apache/spark/internal/LoggingSuite.scala +++ b/core/src/test/scala/org/apache/spark/internal/LoggingSuite.scala @@ -52,7 +52,7 @@ class LoggingSuite extends SparkFunSuite { assert(ssf.filter(logEvent2) == Filter.Result.NEUTRAL) // custom log level configured (by log4j2.properties) - val jettyLogger = LogManager.getLogger("org.sparkproject.jetty") + val jettyLogger = LogManager.getLogger("org.spark_project.jetty") .asInstanceOf[Logger] val logEvent3 = new Builder().setLevel(Level.INFO) .setLoggerName(jettyLogger.getName()).setMessage(new SimpleMessage("Test")).build() diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala index 86aec363ea421..35091735836ab 100644 --- a/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala +++ b/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegression.scala @@ -18,7 +18,6 @@ // scalastyle:off println package org.apache.spark.examples.mllib -import org.apache.log4j.{Level, Logger} import scopt.OptionParser import org.apache.spark.{SparkConf, SparkContext} @@ -92,7 +91,7 @@ object LinearRegression { val conf = new SparkConf().setAppName(s"LinearRegression with $params") val sc = new SparkContext(conf) - Logger.getRootLogger.setLevel(Level.WARN) + sc.setLogLevel("warn") val examples = MLUtils.loadLibSVMFile(sc, params.input).cache() diff --git a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceSuite.scala b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceSuite.scala index 4d53a6f91b631..fcc1f6a8c722c 100644 --- a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceSuite.scala +++ b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceSuite.scala @@ -432,6 +432,7 @@ class KafkaSourceSuite extends KafkaSourceTest { .format("kafka") .option("kafka.bootstrap.servers", testUtils.brokerAddress) .option("kafka.metadata.max.age.ms", "1") + .option("kafka.default.api.timeout.ms", "3000") .option("subscribePattern", s"$topicPrefix-.*") .option("failOnDataLoss", "false") @@ -576,6 +577,7 @@ class KafkaSourceSuite extends KafkaSourceTest { .readStream .format("kafka") .option("kafka.bootstrap.servers", testUtils.brokerAddress) + .option("kafka.default.api.timeout.ms", "3000") .option("kafka.metadata.max.age.ms", "1") .option("subscribe", topic) // If a topic is deleted and we try to poll data starting from offset 0, @@ -886,6 +888,7 @@ class KafkaSourceStressSuite extends KafkaSourceTest { .option("kafka.metadata.max.age.ms", "1") .option("subscribePattern", "stress.*") .option("failOnDataLoss", "false") + .option("kafka.default.api.timeout.ms", "3000") .load() .selectExpr("CAST(key AS STRING)", "CAST(value AS STRING)") .as[(String, String)] @@ -982,6 +985,7 @@ class KafkaSourceStressForDontFailOnDataLossSuite extends StreamTest with Shared .format("kafka") .option("kafka.bootstrap.servers", testUtils.brokerAddress) .option("kafka.metadata.max.age.ms", "1") + .option("kafka.default.api.timeout.ms", "3000") .option("subscribePattern", "failOnDataLoss.*") .option("startingOffsets", "earliest") .option("failOnDataLoss", "false") diff --git a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala index 996b90016a2ed..b5364ef4cb8ba 100644 --- a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala +++ b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala @@ -39,6 +39,7 @@ import org.apache.kafka.clients.producer._ import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.network.ListenerName import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer} +import org.apache.kafka.common.utils.Exit import org.apache.zookeeper.server.{NIOServerCnxnFactory, ZooKeeperServer} import org.scalatest.concurrent.Eventually._ import org.scalatest.time.SpanSugar._ @@ -56,7 +57,7 @@ import org.apache.spark.util.Utils class KafkaTestUtils(withBrokerProps: Map[String, Object] = Map.empty) extends Logging { // Zookeeper related configurations - private val zkHost = "localhost" + private val zkHost = "127.0.0.1" private var zkPort: Int = 0 private val zkConnectionTimeout = 60000 private val zkSessionTimeout = 6000 @@ -67,7 +68,7 @@ class KafkaTestUtils(withBrokerProps: Map[String, Object] = Map.empty) extends L private var adminClient: AdminClient = null // Kafka broker related configurations - private val brokerHost = "localhost" + private val brokerHost = "127.0.0.1" private var brokerPort = 0 private var brokerConf: KafkaConfig = _ @@ -131,6 +132,9 @@ class KafkaTestUtils(withBrokerProps: Map[String, Object] = Map.empty) extends L def setup(): Unit = { setupEmbeddedZookeeper() setupEmbeddedKafkaServer() + eventually(timeout(60.seconds)) { + assert(zkUtils.getAllBrokersInCluster().nonEmpty, "Broker was not up in 60 seconds") + } } /** Teardown the whole servers, including Kafka broker and Zookeeper */ @@ -290,8 +294,8 @@ class KafkaTestUtils(withBrokerProps: Map[String, Object] = Map.empty) extends L protected def brokerConfiguration: Properties = { val props = new Properties() props.put("broker.id", "0") - props.put("host.name", "localhost") - props.put("advertised.host.name", "localhost") + props.put("host.name", "127.0.0.1") + props.put("advertised.host.name", "127.0.0.1") props.put("port", brokerPort.toString) props.put("log.dir", Utils.createTempDir().getAbsolutePath) props.put("zookeeper.connect", zkAddress) diff --git a/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala b/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala index 21dbcdfb4563c..f9286ae6d77d8 100644 --- a/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala +++ b/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala @@ -17,7 +17,7 @@ package org.apache.spark.streaming.kafka010 -import java.io.File +import java.io.{File, IOException} import java.lang.{Integer => JInt} import java.net.InetSocketAddress import java.util.concurrent.TimeoutException @@ -50,17 +50,17 @@ import org.apache.spark.util.Utils private[kafka010] class KafkaTestUtils extends Logging { // Zookeeper related configurations - private val zkHost = "localhost" + private val zkHost = "127.0.0.1" private var zkPort: Int = 0 private val zkConnectionTimeout = 60000 - private val zkSessionTimeout = 6000 + private val zkSessionTimeout = 10000 private var zookeeper: EmbeddedZookeeper = _ private var zkUtils: ZkUtils = _ // Kafka broker related configurations - private val brokerHost = "localhost" + private val brokerHost = "127.0.0.1" private var brokerPort = 0 private var brokerConf: KafkaConfig = _ @@ -135,10 +135,21 @@ private[kafka010] class KafkaTestUtils extends Logging { if (server != null) { server.shutdown() + server.awaitShutdown() server = null } - brokerConf.logDirs.foreach { f => Utils.deleteRecursively(new File(f)) } + // On Windows, `logDirs` is left open even after Kafka server above is completely shut down + // in some cases. It leads to test failures on Windows if the directory deletion failure + // throws an exception. + brokerConf.logDirs.foreach { f => + try { + Utils.deleteRecursively(new File(f)) + } catch { + case e: IOException if Utils.isWindows => + logWarning(e.getMessage) + } + } if (zkUtils != null) { zkUtils.close() @@ -191,8 +202,8 @@ private[kafka010] class KafkaTestUtils extends Logging { private def brokerConfiguration: Properties = { val props = new Properties() props.put("broker.id", "0") - props.put("host.name", "localhost") - props.put("advertised.host.name", "localhost") + props.put("host.name", "127.0.0.1") + props.put("advertised.host.name", "127.0.0.1") props.put("port", brokerPort.toString) props.put("log.dir", brokerLogDir) props.put("zookeeper.connect", zkAddress) @@ -252,10 +263,10 @@ private[kafka010] class KafkaTestUtils extends Logging { def isPropagated = server.dataPlaneRequestProcessor.metadataCache .getPartitionInfo(topic, partition) match { case Some(partitionState) => + val leader = partitionState.basePartitionState.leader + val isr = partitionState.basePartitionState.isr zkUtils.getLeaderForPartition(topic, partition).isDefined && - Request.isValidBrokerId(partitionState.basePartitionState.leader) && - !partitionState.basePartitionState.replicas.isEmpty - + Request.isValidBrokerId(leader) && !isr.isEmpty case _ => false } @@ -281,8 +292,21 @@ private[kafka010] class KafkaTestUtils extends Logging { def shutdown() { factory.shutdown() - Utils.deleteRecursively(snapshotDir) - Utils.deleteRecursively(logDir) + // The directories are not closed even if the ZooKeeper server is shut down. + // Please see ZOOKEEPER-1844, which is fixed in 3.4.6+. It leads to test failures + // on Windows if the directory deletion failure throws an exception. + try { + Utils.deleteRecursively(snapshotDir) + } catch { + case e: IOException if Utils.isWindows => + logWarning(e.getMessage) + } + try { + Utils.deleteRecursively(logDir) + } catch { + case e: IOException if Utils.isWindows => + logWarning(e.getMessage) + } } } } diff --git a/external/kafka-0-8-assembly/pom.xml b/external/kafka-0-8-assembly/pom.xml index 1f8ca7a17c720..20782a65c59dd 100644 --- a/external/kafka-0-8-assembly/pom.xml +++ b/external/kafka-0-8-assembly/pom.xml @@ -90,11 +90,6 @@ zookeeper provided - - log4j - log4j - provided - net.java.dev.jets3t jets3t @@ -110,11 +105,6 @@ slf4j-api provided - - org.slf4j - slf4j-log4j12 - provided - org.xerial.snappy snappy-java @@ -160,7 +150,7 @@ reference.conf - log4j.properties + log4j2.properties diff --git a/launcher/build.gradle b/launcher/build.gradle index 64232c4637553..af74bf952f94d 100644 --- a/launcher/build.gradle +++ b/launcher/build.gradle @@ -20,24 +20,7 @@ description = 'Spark Project Launcher' dependencies { compile project(subprojectBase + 'snappy-spark-tags_' + scalaBinaryVersion) - testCompile(group: 'org.apache.hadoop', name: 'hadoop-client', version: hadoopVersion) { - exclude(group: 'asm', module: 'asm') - exclude(group: 'org.codehaus.jackson', module: 'jackson-mapper-asl') - exclude(group: 'org.ow2.asm', module: 'asm') - exclude(group: 'org.jboss.netty', module: 'netty') - exclude(group: 'commons-logging', module: 'commons-logging') - exclude(group: 'org.mockito', module: 'mockito-all') - exclude(group: 'org.mortbay.jetty', module: 'servlet-api-2.5') - exclude(group: 'javax.servlet', module: 'servlet-api') - exclude(group: 'junit', module: 'junit') - exclude(group: 'com.google.guava', module: 'guava') - exclude(group: 'com.sun.jersey') - exclude(group: 'com.sun.jersey.jersey-test-framework') - exclude(group: 'com.sun.jersey.contribs') - exclude(group: 'io.netty', module: 'netty') - exclude(group: 'io.netty', module: 'netty-all') - exclude(group: 'org.apache.directory.server', module: 'apacheds-kerberos-codec') - } + testCompile libraries.hadoop_client testCompile group: 'org.slf4j', name: 'jul-to-slf4j', version: slf4jVersion testCompile project(path: subprojectBase + 'snappy-spark-tags_' + scalaBinaryVersion, configuration: 'testOutput') diff --git a/pom.xml b/pom.xml index ee37de2c70759..11824919a3237 100644 --- a/pom.xml +++ b/pom.xml @@ -991,22 +991,6 @@ org.mortbay.jetty servlet-api - - log4j - log4j - - - org.slf4j - slf4j-log4j12 - - - - - org.apache.hadoop - hadoop-minikdc - ${hadoop.version} - test - org.apache.velocity velocity @@ -1015,6 +999,10 @@ log4j log4j + + org.slf4j + slf4j-log4j12 + diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala index 4005087dad05a..23680172fa56e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala @@ -154,11 +154,4 @@ package object util { } def toPrettySQL(e: Expression): String = usePrettyExpression(e).sql - - /* FIX ME - implicit class debugLogging(a: Any) { - def debugLogging() { - org.apache.log4j.Logger.getLogger(a.getClass.getName).setLevel(org.apache.log4j.Level.DEBUG) - } - } */ } diff --git a/sql/hive/build.gradle b/sql/hive/build.gradle index 6998118a3b9f4..70ff42df7388c 100644 --- a/sql/hive/build.gradle +++ b/sql/hive/build.gradle @@ -26,64 +26,12 @@ dependencies { compile group: 'org.datanucleus', name: 'datanucleus-core', version: datanucleusCoreVersion compile group: 'org.datanucleus', name: 'datanucleus-api-jdo', version: datanucleusJdoVersion compile group: 'org.datanucleus', name: 'datanucleus-rdbms', version: datanucleusRdbmsVersion - compile(group: 'org.spark-project.hive', name: 'hive-exec', version: hiveVersion) { - exclude(group: 'org.datanucleus', module: 'datanucleus-core') - exclude(group: 'org.spark-project.hive', module: 'hive-metastore') - exclude(group: 'org.spark-project.hive', module: 'hive-shims') - exclude(group: 'org.spark-project.hive', module: 'hive-ant') - exclude(group: 'org.spark-project.hive', module: 'spark-client') - exclude(group: 'org.apache.ant', module: 'ant') - exclude(group: 'com.esotericsoftware.kryo', module: 'kryo') - exclude(group: 'commons-codec', module: 'commons-codec') - exclude(group: 'commons-httpclient', module: 'commons-httpclient') - exclude(group: 'org.apache.avro', module: 'avro-mapred') - exclude(group: 'org.apache.calcite', module: 'calcite-core') - exclude(group: 'org.apache.curator', module: 'apache-curator') - exclude(group: 'org.apache.curator', module: 'curator-client') - exclude(group: 'org.apache.curator', module: 'curator-framework') - exclude(group: 'org.apache.thrift', module: 'libthrift') - exclude(group: 'org.apache.thrift', module: 'libfb303') - exclude(group: 'org.apache.zookeeper', module: 'zookeeper') - exclude(group: 'org.slf4j', module: 'slf4j-api') - exclude(group: 'org.slf4j', module: 'slf4j-log4j12') - exclude(group: 'log4j', module: 'log4j') - exclude(group: 'commons-logging', module: 'commons-logging') - exclude(group: 'org.codehaus.groovy', module: 'groovy-all') - exclude(group: 'jline', module: 'jline') - exclude(group: 'org.json', module: 'json') - } - compile(group: 'org.spark-project.hive', name: 'hive-metastore', version: hiveVersion) { - exclude(group: 'org.datanucleus', module: 'datanucleus-core') - exclude(group: 'org.datanucleus', module: 'datanucleus-api-jdo') - exclude(group: 'org.datanucleus', module: 'datanucleus-rdbms') - exclude(group: 'org.spark-project.hive', module: 'hive-serde') - exclude(group: 'org.spark-project.hive', module: 'hive-shims') - exclude(group: 'org.apache.thrift', module: 'libfb303') - exclude(group: 'org.apache.thrift', module: 'libthrift') - exclude(group: 'javax.servlet', module: 'servlet-api') - exclude(group: 'com.google.guava', module: 'guava') - exclude(group: 'org.slf4j', module: 'slf4j-api') - exclude(group: 'org.slf4j', module: 'slf4j-log4j12') - exclude(group: 'log4j', module: 'log4j') - exclude(group: 'org.apache.derby', module: 'derby') - } + compile libraries.hive_exec + compile libraries.hive_meta compile group: 'org.apache.avro', name: 'avro', version: avroVersion - compile(group: 'org.apache.avro', name: 'avro-ipc', version: avroVersion) { - exclude(group: 'org.jboss.netty', module: 'netty') - exclude(group: 'org.mortbay.jetty', module: 'jetty') - exclude(group: 'org.mortbay.jetty', module: 'jetty-util') - exclude(group: 'org.mortbay.jetty', module: 'servlet-api') - exclude(group: 'org.apache.velocity', module: 'velocity') - } - compile(group: 'org.apache.avro', name: 'avro-mapred', version: avroVersion, classifier: 'hadoop2') { - exclude(group: 'org.jboss.netty', module: 'netty') - exclude(group: 'org.mortbay.jetty', module: 'jetty') - exclude(group: 'org.mortbay.jetty', module: 'jetty-util') - exclude(group: 'org.mortbay.jetty', module: 'servlet-api') - exclude(group: 'org.apache.velocity', module: 'velocity') - exclude(group: 'org.apache.avro', module: 'avro-ipc') - } + compile libraries.avro_ipc + compile libraries.avro_mapred compile(group: 'org.apache.calcite', name: 'calcite-core', version: calciteVersion) { exclude(group: 'com.fasterxml.jackson.core', module: 'jackson-annotations') exclude(group: 'com.fasterxml.jackson.core', module: 'jackson-core') diff --git a/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala index ae44fd07ac558..a361027a1caf9 100644 --- a/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala +++ b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala @@ -90,8 +90,7 @@ trait JavaTestBase extends TestSuiteBase { } object JavaTestUtils extends JavaTestBase { - override def maxWaitTimeMillis = 20000 - + override def maxWaitTimeMillis = 30000 // keep as max of all other JavaTestBase implementations } object JavaCheckpointTestUtils extends JavaTestBase { diff --git a/yarn/build.gradle b/yarn/build.gradle index 8cc516bec327d..21c4c612149c5 100644 --- a/yarn/build.gradle +++ b/yarn/build.gradle @@ -39,6 +39,8 @@ dependencies { exclude(group: 'com.sun.jersey') exclude(group: 'com.sun.jersey.jersey-test-framework') exclude(group: 'com.sun.jersey.contribs') + exclude(group: 'log4j', module: 'log4j') + exclude(group: 'org.slf4j', module: 'slf4j-logj12') } compile(group: 'org.apache.hadoop', name: 'hadoop-yarn-server-web-proxy', version: hadoopVersion) { exclude(group: 'asm', module: 'asm') @@ -59,6 +61,8 @@ dependencies { exclude(group: 'com.sun.jersey') exclude(group: 'com.sun.jersey.jersey-test-framework') exclude(group: 'com.sun.jersey.contribs') + exclude(group: 'log4j', module: 'log4j') + exclude(group: 'org.slf4j', module: 'slf4j-logj12') } compile group: 'com.fasterxml.jackson.jaxrs', name: 'jackson-jaxrs-json-provider', version: jacksonVersion compile group: 'com.google.guava', name: 'guava', version: guavaVersion @@ -69,43 +73,8 @@ dependencies { compile group: 'org.eclipse.jetty', name: 'jetty-servlet', version: jettyVersion compile group: 'org.eclipse.jetty', name: 'jetty-servlets', version: jettyVersion compile group: 'org.apache.derby', name: 'derby', version: derbyVersion - compile(group: 'org.spark-project.hive', name: 'hive-exec', version: hiveVersion) { - exclude(group: 'org.spark-project.hive', module: 'hive-metastore') - exclude(group: 'org.spark-project.hive', module: 'hive-shims') - exclude(group: 'org.spark-project.hive', module: 'hive-ant') - exclude(group: 'org.spark-project.hive', module: 'spark-client') - exclude(group: 'org.apache.ant', module: 'ant') - exclude(group: 'com.esotericsoftware.kryo', module: 'kryo') - exclude(group: 'commons-codec', module: 'commons-codec') - exclude(group: 'commons-httpclient', module: 'commons-httpclient') - exclude(group: 'org.apache.avro', module: 'avro-mapred') - exclude(group: 'org.apache.calcite', module: 'calcite-core') - exclude(group: 'org.apache.curator', module: 'apache-curator') - exclude(group: 'org.apache.curator', module: 'curator-client') - exclude(group: 'org.apache.curator', module: 'curator-framework') - exclude(group: 'org.apache.thrift', module: 'libthrift') - exclude(group: 'org.apache.thrift', module: 'libfb303') - exclude(group: 'org.apache.zookeeper', module: 'zookeeper') - exclude(group: 'org.slf4j', module: 'slf4j-api') - exclude(group: 'org.slf4j', module: 'slf4j-log4j12') - exclude(group: 'log4j', module: 'log4j') - exclude(group: 'commons-logging', module: 'commons-logging') - exclude(group: 'org.codehaus.groovy', module: 'groovy-all') - exclude(group: 'jline', module: 'jline') - exclude(group: 'org.json', module: 'json') - } - compile(group: 'org.spark-project.hive', name: 'hive-metastore', version: hiveVersion) { - exclude(group: 'org.spark-project.hive', module: 'hive-serde') - exclude(group: 'org.spark-project.hive', module: 'hive-shims') - exclude(group: 'org.apache.thrift', module: 'libfb303') - exclude(group: 'org.apache.thrift', module: 'libthrift') - exclude(group: 'javax.servlet', module: 'servlet-api') - exclude(group: 'com.google.guava', module: 'guava') - exclude(group: 'org.slf4j', module: 'slf4j-api') - exclude(group: 'org.slf4j', module: 'slf4j-log4j12') - exclude(group: 'log4j', module: 'log4j') - exclude(group: 'org.apache.derby', module: 'derby') - } + compile libraries.hive_exec + compile libraries.hive_meta compile(group: 'org.apache.thrift', name: 'libthrift', version: thriftVersion) { exclude(group: 'org.slf4j', module: 'slf4j-api') } @@ -140,3 +109,24 @@ dependencies { } testCompile group: 'com.sun.jersey.contribs', name: 'jersey-guice', version: sunJerseyVersion } + +configurations.all { + // tests require older jetty version + if (it.name.startsWith('testRuntime')) { + resolutionStrategy.force "org.eclipse.jetty:jetty-client:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-continuation:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-http:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-io:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-jndi:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-plus:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-proxy:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-security:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-server:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-servlet:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-servlets:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-util:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-util-ajax:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-webapp:${yarnTestJettyVersion}", + "org.eclipse.jetty:jetty-xml:${yarnTestJettyVersion}" + } +} diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala index 01ab313a6bd83..6ebf3ec81686c 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala @@ -694,8 +694,8 @@ private[spark] class Client( private def createConfArchive(): File = { val hadoopConfFiles = new HashMap[String, File]() - // Uploading $SPARK_CONF_DIR/log4j2 configuration file to the distributed cache to make sure that - // the executors will use the latest configurations instead of the default values. This is + // Uploading $SPARK_CONF_DIR/log4j2 configuration file to the distributed cache to make sure + // that the executors will use the latest configurations instead of the default values. This is // required when user changes log4j2 configuration directly to set the log configurations. If // configuration file is provided through --files then executors will be taking configurations // from --files instead of $SPARK_CONF_DIR/log4j2 configuration file. diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala index 639e564d4684b..a04213773b6b3 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala @@ -30,7 +30,6 @@ import org.apache.hadoop.yarn.api.records._ import org.apache.hadoop.yarn.client.api.AMRMClient import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest import org.apache.hadoop.yarn.conf.YarnConfiguration -import org.apache.log4j.{Level, Logger} import org.apache.spark.{SecurityManager, SparkConf, SparkException} import org.apache.spark.deploy.yarn.YarnSparkHadoopUtil._ diff --git a/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnScheduler.scala b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnScheduler.scala index 029382133ddf2..d3d5d2bbc82d6 100644 --- a/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnScheduler.scala +++ b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnScheduler.scala @@ -18,7 +18,8 @@ package org.apache.spark.scheduler.cluster import org.apache.hadoop.yarn.util.RackResolver -import org.apache.log4j.{Level, Logger} +import org.apache.logging.log4j.{Level, LogManager} +import org.apache.logging.log4j.core.Logger import org.apache.spark._ import org.apache.spark.scheduler.TaskSchedulerImpl @@ -27,8 +28,9 @@ import org.apache.spark.util.Utils private[spark] class YarnScheduler(sc: SparkContext) extends TaskSchedulerImpl(sc) { // RackResolver logs an INFO message whenever it resolves a rack, which is way too often. - if (Logger.getLogger(classOf[RackResolver]).getLevel == null) { - Logger.getLogger(classOf[RackResolver]).setLevel(Level.WARN) + val logger = LogManager.getLogger(classOf[RackResolver]) + if (logger.getLevel != Level.WARN) { + logger.asInstanceOf[Logger].setLevel(Level.WARN) } // By default, rack is unknown