Skip to content

Commit 7c4dc66

Browse files
holdenkjeanlyn
authored andcommitted
[SPARK-3444] Provide an easy way to change log level
Add support for changing the log level at run time through the SparkContext. Based on an earlier PR, apache#2433 includes CR feedback from pwendel & davies Author: Holden Karau <[email protected]> Closes apache#5791 from holdenk/SPARK-3444-provide-an-easy-way-to-change-log-level-r2 and squashes the following commits: 3bf3be9 [Holden Karau] fix exception 42ba873 [Holden Karau] fix exception 9117244 [Holden Karau] Only allow valid log levels, throw exception if invalid log level. 338d7bf [Holden Karau] rename setLoggingLevel to setLogLevel fac14a0 [Holden Karau] Fix style errors d9d03f3 [Holden Karau] Add support for changing the log level at run time through the SparkContext. Based on an earlier PR, apache#2433 includes CR feedback from @pwendel & @davies
1 parent 183a3cd commit 7c4dc66

File tree

6 files changed

+61
-16
lines changed

6 files changed

+61
-16
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -347,6 +347,19 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
347347
value
348348
}
349349

350+
/** Control our logLevel. This overrides any user-defined log settings.
351+
* @param logLevel The desired log level as a string.
352+
* Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
353+
*/
354+
def setLogLevel(logLevel: String) {
355+
val validLevels = Seq("ALL", "DEBUG", "ERROR", "FATAL", "INFO", "OFF", "TRACE", "WARN")
356+
if (!validLevels.contains(logLevel)) {
357+
throw new IllegalArgumentException(
358+
s"Supplied level $logLevel did not match one of: ${validLevels.mkString(",")}")
359+
}
360+
Utils.setLogLevel(org.apache.log4j.Level.toLevel(logLevel))
361+
}
362+
350363
try {
351364
_conf = config.clone()
352365
_conf.validateSettings()

core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -755,6 +755,14 @@ class JavaSparkContext(val sc: SparkContext)
755755
*/
756756
def getLocalProperty(key: String): String = sc.getLocalProperty(key)
757757

758+
/** Control our logLevel. This overrides any user-defined log settings.
759+
* @param logLevel The desired log level as a string.
760+
* Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
761+
*/
762+
def setLogLevel(logLevel: String) {
763+
sc.setLogLevel(logLevel)
764+
}
765+
758766
/**
759767
* Assigns a group ID to all the jobs started by this thread until the group ID is set to a
760768
* different value or cleared.

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2022,6 +2022,13 @@ private[spark] object Utils extends Logging {
20222022
}
20232023
}
20242024

2025+
/**
2026+
* configure a new log4j level
2027+
*/
2028+
def setLogLevel(l: org.apache.log4j.Level) {
2029+
org.apache.log4j.Logger.getRootLogger().setLevel(l)
2030+
}
2031+
20252032
/**
20262033
* config a log4j properties used for testsuite
20272034
*/

core/src/test/scala/org/apache/spark/util/UtilsSuite.scala

Lines changed: 25 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -35,9 +35,10 @@ import org.apache.hadoop.conf.Configuration
3535
import org.apache.hadoop.fs.Path
3636

3737
import org.apache.spark.network.util.ByteUnit
38+
import org.apache.spark.Logging
3839
import org.apache.spark.SparkConf
3940

40-
class UtilsSuite extends FunSuite with ResetSystemProperties {
41+
class UtilsSuite extends FunSuite with ResetSystemProperties with Logging {
4142

4243
test("timeConversion") {
4344
// Test -1
@@ -68,7 +69,7 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
6869
intercept[NumberFormatException] {
6970
Utils.timeStringAsMs("600l")
7071
}
71-
72+
7273
intercept[NumberFormatException] {
7374
Utils.timeStringAsMs("This breaks 600s")
7475
}
@@ -99,7 +100,7 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
99100
assert(Utils.byteStringAsGb("1k") === 0)
100101
assert(Utils.byteStringAsGb("1t") === ByteUnit.TiB.toGiB(1))
101102
assert(Utils.byteStringAsGb("1p") === ByteUnit.PiB.toGiB(1))
102-
103+
103104
assert(Utils.byteStringAsMb("1") === 1)
104105
assert(Utils.byteStringAsMb("1m") === 1)
105106
assert(Utils.byteStringAsMb("1048575b") === 0)
@@ -118,7 +119,7 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
118119
assert(Utils.byteStringAsKb("1g") === ByteUnit.GiB.toKiB(1))
119120
assert(Utils.byteStringAsKb("1t") === ByteUnit.TiB.toKiB(1))
120121
assert(Utils.byteStringAsKb("1p") === ByteUnit.PiB.toKiB(1))
121-
122+
122123
assert(Utils.byteStringAsBytes("1") === 1)
123124
assert(Utils.byteStringAsBytes("1k") === ByteUnit.KiB.toBytes(1))
124125
assert(Utils.byteStringAsBytes("1m") === ByteUnit.MiB.toBytes(1))
@@ -127,17 +128,17 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
127128
assert(Utils.byteStringAsBytes("1p") === ByteUnit.PiB.toBytes(1))
128129

129130
// Overflow handling, 1073741824p exceeds Long.MAX_VALUE if converted straight to Bytes
130-
// This demonstrates that we can have e.g 1024^3 PB without overflowing.
131+
// This demonstrates that we can have e.g 1024^3 PB without overflowing.
131132
assert(Utils.byteStringAsGb("1073741824p") === ByteUnit.PiB.toGiB(1073741824))
132133
assert(Utils.byteStringAsMb("1073741824p") === ByteUnit.PiB.toMiB(1073741824))
133-
134+
134135
// Run this to confirm it doesn't throw an exception
135-
assert(Utils.byteStringAsBytes("9223372036854775807") === 9223372036854775807L)
136+
assert(Utils.byteStringAsBytes("9223372036854775807") === 9223372036854775807L)
136137
assert(ByteUnit.PiB.toPiB(9223372036854775807L) === 9223372036854775807L)
137-
138+
138139
// Test overflow exception
139140
intercept[IllegalArgumentException] {
140-
// This value exceeds Long.MAX when converted to bytes
141+
// This value exceeds Long.MAX when converted to bytes
141142
Utils.byteStringAsBytes("9223372036854775808")
142143
}
143144

@@ -146,22 +147,22 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
146147
// This value exceeds Long.MAX when converted to TB
147148
ByteUnit.PiB.toTiB(9223372036854775807L)
148149
}
149-
150+
150151
// Test fractional string
151152
intercept[NumberFormatException] {
152153
Utils.byteStringAsMb("0.064")
153154
}
154-
155+
155156
// Test fractional string
156157
intercept[NumberFormatException] {
157158
Utils.byteStringAsMb("0.064m")
158159
}
159-
160+
160161
// Test invalid strings
161162
intercept[NumberFormatException] {
162163
Utils.byteStringAsBytes("500ub")
163164
}
164-
165+
165166
// Test invalid strings
166167
intercept[NumberFormatException] {
167168
Utils.byteStringAsBytes("This breaks 600b")
@@ -174,12 +175,12 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
174175
intercept[NumberFormatException] {
175176
Utils.byteStringAsBytes("600gb This breaks")
176177
}
177-
178+
178179
intercept[NumberFormatException] {
179180
Utils.byteStringAsBytes("This 123mb breaks")
180181
}
181182
}
182-
183+
183184
test("bytesToString") {
184185
assert(Utils.bytesToString(10) === "10.0 B")
185186
assert(Utils.bytesToString(1500) === "1500.0 B")
@@ -475,6 +476,15 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
475476
}
476477
}
477478

479+
// Test for using the util function to change our log levels.
480+
test("log4j log level change") {
481+
Utils.setLogLevel(org.apache.log4j.Level.ALL)
482+
assert(log.isInfoEnabled())
483+
Utils.setLogLevel(org.apache.log4j.Level.ERROR)
484+
assert(!log.isInfoEnabled())
485+
assert(log.isErrorEnabled())
486+
}
487+
478488
test("deleteRecursively") {
479489
val tempDir1 = Utils.createTempDir()
480490
assert(tempDir1.exists())

python/pyspark/context.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -267,6 +267,13 @@ def __exit__(self, type, value, trace):
267267
"""
268268
self.stop()
269269

270+
def setLogLevel(self, logLevel):
271+
"""
272+
Control our logLevel. This overrides any user-defined log settings.
273+
Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
274+
"""
275+
self._jsc.setLogLevel(logLevel)
276+
270277
@classmethod
271278
def setSystemProperty(cls, key, value):
272279
"""

python/pyspark/sql/dataframe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#
22
# Licensed to the Apache Software Foundation (ASF) under one or more
3-
# contributor license agreements. See the NOTICE file distributed with
3+
# contir[butor license agreements. See the NOTICE file distributed with
44
# this work for additional information regarding copyright ownership.
55
# The ASF licenses this file to You under the Apache License, Version 2.0
66
# (the "License"); you may not use this file except in compliance with

0 commit comments

Comments
 (0)