Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit c7ab0cd

Browse files
No extraneous errors when running the listener (#251)
In build.sbt, we don't set `-Dlog4j.configuration` so the launcher uses the default in src/main/resources. We explicitly override the java options for both `test` and `run`forks. In IntpHandler.scala, we bind the Spark global variables to `intp`, so `intp` inherits logging and other spark configs from the main java process. In log4j.properties, we set `log4j.appender.console.target` to print to stdout instead of stderr. Can't get rid of the `WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable".` without introducing a Hadoop dependency: https://stackoverflow.com/questions/40015416/spark-unable-to-load-native-hadoop-library-for-your-platform. It's fine to use the Java classes, since we don't need HDFS for a local listener anyway.
1 parent eb1707f commit c7ab0cd

File tree

4 files changed

+23
-20
lines changed

4 files changed

+23
-20
lines changed

‎build.sbt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,9 +91,9 @@ assemblyShadeRules in (Test, assembly) := commonShadeRules
9191
testOptions in Test += Tests.Argument("-oF")
9292

9393
lazy val commonJavaOptions =
94-
Seq("-Xmx2048m", "-XX:ReservedCodeCacheSize=384m", "-Dlog4j.configuration=log4j.properties")
95-
javaOptions in Test++= commonJavaOptions
96-
javaOptions in run ++= commonJavaOptions ++ Seq("-Dspark.master=local[*]")
94+
Seq("-Xmx4096m", "-XX:ReservedCodeCacheSize=384m")
95+
Test/ javaOptions := commonJavaOptions
96+
run / javaOptions := commonJavaOptions ++ Seq("-Dspark.master=local[1]")
9797

9898
scalacOptions ++= Seq(
9999
"-deprecation",

‎src/main/resources/log4j.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Set everything to be logged to the console
22
log4j.rootCategory=INFO, console
33
log4j.appender.console=org.apache.log4j.ConsoleAppender
4-
log4j.appender.console.target=System.err
4+
log4j.appender.console.target=System.out
55
log4j.appender.console.layout=org.apache.log4j.PatternLayout
66
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
77

‎src/main/scala/edu/berkeley/cs/rise/opaque/rpc/IntpHandler.scala

Lines changed: 18 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package edu.berkeley.cs.rise.opaque.rpc
1919

2020
import org.apache.spark.SparkConf
21+
import org.apache.spark.sql.SparkSession
2122

2223
import scala.tools.nsc.interpreter.IMain
2324
import scala.tools.nsc.interpreter.IR.Result
@@ -29,27 +30,32 @@ import java.io._
2930
/* Handler to simplify writing to an instance of OpaqueILoop. */
3031
object IntpHandler {
3132

32-
/* We need to include the jars provided to Spark in the new IMain's classpath. */
33-
val sparkJars = new SparkConf().get("spark.jars", "").replace(",", ":")
33+
val intp = {
34+
/* We need to include the jars provided to Spark in the new IMain's classpath. */
35+
val sparkJars = new SparkConf().get("spark.jars", "").replace(",", ":")
3436

35-
val settings = new GenericRunnerSettings(msg => Console.err.println(msg))
36-
settings.classpath.value = sys.props("java.class.path").concat(":").concat(sparkJars)
37+
val settings = new GenericRunnerSettings(System.err.println(_))
38+
settings.classpath.value = sys.props("java.class.path").concat(":").concat(sparkJars)
39+
settings.usejavacp.value = true
3740

38-
/* Construct an interpreter that routes all output to stdout. */
39-
valintp=IMain(settings)
41+
newIMain(settings)
42+
}
4043

4144
/* Initial commands for the interpreter to execute.
4245
* This has to be called before any call to IntpHandler.run
4346
*/
4447
def initializeIntp() = {
4548
intp.initializeSynchronous()
49+
50+
println(
51+
"############################# Commands from Spark startup #############################"
52+
)
53+
val spark = SparkSession.builder.getOrCreate
54+
intp.bind("spark", spark)
55+
val sc = spark.sparkContext
56+
intp.bind("sc", sc)
4657
val initializationCommands = Seq(
4758
"""
48-
@transient val spark = if (org.apache.spark.repl.Main.sparkSession != null) {
49-
org.apache.spark.repl.Main.sparkSession
50-
} else {
51-
org.apache.spark.repl.Main.createSparkSession()
52-
}
5359
@transient val sc = {
5460
val _sc = spark.sparkContext
5561
if (_sc.getConf.getBoolean("spark.ui.reverseProxy", false)) {
@@ -82,10 +88,7 @@ object IntpHandler {
8288
edu.berkeley.cs.rise.opaque.Utils.initOpaqueSQL(spark)
8389
"""
8490
)
85-
val cap, _ = run(initializationCommands)
86-
println(
87-
"############################# Commands from Spark startup #############################"
88-
)
91+
val (cap, _) = run(initializationCommands)
8992
println(cap)
9093
println(
9194
"#######################################################################################"

‎src/test/resources/log4j.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Set everything to be logged to the console
22
log4j.rootCategory=WARN, console
33
log4j.appender.console=org.apache.log4j.ConsoleAppender
4-
log4j.appender.console.target=System.err
4+
log4j.appender.console.target=System.out
55
log4j.appender.console.layout=org.apache.log4j.PatternLayout
66
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
77

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /