· 6 years ago · Mar 28, 2020, 05:16 PM
1diff ./SparkEnv.scala /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/SparkEnv.scala
224a25
3> import scala.collection.concurrent
4197a199
5> bindAddress: String,
6205c207
7< hostname,
8---
9> bindAddress,
10215a218,228
11> private[spark] def createExecutorEnv(
12> conf: SparkConf,
13> executorId: String,
14> hostname: String,
15> numCores: Int,
16> ioEncryptionKey: Option[Array[Byte]],
17> isLocal: Boolean): SparkEnv = {
18> createExecutorEnv(conf, executorId, hostname,
19> hostname, numCores, ioEncryptionKey, isLocal)
20> }
21>
22342,354c355,374
23< val blockManagerMaster = new BlockManagerMaster(registerOrLookupEndpoint(
24< BlockManagerMaster.DRIVER_ENDPOINT_NAME,
25< new BlockManagerMasterEndpoint(
26< rpcEnv,
27< isLocal,
28< conf,
29< listenerBus,
30< if (conf.get(config.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)) {
31< externalShuffleClient
32< } else {
33< None
34< })),
35< conf, isDriver)
36---
37> // Mapping from block manager id to the block manager's information.
38> val blockManagerInfo = new concurrent.TrieMap[BlockManagerId, BlockManagerInfo]()
39> val blockManagerMaster = new BlockManagerMaster(
40> registerOrLookupEndpoint(
41> BlockManagerMaster.DRIVER_ENDPOINT_NAME,
42> new BlockManagerMasterEndpoint(
43> rpcEnv,
44> isLocal,
45> conf,
46> listenerBus,
47> if (conf.get(config.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)) {
48> externalShuffleClient
49> } else {
50> None
51> }, blockManagerInfo)),
52> registerOrLookupEndpoint(
53> BlockManagerMaster.DRIVER_HEARTBEAT_ENDPOINT_NAME,
54> new BlockManagerMasterHeartbeatEndpoint(rpcEnv, isLocal, blockManagerInfo)),
55> conf,
56> isDriver)
57386c406
58< ms.start()
59---
60> ms.start(conf.get(METRICS_STATIC_SOURCES_ENABLED))
61diff ./TaskContext.scala /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/TaskContext.scala
62187a188,195
63> /**
64> * (java-specific) Resources allocated to the task. The key is the resource name and the value
65> * is information about the resource. Please refer to
66> * [[org.apache.spark.resource.ResourceInformation]] for specifics.
67> */
68> @Evolving
69> def resourcesJMap(): java.util.Map[String, ResourceInformation]
70>
71diff ./TaskContextImpl.scala /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/TaskContextImpl.scala
7222a23
73> import scala.collection.JavaConverters._
74103a105,108
75> override def resourcesJMap(): java.util.Map[String, ResourceInformation] = {
76> resources.asJava
77> }
78>
79Only in /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark: TaskOutputFileAlreadyExistException.scala
80diff ./TestUtils.scala /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/TestUtils.scala
8127c27
82< import java.util.{Arrays, EnumSet, Properties}
83---
84> import java.util.{Arrays, EnumSet, Locale, Properties}
8529c29
86< import java.util.jar.{JarEntry, JarOutputStream}
87---
88> import java.util.jar.{JarEntry, JarOutputStream, Manifest}
89100c100,114
90< def createJar(files: Seq[File], jarFile: File, directoryPrefix: Option[String] = None): URL = {
91---
92> def createJar(
93> files: Seq[File],
94> jarFile: File,
95> directoryPrefix: Option[String] = None,
96> mainClass: Option[String] = None): URL = {
97> val manifest = mainClass match {
98> case Some(mc) =>
99> val m = new Manifest()
100> m.getMainAttributes.putValue("Manifest-Version", "1.0")
101> m.getMainAttributes.putValue("Main-Class", mc)
102> m
103> case None =>
104> new Manifest()
105> }
106>
107102c116
108< val jarStream = new JarOutputStream(jarFileStream, new java.util.jar.Manifest())
109---
110> val jarStream = new JarOutputStream(jarFileStream, manifest)
111203c217,225
112< def assertExceptionMsg(exception: Throwable, msg: String): Unit = {
113---
114> def assertExceptionMsg(exception: Throwable, msg: String, ignoreCase: Boolean = false): Unit = {
115> def contain(msg1: String, msg2: String): Boolean = {
116> if (ignoreCase) {
117> msg1.toLowerCase(Locale.ROOT).contains(msg2.toLowerCase(Locale.ROOT))
118> } else {
119> msg1.contains(msg2)
120> }
121> }
122>
123205c227
124< var contains = e.getMessage.contains(msg)
125---
126> var contains = contain(e.getMessage, msg)
127208c230
128< contains = e.getMessage.contains(msg)
129---
130> contains = contain(e.getMessage, msg)
131Only in .: annotation
132Common subdirectories: ./api and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/api
133Common subdirectories: ./broadcast and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/broadcast
134Common subdirectories: ./deploy and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/deploy
135Common subdirectories: ./executor and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/executor
136Common subdirectories: ./input and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/input
137Common subdirectories: ./internal and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/internal
138Common subdirectories: ./io and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/io
139Common subdirectories: ./launcher and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/launcher
140Common subdirectories: ./mapred and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/mapred
141Common subdirectories: ./memory and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/memory
142Common subdirectories: ./metrics and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/metrics
143Common subdirectories: ./network and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/network
144Common subdirectories: ./partial and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/partial
145Common subdirectories: ./rdd and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/rdd
146Common subdirectories: ./resource and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/resource
147Common subdirectories: ./rpc and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/rpc
148Common subdirectories: ./scheduler and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/scheduler
149Common subdirectories: ./security and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/security
150Common subdirectories: ./serializer and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/serializer
151Common subdirectories: ./shuffle and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/shuffle
152Common subdirectories: ./status and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/status
153Common subdirectories: ./storage and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/storage
154Common subdirectories: ./ui and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/ui
155Common subdirectories: ./util and /Users/bimde/Downloads/spark-f280c6aa54d80251da66ab370d32a7d93b01f225/core/src/main/scala/org/apache/spark/util