Skip to content

Commit f04b745

Browse files
Fix Implicit definition should have explicit type
1 parent 51e249e commit f04b745

File tree

72 files changed

+140
-149
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+140
-149
lines changed

R/pkg/tests/fulltests/test_client.R

+2-2
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ test_that("multiple packages don't produce a warning", {
3737

3838
test_that("sparkJars sparkPackages as character vectors", {
3939
args <- generateSparkSubmitArgs("", "", c("one.jar", "two.jar", "three.jar"), "",
40-
c("com.databricks:spark-avro_2.12:2.0.1"))
40+
c("com.databricks:spark-avro_2.13:2.0.1"))
4141
expect_match(args, "--jars one.jar,two.jar,three.jar")
42-
expect_match(args, "--packages com.databricks:spark-avro_2.12:2.0.1")
42+
expect_match(args, "--packages com.databricks:spark-avro_2.13:2.0.1")
4343
})

common/kvstore/pom.xml

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,12 @@
2121
<modelVersion>4.0.0</modelVersion>
2222
<parent>
2323
<groupId>org.apache.spark</groupId>
24-
<artifactId>spark-parent_2.12</artifactId>
24+
<artifactId>spark-parent_2.13</artifactId>
2525
<version>3.5.1</version>
2626
<relativePath>../../pom.xml</relativePath>
2727
</parent>
2828

29-
<artifactId>spark-kvstore_2.12</artifactId>
29+
<artifactId>spark-kvstore_2.13</artifactId>
3030
<packaging>jar</packaging>
3131
<name>Spark Project Local DB</name>
3232
<url>https://spark.apache.org/</url>

common/network-common/pom.xml

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,12 @@
2121
<modelVersion>4.0.0</modelVersion>
2222
<parent>
2323
<groupId>org.apache.spark</groupId>
24-
<artifactId>spark-parent_2.12</artifactId>
24+
<artifactId>spark-parent_2.13</artifactId>
2525
<version>3.5.1</version>
2626
<relativePath>../../pom.xml</relativePath>
2727
</parent>
2828

29-
<artifactId>spark-network-common_2.12</artifactId>
29+
<artifactId>spark-network-common_2.13</artifactId>
3030
<packaging>jar</packaging>
3131
<name>Spark Project Networking</name>
3232
<url>https://spark.apache.org/</url>

common/network-shuffle/pom.xml

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,12 @@
2121
<modelVersion>4.0.0</modelVersion>
2222
<parent>
2323
<groupId>org.apache.spark</groupId>
24-
<artifactId>spark-parent_2.12</artifactId>
24+
<artifactId>spark-parent_2.13</artifactId>
2525
<version>3.5.1</version>
2626
<relativePath>../../pom.xml</relativePath>
2727
</parent>
2828

29-
<artifactId>spark-network-shuffle_2.12</artifactId>
29+
<artifactId>spark-network-shuffle_2.13</artifactId>
3030
<packaging>jar</packaging>
3131
<name>Spark Project Shuffle Streaming Service</name>
3232
<url>https://spark.apache.org/</url>

common/unsafe/pom.xml

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,12 @@
2121
<modelVersion>4.0.0</modelVersion>
2222
<parent>
2323
<groupId>org.apache.spark</groupId>
24-
<artifactId>spark-parent_2.12</artifactId>
24+
<artifactId>spark-parent_2.13</artifactId>
2525
<version>3.5.1</version>
2626
<relativePath>../../pom.xml</relativePath>
2727
</parent>
2828

29-
<artifactId>spark-unsafe_2.12</artifactId>
29+
<artifactId>spark-unsafe_2.13</artifactId>
3030
<packaging>jar</packaging>
3131
<name>Spark Project Unsafe</name>
3232
<url>https://spark.apache.org/</url>

connector/docker-integration-tests/pom.xml

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,12 @@
2121
<modelVersion>4.0.0</modelVersion>
2222
<parent>
2323
<groupId>org.apache.spark</groupId>
24-
<artifactId>spark-parent_2.12</artifactId>
24+
<artifactId>spark-parent_2.13</artifactId>
2525
<version>3.5.1</version>
2626
<relativePath>../../pom.xml</relativePath>
2727
</parent>
2828

29-
<artifactId>spark-docker-integration-tests_2.12</artifactId>
29+
<artifactId>spark-docker-integration-tests_2.13</artifactId>
3030
<packaging>jar</packaging>
3131
<name>Spark Project Docker Integration Tests</name>
3232
<url>https://spark.apache.org/</url>

connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/JsonUtils.scala

+3-5
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,14 @@ import scala.collection.mutable.HashMap
2121
import scala.util.control.NonFatal
2222

2323
import org.apache.kafka.common.TopicPartition
24-
import org.json4s.NoTypeHints
24+
import org.json4s.{Formats, NoTypeHints}
2525
import org.json4s.jackson.Serialization
2626

2727
/**
2828
* Utilities for converting Kafka related objects to and from json.
2929
*/
3030
private object JsonUtils {
31-
private implicit val formats = Serialization.formats(NoTypeHints)
31+
private implicit val formats: Formats = Serialization.formats(NoTypeHints)
3232

3333
/**
3434
* Read TopicPartitions from json string
@@ -96,10 +96,8 @@ private object JsonUtils {
9696
*/
9797
def partitionOffsets(partitionOffsets: Map[TopicPartition, Long]): String = {
9898
val result = new HashMap[String, HashMap[Int, Long]]()
99-
implicit val order = new Ordering[TopicPartition] {
100-
override def compare(x: TopicPartition, y: TopicPartition): Int = {
99+
implicit val order: Ordering[TopicPartition] = (x: TopicPartition, y: TopicPartition) => {
101100
Ordering.Tuple2[String, Int].compare((x.topic, x.partition), (y.topic, y.partition))
102-
}
103101
}
104102
val partitions = partitionOffsets.keySet.toSeq.sorted // sort for more determinism
105103
partitions.foreach { tp =>

core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ import scala.concurrent.ExecutionContext.Implicits.global
3030
import scala.concurrent.duration._
3131
import scala.sys.process._
3232

33+
import org.json4s.Formats
3334
import org.json4s.jackson.JsonMethods
3435

3536
import org.apache.spark.{SparkConf, SparkContext}
@@ -340,7 +341,7 @@ private object FaultToleranceTest extends App with Logging {
340341
private class TestMasterInfo(val ip: String, val dockerId: DockerId, val logFile: File)
341342
extends Logging {
342343

343-
implicit val formats = org.json4s.DefaultFormats
344+
implicit val formats: Formats = org.json4s.DefaultFormats
344345
var state: RecoveryState.Value = _
345346
var liveWorkerIPs: List[String] = _
346347
var numLiveApps = 0
@@ -383,7 +384,7 @@ private class TestMasterInfo(val ip: String, val dockerId: DockerId, val logFile
383384
private class TestWorkerInfo(val ip: String, val dockerId: DockerId, val logFile: File)
384385
extends Logging {
385386

386-
implicit val formats = org.json4s.DefaultFormats
387+
implicit val formats: Formats = org.json4s.DefaultFormats
387388

388389
logDebug("Created worker: " + this)
389390

core/src/main/scala/org/apache/spark/deploy/StandaloneResourceUtils.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import java.nio.file.Files
2323
import scala.collection.mutable
2424
import scala.util.control.NonFatal
2525

26-
import org.json4s.{DefaultFormats, Extraction}
26+
import org.json4s.{DefaultFormats, Extraction, Formats}
2727
import org.json4s.jackson.JsonMethods.{compact, render}
2828

2929
import org.apache.spark.SparkException
@@ -114,7 +114,7 @@ private[spark] object StandaloneResourceUtils extends Logging {
114114
private def writeResourceAllocationJson[T](
115115
allocations: Seq[T],
116116
jsonFile: File): Unit = {
117-
implicit val formats = DefaultFormats
117+
implicit val formats: Formats = DefaultFormats
118118
val allocationJson = Extraction.decompose(allocations)
119119
Files.write(jsonFile.toPath, compact(render(allocationJson)).getBytes())
120120
}

core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala

-2
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,6 @@ private[spark] class CoarseGrainedExecutorBackend(
6060

6161
import CoarseGrainedExecutorBackend._
6262

63-
private implicit val formats = DefaultFormats
64-
6563
private[spark] val stopping = new AtomicBoolean(false)
6664
var executor: Executor = null
6765
@volatile var driver: Option[RpcEndpointRef] = None

core/src/main/scala/org/apache/spark/resource/ResourceInformation.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.resource
1919

2020
import scala.util.control.NonFatal
2121

22-
import org.json4s.{DefaultFormats, Extraction, JValue}
22+
import org.json4s.{DefaultFormats, Extraction, JValue, Formats}
2323
import org.json4s.jackson.JsonMethods._
2424

2525
import org.apache.spark.SparkException
@@ -69,7 +69,7 @@ private[spark] object ResourceInformation {
6969
* Parses a JSON string into a [[ResourceInformation]] instance.
7070
*/
7171
def parseJson(json: String): ResourceInformation = {
72-
implicit val formats = DefaultFormats
72+
implicit val formats: Formats = DefaultFormats
7373
try {
7474
parse(json).extract[ResourceInformationJson].toResourceInformation
7575
} catch {
@@ -80,7 +80,7 @@ private[spark] object ResourceInformation {
8080
}
8181

8282
def parseJson(json: JValue): ResourceInformation = {
83-
implicit val formats = DefaultFormats
83+
implicit val formats: Formats = DefaultFormats
8484
try {
8585
json.extract[ResourceInformationJson].toResourceInformation
8686
} catch {

core/src/main/scala/org/apache/spark/resource/ResourceUtils.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import java.util.Optional
2222

2323
import scala.util.control.NonFatal
2424

25-
import org.json4s.DefaultFormats
25+
import org.json4s.{DefaultFormats, Formats}
2626
import org.json4s.jackson.JsonMethods._
2727

2828
import org.apache.spark.{SparkConf, SparkException}
@@ -252,7 +252,7 @@ private[spark] object ResourceUtils extends Logging {
252252

253253
def parseAllocatedFromJsonFile(resourcesFile: String): Seq[ResourceAllocation] = {
254254
withResourcesJson[ResourceAllocation](resourcesFile) { json =>
255-
implicit val formats = DefaultFormats
255+
implicit val formats: Formats = DefaultFormats
256256
parse(json).extract[Seq[ResourceAllocation]]
257257
}
258258
}

core/src/main/scala/org/apache/spark/status/AppStatusSource.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ private [spark] class JobDuration(val value: AtomicLong) extends Gauge[Long] {
3131

3232
private[spark] class AppStatusSource extends Source {
3333

34-
override implicit val metricRegistry = new MetricRegistry()
34+
override implicit val metricRegistry: MetricRegistry = new MetricRegistry()
3535

3636
override val sourceName = "appStatus"
3737

core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import java.util.concurrent.TimeUnit
2323

2424
import scala.collection.JavaConverters._
2525
import scala.collection.mutable
26-
import scala.concurrent.{ExecutionContext, Future, TimeoutException}
26+
import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService, Future, TimeoutException}
2727
import scala.util.Random
2828
import scala.util.control.NonFatal
2929

@@ -94,7 +94,7 @@ class BlockManagerMasterEndpoint(
9494

9595
private val askThreadPool =
9696
ThreadUtils.newDaemonCachedThreadPool("block-manager-ask-thread-pool", 100)
97-
private implicit val askExecutionContext = ExecutionContext.fromExecutorService(askThreadPool)
97+
private implicit val askExecutionContext: ExecutionContextExecutorService = ExecutionContext.fromExecutorService(askThreadPool)
9898

9999
private val topologyMapper = {
100100
val topologyMapperClassName = conf.get(

core/src/main/scala/org/apache/spark/storage/BlockManagerStorageEndpoint.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.storage
1919

20-
import scala.concurrent.{ExecutionContext, Future}
20+
import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService, Future}
2121

2222
import org.apache.spark.{MapOutputTracker, SparkEnv}
2323
import org.apache.spark.internal.Logging
@@ -38,7 +38,7 @@ class BlockManagerStorageEndpoint(
3838

3939
private val asyncThreadPool =
4040
ThreadUtils.newDaemonCachedThreadPool("block-manager-storage-async-thread-pool", 100)
41-
private implicit val asyncExecutionContext = ExecutionContext.fromExecutorService(asyncThreadPool)
41+
private implicit val asyncExecutionContext: ExecutionContextExecutorService = ExecutionContext.fromExecutorService(asyncThreadPool)
4242

4343
// Operations that involve removing blocks may be slow and should be done asynchronously
4444
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {

core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ import org.apache.spark.storage._
4242
abstract class ContextCleanerSuiteBase(val shuffleManager: Class[_] = classOf[SortShuffleManager])
4343
extends SparkFunSuite with BeforeAndAfter with LocalSparkContext
4444
{
45-
implicit val defaultTimeout = timeout(10.seconds)
45+
implicit val defaultTimeout: PatienceConfiguration.Timeout = timeout(10.seconds)
4646
val conf = new SparkConf()
4747
.setMaster("local[2]")
4848
.setAppName("ContextCleanerSuite")

core/src/test/scala/org/apache/spark/SparkContextSuite.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
3232
import org.apache.hadoop.mapred.TextInputFormat
3333
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
3434
import org.apache.logging.log4j.{Level, LogManager}
35-
import org.json4s.{DefaultFormats, Extraction}
35+
import org.json4s.{DefaultFormats, Extraction, Formats}
3636
import org.scalatest.concurrent.Eventually
3737
import org.scalatest.matchers.must.Matchers._
3838

@@ -923,7 +923,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
923923
val scriptPath = createTempScriptWithExpectedOutput(dir, "gpuDiscoveryScript",
924924
"""{"name": "gpu","addresses":["5", "6"]}""")
925925

926-
implicit val formats = DefaultFormats
926+
implicit val formats: Formats = DefaultFormats
927927
val gpusAllocated =
928928
ResourceAllocation(DRIVER_GPU_ID, Seq("0", "1", "8"))
929929
val ja = Extraction.decompose(Seq(gpusAllocated))

core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ import scala.concurrent.duration._
2929
import com.google.common.io.{ByteStreams, Files}
3030
import org.apache.commons.io.{FileUtils, IOUtils}
3131
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
32+
import org.json4s.Formats
3233
import org.json4s.JsonAST._
3334
import org.json4s.jackson.JsonMethods
3435
import org.json4s.jackson.JsonMethods._
@@ -380,7 +381,7 @@ abstract class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with
380381

381382
test("incomplete apps get refreshed") {
382383
implicit val webDriver: WebDriver = new HtmlUnitDriver
383-
implicit val formats = org.json4s.DefaultFormats
384+
implicit val formats: Formats = org.json4s.DefaultFormats
384385

385386
// this test dir is explicitly deleted on successful runs; retained for diagnostics when
386387
// not

core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala

+4-4
Original file line numberDiff line numberDiff line change
@@ -327,7 +327,7 @@ class MasterSuite extends SparkFunSuite
327327
}
328328

329329
test("SPARK-46888: master should reject worker kill request if decommision is disabled") {
330-
implicit val formats = org.json4s.DefaultFormats
330+
implicit val formats: Formats = org.json4s.DefaultFormats
331331
val conf = new SparkConf()
332332
.set(DECOMMISSION_ENABLED, false)
333333
.set(MASTER_UI_DECOMMISSION_ALLOW_MODE, "ALLOW")
@@ -347,7 +347,7 @@ class MasterSuite extends SparkFunSuite
347347
}
348348

349349
test("master/worker web ui available") {
350-
implicit val formats = org.json4s.DefaultFormats
350+
implicit val formats: Formats = org.json4s.DefaultFormats
351351
val conf = new SparkConf()
352352
val localCluster = LocalSparkCluster(2, 2, 512, conf)
353353
localCluster.start()
@@ -383,7 +383,7 @@ class MasterSuite extends SparkFunSuite
383383
}
384384

385385
test("master/worker web ui available with reverseProxy") {
386-
implicit val formats = org.json4s.DefaultFormats
386+
implicit val formats: Formats = org.json4s.DefaultFormats
387387
val conf = new SparkConf()
388388
conf.set(UI_REVERSE_PROXY, true)
389389
val localCluster = LocalSparkCluster(2, 2, 512, conf)
@@ -419,7 +419,7 @@ class MasterSuite extends SparkFunSuite
419419
}
420420

421421
test("master/worker web ui available behind front-end reverseProxy") {
422-
implicit val formats = org.json4s.DefaultFormats
422+
implicit val formats: Formats = org.json4s.DefaultFormats
423423
val reverseProxyUrl = "http://proxyhost:8080/path/to/spark"
424424
val conf = new SparkConf()
425425
conf.set(UI_REVERSE_PROXY, true)

core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import java.util.function.Supplier
2323

2424
import scala.concurrent.duration._
2525

26-
import org.json4s.{DefaultFormats, Extraction}
26+
import org.json4s.{DefaultFormats, Extraction, Formats}
2727
import org.mockito.{Mock, MockitoAnnotations}
2828
import org.mockito.Answers.RETURNS_SMART_NULLS
2929
import org.mockito.ArgumentMatchers.any
@@ -60,7 +60,7 @@ class WorkerSuite extends SparkFunSuite with Matchers with BeforeAndAfter {
6060
}
6161
def conf(opts: (String, String)*): SparkConf = new SparkConf(loadDefaults = false).setAll(opts)
6262

63-
implicit val formats = DefaultFormats
63+
implicit val formats: Formats = DefaultFormats
6464

6565
private var _worker: Worker = _
6666

core/src/test/scala/org/apache/spark/executor/CoarseGrainedExecutorBackendSuite.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ import java.util.concurrent.atomic.AtomicInteger
2626
import scala.collection.concurrent.TrieMap
2727
import scala.concurrent.duration._
2828

29-
import org.json4s.{DefaultFormats, Extraction}
29+
import org.json4s.{DefaultFormats, Extraction, Formats}
3030
import org.json4s.JsonAST.{JArray, JObject}
3131
import org.json4s.JsonDSL._
3232
import org.mockito.ArgumentMatchers.any
@@ -50,7 +50,7 @@ import org.apache.spark.util.{SerializableBuffer, ThreadUtils, Utils}
5050
class CoarseGrainedExecutorBackendSuite extends SparkFunSuite
5151
with LocalSparkContext with MockitoSugar {
5252

53-
implicit val formats = DefaultFormats
53+
implicit val formats: Formats = DefaultFormats
5454

5555
test("parsing no resources") {
5656
val conf = new SparkConf

core/src/test/scala/org/apache/spark/resource/ResourceUtilsSuite.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import java.io.File
2121
import java.nio.file.{Files => JavaFiles}
2222
import java.util.Optional
2323

24-
import org.json4s.{DefaultFormats, Extraction}
24+
import org.json4s.{DefaultFormats, Extraction, Formats}
2525

2626
import org.apache.spark.{LocalSparkContext, SparkConf, SparkException, SparkFunSuite}
2727
import org.apache.spark.TestUtils._
@@ -117,7 +117,7 @@ class ResourceUtilsSuite extends SparkFunSuite
117117
val conf = new SparkConf
118118
assume(!(Utils.isWindows))
119119
withTempDir { dir =>
120-
implicit val formats = DefaultFormats
120+
implicit val formats: Formats = DefaultFormats
121121
val fpgaAddrs = Seq("f1", "f2", "f3")
122122
val fpgaAllocation = ResourceAllocation(EXECUTOR_FPGA_ID, fpgaAddrs)
123123
val resourcesFile = createTempJsonFile(
@@ -146,7 +146,7 @@ class ResourceUtilsSuite extends SparkFunSuite
146146
val rpId = 1
147147
assume(!(Utils.isWindows))
148148
withTempDir { dir =>
149-
implicit val formats = DefaultFormats
149+
implicit val formats: Formats = DefaultFormats
150150
val fpgaAddrs = Seq("f1", "f2", "f3")
151151
val fpgaAllocation = ResourceAllocation(EXECUTOR_FPGA_ID, fpgaAddrs)
152152
val resourcesFile = createTempJsonFile(

0 commit comments

Comments
 (0)