Skip to content

Commit 9d8c3fd

Browse files
fix implicit types
1 parent 323098d commit 9d8c3fd

File tree

7 files changed

+12
-11
lines changed

7 files changed

+12
-11
lines changed

sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -1589,7 +1589,7 @@ class Dataset[T] private[sql](
15891589
* @since 1.6.0
15901590
*/
15911591
def select[U1](c1: TypedColumn[T, U1]): Dataset[U1] = {
1592-
implicit val encoder: ExpressionEncoder[T] = c1.encoder
1592+
implicit val encoder: ExpressionEncoder[U1] = c1.encoder
15931593
val project = Project(c1.withInputType(exprEnc, logicalPlan.output).named :: Nil, logicalPlan)
15941594

15951595
if (!encoder.isSerializedAsStructForTopLevel) {

sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/CommitLog.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import java.nio.charset.StandardCharsets._
2222

2323
import scala.io.{Source => IOSource}
2424

25-
import org.json4s.NoTypeHints
25+
import org.json4s.{Formats, NoTypeHints}
2626
import org.json4s.jackson.Serialization
2727

2828
import org.apache.spark.sql.SparkSession
@@ -82,7 +82,7 @@ case class CommitMetadata(nextBatchWatermarkMs: Long = 0) {
8282
}
8383

8484
object CommitMetadata {
85-
implicit val format = Serialization.formats(NoTypeHints)
85+
implicit val format: Formats = Serialization.formats(NoTypeHints)
8686

8787
def apply(json: String): CommitMetadata = Serialization.read[CommitMetadata](json)
8888
}

sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamSourceOffset.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.streaming
1919

2020
import scala.util.control.Exception._
2121

22-
import org.json4s.NoTypeHints
22+
import org.json4s.{Formats, NoTypeHints}
2323
import org.json4s.jackson.Serialization
2424

2525
/**
@@ -34,7 +34,7 @@ case class FileStreamSourceOffset(logOffset: Long) extends Offset {
3434
}
3535

3636
object FileStreamSourceOffset {
37-
implicit val format = Serialization.formats(NoTypeHints)
37+
implicit val format: Formats = Serialization.formats(NoTypeHints)
3838

3939
def apply(offset: Offset): FileStreamSourceOffset = {
4040
offset match {

sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala

+1
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ class HDFSMetadataLog[T <: AnyRef : ClassTag](sparkSession: SparkSession, path:
5252
private implicit val formats: Formats = Serialization.formats(NoTypeHints)
5353

5454
/** Needed to serialize type T into JSON when using Jackson */
55+
@scala.annotation.nowarn
5556
private implicit val manifest = Manifest.classType[T](implicitly[ClassTag[T]].runtimeClass)
5657

5758
// Avoid serializing generic sequences, see SPARK-17372

sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/OffsetSeq.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.execution.streaming
1919

20-
import org.json4s.NoTypeHints
20+
import org.json4s.{Formats, NoTypeHints}
2121
import org.json4s.jackson.Serialization
2222

2323
import org.apache.spark.internal.Logging
@@ -88,7 +88,7 @@ case class OffsetSeqMetadata(
8888
}
8989

9090
object OffsetSeqMetadata extends Logging {
91-
private implicit val format = Serialization.formats(NoTypeHints)
91+
private implicit val format: Formats = Serialization.formats(NoTypeHints)
9292
/**
9393
* These configs are related to streaming query execution and should not be changed across
9494
* batches of a streaming query. The values of these configs are persisted into the offset

sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamMetadata.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import scala.util.control.NonFatal
2525
import org.apache.commons.io.IOUtils
2626
import org.apache.hadoop.conf.Configuration
2727
import org.apache.hadoop.fs.{FileAlreadyExistsException, FSDataInputStream, Path}
28-
import org.json4s.NoTypeHints
28+
import org.json4s.{Formats, NoTypeHints}
2929
import org.json4s.jackson.Serialization
3030

3131
import org.apache.spark.internal.Logging
@@ -45,7 +45,7 @@ case class StreamMetadata(id: String) {
4545
}
4646

4747
object StreamMetadata extends Logging {
48-
implicit val format = Serialization.formats(NoTypeHints)
48+
implicit val format: Formats = Serialization.formats(NoTypeHints)
4949

5050
/** Read the metadata from file if it exists */
5151
def read(metadataFile: Path, hadoopConf: Configuration): Option[StreamMetadata] = {

sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/RocksDBFileManager.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ import com.fasterxml.jackson.module.scala.{ClassTagExtensions, DefaultScalaModul
3434
import org.apache.commons.io.{FilenameUtils, IOUtils}
3535
import org.apache.hadoop.conf.Configuration
3636
import org.apache.hadoop.fs.{FileStatus, Path, PathFilter}
37-
import org.json4s.NoTypeHints
37+
import org.json4s.{Formats, NoTypeHints}
3838
import org.json4s.jackson.Serialization
3939

4040
import org.apache.spark.{SparkConf, SparkEnv}
@@ -737,7 +737,7 @@ case class RocksDBCheckpointMetadata(
737737
object RocksDBCheckpointMetadata {
738738
val VERSION = 1
739739

740-
implicit val format = Serialization.formats(NoTypeHints)
740+
implicit val format: Formats = Serialization.formats(NoTypeHints)
741741

742742
/** Used to convert between classes and JSON. */
743743
lazy val mapper = {

0 commit comments

Comments
 (0)