Skip to content

Commit 527ab20

Browse files
authored
Fix compilation warnings in Scala 2.13 (#59)
1 parent 50d81a9 commit 527ab20

File tree

7 files changed

+117
-44
lines changed

7 files changed

+117
-44
lines changed

build.sbt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,7 @@ def generateSparkModule(sparkVersion: String): Project = {
126126
Compile / resourceDirectory := baseDirectory.value / pathFromModule("src/main/resources"),
127127
Test / scalaSource := baseDirectory.value / pathFromModule("src/test/scala"),
128128
Test / resourceDirectory := baseDirectory.value / pathFromModule("src/test/resources"),
129+
Test / parallelExecution := false,
129130
crossScalaVersions := sparkScalaVersions,
130131
enablingPublishingSettings,
131132
coverageConfig,

core/src/main/scala/com/acervera/osm4scala/DenseNodesIterator.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -56,10 +56,10 @@ class DenseNodesIterator(osmosisStringTable: StringTable,
5656
throw new Exception("Only visible nodes are implemented.")
5757
}
5858

59-
private val idIterator = osmosisDenseNode.id.toIterator
60-
private val lonIterator = osmosisDenseNode.lon.toIterator
61-
private val latIterator = osmosisDenseNode.lat.toIterator
62-
private val tagsIterator = osmosisDenseNode.keysVals.toIterator
59+
private val idIterator = osmosisDenseNode.id.iterator
60+
private val lonIterator = osmosisDenseNode.lon.iterator
61+
private val latIterator = osmosisDenseNode.lat.iterator
62+
private val tagsIterator = osmosisDenseNode.keysVals.iterator
6363

6464
private var lastNode: NodeEntity = NodeEntity(0, 0, 0, Map())
6565

core/src/main/scala/com/acervera/osm4scala/FromPbfFileEntitiesIterator.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ class FromPbfFileEntitiesIterator(pbfInputStream: InputStreamSentinel) extends E
4848
osmEntitiesIterator.isDefined && (osmEntitiesIterator.get.hasNext || blobIterator.hasNext)
4949

5050
override def next(): OSMEntity = {
51-
val nextEntity = osmEntitiesIterator.get.next
51+
val nextEntity = osmEntitiesIterator.get.next()
5252

5353
if (!osmEntitiesIterator.get.hasNext) {
5454
osmEntitiesIterator = readNextBlock()
@@ -62,7 +62,7 @@ class FromPbfFileEntitiesIterator(pbfInputStream: InputStreamSentinel) extends E
6262
*/
6363
private def readNextBlock() =
6464
if (blobIterator.hasNext) {
65-
Some(EntityIterator.fromBlob(blobIterator.next._2))
65+
Some(EntityIterator.fromBlob(blobIterator.next()._2))
6666
} else {
6767
None
6868
}

examples/counter-akka/src/main/scala/com/acervera/osm4scala/examples/counterakka/ControllerActor.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,7 @@ class ControllerActor(pbfFile: File, filterByOsmType: Option[OSMTypes.Value]) ex
5050

5151
val startTime = System.currentTimeMillis()
5252

53-
override def postStop() {
54-
pbfIS.close()
55-
}
53+
override def postStop(): Unit = pbfIS.close()
5654

5755
override def receive = {
5856

@@ -74,7 +72,7 @@ class ControllerActor(pbfFile: File, filterByOsmType: Option[OSMTypes.Value]) ex
7472
// Handling the count when the counter finish and process other blob if it is available.
7573
case CounterResponse(count) => {
7674
counter += count
77-
nextBlob(sender)
75+
nextBlob(sender())
7876
}
7977

8078
}

examples/counter-akka/src/main/scala/com/acervera/osm4scala/examples/counterakka/CounterActor.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ class CounterActor(filterType: Option[OSMTypes.Value]) extends Actor {
5454

5555
override def receive = {
5656
case BlobTupleMsg(header, blob) =>
57-
sender ! CounterResponse(count(blob))
57+
sender() ! CounterResponse(count(blob))
5858
}
5959

6060
}

spark/src/test/scala/com/acervera/osm4scala/spark/OsmPbfFormatSpec.scala

Lines changed: 25 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,12 @@ import java.io.File
2929

3030
import org.apache.spark.sql.catalyst.InternalRow
3131
import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession, functions => fn}
32-
import org.scalatest.BeforeAndAfterAll
3332
import org.scalatest.matchers.should.Matchers
3433
import org.scalatest.wordspec.AnyWordSpec
3534

3635
import scala.util.Random
3736

38-
class OsmPbfFormatSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll {
37+
class OsmPbfFormatSpec extends AnyWordSpec with Matchers with SparkSessionBeforeAfterAll {
3938

4039
def withTemporalFolder(testCode: File => Any): Unit =
4140
testCode(
@@ -44,42 +43,28 @@ class OsmPbfFormatSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll
4443
)
4544
)
4645

47-
val cores = 4
4846
val madridPath = "core/src/test/resources/com/acervera/osm4scala/Madrid.bbbike.osm.pbf"
4947
val monacoPath = "core/src/test/resources/com/acervera/osm4scala/monaco-latest.osm.pbf"
5048

51-
val sparkSession = SparkSession
52-
.builder()
53-
.master(s"local[$cores]")
54-
.getOrCreate()
55-
56-
import sparkSession.implicits._
57-
58-
val sqlContext = sparkSession.sqlContext
59-
60-
def loadOsmPbf(path: String, tableName: Option[String] = None): DataFrame = {
61-
val df = sqlContext.read
49+
def loadOsmPbf(spark: SparkSession, path: String, tableName: Option[String] = None): DataFrame = {
50+
val df = spark.sqlContext.read
6251
.format("osm.pbf")
6352
.load(path)
6453
.repartition(cores * 2)
65-
tableName.foreach(df.createTempView)
54+
tableName.foreach(df.createOrReplaceTempView)
6655
df
6756
}
6857

69-
override protected def afterAll(): Unit = {
70-
sparkSession.close()
71-
}
72-
7358
"OsmPbfFormat" should {
7459

7560
"parsing all only one time" in {
76-
val entitiesCount = loadOsmPbf(madridPath).count()
61+
val entitiesCount = loadOsmPbf(spark, madridPath).count()
7762
entitiesCount shouldBe 2677227
7863
}
7964

8065
"parser correctly" when {
8166
"is parsing nodes" in {
82-
val node171946 = loadOsmPbf(madridPath).filter("id == 171946").collect()(0)
67+
val node171946 = loadOsmPbf(spark, madridPath).filter("id == 171946").collect()(0)
8368
node171946.getAs[Long]("id") shouldBe 171946L
8469
node171946.getAs[Byte]("type") shouldBe 0
8570
node171946.getAs[Double]("latitude") shouldBe (40.42125 +- 0.001)
@@ -92,7 +77,7 @@ class OsmPbfFormatSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll
9277
}
9378

9479
"is parsing ways" in {
95-
val way3996192 = loadOsmPbf(madridPath).filter("id == 3996192").collect()(0)
80+
val way3996192 = loadOsmPbf(spark, madridPath).filter("id == 3996192").collect()(0)
9681
way3996192.getAs[Long]("id") shouldBe 3996192L
9782
way3996192.getAs[Byte]("type") shouldBe 1
9883
way3996192.getAs[AnyRef]("latitude") should be(null)
@@ -109,7 +94,7 @@ class OsmPbfFormatSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll
10994
}
11095

11196
"is parsing relations" in {
112-
val relation55799 = loadOsmPbf(madridPath).filter("id == 55799").collect()(0)
97+
val relation55799 = loadOsmPbf(spark, madridPath).filter("id == 55799").collect()(0)
11398
relation55799.getAs[Long]("id") shouldBe 55799
11499
relation55799.getAs[Byte]("type") shouldBe 2
115100
relation55799.getAs[AnyRef]("latitude") should be(null)
@@ -127,7 +112,7 @@ class OsmPbfFormatSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll
127112
}
128113

129114
"export to other formats" in withTemporalFolder { tmpFolder =>
130-
val threeExamples = loadOsmPbf(madridPath)
115+
val threeExamples = loadOsmPbf(spark, madridPath)
131116
.filter("id == 55799 || id == 3996192 || id == 171946")
132117
.orderBy("id")
133118

@@ -136,7 +121,7 @@ class OsmPbfFormatSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll
136121
.format("orc")
137122
.save(s"${tmpFolder}/madrid/three")
138123

139-
val readFromOrc = sqlContext.read
124+
val readFromOrc = spark.sqlContext.read
140125
.format("orc")
141126
.load(s"${tmpFolder}/madrid/three")
142127
.orderBy("id")
@@ -151,7 +136,10 @@ class OsmPbfFormatSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll
151136
"execute complex queries" when {
152137
"using dsl" should {
153138
"count arrays and filter" in {
154-
loadOsmPbf(madridPath)
139+
val sparkStable = spark
140+
import sparkStable.implicits._
141+
142+
loadOsmPbf(spark, madridPath)
155143
.withColumn("no_of_nodes", fn.size($"nodes"))
156144
.withColumn("no_of_relations", fn.size($"relations"))
157145
.withColumn("no_of_tags", fn.size($"tags"))
@@ -161,33 +149,37 @@ class OsmPbfFormatSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll
161149
}
162150
}
163151
"using SQL" should {
164-
loadOsmPbf(madridPath, Some("madrid_shows"))
165-
loadOsmPbf(monacoPath, Some("monaco_shows"))
152+
166153
"count all zebras" in {
167-
sqlContext
154+
loadOsmPbf(spark, madridPath, Some("madrid_shows"))
155+
spark.sqlContext
168156
.sql("select count(*) from madrid_shows where array_contains(map_values(tags), 'zebra')")
169157
.show()
170158
}
171159
"extract all keys used in tags" in {
172-
sqlContext
160+
loadOsmPbf(spark, madridPath, Some("madrid_shows"))
161+
spark.sqlContext
173162
.sql("select distinct explode(map_keys(tags)) as tag from madrid_shows where size(tags) > 0 order by tag")
174163
.show()
175164
}
176165

177166
"extract unique list of types" in {
178-
sqlContext
167+
loadOsmPbf(spark, monacoPath, Some("monaco_shows"))
168+
spark.sqlContext
179169
.sql("select distinct(type) as unique_types from monaco_shows order by unique_types")
180170
.show()
181171
}
182172

183173
"extract ways with more nodes" in {
184-
sqlContext
174+
loadOsmPbf(spark, monacoPath, Some("monaco_shows"))
175+
spark.sqlContext
185176
.sql("select id, size(nodes) as size_nodes from monaco_shows where type == 1 order by size_nodes desc")
186177
.show()
187178
}
188179

189180
"extract relations" in {
190-
sqlContext
181+
loadOsmPbf(spark, monacoPath, Some("monaco_shows"))
182+
spark.sqlContext
191183
.sql("select id, relations from monaco_shows where type == 2")
192184
.show()
193185
}
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
/*
2+
* The MIT License (MIT)
3+
*
4+
* Copyright (c) 2020 Ángel Cervera Claudio
5+
*
6+
* Permission is hereby granted, free of charge, to any person obtaining a copy
7+
* of this software and associated documentation files (the "Software"), to deal
8+
* in the Software without restriction, including without limitation the rights
9+
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10+
* copies of the Software, and to permit persons to whom the Software is
11+
* furnished to do so, subject to the following conditions:
12+
*
13+
* The above copyright notice and this permission notice shall be included in all
14+
* copies or substantial portions of the Software.
15+
*
16+
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17+
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18+
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19+
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20+
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21+
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22+
* SOFTWARE.
23+
*
24+
*/
25+
26+
package com.acervera.osm4scala.spark
27+
28+
import org.apache.spark.sql.{SQLContext, SparkSession}
29+
import org.scalatest.{BeforeAndAfterAll, Suite}
30+
31+
object SparkSessionFixture {
32+
33+
/**
34+
* Create a new spark session.
35+
* Warning 1: It there is a session open, is going to use it and close it at the end of the test.
36+
* Warning 2: Using it only with `in` case, don't with `should`, `when`, etc... because the finally is executed before
37+
* the test. Maybe is executed concurrently.
38+
*
39+
* @param cores Number of cores to use.
40+
* @param appName Application name to use.
41+
* @param testCode Test to execute
42+
*/
43+
def withSparkSession(cores: Int, appName: String)(testCode: (SparkSession, SQLContext) => Any): Unit = {
44+
val sparkSession = SparkSession
45+
.builder()
46+
.appName(appName)
47+
.master(s"local[$cores]")
48+
.getOrCreate()
49+
try {
50+
testCode(sparkSession, sparkSession.sqlContext)
51+
} finally {
52+
sparkSession.close()
53+
}
54+
}
55+
56+
}
57+
58+
trait SparkSessionBeforeAfterAll extends BeforeAndAfterAll { this: Suite =>
59+
60+
val cores: Int = 4
61+
val appName: String = this.getClass().getCanonicalName()
62+
63+
var spark: SparkSession = _
64+
65+
override def beforeAll(): Unit = {
66+
spark =
67+
SparkSession
68+
.builder()
69+
.appName(appName)
70+
.master(s"local[$cores]")
71+
.getOrCreate()
72+
73+
super.beforeAll()
74+
}
75+
76+
override def afterAll() {
77+
try super.afterAll()
78+
finally {
79+
spark.close()
80+
}
81+
}
82+
}

0 commit comments

Comments
 (0)