Skip to content

Commit e15a319

Browse files
committed
[SPARK-26536][BUILD][TEST] Upgrade Mockito to 2.23.4
## What changes were proposed in this pull request? This PR upgrades Mockito from 1.10.19 to 2.23.4. The following changes are required. - Replace `org.mockito.Matchers` with `org.mockito.ArgumentMatchers` - Replace `anyObject` with `any` - Replace `getArgumentAt` with `getArgument` and add type annotation. - Use `isNull` matcher in case of `null` is invoked. ```scala saslHandler.channelInactive(null); - verify(handler).channelInactive(any(TransportClient.class)); + verify(handler).channelInactive(isNull()); ``` - Make and use `doReturn` wrapper to avoid [SI-4775](https://issues.scala-lang.org/browse/SI-4775) ```scala private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*) ``` ## How was this patch tested? Pass the Jenkins with the existing tests. Closes apache#23452 from dongjoon-hyun/SPARK-26536. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent bccb860 commit e15a319

File tree

56 files changed

+131
-111
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+131
-111
lines changed

common/network-common/src/test/java/org/apache/spark/network/sasl/SparkSaslSuite.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -347,10 +347,10 @@ public void testRpcHandlerDelegate() throws Exception {
347347
verify(handler).getStreamManager();
348348

349349
saslHandler.channelInactive(null);
350-
verify(handler).channelInactive(any(TransportClient.class));
350+
verify(handler).channelInactive(isNull());
351351

352352
saslHandler.exceptionCaught(null, null);
353-
verify(handler).exceptionCaught(any(Throwable.class), any(TransportClient.class));
353+
verify(handler).exceptionCaught(isNull(), isNull());
354354
}
355355

356356
@Test

common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockHandlerSuite.java

+3-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
import org.mockito.ArgumentCaptor;
2828

2929
import static org.junit.Assert.*;
30-
import static org.mockito.Matchers.any;
30+
import static org.mockito.ArgumentMatchers.any;
3131
import static org.mockito.Mockito.*;
3232

3333
import org.apache.spark.network.buffer.ManagedBuffer;
@@ -79,6 +79,8 @@ public void testRegisterExecutor() {
7979
@SuppressWarnings("unchecked")
8080
@Test
8181
public void testOpenShuffleBlocks() {
82+
when(client.getClientId()).thenReturn("app0");
83+
8284
RpcResponseCallback callback = mock(RpcResponseCallback.class);
8385

8486
ManagedBuffer block0Marker = new NioManagedBuffer(ByteBuffer.wrap(new byte[3]));

common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/OneForOneBlockFetcherSuite.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,10 @@
2828

2929
import static org.junit.Assert.assertEquals;
3030
import static org.junit.Assert.fail;
31-
import static org.mockito.Matchers.any;
32-
import static org.mockito.Matchers.anyInt;
33-
import static org.mockito.Matchers.anyLong;
34-
import static org.mockito.Matchers.eq;
31+
import static org.mockito.ArgumentMatchers.any;
32+
import static org.mockito.ArgumentMatchers.anyInt;
33+
import static org.mockito.ArgumentMatchers.anyLong;
34+
import static org.mockito.ArgumentMatchers.eq;
3535
import static org.mockito.Mockito.doAnswer;
3636
import static org.mockito.Mockito.mock;
3737
import static org.mockito.Mockito.times;

core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,8 @@
5050
import static org.junit.Assert.assertEquals;
5151
import static org.junit.Assert.assertFalse;
5252
import static org.mockito.Answers.RETURNS_SMART_NULLS;
53-
import static org.mockito.Matchers.any;
54-
import static org.mockito.Matchers.anyInt;
53+
import static org.mockito.ArgumentMatchers.any;
54+
import static org.mockito.ArgumentMatchers.anyInt;
5555
import static org.mockito.Mockito.when;
5656

5757

core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark
1919

2020
import scala.collection.mutable
2121

22-
import org.mockito.Matchers.{any, eq => meq}
22+
import org.mockito.ArgumentMatchers.{any, eq => meq}
2323
import org.mockito.Mockito.{mock, never, verify, when}
2424
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
2525

core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala

+6-7
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,7 @@ import scala.collection.mutable
2323
import scala.concurrent.Future
2424
import scala.concurrent.duration._
2525

26-
import org.mockito.Matchers
27-
import org.mockito.Matchers._
26+
import org.mockito.ArgumentMatchers.{any, eq => meq}
2827
import org.mockito.Mockito.{mock, spy, verify, when}
2928
import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
3029

@@ -151,7 +150,7 @@ class HeartbeatReceiverSuite
151150
heartbeatReceiverClock.advance(executorTimeout)
152151
heartbeatReceiverRef.askSync[Boolean](ExpireDeadHosts)
153152
// Only the second executor should be expired as a dead host
154-
verify(scheduler).executorLost(Matchers.eq(executorId2), any())
153+
verify(scheduler).executorLost(meq(executorId2), any())
155154
val trackedExecutors = getTrackedExecutors
156155
assert(trackedExecutors.size === 1)
157156
assert(trackedExecutors.contains(executorId1))
@@ -223,10 +222,10 @@ class HeartbeatReceiverSuite
223222
assert(!response.reregisterBlockManager)
224223
// Additionally verify that the scheduler callback is called with the correct parameters
225224
verify(scheduler).executorHeartbeatReceived(
226-
Matchers.eq(executorId),
227-
Matchers.eq(Array(1L -> metrics.accumulators())),
228-
Matchers.eq(blockManagerId),
229-
Matchers.eq(executorUpdates))
225+
meq(executorId),
226+
meq(Array(1L -> metrics.accumulators())),
227+
meq(blockManagerId),
228+
meq(executorUpdates))
230229
}
231230
}
232231

core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark
1919

2020
import scala.collection.mutable.ArrayBuffer
2121

22-
import org.mockito.Matchers.any
22+
import org.mockito.ArgumentMatchers.any
2323
import org.mockito.Mockito._
2424

2525
import org.apache.spark.LocalSparkContext._

core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.deploy
2020
import scala.collection.mutable
2121
import scala.concurrent.duration._
2222

23-
import org.mockito.Matchers.any
23+
import org.mockito.ArgumentMatchers.any
2424
import org.mockito.Mockito.{mock, verify, when}
2525
import org.scalatest.{BeforeAndAfterAll, PrivateMethodTester}
2626
import org.scalatest.concurrent.Eventually._

core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import scala.collection.mutable
2424

2525
import com.codahale.metrics.Counter
2626
import org.eclipse.jetty.servlet.ServletContextHandler
27-
import org.mockito.Matchers._
27+
import org.mockito.ArgumentMatchers.any
2828
import org.mockito.Mockito._
2929
import org.mockito.invocation.InvocationOnMock
3030
import org.mockito.stubbing.Answer

core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ import org.apache.hadoop.hdfs.{DFSInputStream, DistributedFileSystem}
3434
import org.apache.hadoop.security.AccessControlException
3535
import org.json4s.jackson.JsonMethods._
3636
import org.mockito.ArgumentMatcher
37-
import org.mockito.Matchers.{any, argThat}
37+
import org.mockito.ArgumentMatchers.{any, argThat}
3838
import org.mockito.Mockito.{doThrow, mock, spy, verify, when}
3939
import org.scalatest.BeforeAndAfter
4040
import org.scalatest.Matchers
@@ -933,7 +933,7 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
933933
val mockedFs = spy(provider.fs)
934934
doThrow(new AccessControlException("Cannot read accessDenied file")).when(mockedFs).open(
935935
argThat(new ArgumentMatcher[Path]() {
936-
override def matches(path: Any): Boolean = {
936+
override def matches(path: Path): Boolean = {
937937
path.asInstanceOf[Path].getName.toLowerCase(Locale.ROOT) == "accessdenied"
938938
}
939939
}))

core/src/test/scala/org/apache/spark/deploy/history/HistoryServerDiskManagerSuite.scala

+4-2
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@ package org.apache.spark.deploy.history
2020
import java.io.File
2121

2222
import org.mockito.AdditionalAnswers
23-
import org.mockito.Matchers.{any, anyBoolean, anyLong, eq => meq}
24-
import org.mockito.Mockito._
23+
import org.mockito.ArgumentMatchers.{anyBoolean, anyLong, eq => meq}
24+
import org.mockito.Mockito.{doAnswer, spy}
2525
import org.scalatest.BeforeAndAfter
2626

2727
import org.apache.spark.{SparkConf, SparkFunSuite}
@@ -32,6 +32,8 @@ import org.apache.spark.util.kvstore.KVStore
3232

3333
class HistoryServerDiskManagerSuite extends SparkFunSuite with BeforeAndAfter {
3434

35+
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
36+
3537
private val MAX_USAGE = 3L
3638

3739
private var testDir: File = _

core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import java.io.File
2121

2222
import scala.concurrent.duration._
2323

24-
import org.mockito.Matchers._
24+
import org.mockito.ArgumentMatchers.{any, anyInt}
2525
import org.mockito.Mockito._
2626
import org.mockito.invocation.InvocationOnMock
2727
import org.mockito.stubbing.Answer

core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import java.util.function.Supplier
2222

2323
import org.mockito.{Mock, MockitoAnnotations}
2424
import org.mockito.Answers.RETURNS_SMART_NULLS
25-
import org.mockito.Matchers._
25+
import org.mockito.ArgumentMatchers.any
2626
import org.mockito.Mockito._
2727
import org.mockito.invocation.InvocationOnMock
2828
import org.mockito.stubbing.Answer

core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ import scala.concurrent.duration._
3030
import scala.language.postfixOps
3131

3232
import org.mockito.ArgumentCaptor
33-
import org.mockito.Matchers.{any, eq => meq}
33+
import org.mockito.ArgumentMatchers.{any, eq => meq}
3434
import org.mockito.Mockito.{inOrder, verify, when}
3535
import org.mockito.invocation.InvocationOnMock
3636
import org.mockito.stubbing.Answer

core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import scala.collection.mutable
2323
import scala.concurrent.{ExecutionContext, Future}
2424
import scala.concurrent.duration.Duration
2525

26-
import org.mockito.Matchers.{any, anyLong}
26+
import org.mockito.ArgumentMatchers.{any, anyLong}
2727
import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS}
2828
import org.mockito.invocation.InvocationOnMock
2929
import org.mockito.stubbing.Answer

core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import scala.concurrent.duration._
2929
import scala.language.postfixOps
3030

3131
import com.google.common.io.Files
32-
import org.mockito.Matchers.any
32+
import org.mockito.ArgumentMatchers.any
3333
import org.mockito.Mockito.{mock, never, verify, when}
3434
import org.scalatest.BeforeAndAfterAll
3535
import org.scalatest.concurrent.Eventually._

core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import java.net.InetSocketAddress
2121
import java.nio.ByteBuffer
2222

2323
import io.netty.channel.Channel
24-
import org.mockito.Matchers._
24+
import org.mockito.ArgumentMatchers.any
2525
import org.mockito.Mockito._
2626

2727
import org.apache.spark.SparkFunSuite

core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.scheduler
1919

20-
import org.mockito.Matchers.any
20+
import org.mockito.ArgumentMatchers.any
2121
import org.mockito.Mockito.{never, verify, when}
2222
import org.mockito.invocation.InvocationOnMock
2323
import org.mockito.stubbing.Answer

core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream,
2121

2222
import scala.util.Random
2323

24-
import org.mockito.Mockito._
24+
import org.mockito.Mockito.mock
2525
import org.roaringbitmap.RoaringBitmap
2626

2727
import org.apache.spark.{SparkConf, SparkContext, SparkEnv, SparkFunSuite}
@@ -31,6 +31,7 @@ import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
3131
import org.apache.spark.storage.BlockManagerId
3232

3333
class MapStatusSuite extends SparkFunSuite {
34+
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
3435

3536
test("compressSize") {
3637
assert(MapStatus.compressSize(0L) === 0)

core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala

+9-7
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@ import scala.language.postfixOps
2626

2727
import org.apache.hadoop.mapred._
2828
import org.apache.hadoop.mapreduce.TaskType
29-
import org.mockito.Matchers
30-
import org.mockito.Mockito._
29+
import org.mockito.ArgumentMatchers.{any, eq => meq}
30+
import org.mockito.Mockito.{doAnswer, spy, times, verify}
3131
import org.mockito.invocation.InvocationOnMock
3232
import org.mockito.stubbing.Answer
3333
import org.scalatest.BeforeAndAfter
@@ -71,6 +71,8 @@ import org.apache.spark.util.{ThreadUtils, Utils}
7171
*/
7272
class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
7373

74+
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
75+
7476
var outputCommitCoordinator: OutputCommitCoordinator = null
7577
var tempDir: File = null
7678
var sc: SparkContext = null
@@ -103,7 +105,7 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
103105
invoke.callRealMethod()
104106
mockTaskScheduler.backend.reviveOffers()
105107
}
106-
}).when(mockTaskScheduler).submitTasks(Matchers.any())
108+
}).when(mockTaskScheduler).submitTasks(any())
107109

108110
doAnswer(new Answer[TaskSetManager]() {
109111
override def answer(invoke: InvocationOnMock): TaskSetManager = {
@@ -123,7 +125,7 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
123125
}
124126
}
125127
}
126-
}).when(mockTaskScheduler).createTaskSetManager(Matchers.any(), Matchers.any())
128+
}).when(mockTaskScheduler).createTaskSetManager(any(), any())
127129

128130
sc.taskScheduler = mockTaskScheduler
129131
val dagSchedulerWithMockTaskScheduler = new DAGScheduler(sc, mockTaskScheduler)
@@ -154,7 +156,7 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
154156
test("Job should not complete if all commits are denied") {
155157
// Create a mock OutputCommitCoordinator that denies all attempts to commit
156158
doReturn(false).when(outputCommitCoordinator).handleAskPermissionToCommit(
157-
Matchers.any(), Matchers.any(), Matchers.any(), Matchers.any())
159+
any(), any(), any(), any())
158160
val rdd: RDD[Int] = sc.parallelize(Seq(1), 1)
159161
def resultHandler(x: Int, y: Unit): Unit = {}
160162
val futureAction: SimpleFutureAction[Unit] = sc.submitJob[Int, Unit, Unit](rdd,
@@ -268,8 +270,8 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
268270
assert(retriedStage.size === 1)
269271
assert(sc.dagScheduler.outputCommitCoordinator.isEmpty)
270272
verify(sc.env.outputCommitCoordinator, times(2))
271-
.stageStart(Matchers.eq(retriedStage.head), Matchers.any())
272-
verify(sc.env.outputCommitCoordinator).stageEnd(Matchers.eq(retriedStage.head))
273+
.stageStart(meq(retriedStage.head), any())
274+
verify(sc.env.outputCommitCoordinator).stageEnd(meq(retriedStage.head))
273275
}
274276
}
275277

core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.scheduler
1919

2020
import java.util.Properties
2121

22-
import org.mockito.Matchers.any
22+
import org.mockito.ArgumentMatchers.any
2323
import org.mockito.Mockito._
2424
import org.scalatest.BeforeAndAfter
2525

core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ import scala.util.control.NonFatal
2828

2929
import com.google.common.util.concurrent.MoreExecutors
3030
import org.mockito.ArgumentCaptor
31-
import org.mockito.Matchers.{any, anyLong}
31+
import org.mockito.ArgumentMatchers.{any, anyLong}
3232
import org.mockito.Mockito.{spy, times, verify}
3333
import org.scalatest.BeforeAndAfter
3434
import org.scalatest.concurrent.Eventually._

core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala

+5-5
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import java.nio.ByteBuffer
2222
import scala.collection.mutable.HashMap
2323
import scala.concurrent.duration._
2424

25-
import org.mockito.Matchers.{anyInt, anyObject, anyString, eq => meq}
25+
import org.mockito.ArgumentMatchers.{any, anyInt, anyString, eq => meq}
2626
import org.mockito.Mockito.{atLeast, atMost, never, spy, times, verify, when}
2727
import org.scalatest.BeforeAndAfterEach
2828
import org.scalatest.concurrent.Eventually
@@ -430,7 +430,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with LocalSparkContext with B
430430
verify(blacklist, never).updateBlacklistForSuccessfulTaskSet(
431431
stageId = meq(2),
432432
stageAttemptId = anyInt(),
433-
failuresByExec = anyObject())
433+
failuresByExec = any())
434434
}
435435

436436
test("scheduled tasks obey node and executor blacklists") {
@@ -504,7 +504,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with LocalSparkContext with B
504504
WorkerOffer("executor3", "host1", 2)
505505
)).flatten.size === 0)
506506
assert(tsm.isZombie)
507-
verify(tsm).abort(anyString(), anyObject())
507+
verify(tsm).abort(anyString(), any())
508508
}
509509

510510
test("SPARK-22148 abort timer should kick in when task is completely blacklisted & no new " +
@@ -1184,7 +1184,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with LocalSparkContext with B
11841184
assert(finalTsm.isZombie)
11851185

11861186
// no taskset has completed all of its tasks, so no updates to the blacklist tracker yet
1187-
verify(blacklist, never).updateBlacklistForSuccessfulTaskSet(anyInt(), anyInt(), anyObject())
1187+
verify(blacklist, never).updateBlacklistForSuccessfulTaskSet(anyInt(), anyInt(), any())
11881188

11891189
// finally, lets complete all the tasks. We simulate failures in attempt 1, but everything
11901190
// else succeeds, to make sure we get the right updates to the blacklist in all cases.
@@ -1202,7 +1202,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with LocalSparkContext with B
12021202
// we update the blacklist for the stage attempts with all successful tasks. Even though
12031203
// some tasksets had failures, we still consider them all successful from a blacklisting
12041204
// perspective, as the failures weren't from a problem w/ the tasks themselves.
1205-
verify(blacklist).updateBlacklistForSuccessfulTaskSet(meq(0), meq(stageAttempt), anyObject())
1205+
verify(blacklist).updateBlacklistForSuccessfulTaskSet(meq(0), meq(stageAttempt), any())
12061206
}
12071207
}
12081208

core/src/test/scala/org/apache/spark/scheduler/TaskSetBlacklistSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
*/
1717
package org.apache.spark.scheduler
1818

19-
import org.mockito.Matchers.isA
19+
import org.mockito.ArgumentMatchers.isA
2020
import org.mockito.Mockito.{never, verify}
2121
import org.scalatest.BeforeAndAfterEach
2222
import org.scalatest.mockito.MockitoSugar

core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import java.util.{Properties, Random}
2222
import scala.collection.mutable
2323
import scala.collection.mutable.ArrayBuffer
2424

25-
import org.mockito.Matchers.{any, anyInt, anyString}
25+
import org.mockito.ArgumentMatchers.{any, anyInt, anyString}
2626
import org.mockito.Mockito.{mock, never, spy, times, verify, when}
2727
import org.mockito.invocation.InvocationOnMock
2828
import org.mockito.stubbing.Answer
@@ -1319,7 +1319,7 @@ class TaskSetManagerSuite extends SparkFunSuite with LocalSparkContext with Logg
13191319
when(taskSetManagerSpy.addPendingTask(anyInt())).thenAnswer(
13201320
new Answer[Unit] {
13211321
override def answer(invocationOnMock: InvocationOnMock): Unit = {
1322-
val task = invocationOnMock.getArgumentAt(0, classOf[Int])
1322+
val task: Int = invocationOnMock.getArgument(0)
13231323
assert(taskSetManager.taskSetBlacklistHelperOpt.get.
13241324
isExecutorBlacklistedForTask(exec, task))
13251325
}

0 commit comments

Comments
 (0)