Skip to content

Commit 1affcc5

Browse files
georgeajitgeorgeajit
authored andcommitted
No Task - Using delete listener instead of clearDB method. Prevents side
effects.
1 parent fcd60de commit 1affcc5

File tree

1 file changed

+52
-36
lines changed

1 file changed

+52
-36
lines changed

marklogic-client-api-functionaltests/src/test/java/com/marklogic/client/datamovement/functionaltests/QueryBatcherJobReportTest.java

Lines changed: 52 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -39,10 +39,8 @@
3939
import java.util.concurrent.atomic.AtomicLong;
4040

4141
import org.apache.commons.io.FileUtils;
42-
import org.junit.After;
4342
import org.junit.AfterClass;
4443
import org.junit.Assert;
45-
import org.junit.Before;
4644
import org.junit.BeforeClass;
4745
import org.junit.Test;
4846

@@ -53,6 +51,7 @@
5351
import com.marklogic.client.datamovement.ApplyTransformListener;
5452
import com.marklogic.client.datamovement.ApplyTransformListener.ApplyResult;
5553
import com.marklogic.client.datamovement.DataMovementManager;
54+
import com.marklogic.client.datamovement.DeleteListener;
5655
import com.marklogic.client.datamovement.JobTicket;
5756
import com.marklogic.client.datamovement.QueryBatch;
5857
import com.marklogic.client.datamovement.QueryBatcher;
@@ -190,23 +189,12 @@ public static void tearDownAfterClass() throws Exception {
190189
detachForest(dbName, dbName + "-" + (i + 1));
191190
deleteForest(dbName + "-" + (i + 1));
192191
}
193-
194192
deleteDB(dbName);
195193
}
196194

197-
@Before
198-
public void setUp() throws Exception {
199-
200-
}
201-
202-
@After
203-
public void tearDown() throws Exception {
204-
205-
}
206-
207195
@Test
208196
public void jobReport() throws Exception {
209-
197+
System.out.println("In jobReport method");
210198
AtomicInteger batchCount = new AtomicInteger(0);
211199
AtomicInteger successCount = new AtomicInteger(0);
212200
AtomicLong count1 = new AtomicLong(0);
@@ -267,11 +255,11 @@ public void jobReport() throws Exception {
267255
Assert.assertEquals(dmManager.getJobReport(queryTicket).getSuccessBatchesCount(), count1.get());
268256
Assert.assertEquals(dmManager.getJobReport(queryTicket).getSuccessBatchesCount(), count2.get());
269257
Assert.assertEquals(dmManager.getJobReport(queryTicket).getSuccessBatchesCount(), count3.get());
270-
271258
}
272259

273260
@Test
274261
public void testNullQdef() throws IOException, InterruptedException {
262+
System.out.println("In testNullQdef method");
275263
JsonNode node = null;
276264
JacksonHandle jacksonHandle = null;
277265

@@ -305,7 +293,7 @@ public void testNullQdef() throws IOException, InterruptedException {
305293

306294
@Test
307295
public void queryFailures() throws Exception {
308-
296+
System.out.println("In queryFailures method");
309297
Thread t1 = new Thread(new DisabledDBRunnable());
310298
t1.setName("Status Check -1");
311299

@@ -399,12 +387,11 @@ public void run() {
399387
properties.put("enabled", "true");
400388
changeProperty(properties, "/manage/v2/databases/" + dbName + "/properties");
401389
}
402-
403390
}
404391

405392
@Test
406393
public void jobReportStopJob() throws Exception {
407-
394+
System.out.println("In jobReportStopJob method");
408395
QueryBatcher batcher = dmManager.newQueryBatcher(new StructuredQueryBuilder().collection("XmlTransform"))
409396
.withBatchSize(20).withThreadCount(20);
410397
AtomicInteger batchCount = new AtomicInteger(0);
@@ -438,7 +425,7 @@ public void jobReportStopJob() throws Exception {
438425
// Making sure we can stop jobs based on the JobId.
439426
@Test
440427
public void stopJobUsingJobId() throws Exception {
441-
428+
System.out.println("In stopJobUsingJobId method");
442429
String jobId = UUID.randomUUID().toString();
443430

444431
QueryBatcher batcher = dmManager.newQueryBatcher(new StructuredQueryBuilder().collection("XmlTransform"))
@@ -477,7 +464,7 @@ public void stopJobUsingJobId() throws Exception {
477464

478465
@Test
479466
public void jsMasstransformReplace() throws Exception {
480-
467+
System.out.println("In jsMasstransformReplace method");
481468
ServerTransform transform = new ServerTransform("jsTransform");
482469
transform.put("newValue", "new Value");
483470

@@ -532,10 +519,9 @@ public void jsMasstransformReplace() throws Exception {
532519

533520
}
534521

535-
// ISSUE # 106
536522
@Test
537523
public void stopTransformJobTest() throws Exception {
538-
524+
System.out.println("In stopTransformJobTest method");
539525
ServerTransform transform = new ServerTransform("add-attr-xquery-transform");
540526
transform.put("name", "Lang");
541527
transform.put("value", "French");
@@ -606,19 +592,28 @@ public void stopTransformJobTest() throws Exception {
606592
*/
607593
@Test
608594
public void testStopBeforeListenerisComplete() throws Exception {
595+
ArrayList<String> urisList = new ArrayList<String>();
596+
final String qMaxBatches = "fn:count(cts:uri-match('/setMaxBatches*'))";
609597
try {
610-
clearDB(port);
598+
611599
System.out.println("In testStopBeforeListenerisComplete method");
612-
613-
final String query1 = "fn:count(fn:doc())";
600+
614601
final AtomicInteger count = new AtomicInteger(0);
615602
final AtomicInteger failedBatch = new AtomicInteger(0);
616603
final AtomicInteger successBatch = new AtomicInteger(0);
617604

618605
final AtomicInteger failedBatch2 = new AtomicInteger(0);
619606
final AtomicInteger successBatch2 = new AtomicInteger(0);
620-
621-
ArrayList<String> urisList = new ArrayList<String>();
607+
608+
String jsonDoc = "{" +
609+
"\"employees\": [" +
610+
"{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," +
611+
"{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," +
612+
"{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" +
613+
"}";
614+
StringHandle handle = new StringHandle();
615+
handle.setFormat(Format.JSON);
616+
handle.set(jsonDoc);
622617

623618
WriteBatcher batcher = dmManager.newWriteBatcher();
624619
batcher.withBatchSize(99);
@@ -638,10 +633,10 @@ class writeDocsThread implements Runnable {
638633
public void run() {
639634

640635
for (int j = 0; j < 50000; j++) {
641-
String uri = "/local/json-" + j + "-" + Thread.currentThread().getId();
642-
System.out.println("Thread name: " + Thread.currentThread().getName() + " URI:" + uri);
636+
String uri = "/setMaxBatches-" + j + "-" + Thread.currentThread().getId();
637+
//System.out.println("Thread name: " + Thread.currentThread().getName() + " URI:" + uri);
643638
urisList.add(uri);
644-
batcher.add(uri, fileHandle);
639+
batcher.add(uri, handle);
645640
}
646641
batcher.flushAndWait();
647642
}
@@ -678,9 +673,10 @@ public void run() {
678673
countT.join();
679674

680675
t1.join();
681-
int docCnt = dbClient.newServerEval().xquery(query1).eval().next().getNumber().intValue();
676+
677+
int docCnt = dbClient.newServerEval().xquery(qMaxBatches).eval().next().getNumber().intValue();
682678
System.out.println("Doc count is " + docCnt);
683-
Assert.assertTrue( docCnt == 50000);
679+
Assert.assertTrue(docCnt == 50000);
684680

685681
Collection<String> batchResults = new LinkedHashSet<String>();
686682
QueryBatcher qb = dmManager.newQueryBatcher(urisList.iterator())
@@ -729,7 +725,6 @@ public void run() {
729725
assertTrue("Stop QueryBatcher with setMaxBatches set to 2035 is incorrect", batchResults.size() == 24420);
730726

731727
/* Test 2 setMaxBatches()
732-
733728
*/
734729
Collection<String> batchResults2 = new LinkedHashSet<String>();
735730
QueryBatcher qb2 = dmManager.newQueryBatcher(urisList.iterator())
@@ -747,6 +742,7 @@ public void run() {
747742
failedBatch2.addAndGet(1);
748743
});
749744
qb2.setMaxBatches(203);
745+
750746
class BatchesSoFarThread implements Runnable {
751747

752748
@Override
@@ -760,14 +756,14 @@ public void run() {
760756
qb2.setMaxBatches();
761757
}
762758
}
763-
759+
764760
Thread tMBStop2 = new Thread(new BatchesSoFarThread());
761+
// Wait for the stop thread to initialize before starting DMSDK Job.
765762
try {
766763
Thread.sleep(3000);
767764
} catch (InterruptedException e) {
768765
e.printStackTrace();
769766
}
770-
771767
dmManager.startJob(qb2);
772768

773769
int initialUrisSize = batchResults2.size();
@@ -776,14 +772,34 @@ public void run() {
776772
qb2.awaitCompletion();
777773
dmManager.stopJob(qb2);
778774

775+
System.out.println("Doc count in initialUrisSize " + initialUrisSize);
776+
System.out.println("Doc count after setMaxBatches() is called " + batchResults2.size());
777+
779778
assertTrue("Batches of URIs collected so far", batchResults2.size() > 0);
780779
assertTrue("Number of Uris collected does not fall in the range", (batchResults2.size()>initialUrisSize && batchResults2.size()< 2436));
781780
}
782781
catch (Exception ex) {
783782
ex.printStackTrace();
784783
}
785784
finally {
786-
clearDB(port);
785+
// Delete all uris.
786+
QueryBatcher deleteBatcher = dmManager.newQueryBatcher(urisList.iterator())
787+
.onUrisReady(new DeleteListener())
788+
.onUrisReady(batch -> {
789+
//System.out.println("Items in batch " + batch.getItems().length);
790+
}
791+
)
792+
.onQueryFailure(throwable -> {
793+
System.out.println("Query Failed");
794+
throwable.printStackTrace();
795+
})
796+
.withBatchSize(5000)
797+
.withThreadCount(10);
798+
dmManager.startJob(deleteBatcher);
799+
deleteBatcher.awaitCompletion(2, TimeUnit.MINUTES);
800+
int docCnt = dbClient.newServerEval().xquery(qMaxBatches).eval().next().getNumber().intValue();
801+
System.out.println("All setMaxBatches docs should have been deleted. Count after DeleteListener job is " + docCnt);
802+
Assert.assertTrue(docCnt == 0);
787803
}
788804
}
789805
}

0 commit comments

Comments
 (0)