Skip to content

Commit 7209c80

Browse files
committed
Added few tests and some Sonar fixes
1 parent 8812a1f commit 7209c80

File tree

2 files changed

+21
-4
lines changed

2 files changed

+21
-4
lines changed

src/main/java/com/marklogic/spark/writer/WriteContext.java

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@
2525
import com.marklogic.spark.Util;
2626
import org.apache.spark.sql.types.StructType;
2727

28-
import java.util.ArrayList;
2928
import java.util.Arrays;
3029
import java.util.List;
3130
import java.util.Map;
@@ -69,9 +68,9 @@ WriteBatcher newWriteBatcher(DataMovementManager dataMovementManager) {
6968
// WriteBatcherImpl has its own warn-level logging which is a bit verbose, including more than just the
7069
// message from the server. This is intended to always show up and be associated with our Spark connector
7170
// and also to be more brief, just capturing the main message from the server.
72-
.onBatchFailure(((batch, failure) -> {
73-
Util.MAIN_LOGGER.error("Failed to write documents: {}", failure.getMessage());
74-
}));
71+
.onBatchFailure((batch, failure) ->
72+
Util.MAIN_LOGGER.error("Failed to write documents: {}", failure.getMessage())
73+
);
7574

7675
if (logger.isDebugEnabled()) {
7776
writeBatcher.onBatchSuccess(this::logBatchOnSuccess);

src/test/java/com/marklogic/spark/reader/document/ReadDocumentRowsWithPartitionCountsTest.java

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,24 @@ void invalidValue() {
5050
assertEquals("Value of 'spark.marklogic.read.documents.partitionsPerForest' option must be numeric.", ex.getMessage());
5151
}
5252

53+
@ParameterizedTest
54+
@ValueSource(strings = {
55+
"{\"ctsquery\": {\"collectionQuery\": {\"uris\": [\"author\"]}}}",
56+
"{\"query\": {\"collection-query\": {\"uri\": [\"author\"]}}}",
57+
"{\"search\": {\"query\": {\"collection-query\": {\"uri\": [\"author\"]}}}}"
58+
})
59+
void complexQuery(String query) {
60+
long count = newSparkSession().read()
61+
.format(CONNECTOR_IDENTIFIER)
62+
.option(Options.CLIENT_URI, makeClientUri())
63+
.option(Options.READ_DOCUMENTS_QUERY, query)
64+
.option(Options.READ_DOCUMENTS_PARTITIONS_PER_FOREST, 3)
65+
.load()
66+
.count();
67+
68+
assertEquals(15, count, "Unexpected count for query: " + query);
69+
}
70+
5371
private Dataset<Row> readAuthors(int partitionsPerForest) {
5472
return newSparkSession().read()
5573
.format(CONNECTOR_IDENTIFIER)

0 commit comments

Comments
 (0)