Skip to content

Commit d3affdc

Browse files
committed
Addressing all Sonar warnings
Ran an "optimize imports" on src/main and src/test, so some files had imports optimized even though they didn't have unused imports.
1 parent 8d45182 commit d3affdc

21 files changed

+27
-54
lines changed

src/main/java/com/marklogic/spark/MarkLogicTable.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33
*/
44
package com.marklogic.spark;
55

6-
import com.marklogic.spark.reader.optic.OpticScanBuilder;
7-
import com.marklogic.spark.reader.optic.OpticReadContext;
86
import com.marklogic.spark.reader.customcode.CustomCodeScanBuilder;
7+
import com.marklogic.spark.reader.optic.OpticReadContext;
8+
import com.marklogic.spark.reader.optic.OpticScanBuilder;
99
import com.marklogic.spark.writer.MarkLogicWriteBuilder;
1010
import com.marklogic.spark.writer.WriteContext;
1111
import org.apache.spark.sql.SparkSession;

src/main/java/com/marklogic/spark/reader/optic/PlanUtil.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,9 @@
1717
import org.slf4j.Logger;
1818
import org.slf4j.LoggerFactory;
1919

20-
import java.util.*;
20+
import java.util.HashMap;
21+
import java.util.Map;
22+
import java.util.Set;
2123
import java.util.function.Consumer;
2224
import java.util.function.Function;
2325

src/test/java/com/marklogic/spark/AbstractIntegrationTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ protected final boolean isMarkLogic10() {
120120
String version = getDatabaseClient().newServerEval().javascript("xdmp.version()").evalAs(String.class);
121121
markLogicVersion = new MarkLogicVersion(version);
122122
}
123-
return markLogicVersion.getMajor() == 10;
123+
return markLogicVersion.getMajorNumber() == 10;
124124
}
125125

126126
protected final boolean isSpark340OrHigher() {

src/test/java/com/marklogic/spark/MarkLogicVersion.java

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@
1414
*/
1515
class MarkLogicVersion {
1616

17-
private int major;
18-
private Integer minor;
17+
private int majorNumber;
18+
private Integer minorNumber;
1919
private boolean nightly;
2020

2121
private static final String VERSION_WITH_PATCH_PATTERN = "^.*-(.+)\\..*";
@@ -27,28 +27,28 @@ public MarkLogicVersion(String version) {
2727
if (version.matches(nightlyPattern)) {
2828
this.nightly = true;
2929
} else if (version.matches(majorWithMinorPattern)) {
30-
this.minor = version.matches(VERSION_WITH_PATCH_PATTERN) ?
30+
this.minorNumber = version.matches(VERSION_WITH_PATCH_PATTERN) ?
3131
parseMinorWithPatch(version) :
3232
Integer.parseInt(version.replaceAll(majorWithMinorPattern, "$1") + "00");
3333
}
34-
this.major = major;
34+
this.majorNumber = major;
3535
}
3636

3737
private int parseMinorWithPatch(String version) {
38-
final int minorNumber = Integer.parseInt(version.replaceAll(VERSION_WITH_PATCH_PATTERN, "$1"));
38+
final int minorValue = Integer.parseInt(version.replaceAll(VERSION_WITH_PATCH_PATTERN, "$1"));
3939
final int patch = Integer.parseInt(version.replaceAll("^.*-(.+)\\.(.*)", "$2"));
4040
final String leftPaddedPatchNumber = patch < 10 ?
4141
StringUtils.leftPad(String.valueOf(patch), 2, "0") :
4242
String.valueOf(patch);
43-
return Integer.parseInt(minorNumber + leftPaddedPatchNumber);
43+
return Integer.parseInt(minorValue + leftPaddedPatchNumber);
4444
}
4545

46-
public int getMajor() {
47-
return major;
46+
public int getMajorNumber() {
47+
return majorNumber;
4848
}
4949

50-
public Integer getMinor() {
51-
return minor;
50+
public Integer getMinorNumber() {
51+
return minorNumber;
5252
}
5353

5454
public boolean isNightly() {

src/test/java/com/marklogic/spark/reader/document/ReadDocumentRowsTest.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
import com.fasterxml.jackson.databind.JsonNode;
77
import com.fasterxml.jackson.databind.node.ObjectNode;
88
import com.marklogic.client.MarkLogicIOException;
9-
import com.marklogic.spark.AbstractIntegrationTest;
109
import com.marklogic.spark.ConnectorException;
1110
import com.marklogic.spark.Options;
1211
import com.marklogic.spark.writer.AbstractWriteTest;

src/test/java/com/marklogic/spark/reader/document/ReadDocumentRowsWithPartitionCountsTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ void wayTooManyPartitions() {
3737
@Test
3838
void zeroPartitions() {
3939
Dataset<Row> dataset = readAuthors(0);
40-
ConnectorException ex = assertThrows(ConnectorException.class, () -> dataset.count());
40+
assertThrows(ConnectorException.class, () -> dataset.count());
4141
}
4242

4343
@Test

src/test/java/com/marklogic/spark/reader/optic/AnalyzePlanTest.java

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,6 @@
1010
import com.marklogic.client.row.RawQueryDSLPlan;
1111
import com.marklogic.client.row.RowManager;
1212
import com.marklogic.spark.AbstractIntegrationTest;
13-
import com.marklogic.spark.reader.optic.PlanAnalysis;
14-
import com.marklogic.spark.reader.optic.PlanAnalyzer;
1513
import org.junit.jupiter.api.BeforeEach;
1614
import org.junit.jupiter.params.ParameterizedTest;
1715
import org.junit.jupiter.params.provider.CsvSource;
@@ -47,7 +45,7 @@ void setup() {
4745
"5,15"
4846
})
4947
void partitionCountAndBatchSize(long partitionCount, long batchSize) {
50-
logger.info(partitionCount + ":" + batchSize);
48+
logger.info("{}:{}", partitionCount, batchSize);
5149

5250
PlanAnalysis planAnalysis = analyzePlan(partitionCount, batchSize);
5351
verifyBucketsCoverAllUnsignedLongs(planAnalysis);
@@ -58,8 +56,6 @@ private PlanAnalysis analyzePlan(long partitionCount, long batchSize) {
5856
RawQueryDSLPlan userPlan = rowManager.newRawQueryDSLPlan(new StringHandle("op.fromView('Medical', 'Authors').select(['LastName', 'rowID'])"));
5957
PlanAnalyzer partitioner = new PlanAnalyzer((DatabaseClientImpl) getDatabaseClient());
6058
PlanAnalysis planAnalysis = partitioner.analyzePlan(userPlan.getHandle(), partitionCount, batchSize);
61-
// System.out.println("BUCKET COUNT: " + planAnalysis.getAllBuckets().size());
62-
// System.out.println(planAnalysis.boundedPlan.toPrettyString());
6359
assertEquals(partitionCount, planAnalysis.getPartitions().size());
6460
return planAnalysis;
6561
}
@@ -96,7 +92,6 @@ private void verifyAllFifteenAuthorsAreReturned(PlanAnalysis planAnalysis) {
9692
JacksonHandle initialHandle = new JacksonHandle();
9793
runPlan(planAnalysis, planAnalysis.getPartitions().get(0).getBuckets().get(0), initialHandle);
9894
final long serverTimestamp = initialHandle.getServerTimestamp();
99-
// System.out.println("ST: " + serverTimestamp);
10095
// Now run the plan on each bucket and keep track of the total number of rows returned.
10196
// This uses a thread pool solely to improve the performance of the test.
10297
ExecutorService executor = Executors.newFixedThreadPool(planAnalysis.getAllBuckets().size());
@@ -114,15 +109,12 @@ private void verifyAllFifteenAuthorsAreReturned(PlanAnalysis planAnalysis) {
114109
if (result != null) {
115110
JsonNode rows = result.get("rows");
116111
for (int i = 0; i < rows.size(); i++) {
117-
// System.out.println(rows.get(i).toPrettyString());
118112
String name = rows.get(i).get("Medical.Authors.LastName").get("value").asText();
119113
names.add(name);
120114
bucketNames.add(name + ":" + rows.get(i).get("Medical.Authors.rowid").get("value").asText());
121115
}
122-
// Scarsbrick:14992830574179162536:4435912200092073691
123116
returnedRowCount.addAndGet(rows.size());
124117
}
125-
// System.out.println(bucket + ": " + bucketNames);
126118
}));
127119
}
128120
}
@@ -136,8 +128,6 @@ private void verifyAllFifteenAuthorsAreReturned(PlanAnalysis planAnalysis) {
136128
}
137129
});
138130

139-
// System.out.println("NAMES: " + names);
140-
// System.out.println("NAME COUNT: " + names.size());
141131
assertEquals(15, returnedRowCount.get(),
142132
"All 15 author rows should have been returned; we can't assume how many will be in a bucket since the " +
143133
"row ID of each row is random, we just know we should get 15 back.");

src/test/java/com/marklogic/spark/reader/optic/PerformanceTester.java

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,6 @@ class PerformanceTester {
2222
public static void main(String[] args) {
2323
final int sparkConcurrentTaskCount = 16;
2424
final String query = "op.fromView('demo','employee')";
25-
// final String query = "op.fromView('demo','employee').where(op.eq(op.col('job_description'), 'Technician'))";
26-
// final String query = "op.fromView('demo', 'employee').where(op.le(op.col('person_id'), 8))";
2725
final long partitionCount = 8;
2826
final long batchSize = 100000;
2927

@@ -48,6 +46,5 @@ public static void main(String[] args) {
4846
long duration = System.currentTimeMillis() - now;
4947
logger.info("Duration: {}; row count: {}; rows per second: {}", duration, count,
5048
(double) count / ((double) duration / 1000));
51-
// rows.forEach(row -> logger.info(row.prettyJson()));
5249
}
5350
}

src/test/java/com/marklogic/spark/reader/optic/PushDownGroupByCountTest.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
package com.marklogic.spark.reader.optic;
55

66
import com.marklogic.spark.Options;
7-
import org.apache.spark.sql.Column;
87
import org.apache.spark.sql.Row;
98
import org.junit.jupiter.api.Test;
109

src/test/java/com/marklogic/spark/reader/optic/PushDownGroupByMaxTest.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212

1313
import static org.apache.spark.sql.functions.max;
1414
import static org.junit.jupiter.api.Assertions.assertEquals;
15-
import static org.junit.jupiter.api.Assertions.assertTrue;
1615

1716
class PushDownGroupByMaxTest extends AbstractPushDownTest {
1817

0 commit comments

Comments
 (0)