Skip to content

Commit 532ba7d

Browse files
authored
Merge pull request #382 from marklogic/feature/upgrade-sonar
Upgraded Sonar
2 parents a447e96 + 53df807 commit 532ba7d

File tree

8 files changed

+21
-32
lines changed

8 files changed

+21
-32
lines changed

CONTRIBUTING.md

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -56,18 +56,18 @@ To configure the SonarQube service, perform the following steps:
5656
7. Click on "Use the global setting" and then "Create project".
5757
8. On the "Analysis Method" page, click on "Locally".
5858
9. In the "Provide a token" panel, click on "Generate". Copy the token.
59-
10. Add `systemProp.sonar.token=your token pasted here` to `gradle-local.properties` in the root of your project, creating
59+
10. Add `systemProp.sonar.login=your token pasted here` to `gradle-local.properties` in the root of your project, creating
6060
that file if it does not exist yet.
6161

6262
To run SonarQube, run the following Gradle tasks using Java 17, which will run all the tests with code coverage and
6363
then generate a quality report with SonarQube:
6464

6565
./gradlew test sonar
6666

67-
If you do not add `systemProp.sonar.token` to your `gradle-local.properties` file, you can specify the token via the
67+
If you do not add `systemProp.sonar.login` to your `gradle-local.properties` file, you can specify the token via the
6868
following:
6969

70-
./gradlew test sonar -Dsonar.token=paste your token here
70+
./gradlew test sonar -Dsonar.login=paste your token here
7171

7272
When that completes, you will see a line like this near the end of the logging:
7373

@@ -193,12 +193,16 @@ You will need the connector jar available, so run `./gradlew clean shadowJar` if
193193
You can then run a test Python program in this repository via the following (again, change the master address as
194194
needed); note that you run this outside of PySpark, and `spark-submit` is available after having installed PySpark:
195195

196-
spark-submit --master spark://NYWHYC3G0W:7077 --jars marklogic-spark-connector/build/libs/marklogic-spark-connector-2.5-SNAPSHOT.jar src/test/python/test_program.py
196+
spark-submit --master spark://NYWHYC3G0W:7077 --jars marklogic-spark-connector/build/libs/marklogic-spark-connector-2.5-SNAPSHOT.jar marklogic-spark-connector/src/test/python/test_program.py
197197

198198
You can also test a Java program. To do so, first move the `com.marklogic.spark.TestProgram` class from `src/test/java`
199-
to `src/main/java`. Then run `./gradlew clean shadowJar` to rebuild the connector jar. Then run the following:
199+
to `src/main/java`. Then run the following:
200200

201-
spark-submit --master spark://NYWHYC3G0W:7077 --class com.marklogic.spark.TestProgram marklogic-spark-connector/build/libs/marklogic-spark-connector-2.5-SNAPSHOT.jar
201+
```
202+
./gradlew clean shadowJar
203+
cd marklogic-spark-connector
204+
spark-submit --master spark://NYWHYC3G0W:7077 --class com.marklogic.spark.TestProgram build/libs/marklogic-spark-connector-2.5-SNAPSHOT.jar
205+
```
202206

203207
Be sure to move `TestProgram` back to `src/test/java` when you are done.
204208

build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
plugins {
22
id "java-library"
3-
id "org.sonarqube" version "5.1.0.4882"
3+
id "org.sonarqube" version "6.0.1.5171"
44
}
55

66
sonar {

docker-compose.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ services:
3030

3131
# Copied from https://docs.sonarsource.com/sonarqube/latest/setup-and-upgrade/install-the-server/#example-docker-compose-configuration .
3232
sonarqube:
33-
image: sonarqube:10.6.0-community
33+
image: sonarqube:lts-community
3434
depends_on:
3535
- postgres
3636
environment:

marklogic-spark-connector/src/main/java/com/marklogic/spark/writer/file/FileUtil.java

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import java.net.URI;
99
import java.net.URISyntaxException;
1010

11-
abstract class FileUtil {
11+
public interface FileUtil {
1212

1313
static String makePathFromDocumentURI(String documentURI) {
1414
// Mostly copied from MLCP.
@@ -25,7 +25,4 @@ static String makePathFromDocumentURI(String documentURI) {
2525
return documentURI;
2626
}
2727
}
28-
29-
private FileUtil() {
30-
}
3128
}

marklogic-spark-langchain4j/src/main/java/com/marklogic/spark/langchain4j/DocumentSplitterFactory.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@
1414
import java.util.HashMap;
1515
import java.util.Map;
1616

17-
public abstract class DocumentSplitterFactory {
17+
public interface DocumentSplitterFactory {
1818

19-
public static DocumentSplitter makeDocumentSplitter(Context context) {
19+
static DocumentSplitter makeDocumentSplitter(Context context) {
2020
if (context.hasOption(Options.WRITE_SPLITTER_CUSTOM_CLASS)) {
2121
return makeCustomSplitter(context);
2222
}
@@ -99,7 +99,4 @@ private static String massageLangchain4jError(Context context, String message) {
9999
}
100100
return message;
101101
}
102-
103-
private DocumentSplitterFactory() {
104-
}
105102
}

marklogic-spark-langchain4j/src/main/java/com/marklogic/spark/langchain4j/DocumentTextSplitterFactory.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,9 @@
1313
import java.util.Arrays;
1414
import java.util.Optional;
1515

16-
public abstract class DocumentTextSplitterFactory {
16+
public interface DocumentTextSplitterFactory {
1717

18-
public static Optional<DocumentTextSplitter> makeSplitter(Context context) {
18+
static Optional<DocumentTextSplitter> makeSplitter(Context context) {
1919
if (context.hasOption(Options.WRITE_SPLITTER_XPATH)) {
2020
return Optional.of(makeXmlSplitter(context));
2121
} else if (context.getProperties().containsKey(Options.WRITE_SPLITTER_JSON_POINTERS)) {
@@ -95,7 +95,4 @@ private static ChunkAssembler makeChunkAssembler(Context context) {
9595
.build()
9696
);
9797
}
98-
99-
private DocumentTextSplitterFactory() {
100-
}
10198
}

marklogic-spark-langchain4j/src/main/java/com/marklogic/spark/langchain4j/EmbeddingAdderFactory.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,9 @@
1616
import java.util.Optional;
1717
import java.util.function.Function;
1818

19-
public abstract class EmbeddingAdderFactory {
19+
public interface EmbeddingAdderFactory {
2020

21-
public static Optional<EmbeddingAdder> makeEmbedder(Context context, DocumentTextSplitter splitter) {
21+
static Optional<EmbeddingAdder> makeEmbedder(Context context, DocumentTextSplitter splitter) {
2222
Optional<EmbeddingModel> embeddingModel = makeEmbeddingModel(context);
2323
if (embeddingModel.isPresent()) {
2424
EmbeddingGenerator embeddingGenerator = makeEmbeddingGenerator(context, embeddingModel.get());
@@ -119,7 +119,4 @@ private static Map<String, String> makeEmbedderOptions(Context context) {
119119
.forEach(key -> options.put(key.substring(Options.WRITE_EMBEDDER_MODEL_FUNCTION_OPTION_PREFIX.length()), context.getProperties().get(key)));
120120
return options;
121121
}
122-
123-
private EmbeddingAdderFactory() {
124-
}
125122
}

marklogic-spark-langchain4j/src/main/java/com/marklogic/spark/langchain4j/NamespaceContextFactory.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,9 @@
1010
import java.util.HashMap;
1111
import java.util.Map;
1212

13-
public abstract class NamespaceContextFactory {
13+
public interface NamespaceContextFactory {
1414

15-
public static NamespaceContext makeNamespaceContext(Map<String, String> properties) {
15+
static NamespaceContext makeNamespaceContext(Map<String, String> properties) {
1616
Map<String, String> prefixesToNamespaces = new HashMap<>();
1717
properties.keySet().stream()
1818
.filter(key -> key.startsWith(Options.XPATH_NAMESPACE_PREFIX))
@@ -23,7 +23,4 @@ public static NamespaceContext makeNamespaceContext(Map<String, String> properti
2323
});
2424
return new XPathNamespaceContext(prefixesToNamespaces);
2525
}
26-
27-
private NamespaceContextFactory() {
28-
}
2926
}

0 commit comments

Comments
 (0)