Skip to content

Commit 5ac9b04

Browse files
authored
Merge pull request #64 from marklogic/feature/opticDsl-rename
Renaming "opticDsl" to "opticQuery"
2 parents 13b7c17 + 96db701 commit 5ac9b04

25 files changed

+57
-57
lines changed

CONTRIBUTING.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ df = spark.read.format("com.marklogic.spark")\
9797
.option("spark.marklogic.client.username", "admin")\
9898
.option("spark.marklogic.client.password", "admin")\
9999
.option("spark.marklogic.client.authType", "digest")\
100-
.option("spark.marklogic.read.opticDsl", "op.fromView('Medical', 'Authors')")\
100+
.option("spark.marklogic.read.opticQuery", "op.fromView('Medical', 'Authors')")\
101101
.load()
102102
```
103103

docs/configuration.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ Using this convenience can provide a much more succinct set of options - for exa
5151
```
5252
df = spark.read.format("com.marklogic.spark")\
5353
.option("spark.marklogic.client.uri", "spark-example-user:password@localhost:8020")\
54-
.option("spark.marklogic.read.opticDsl", "op.fromView('example', 'employee')")\
54+
.option("spark.marklogic.read.opticQuery", "op.fromView('example', 'employee')")\
5555
.load()
5656
```
5757

@@ -66,7 +66,7 @@ information on how data is read from MarkLogic.
6666

6767
| Option | Description |
6868
| --- |---------------------------------------------------------------------------------------------------|
69-
| spark.marklogic.read.opticDsl | Required; the Optic DSL query to run for retrieving rows; must use `op.fromView` as the accessor. |
69+
| spark.marklogic.read.opticQuery | Required; the Optic DSL query to run for retrieving rows; must use `op.fromView` as the accessor. |
7070
| spark.marklogic.read.numPartitions | The number of Spark partitions to create; defaults to `spark.default.parallelism` . |
7171
| spark.marklogic.read.batchSize | Approximate number of rows to retrieve in each call to MarkLogic; defaults to 10000. |
7272

docs/getting-started/pyspark.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ df = spark.read.format("com.marklogic.spark") \
5757
.option("spark.marklogic.client.port", "8020") \
5858
.option("spark.marklogic.client.username", "spark-example-user") \
5959
.option("spark.marklogic.client.password", "password") \
60-
.option("spark.marklogic.read.opticDsl", "op.fromView('example', 'employee')") \
60+
.option("spark.marklogic.read.opticQuery", "op.fromView('example', 'employee')") \
6161
.load()
6262
```
6363

@@ -67,7 +67,7 @@ client options in one option:
6767
```
6868
df = spark.read.format("com.marklogic.spark") \
6969
.option("spark.marklogic.client.uri", "spark-example-user:password@localhost:8020") \
70-
.option("spark.marklogic.read.opticDsl", "op.fromView('example', 'employee')") \
70+
.option("spark.marklogic.read.opticQuery", "op.fromView('example', 'employee')") \
7171
.load()
7272
```
7373

docs/reading.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ from pyspark.sql.types import StructField, StructType, StringType
3131
df = spark.read.format("com.marklogic.spark") \
3232
.schema(StructType([StructField("example.employee.GivenName", StringType()), StructField("example.employee.Surname", StringType())])) \
3333
.option("spark.marklogic.client.uri", "spark-example-user:password@localhost:8020") \
34-
.option("spark.marklogic.read.opticDsl", "op.fromView('example', 'employee')") \
34+
.option("spark.marklogic.read.opticQuery", "op.fromView('example', 'employee')") \
3535
.load()
3636
```
3737

@@ -51,7 +51,7 @@ op.fromView('example', 'employee', '', joinCol) \
5151
5252
df = spark.read.format("com.marklogic.spark") \
5353
.option("spark.marklogic.client.uri", "spark-example-user:password@localhost:8020") \
54-
.option("spark.marklogic.read.opticDsl", query) \
54+
.option("spark.marklogic.read.opticQuery", query) \
5555
.load()
5656
```
5757

@@ -75,7 +75,7 @@ stream = spark.readStream \
7575
.format("com.marklogic.spark") \
7676
.option("spark.marklogic.client.uri", "spark-example-user:password@localhost:8020") \
7777
.option("spark.marklogic.read.numPartitions", 2) \
78-
.option("spark.marklogic.read.opticDsl", "op.fromView('example', 'employee')") \
78+
.option("spark.marklogic.read.opticQuery", "op.fromView('example', 'employee')") \
7979
.load() \
8080
.writeStream \
8181
.format("console") \

examples/java-dependency/src/main/java/org/example/App.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ public static void main(String[] args) {
1717
.read()
1818
.format("com.marklogic.spark")
1919
.option("spark.marklogic.client.uri", "spark-example-user:password@localhost:8020")
20-
.option("spark.marklogic.read.opticDsl", "op.fromView('example', 'employee', '')")
20+
.option("spark.marklogic.read.opticQuery", "op.fromView('example', 'employee', '')")
2121
.load()
2222
.filter("City == 'San Diego'")
2323
.collectAsList();

src/main/java/com/marklogic/spark/DefaultSource.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,9 @@ public class DefaultSource implements TableProvider {
4747
@Override
4848
public StructType inferSchema(CaseInsensitiveStringMap options) {
4949
final Map<String, String> caseSensitiveOptions = options.asCaseSensitiveMap();
50-
final String query = caseSensitiveOptions.get(Options.READ_OPTIC_DSL);
50+
final String query = caseSensitiveOptions.get(Options.READ_OPTIC_QUERY);
5151
if (query == null || query.trim().length() < 1) {
52-
throw new IllegalArgumentException(String.format("No Optic query found; must define %s", Options.READ_OPTIC_DSL));
52+
throw new IllegalArgumentException(String.format("No Optic query found; must define %s", Options.READ_OPTIC_QUERY));
5353
}
5454
RowManager rowManager = new ContextSupport(caseSensitiveOptions).connectToMarkLogic().newRowManager();
5555
RawQueryDSLPlan dslPlan = rowManager.newRawQueryDSLPlan(new StringHandle(query));
@@ -88,6 +88,6 @@ public boolean supportsExternalMetadata() {
8888
}
8989

9090
private boolean isReadOperation(Map<String, String> properties) {
91-
return properties.containsKey(Options.READ_OPTIC_DSL);
91+
return properties.containsKey(Options.READ_OPTIC_QUERY);
9292
}
9393
}

src/main/java/com/marklogic/spark/Options.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ public interface Options {
1919

2020
String CLIENT_URI = "spark.marklogic.client.uri";
2121

22-
String READ_OPTIC_DSL = "spark.marklogic.read.opticDsl";
22+
String READ_OPTIC_QUERY = "spark.marklogic.read.opticQuery";
2323
String READ_NUM_PARTITIONS = "spark.marklogic.read.numPartitions";
2424
String READ_BATCH_SIZE = "spark.marklogic.read.batchSize";
2525

src/main/java/com/marklogic/spark/reader/ReadContext.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,9 +67,9 @@ public ReadContext(Map<String, String> properties, StructType schema) {
6767
final long partitionCount = getNumericOption(Options.READ_NUM_PARTITIONS,
6868
SparkSession.active().sparkContext().defaultMinPartitions(), 1);
6969
final long batchSize = getNumericOption(Options.READ_BATCH_SIZE, DEFAULT_BATCH_SIZE, 0);
70-
final String dslQuery = properties.get(Options.READ_OPTIC_DSL);
70+
final String dslQuery = properties.get(Options.READ_OPTIC_QUERY);
7171
if (dslQuery == null || dslQuery.trim().length() < 1) {
72-
throw new IllegalArgumentException(String.format("No Optic query found; must define %s", Options.READ_OPTIC_DSL));
72+
throw new IllegalArgumentException(String.format("No Optic query found; must define %s", Options.READ_OPTIC_QUERY));
7373
}
7474
DatabaseClient client = connectToMarkLogic();
7575
RawQueryDSLPlan dslPlan = client.newRowManager().newRawQueryDSLPlan(new StringHandle(dslQuery));

src/test/java/com/marklogic/spark/AbstractIntegrationTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ protected DataFrameReader newDefaultReader(SparkSession session) {
9797
.option("spark.marklogic.client.port", testConfig.getRestPort())
9898
.option("spark.marklogic.client.username", TEST_USERNAME)
9999
.option("spark.marklogic.client.password", TEST_PASSWORD)
100-
.option(Options.READ_OPTIC_DSL, "op.fromView('Medical','Authors')");
100+
.option(Options.READ_OPTIC_QUERY, "op.fromView('Medical','Authors')");
101101
}
102102

103103
protected String readClasspathFile(String path) {

src/test/java/com/marklogic/spark/reader/AbstractPushDownTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ private synchronized void addToRowCount(long totalRowCount) {
4343

4444
protected Dataset<Row> newDatasetOrderedByCitationIDWithOneBucket() {
4545
return newDefaultReader()
46-
.option(Options.READ_OPTIC_DSL, QUERY_ORDERED_BY_CITATION_ID)
46+
.option(Options.READ_OPTIC_QUERY, QUERY_ORDERED_BY_CITATION_ID)
4747
.option(Options.READ_NUM_PARTITIONS, 1)
4848
.option(Options.READ_BATCH_SIZE, 0)
4949
.load();

0 commit comments

Comments
 (0)