diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f629e60..e40eb8b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,8 +4,8 @@ distribution. ### Requirements: * MarkLogic Server 11+ -* Java (either version 8, 11, or 17). It is recommended to use 11 or 17, as Confluent has deprecated Java 8 support in - Confluent 7.x and is removing it in Confluent 8.x. Additionally, Sonar requires the use of Java 11 or 17. +* Java, either version 11 or 17, is required to use the Gradle tools. +Additionally, SonarQube requires the use of Java 17. See [the Confluent compatibility matrix](https://docs.confluent.io/platform/current/installation/versions-interoperability.html#java) for more information. After installing your desired version of Java, ensure that the `JAVA_HOME` environment variable @@ -76,7 +76,7 @@ application must be deployed. From the "test-app" directory, follow these steps: ## Automated Testing Now that your MarkLogic server is configured and the test-app is deployed, you can run the tests via from the root -directory: +directory. Note that you must be using Java 11 or Java 17 for this command due to the latest version of Gradle. ``` ./gradlew test ``` @@ -318,3 +318,7 @@ project. You must have Ruby installed. Additionally, there seems to be a bug wit The server needs to be run with Ruby 3.2.3, so you will need to run `chruby ruby-3.2.3` before starting the jekyll server. To start the jekyll server, cd into the /docs directory and run the command `bundle exec jekyll server`. This will start the server and the user documents will be available at http://127.0.0.1:4000/. + +# Publishing the Connector to Confluent + +Please refer to the internal Wiki page for information regarding the process for releasing the connector to the Confluent Hub. diff --git a/build.gradle b/build.gradle index b22e687..8545481 100644 --- a/build.gradle +++ b/build.gradle @@ -1,13 +1,12 @@ plugins { id 'java' id 'net.saliman.properties' version '1.5.2' - id 'com.github.johnrengelman.shadow' version '8.1.1' - id "com.github.jk1.dependency-license-report" version "1.19" + id 'com.gradleup.shadow' version '8.3.4' // Only used for testing - id 'com.marklogic.ml-gradle' version '4.8.0' + id 'com.marklogic.ml-gradle' version '5.0.0' id 'jacoco' - id "org.sonarqube" version "4.4.1.3373" + id "org.sonarqube" version "5.1.0.4882" // Used to generate Avro classes. This will write classes to build/generated-test-avro-java and also add that folder // as a source root. Since this is commented out by default, the generated Avro test class has been added to @@ -33,24 +32,21 @@ configurations { ext { // Even though Kafka Connect 3.7.0 is out, we're staying with 3.6.1 in order to continue // using the third-party Kafka JUnit tool. See https://github.com/mguenther/kafka-junit?tab=readme-ov-file - kafkaVersion = "3.6.1" + kafkaVersion = "3.8.1" } dependencies { - compileOnly "org.apache.kafka:connect-api:${kafkaVersion}" - compileOnly "org.apache.kafka:connect-json:${kafkaVersion}" compileOnly "org.apache.kafka:connect-runtime:${kafkaVersion}" - compileOnly "org.slf4j:slf4j-api:2.0.13" + compileOnly "org.slf4j:slf4j-api:1.7.36" - implementation 'com.marklogic:ml-javaclient-util:4.8.0' // Force DHF to use the latest version of ml-app-deployer, which minimizes security vulnerabilities - implementation "com.marklogic:ml-app-deployer:4.8.0" + implementation "com.marklogic:ml-app-deployer:5.0.0" - implementation "com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.15.3" + implementation "com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.17.2" // Note that in general, the version of the DHF jar must match that of the deployed DHF instance. Different versions // may work together, but that behavior is not guaranteed. - implementation("com.marklogic:marklogic-data-hub:6.0.0") { + implementation("com.marklogic:marklogic-data-hub:6.1.1") { exclude module: "marklogic-client-api" exclude module: "ml-javaclient-util" exclude module: "ml-app-deployer" @@ -61,17 +57,18 @@ dependencies { exclude module: "logback-classic" } - testImplementation 'com.marklogic:marklogic-junit5:1.4.0' + testImplementation 'com.marklogic:marklogic-junit5:1.5.0' - testImplementation "org.apache.kafka:connect-api:${kafkaVersion}" testImplementation "org.apache.kafka:connect-json:${kafkaVersion}" + + // Can be deleted when the disabled kafka-junit tests are deleted. testImplementation 'net.mguenther.kafka:kafka-junit:3.6.0' - testImplementation "org.apache.avro:avro-compiler:1.11.3" + testImplementation "org.apache.avro:avro-compiler:1.12.0" // Forcing logback to be used for test logging testImplementation "ch.qos.logback:logback-classic:1.3.14" - testImplementation "org.slf4j:jcl-over-slf4j:2.0.13" + testImplementation "org.slf4j:jcl-over-slf4j:2.0.16" documentation files('LICENSE.txt') documentation files('NOTICE.txt') diff --git a/docs/writing-data.md b/docs/writing-data.md index fb6b69e..fe22cbc 100644 --- a/docs/writing-data.md +++ b/docs/writing-data.md @@ -298,13 +298,16 @@ sent to the DLQ. MarkLogic then each of the records in the batch will be sent to the DLQ. The entire batch must be sent to the DLQ since the connector is unable to determine the cause of the failure. -When a record is sent to the DLQ, the connector first adds headers to the record providing information about the cause +When a record is sent to the DLQ, the connector first adds headers to the record, providing information about the cause of the failure in order to assist with troubleshooting and potential routing. - "marklogic-failure-type" : Either "Write failure" or "Record conversion" - "marklogic-exception-message" : Information from MarkLogic when there is a write failure - "marklogic-original-topic" : The name of the topic that this record came from - "marklogic-target-uri" : For write failures, this contains the target URI for the document +For those headers to be populated properly, the version of this connector must be compatible with the version of Kafka +that is being used. The 1.8.0 and 1.9.0 versions of the connector work with Kafka versions before 3.8. Starting with +the connector version 1.10.0, the Kafka version must be 3.8 or later. ## Sink connector error handling diff --git a/gradle.properties b/gradle.properties index 1242396..05461f1 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,5 +1,5 @@ group=com.marklogic -version=1.9.0 +version=1.10.0 # For the Confluent Connector Archive componentOwner=marklogic diff --git a/src/main/java/com/marklogic/kafka/connect/sink/WriteBatcherSinkTask.java b/src/main/java/com/marklogic/kafka/connect/sink/WriteBatcherSinkTask.java index 5c1e935..3fdd839 100644 --- a/src/main/java/com/marklogic/kafka/connect/sink/WriteBatcherSinkTask.java +++ b/src/main/java/com/marklogic/kafka/connect/sink/WriteBatcherSinkTask.java @@ -35,6 +35,8 @@ import org.apache.kafka.connect.runtime.InternalSinkRecord; import org.apache.kafka.connect.sink.ErrantRecordReporter; import org.apache.kafka.connect.sink.SinkRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.util.StringUtils; import java.io.IOException; @@ -53,6 +55,8 @@ */ public class WriteBatcherSinkTask extends AbstractSinkTask { + protected static final Logger classLogger = LoggerFactory.getLogger(WriteBatcherSinkTask.class); + private DatabaseClient databaseClient; private DataMovementManager dataMovementManager; private WriteBatcher writeBatcher; @@ -101,20 +105,35 @@ protected void writeSinkRecord(SinkRecord sinkRecord) { static void addFailureHeaders(SinkRecord sinkRecord, Throwable e, String failureHeaderValue, WriteEvent writeEvent) { if (sinkRecord instanceof InternalSinkRecord) { - ConsumerRecord originalRecord = ((InternalSinkRecord) sinkRecord).originalRecord(); - originalRecord.headers().add(MARKLOGIC_MESSAGE_FAILURE_HEADER, getBytesHandleNull(failureHeaderValue)); - originalRecord.headers().add(MARKLOGIC_MESSAGE_EXCEPTION_MESSAGE, getBytesHandleNull(e.getMessage())); - originalRecord.headers().add(MARKLOGIC_ORIGINAL_TOPIC, getBytesHandleNull(sinkRecord.topic())); - if (writeEvent != null) { - originalRecord.headers().add(MARKLOGIC_TARGET_URI, writeEvent.getTargetUri().getBytes(StandardCharsets.UTF_8)); + try { + ConsumerRecord originalRecord = ((InternalSinkRecord) sinkRecord).context().original(); + addFailureHeadersToOriginalSinkRecord(originalRecord, e, failureHeaderValue, writeEvent); + } catch (NoSuchMethodError methodException) { + classLogger.warn("This version of the MarkLogic Kafka Connector requires Kafka version 3.8.0 or" + + " higher in order to store failure information on the original sink record. Instead, the failure" + + " information will be on the wrapper sink record."); + addFailureHeadersToNonInternalSinkRecord(sinkRecord, e, failureHeaderValue, writeEvent); } } else { - sinkRecord.headers().addString(MARKLOGIC_MESSAGE_FAILURE_HEADER, failureHeaderValue); - sinkRecord.headers().addString(MARKLOGIC_MESSAGE_EXCEPTION_MESSAGE, e.getMessage()); - sinkRecord.headers().addString(MARKLOGIC_ORIGINAL_TOPIC, sinkRecord.topic()); - if (writeEvent != null) { - sinkRecord.headers().addString(MARKLOGIC_TARGET_URI, writeEvent.getTargetUri()); - } + addFailureHeadersToNonInternalSinkRecord(sinkRecord, e, failureHeaderValue, writeEvent); + } + } + + static void addFailureHeadersToNonInternalSinkRecord(SinkRecord sinkRecord, Throwable e, String failureHeaderValue, WriteEvent writeEvent) { + sinkRecord.headers().addString(MARKLOGIC_MESSAGE_FAILURE_HEADER, failureHeaderValue); + sinkRecord.headers().addString(MARKLOGIC_MESSAGE_EXCEPTION_MESSAGE, e.getMessage()); + sinkRecord.headers().addString(MARKLOGIC_ORIGINAL_TOPIC, sinkRecord.topic()); + if (writeEvent != null) { + sinkRecord.headers().addString(MARKLOGIC_TARGET_URI, writeEvent.getTargetUri()); + } + } + + static void addFailureHeadersToOriginalSinkRecord(ConsumerRecord originalRecord, Throwable e, String failureHeaderValue, WriteEvent writeEvent) { + originalRecord.headers().add(MARKLOGIC_MESSAGE_FAILURE_HEADER, getBytesHandleNull(failureHeaderValue)); + originalRecord.headers().add(MARKLOGIC_MESSAGE_EXCEPTION_MESSAGE, getBytesHandleNull(e.getMessage())); + originalRecord.headers().add(MARKLOGIC_ORIGINAL_TOPIC, getBytesHandleNull(originalRecord.topic())); + if (writeEvent != null) { + originalRecord.headers().add(MARKLOGIC_TARGET_URI, writeEvent.getTargetUri().getBytes(StandardCharsets.UTF_8)); } } diff --git a/src/test/java/com/marklogic/kafka/connect/sink/SendWriteFailureRecordsToDlqKafkaTest.java b/src/test/java/com/marklogic/kafka/connect/sink/SendWriteFailureRecordsToDlqKafkaTest.java index 1827782..3bc8624 100644 --- a/src/test/java/com/marklogic/kafka/connect/sink/SendWriteFailureRecordsToDlqKafkaTest.java +++ b/src/test/java/com/marklogic/kafka/connect/sink/SendWriteFailureRecordsToDlqKafkaTest.java @@ -22,10 +22,7 @@ import net.mguenther.kafka.junit.ReadKeyValues; import net.mguenther.kafka.junit.SendKeyValues; import org.apache.kafka.common.header.Headers; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.*; import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; @@ -68,6 +65,7 @@ void tearDownKafka() { } @Test + @Disabled("This test is disabled because kafka-junit is not compatible with kafka > 3.6.0") void failedBatchesShouldGoToTheDlq() throws InterruptedException { sendSomeJsonMessages(NUM_RECORDS); diff --git a/src/test/java/com/marklogic/kafka/connect/sink/WriteAvroDataTest.java b/src/test/java/com/marklogic/kafka/connect/sink/WriteAvroDataTest.java index eb9e083..2149896 100644 --- a/src/test/java/com/marklogic/kafka/connect/sink/WriteAvroDataTest.java +++ b/src/test/java/com/marklogic/kafka/connect/sink/WriteAvroDataTest.java @@ -2,6 +2,7 @@ import com.fasterxml.jackson.databind.JsonNode; import org.apache.avro.Schema; +import org.apache.avro.CanonicalSchemaFormatterFactory; import org.apache.avro.SchemaBuilder; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.Encoder; @@ -83,7 +84,7 @@ void writeSchema() throws IOException { .endRecord(); FileCopyUtils.copy( - mySchema.toString(true).getBytes(), + new CanonicalSchemaFormatterFactory().getDefaultFormatter().format(mySchema).getBytes(), new File(Paths.get("src", "test", "avro").toFile(), "avroTestClass-schema.avsc") ); } diff --git a/src/test/java/com/marklogic/kafka/connect/sink/WriteFromKafkaTest.java b/src/test/java/com/marklogic/kafka/connect/sink/WriteFromKafkaTest.java index 0f8b256..e595741 100644 --- a/src/test/java/com/marklogic/kafka/connect/sink/WriteFromKafkaTest.java +++ b/src/test/java/com/marklogic/kafka/connect/sink/WriteFromKafkaTest.java @@ -24,6 +24,7 @@ import net.mguenther.kafka.junit.SendKeyValues; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import java.util.ArrayList; @@ -56,6 +57,7 @@ void tearDownKafka() { } @Test + @Disabled("This test is disabled because kafka-junit is not compatible with kafka > 3.6.0") void shouldWaitForKeyedRecordsToBePublished() throws InterruptedException { Integer NUM_RECORDS = 2; sendSomeJsonMessages(NUM_RECORDS); diff --git a/src/test/java/com/marklogic/kafka/connect/sink/WriteTransformDocumentTest.java b/src/test/java/com/marklogic/kafka/connect/sink/WriteTransformDocumentTest.java index 0ae1e6f..88cdc8f 100644 --- a/src/test/java/com/marklogic/kafka/connect/sink/WriteTransformDocumentTest.java +++ b/src/test/java/com/marklogic/kafka/connect/sink/WriteTransformDocumentTest.java @@ -24,6 +24,7 @@ import net.mguenther.kafka.junit.SendKeyValues; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import java.util.ArrayList; @@ -56,6 +57,7 @@ void tearDownKafka() { } @Test + @Disabled("This test is disabled because kafka-junit is not compatible with kafka > 3.6.0") void shouldWaitForKeyedRecordsToBePublished() throws InterruptedException { Integer NUM_RECORDS = 2; sendSomeJsonMessages(NUM_RECORDS); diff --git a/src/test/java/com/marklogic/kafka/connect/source/ReadRowsViaOpticDslKafkaTest.java b/src/test/java/com/marklogic/kafka/connect/source/ReadRowsViaOpticDslKafkaTest.java index b4f3957..7ac3da8 100644 --- a/src/test/java/com/marklogic/kafka/connect/source/ReadRowsViaOpticDslKafkaTest.java +++ b/src/test/java/com/marklogic/kafka/connect/source/ReadRowsViaOpticDslKafkaTest.java @@ -20,6 +20,7 @@ import net.mguenther.kafka.junit.EmbeddedKafkaCluster; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import java.util.Properties; @@ -48,6 +49,7 @@ void tearDownKafka() { @SuppressWarnings("java:S2699") // The assertion happens via kafka.observe @Test + @Disabled("This test is disabled because kafka-junit is not compatible with kafka > 3.6.0") void shouldWaitForKeyedRecordsToBePublished() throws InterruptedException { kafka.observe(on(AUTHORS_TOPIC, 15)); } diff --git a/test-app/gradlew b/test-app/gradlew index cccdd3d..a69d9cb 100755 --- a/test-app/gradlew +++ b/test-app/gradlew @@ -1,78 +1,129 @@ -#!/usr/bin/env sh +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` +APP_BASE_NAME=${0##*/} # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -81,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" + JAVACMD=java which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the @@ -89,84 +140,101 @@ location of your Java installation." fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=$((i+1)) + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=$(save "$@") - -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" - -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" fi +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + exec "$JAVACMD" "$@" diff --git a/test-app/gradlew.bat b/test-app/gradlew.bat index f955316..f127cfd 100755 --- a/test-app/gradlew.bat +++ b/test-app/gradlew.bat @@ -1,4 +1,20 @@ -@if "%DEBUG%" == "" @echo off +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -9,19 +25,22 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if %ERRORLEVEL% equ 0 goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -35,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -45,38 +64,26 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal