diff --git a/.semaphore/live-site-deploy.yml b/.semaphore/live-site-deploy.yml index 3a1953af22..f5e64198b6 100644 --- a/.semaphore/live-site-deploy.yml +++ b/.semaphore/live-site-deploy.yml @@ -2,8 +2,7 @@ version: v1.0 name: Kafka Tutorials live site deployment agent: machine: - type: e1-standard-2 - os_image: ubuntu1804 + type: s1-prod-ubuntu20-04-amd64-1 blocks: - task: diff --git a/.semaphore/pr-staging-deploy.yml b/.semaphore/pr-staging-deploy.yml index 0a84472b50..09a3823b70 100644 --- a/.semaphore/pr-staging-deploy.yml +++ b/.semaphore/pr-staging-deploy.yml @@ -2,8 +2,7 @@ version: v1.0 name: Kafka Tutorials staging site deployment agent: machine: - type: e1-standard-2 - os_image: ubuntu1804 + type: s1-prod-ubuntu20-04-amd64-1 blocks: - task: diff --git a/.semaphore/semaphore.yml b/.semaphore/semaphore.yml index 7903257e56..8a1451764f 100644 --- a/.semaphore/semaphore.yml +++ b/.semaphore/semaphore.yml @@ -1,362 +1,101 @@ -version: v1.0 -name: Kafka Tutorials pipeline agent: machine: - type: e1-standard-2 - os_image: ubuntu1804 + type: s1-prod-ubuntu20-04-amd64-1 global_job_config: secrets: - - name: vault_sem2_approle_prod + - name: vault_sem2_approle prologue: commands: + - echo $SEMAPHORE_WORKFLOW_ID + - sudo add-apt-repository ppa:cwchien/gradle -y - checkout - make install-vault - . vault-bin/vault-setup - . vault-sem-get-secret aws_credentials - . vault-sem-get-secret dockerhub-semaphore-cred + - . vault-sem-get-secret artifactory-docker-helm - > aws ecr get-login-password --region us-west-2 | docker login --username AWS --password-stdin "$(aws sts get-caller-identity | jq -r .Account).dkr.ecr.us-west-2.amazonaws.com" - docker login --username $DOCKERHUB_USER --password $DOCKERHUB_APIKEY - - sudo pip3 install -e harness_runner/ + - docker login --username $DOCKER_USER --password $DOCKER_APIKEY confluent-docker.jfrog.io + - sem-version python 3.8 + - pip3 install -e harness_runner/ - > find _includes/tutorials/**/ksql -name docker-compose.yml | xargs -I {} sed -i -E "s/(\s+)(KSQL_CONFIG_DIR.*)/\1\2\\n\1KSQL_CONFLUENT_SUPPORT_METRICS_ENABLE: \"false\"/g" {} - -promotions: - - name: Deploy to live site - pipeline_file: live-site-deploy.yml - auto_promote_on: - - result: passed - branch: - - ^release$ - - - name: Deploy to staging site - pipeline_file: staging-site-deploy.yml - auto_promote_on: - - result: passed - branch: - - ^master$ - - - name: Stage PRs for review - pipeline_file: pr-staging-deploy.yml - auto_promote: - when: "branch != 'master' and branch != 'release' and branch !~ '^dependabot\.'" - blocks: - - name: Build the website - task: - prologue: - commands: - - checkout - - cache restore - - npm install - - gem install bundler - - bundle install - - cache store - jobs: - - name: Compile with Jekyll - commands: - - bundle exec jekyll build --baseurl "/$SEMAPHORE_GIT_BRANCH" - - cache store site-$SEMAPHORE_GIT_SHA _site - - - name: "☕️ Build and Test Java (Kafka and Kafka Streams) Only" - task: - prologue: - commands: - - checkout - - cache restore - jobs: - - name: Tests - matrix: - - env_var: JAVA_VERSION - values: ["8", "11"] - commands: - - sem-version java $JAVA_VERSION - - ./gradlew clean test - - - name: Run first block of tests - execution_time_limit: - minutes: 10 - task: - jobs: - - name: KSQL transforming tests - commands: - - make -C _includes/tutorials/transforming/ksql/code tutorial - - - name: KStreams transforming tests - commands: - - make -C _includes/tutorials/transforming/kstreams/code tutorial - - - name: Kafka transforming tests - commands: - - make -C _includes/tutorials/transforming/kafka/code tutorial - - - name: KSQL filtering tests - commands: - - make -C _includes/tutorials/filtering/ksql/code tutorial - - - name: KStreams filtering tests - commands: - - make -C _includes/tutorials/filtering/kstreams/code tutorial - - - name: KSQL splitting tests - commands: - - make -C _includes/tutorials/splitting/ksql/code tutorial - - - name: KStreams splitting tests - commands: - - make -C _includes/tutorials/splitting/kstreams/code tutorial - - - name: KSQL merging tests - commands: - - make -C _includes/tutorials/merging/ksql/code tutorial - - - name: KStreams merging tests - commands: - - make -C _includes/tutorials/merging/kstreams/code tutorial - - - name: KSQL join stream to stream tests - commands: - - make -C _includes/tutorials/joining-stream-stream/ksql/code tutorial - - - name: KSQL join stream to table tests - commands: - - make -C _includes/tutorials/joining-stream-table/ksql/code tutorial - - - name: KStreams join stream to table tests - commands: - - make -C _includes/tutorials/joining-stream-table/kstreams/code tutorial - - - name: KStreams table to table foreign key join tests - commands: - - make -C _includes/tutorials/fk-joins/kstreams/code tutorial - - - name: KSQL join table to table tests - commands: - - make -C _includes/tutorials/joining-table-table/ksql/code tutorial - - - name: KStreams tumbling windows tests - commands: - - make -C _includes/tutorials/tumbling-windows/kstreams/code tutorial - - - name: KSQL tumbling windows tests - commands: - - make -C _includes/tutorials/tumbling-windows/ksql/code tutorial - - - name: KSQL session windows tests - commands: - - make -C _includes/tutorials/session-windows/ksql/code tutorial - - - name: KSQL aggregation count tests - commands: - - make -C _includes/tutorials/aggregating-count/ksql/code tutorial - - - name: KSQL aggregation MIN/MAX tests - commands: - - make -C _includes/tutorials/aggregating-minmax/ksql/code tutorial - - - name: KSQL aggregation sum tests - commands: - - make -C _includes/tutorials/aggregating-sum/ksql/code tutorial - - - name: KStreams aggregation sum tests - commands: - - make -C _includes/tutorials/aggregating-sum/kstreams/code tutorial - - - name: KSQL serialization tests - commands: - - make -C _includes/tutorials/serialization/ksql/code tutorial - - - name: KStreams Serialization tests - commands: - - sem-version java 11 - - make -C _includes/tutorials/serialization/kstreams/code tutorial - - - name: KSQL rekey stream tests - commands: - - make -C _includes/tutorials/rekeying/ksql/code tutorial - - - name: KSQL rekey stream with function tests - commands: - - make -C _includes/tutorials/rekeying-function/ksql/code tutorial - - - name: Kafka produce and consume in Scala - commands: - - make -C _includes/tutorials/produce-consume-lang/scala/code tutorial - - - name: KSQL anomaly detection tests - commands: - - make -C _includes/tutorials/anomaly-detection/ksql/code tutorial - - - name: KStreams Test Kafka Streams session windows tests - commands: - - make -C _includes/tutorials/session-windows/kstreams/code tutorial - - - name: Run second block of tests - execution_time_limit: - minutes: 10 - task: - jobs: - - name: KStreams finding distinct events tests - commands: - - make -C _includes/tutorials/finding-distinct/kstreams/code tutorial - - - name: KStreams Connect key tests - commands: - - make -C _includes/tutorials/connect-add-key-to-source/kstreams/code tutorial - - - name: Kafka Connect SMT key tests - commands: - - make -C _includes/tutorials/connect-add-key-to-source/kafka/code tutorial - - - name: ksqlDB Connect SMT key tests - commands: - - make -C _includes/tutorials/connect-add-key-to-source/ksql/code tutorial - - - name: KStreams emit a final result from a time window - commands: - - make -C _includes/tutorials/window-final-result/kstreams/code tutorial - - - name: KSQL UDF tests - commands: - - make -C _includes/tutorials/udf/ksql/code tutorial - - - name: KSQL hopping windows tests - commands: - - make -C _includes/tutorials/hopping-windows/ksql/code tutorial - - - name: KSQL finding distinct events tests - commands: - - make -C _includes/tutorials/finding-distinct/ksql/code tutorial - - - name: KSQL flatten nested data - commands: - - make -C _includes/tutorials/flatten-nested-data/ksql/code tutorial - - - name: KSQL deserialization errors tests - commands: - - make -C _includes/tutorials/deserialization-errors/ksql/code tutorial - - - name: KStreams aggregation MIN/MAX tests - commands: - - make -C _includes/tutorials/aggregating-minmax/kstreams/code tutorial - - - name: KSQL Generate streams of test data tests - commands: - - make -C _includes/tutorials/generate-test-data-streams/ksql/code tutorial - - - name: KStreams Running Average tests - commands: - - make -C _includes/tutorials/aggregating-average/kstreams/code tutorial - - - name: KStreams Test Streams choosing output topic dynamically tests - commands: - - make -C _includes/tutorials/dynamic-output-topic/kstreams/code tutorial - - - name: KStreams Test Streams naming changelog and repartition topics tests - commands: - - make -C _includes/tutorials/naming-changelog-repartition-topics/kstreams/code tutorial - - - name: KStreams Test Cogrouping Streams tests - commands: - - make -C _includes/tutorials/cogrouping-streams/kstreams/code tutorial - - - name: Kafka Console Consumer Producer Basics tests - commands: - - make -C _includes/tutorials/console-consumer-producer-basic/kafka/code tutorial - - - name: Kafka Test Kafka Console Consumer with Primitive Keys and Values tests - commands: - - make -C _includes/tutorials/console-consumer-primitive-keys-values/kafka/code tutorial - - - name: Kafka Test Kafka Console Consumer Read Specific Offsets and Partitions tests - commands: - - make -C _includes/tutorials/console-consumer-read-specific-offsets-partition/kafka/code tutorial - - - name: Kafka Test Connect Datagen Local - commands: - - make -C _includes/tutorials/kafka-connect-datagen-local/kafka/code tutorial - - - name: Kafka Test Connect Datagen CCloud - commands: - - make -C _includes/tutorials/kafka-connect-datagen-ccloud/kafka/code tutorial - - - name: ksqlDB Test Change number of partitions and replicas - commands: - - make -C _includes/tutorials/change-topic-partitions-replicas/ksql/code tutorial - - - name: Kafka Consumer Application tests - commands: - - make -C _includes/tutorials/kafka-consumer-application/kafka/code tutorial - - - name: KStreams Test Converting a KStream to a KTable tests - commands: - - make -C _includes/tutorials/streams-to-table/kstreams/code tutorial - - - name: Kafka Producer Application tests - commands: - - make -C _includes/tutorials/kafka-producer-application/kafka/code tutorial - - - name: Kafka Test Kafka Producer with Callbacks tests - commands: - - make -C _includes/tutorials/kafka-producer-application-callback/kafka/code tutorial - - - name: KSQL Count messages on a topic tutorial tests - commands: - - make -C _includes/tutorials/count-messages/ksql/code tutorial - - - name: Kafka Count messages on a topic tutorial tests - commands: - - make -C _includes/tutorials/count-messages/kafka/code tutorial - - - name: Kafka Test ccloud produce and consume - commands: - - make -C _includes/tutorials/ccloud-produce-consume/kafka/code tutorial - - - name: KStreams Test Streams schedule operations tests - commands: - - make -C _includes/tutorials/kafka-streams-schedule-operations/kstreams/code tutorial - - - name: Kafka Message Ordering tests - commands: - - make -C _includes/tutorials/message-ordering/kafka/code tutorial - - - name: KSQL masking data tests - commands: - - make -C _includes/tutorials/masking-data/ksql/code tutorial - - - name: Kafka Consuming and Producing AVRO messages with console tools tests - commands: - - make -C _includes/tutorials/console-consumer-producer-avro/kafka/code tutorial - - - name: KSQL Test ksqlDB nested JSON tests - commands: - - make -C _includes/tutorials/ksql-nested-json/ksql/code tutorial - - - name: KSQL Test ksqlDB heterogeneous JSON tests - commands: - - make -C _includes/tutorials/ksql-heterogeneous-json/ksql/code tutorial - - - name: KSQL Time Concepts Tutorial tests - commands: - - make -C _includes/tutorials/time-concepts/ksql/code tutorial - - - name: KSQL Multi-joins Test tests - commands: - - make -C _includes/tutorials/multi-joins/ksql/code tutorial - - - name: KSQL Test ksqlDB concatenate columns tests - commands: - - make -C _includes/tutorials/concatenation/ksql/code tutorial - - - name: KSQL Test ksqlDB column difference tests - commands: - - make -C _includes/tutorials/column-difference/ksql/code tutorial - - - name: KSQL geo-distance tests - commands: - - make -C _includes/tutorials/geo-distance/ksql/code tutorial - - - name: KStreams Test Kafka Streams sliding windows tests - commands: - - make -C _includes/tutorials/sliding-windows/kstreams/code tutorial - +- execution_time_limit: + minutes: 10 + name: Run the tests part 1 + task: + jobs: + - commands: + - make -C _includes/tutorials/transforming/ksql/code tutorial + name: KSQL transforming tests + - commands: + - make -C _includes/tutorials/filtering/ksql/code tutorial + name: KSQL filtering tests + - commands: + - make -C _includes/tutorials/splitting/ksql/code tutorial + name: KSQL splitting tests + - commands: + - make -C _includes/tutorials/merging/ksql/code tutorial + name: KSQL merging tests + - commands: + - make -C _includes/tutorials/joining-stream-stream/ksql/code tutorial + name: KSQL join stream to stream tests + - commands: + - make -C _includes/tutorials/joining-stream-table/ksql/code tutorial + name: KSQL join stream to table tests + - commands: + - make -C _includes/tutorials/joining-table-table/ksql/code tutorial + name: KSQL join table to table tests + - commands: + - make -C _includes/tutorials/tumbling-windows/ksql/code tutorial + name: KSQL tumbling windows tests + - commands: + - make -C _includes/tutorials/session-windows/ksql/code tutorial + name: KSQL session windows tests + - commands: + - make -C _includes/tutorials/aggregating-count/ksql/code tutorial + name: KSQL aggregation count tests + - commands: + - make -C _includes/tutorials/aggregating-minmax/ksql/code tutorial + name: KSQL aggregation MIN/MAX tests + +- execution_time_limit: + minutes: 10 + name: Run the tests part 2 + task: + jobs: + - commands: + - make -C _includes/tutorials/aggregating-sum/ksql/code tutorial + name: KSQL aggregation sum tests + - commands: + - make -C _includes/tutorials/serialization/ksql/code tutorial + name: KSQL serialization tests + - commands: + - make -C _includes/tutorials/rekeying/ksql/code tutorial + name: KSQL rekey stream tests + - commands: + - make -C _includes/tutorials/rekeying-function/ksql/code tutorial + name: KSQL rekey stream with function tests + - commands: + - make -C _includes/tutorials/connect-add-key-to-source/ksql/code tutorial + name: ksqlDB Connect SMT key tests + - commands: + - make -C _includes/tutorials/udf/ksql/code tutorial + name: KSQL UDF tests + - commands: + - make -C _includes/tutorials/hopping-windows/ksql/code tutorial + name: KSQL hopping windows tests + - commands: + - make -C _includes/tutorials/finding-distinct/ksql/code tutorial + name: KSQL finding distinct events tests + - commands: + - make -C _includes/tutorials/flatten-nested-data/ksql/code tutorial + name: KSQL flatten nested data + - commands: + - make -C _includes/tutorials/deserialization-errors/ksql/code tutorial + name: KSQL deserialization errors tests +name: Kafka Tutorials pipeline +version: v1.0 diff --git a/.semaphore/staging-site-deploy.yml b/.semaphore/staging-site-deploy.yml index 0a84472b50..09a3823b70 100644 --- a/.semaphore/staging-site-deploy.yml +++ b/.semaphore/staging-site-deploy.yml @@ -2,8 +2,7 @@ version: v1.0 name: Kafka Tutorials staging site deployment agent: machine: - type: e1-standard-2 - os_image: ubuntu1804 + type: s1-prod-ubuntu20-04-amd64-1 blocks: - task: diff --git a/_includes/tutorials/aggregating-count/ksql/code/docker-compose.yml b/_includes/tutorials/aggregating-count/ksql/code/docker-compose.yml index 543cca0e84..591234fb1e 100644 --- a/_includes/tutorials/aggregating-count/ksql/code/docker-compose.yml +++ b/_includes/tutorials/aggregating-count/ksql/code/docker-compose.yml @@ -35,15 +35,16 @@ services: hostname: schema-registry container_name: schema-registry depends_on: + - zookeeper - broker ports: - "8081:8081" environment: SCHEMA_REGISTRY_HOST_NAME: schema-registry - SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092' + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' ksqldb-server: - image: confluentinc/ksqldb-server:0.11.0 + image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2 hostname: ksqldb-server container_name: ksqldb-server depends_on: @@ -61,7 +62,7 @@ services: KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" ksqldb-cli: - image: confluentinc/ksqldb-cli:0.11.0 + image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2 container_name: ksqldb-cli depends_on: - broker diff --git a/_includes/tutorials/aggregating-count/ksql/code/tutorial-steps/dev/expected-print-output-topic.log b/_includes/tutorials/aggregating-count/ksql/code/tutorial-steps/dev/expected-print-output-topic.log index a6477315d4..4c14500a84 100644 --- a/_includes/tutorials/aggregating-count/ksql/code/tutorial-steps/dev/expected-print-output-topic.log +++ b/_includes/tutorials/aggregating-count/ksql/code/tutorial-steps/dev/expected-print-output-topic.log @@ -1,6 +1,6 @@ Key format: KAFKA_STRING Value format: AVRO or KAFKA_STRING -rowtime: 2020/05/04 21:19:17.935 Z, key: Aliens, value: {"TICKETS_SOLD": 1} -rowtime: 2020/05/04 21:19:18.365 Z, key: Die Hard, value: {"TICKETS_SOLD": 3} -rowtime: 2020/05/04 21:19:18.586 Z, key: The Big Lebowski, value: {"TICKETS_SOLD": 2} +rowtime: 2020/05/04 21:19:17.935 Z, key: Aliens, value: {"TICKETS_SOLD": 1}, partition: 0 +rowtime: 2020/05/04 21:19:18.365 Z, key: Die Hard, value: {"TICKETS_SOLD": 3}, partition: 0 +rowtime: 2020/05/04 21:19:18.586 Z, key: The Big Lebowski, value: {"TICKETS_SOLD": 2}, partition: 0 Topic printing ceased diff --git a/_includes/tutorials/aggregating-minmax/ksql/code/docker-compose.yml b/_includes/tutorials/aggregating-minmax/ksql/code/docker-compose.yml index 543cca0e84..591234fb1e 100644 --- a/_includes/tutorials/aggregating-minmax/ksql/code/docker-compose.yml +++ b/_includes/tutorials/aggregating-minmax/ksql/code/docker-compose.yml @@ -35,15 +35,16 @@ services: hostname: schema-registry container_name: schema-registry depends_on: + - zookeeper - broker ports: - "8081:8081" environment: SCHEMA_REGISTRY_HOST_NAME: schema-registry - SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092' + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' ksqldb-server: - image: confluentinc/ksqldb-server:0.11.0 + image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2 hostname: ksqldb-server container_name: ksqldb-server depends_on: @@ -61,7 +62,7 @@ services: KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" ksqldb-cli: - image: confluentinc/ksqldb-cli:0.11.0 + image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2 container_name: ksqldb-cli depends_on: - broker diff --git a/_includes/tutorials/aggregating-minmax/ksql/code/tutorial-steps/dev/expected-print-output-topic.log b/_includes/tutorials/aggregating-minmax/ksql/code/tutorial-steps/dev/expected-print-output-topic.log index c6b0d9d38f..355e786cb3 100644 --- a/_includes/tutorials/aggregating-minmax/ksql/code/tutorial-steps/dev/expected-print-output-topic.log +++ b/_includes/tutorials/aggregating-minmax/ksql/code/tutorial-steps/dev/expected-print-output-topic.log @@ -1,5 +1,5 @@ Key format: KAFKA_INT Value format: AVRO -rowtime: 2020/05/04 21:27:50.630 Z, key: 2019, value: {"MIN__TOTAL_SALES": 385082142, "MAX__TOTAL_SALES": 856980506} -rowtime: 2020/05/04 21:27:50.946 Z, key: 2018, value: {"MIN__TOTAL_SALES": 324512774, "MAX__TOTAL_SALES": 700059566} +rowtime: 2020/05/04 21:27:50.630 Z, key: 2019, value: {"MIN__TOTAL_SALES": 385082142, "MAX__TOTAL_SALES": 856980506}, partition: 0 +rowtime: 2020/05/04 21:27:50.946 Z, key: 2018, value: {"MIN__TOTAL_SALES": 324512774, "MAX__TOTAL_SALES": 700059566}, partition: 0 Topic printing ceased diff --git a/_includes/tutorials/aggregating-minmax/ksql/markup/dev/transient-query.adoc b/_includes/tutorials/aggregating-minmax/ksql/markup/dev/transient-query.adoc index 0a45d86c8c..1a5ca33966 100644 --- a/_includes/tutorials/aggregating-minmax/ksql/markup/dev/transient-query.adoc +++ b/_includes/tutorials/aggregating-minmax/ksql/markup/dev/transient-query.adoc @@ -1,6 +1,6 @@ With our test data in place, let's try a query to compute the min and max. A `SELECT` statement with an `EMIT CHANGES` in ksqlDB is called a _transient_ push query, meaning that after we stop it, it is gone and will not keep processing the input stream. We'll create a _persistent_ query, the contrast to a transient push query, a few steps from now. -If you're familiar with SQL, the text of the query itself is fairly self-explanatory. We are calculating the highest and lowest grossing movie figures by year using `MIN` and `MAX` aggregations on the `TOTAL_SALES` column. This query will keep running, continuing to return results until you hit `CTRL+C`. Most ksqlDB queries are _continuous queries_ that run forever in this way; there is always potentially more input available in the source stream, so the query never finishes on its own. +If you're familiar with SQL, the text of the query itself is fairly self-explanatory. We are calculating the highest and lowest grossing movie figures by year using `MIN` and `MAX` aggregations on the `TOTAL_SALES` column. This query will keep running, continuing to return results until you hit CTRL-C. Most ksqlDB queries are _continuous queries_ that run forever in this way; there is always potentially more input available in the source stream, so the query never finishes on its own. +++++
{% include_raw tutorials/aggregating-minmax/ksql/code/tutorial-steps/dev/transient-query.sql %}
+++++
diff --git a/_includes/tutorials/aggregating-sum/ksql/code/docker-compose.yml b/_includes/tutorials/aggregating-sum/ksql/code/docker-compose.yml
index 543cca0e84..591234fb1e 100644
--- a/_includes/tutorials/aggregating-sum/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/aggregating-sum/ksql/code/docker-compose.yml
@@ -35,15 +35,16 @@ services:
hostname: schema-registry
container_name: schema-registry
depends_on:
+ - zookeeper
- broker
ports:
- "8081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
+ SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
@@ -61,7 +62,7 @@ services:
KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/aggregating-sum/ksql/code/tutorial-steps/dev/expected-print.log b/_includes/tutorials/aggregating-sum/ksql/code/tutorial-steps/dev/expected-print.log
index 2fa5d0e91c..a6540512da 100644
--- a/_includes/tutorials/aggregating-sum/ksql/code/tutorial-steps/dev/expected-print.log
+++ b/_includes/tutorials/aggregating-sum/ksql/code/tutorial-steps/dev/expected-print.log
@@ -1,6 +1,6 @@
Key format: KAFKA_STRING
Value format: AVRO
-rowtime: 2019/07/18 10:00:00.000 Z, key: Aliens, value: {"TOTAL_VALUE": 10}
-rowtime: 2019/07/18 10:01:36.000 Z, key: Die Hard, value: {"TOTAL_VALUE": 48}
-rowtime: 2019/07/18 11:03:50.000 Z, key: The Big Lebowski, value: {"TOTAL_VALUE": 24}
+rowtime: 2019/07/18 10:00:00.000 Z, key: Aliens, value: {"TOTAL_VALUE": 10}, partition: 0
+rowtime: 2019/07/18 10:01:36.000 Z, key: Die Hard, value: {"TOTAL_VALUE": 48}, partition: 0
+rowtime: 2019/07/18 11:03:50.000 Z, key: The Big Lebowski, value: {"TOTAL_VALUE": 24}, partition: 0
Topic printing ceased
diff --git a/_includes/tutorials/anomaly-detection/ksql/code/docker-compose.yml b/_includes/tutorials/anomaly-detection/ksql/code/docker-compose.yml
index f8f3abff9d..897a81754d 100644
--- a/_includes/tutorials/anomaly-detection/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/anomaly-detection/ksql/code/docker-compose.yml
@@ -43,7 +43,7 @@ services:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
@@ -62,7 +62,7 @@ services:
KSQL_KSQL_STREAMS_AUTO_OFFSET_RESET: "earliest"
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/change-topic-partitions-replicas/ksql/code/docker-compose.yml b/_includes/tutorials/change-topic-partitions-replicas/ksql/code/docker-compose.yml
index fd75417212..7fa3925782 100644
--- a/_includes/tutorials/change-topic-partitions-replicas/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/change-topic-partitions-replicas/ksql/code/docker-compose.yml
@@ -61,7 +61,7 @@ services:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
@@ -79,7 +79,7 @@ services:
KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/column-difference/ksql/code/docker-compose.yml b/_includes/tutorials/column-difference/ksql/code/docker-compose.yml
index f8f3abff9d..897a81754d 100644
--- a/_includes/tutorials/column-difference/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/column-difference/ksql/code/docker-compose.yml
@@ -43,7 +43,7 @@ services:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
@@ -62,7 +62,7 @@ services:
KSQL_KSQL_STREAMS_AUTO_OFFSET_RESET: "earliest"
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/concatenation/ksql/code/docker-compose.yml b/_includes/tutorials/concatenation/ksql/code/docker-compose.yml
index f8f3abff9d..897a81754d 100644
--- a/_includes/tutorials/concatenation/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/concatenation/ksql/code/docker-compose.yml
@@ -43,7 +43,7 @@ services:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
@@ -62,7 +62,7 @@ services:
KSQL_KSQL_STREAMS_AUTO_OFFSET_RESET: "earliest"
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/connect-add-key-to-source/ksql/code/docker-compose.yml b/_includes/tutorials/connect-add-key-to-source/ksql/code/docker-compose.yml
index ed41ed5f44..9be1c243ef 100644
--- a/_includes/tutorials/connect-add-key-to-source/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/connect-add-key-to-source/ksql/code/docker-compose.yml
@@ -35,15 +35,16 @@ services:
hostname: schema-registry
container_name: schema-registry
depends_on:
+ - zookeeper
- broker
ports:
- "8081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
+ SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb
container_name: ksqldb
depends_on:
@@ -92,7 +93,7 @@ services:
sleep infinity
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/connect-add-key-to-source/ksql/code/tutorial-steps/dev/consume-topic_expected.log b/_includes/tutorials/connect-add-key-to-source/ksql/code/tutorial-steps/dev/consume-topic_expected.log
index 7712401aaf..b770760fd2 100644
--- a/_includes/tutorials/connect-add-key-to-source/ksql/code/tutorial-steps/dev/consume-topic_expected.log
+++ b/_includes/tutorials/connect-add-key-to-source/ksql/code/tutorial-steps/dev/consume-topic_expected.log
@@ -1,9 +1,9 @@
Key format: KAFKA_INT or KAFKA_STRING
Value format: AVRO or KAFKA_STRING
-rowtime: 3/25/20 11:53:36 AM UTC, key: 1, value: {"name": "Raleigh", "state": "NC"}
-rowtime: 3/25/20 11:53:36 AM UTC, key: 2, value: {"name": "Mountain View", "state": "CA"}
-rowtime: 3/25/20 11:53:36 AM UTC, key: 3, value: {"name": "Knoxville", "state": "TN"}
-rowtime: 3/25/20 11:53:36 AM UTC, key: 4, value: {"name": "Houston", "state": "TX"}
-rowtime: 3/25/20 11:53:36 AM UTC, key: 5, value: {"name": "Olympia", "state": "WA"}
-rowtime: 3/25/20 11:53:36 AM UTC, key: 6, value: {"name": "Bismarck", "state": "ND"}
+rowtime: 3/25/20 11:53:36 AM UTC, key: 1, value: {"name": "Raleigh", "state": "NC"}, partition: 0
+rowtime: 3/25/20 11:53:36 AM UTC, key: 2, value: {"name": "Mountain View", "state": "CA"}, partition: 0
+rowtime: 3/25/20 11:53:36 AM UTC, key: 3, value: {"name": "Knoxville", "state": "TN"}, partition: 0
+rowtime: 3/25/20 11:53:36 AM UTC, key: 4, value: {"name": "Houston", "state": "TX"}, partition: 0
+rowtime: 3/25/20 11:53:36 AM UTC, key: 5, value: {"name": "Olympia", "state": "WA"}, partition: 0
+rowtime: 3/25/20 11:53:36 AM UTC, key: 6, value: {"name": "Bismarck", "state": "ND"}, partition: 0
Topic printing ceased
diff --git a/_includes/tutorials/count-messages/ksql/code/docker-compose.yml b/_includes/tutorials/count-messages/ksql/code/docker-compose.yml
index a438d638db..9a48e68ef9 100644
--- a/_includes/tutorials/count-messages/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/count-messages/ksql/code/docker-compose.yml
@@ -49,7 +49,7 @@ services:
# To connect to ksqlDB CLI
# docker exec --interactive --tty ksqldb ksql http://localhost:8088
# *-----------------------------*
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
container_name: ksqldb
depends_on:
- broker
diff --git a/_includes/tutorials/deserialization-errors/ksql/code/docker-compose.yml b/_includes/tutorials/deserialization-errors/ksql/code/docker-compose.yml
index a1489d8dac..55bd779003 100644
--- a/_includes/tutorials/deserialization-errors/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/deserialization-errors/ksql/code/docker-compose.yml
@@ -35,15 +35,16 @@
hostname: schema-registry
container_name: schema-registry
depends_on:
+ - zookeeper
- broker
ports:
- "8081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
+ SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
@@ -67,7 +68,7 @@
- ./log4j:/opt/app/log4j
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/deserialization-errors/ksql/code/tutorial-steps/dev/expected-check-errors-query.log b/_includes/tutorials/deserialization-errors/ksql/code/tutorial-steps/dev/expected-check-errors-query.log
index 15df624f7a..fc688b4a23 100644
--- a/_includes/tutorials/deserialization-errors/ksql/code/tutorial-steps/dev/expected-check-errors-query.log
+++ b/_includes/tutorials/deserialization-errors/ksql/code/tutorial-steps/dev/expected-check-errors-query.log
@@ -1,10 +1,10 @@
+-------------------------------------------+-------------------------------------------+-------------------------------------------+
|ERRORMESSAGE |MSG |CAUSE |
+-------------------------------------------+-------------------------------------------+-------------------------------------------+
-|mvn value from topic: SENSORS_RAW |{"id": "1a076a64-4a84-40cb-a2e8-2190f3b3746|[Can't convert type. sourceType: TextNode, |
-| |5", "timestamp": "2020-01-15 02:30:30", "en|requiredType: BOOLEAN, path: $.ENABLED, Can|
-| |abled": "true"} |'t convert type. sourceType: TextNode, requ|
-| | |iredType: BOOLEAN, path: .ENABLED, Can't co|
+|Failed to deserialize value from topic: SEN|{"id": "1a076a64-4a84-40cb-a2e8-2190f3b3746|[Can't convert type. sourceType: TextNode, |
+|SORS_RAW. Can't convert type. sourceType: T|5", "timestamp": "2020-01-15 02:30:30", "en|requiredType: BOOLEAN, path: $.ENABLED, Can|
+|extNode, requiredType: BOOLEAN, path: $.ENA|abled": "true"} |'t convert type. sourceType: TextNode, requ|
+|BLED | |iredType: BOOLEAN, path: .ENABLED, Can't co|
| | |nvert type. sourceType: TextNode, requiredT|
| | |ype: BOOLEAN] |
Limit Reached
diff --git a/_includes/tutorials/deserialization-errors/ksql/code/tutorial-steps/dev/expected-print.log b/_includes/tutorials/deserialization-errors/ksql/code/tutorial-steps/dev/expected-print.log
index dd02fe46d8..e080d36cd0 100644
--- a/_includes/tutorials/deserialization-errors/ksql/code/tutorial-steps/dev/expected-print.log
+++ b/_includes/tutorials/deserialization-errors/ksql/code/tutorial-steps/dev/expected-print.log
@@ -1,4 +1,4 @@
Key format: ¯\_(ツ)_/¯ - no data processed
Value format: JSON or KAFKA_STRING
-rowtime: 2020/06/05 11:25:21.181 Z, key: {% include_raw tutorials/deserialization-errors/ksql/code/tutorial-steps/dev/check-errors-query.sql %}
+++++
diff --git a/_includes/tutorials/filtering/ksql/code/docker-compose.yml b/_includes/tutorials/filtering/ksql/code/docker-compose.yml
index 543cca0e84..591234fb1e 100644
--- a/_includes/tutorials/filtering/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/filtering/ksql/code/docker-compose.yml
@@ -35,15 +35,16 @@ services:
hostname: schema-registry
container_name: schema-registry
depends_on:
+ - zookeeper
- broker
ports:
- "8081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
+ SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
@@ -61,7 +62,7 @@ services:
KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/filtering/ksql/code/tutorial-steps/dev/expected-print.log b/_includes/tutorials/filtering/ksql/code/tutorial-steps/dev/expected-print.log
index 645cf30f32..17f4725931 100644
--- a/_includes/tutorials/filtering/ksql/code/tutorial-steps/dev/expected-print.log
+++ b/_includes/tutorials/filtering/ksql/code/tutorial-steps/dev/expected-print.log
@@ -1,7 +1,7 @@
Key format: KAFKA_BIGINT or KAFKA_DOUBLE or KAFKA_STRING
Value format: AVRO or KAFKA_STRING
-rowtime: 2020/06/02 14:36:36.846 Z, key: 2, value: {"AUTHOR": "George R. R. Martin", "TITLE": "A Song of Ice and Fire"}
-rowtime: 2020/06/02 14:36:37.057 Z, key: 4, value: {"AUTHOR": "George R. R. Martin", "TITLE": "Fire & Blood"}
-rowtime: 2020/06/02 14:36:37.350 Z, key: 7, value: {"AUTHOR": "George R. R. Martin", "TITLE": "A Dream of Spring"}
-rowtime: 2020/06/02 14:36:37.541 Z, key: 9, value: {"AUTHOR": "George R. R. Martin", "TITLE": "The Ice Dragon"}
+rowtime: 2020/06/02 14:36:36.846 Z, key: 2, value: {"AUTHOR": "George R. R. Martin", "TITLE": "A Song of Ice and Fire"}, partition: 0
+rowtime: 2020/06/02 14:36:37.057 Z, key: 4, value: {"AUTHOR": "George R. R. Martin", "TITLE": "Fire & Blood"}, partition: 0
+rowtime: 2020/06/02 14:36:37.350 Z, key: 7, value: {"AUTHOR": "George R. R. Martin", "TITLE": "A Dream of Spring"}, partition: 0
+rowtime: 2020/06/02 14:36:37.541 Z, key: 9, value: {"AUTHOR": "George R. R. Martin", "TITLE": "The Ice Dragon"}, partition: 0
Topic printing ceased
diff --git a/_includes/tutorials/finding-distinct/ksql/code/docker-compose.yml b/_includes/tutorials/finding-distinct/ksql/code/docker-compose.yml
index ad73bda721..1044e61041 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/finding-distinct/ksql/code/docker-compose.yml
@@ -28,6 +28,7 @@ services:
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_LOG_RETENTION_MS: -1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
schema-registry:
@@ -35,15 +36,16 @@ services:
hostname: schema-registry
container_name: schema-registry
depends_on:
+ - zookeeper
- broker
ports:
- "8081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
+ SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
ksql-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksql-server
container_name: ksql-server
depends_on:
@@ -57,11 +59,12 @@ services:
KSQL_BOOTSTRAP_SERVERS: "broker:9092"
KSQL_HOST_NAME: ksql-server
KSQL_LISTENERS: "http://0.0.0.0:8088"
- KSQL_CACHE_MAX_BYTES_BUFFERING: 0
KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
+ KSQL_KSQL_STREAMS_AUTO_OFFSET_RESET: "earliest"
+ KSQL_KSQL_STREAMS_CACHE_MAX_BYTES_BUFFERING: 0
ksql-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksql-cli
depends_on:
- broker
diff --git a/_includes/tutorials/finding-distinct/ksql/code/src/statements.sql b/_includes/tutorials/finding-distinct/ksql/code/src/statements.sql
index 563a0ffce8..02e222215f 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/src/statements.sql
+++ b/_includes/tutorials/finding-distinct/ksql/code/src/statements.sql
@@ -1,6 +1,6 @@
-CREATE STREAM CLICKS (IP_ADDRESS VARCHAR, URL VARCHAR, TIMESTAMP VARCHAR)
+CREATE STREAM CLICKS (IP_ADDRESS STRING, URL STRING, TIMESTAMP STRING)
WITH (KAFKA_TOPIC = 'CLICKS',
- VALUE_FORMAT = 'JSON',
+ FORMAT = 'JSON',
TIMESTAMP = 'TIMESTAMP',
TIMESTAMP_FORMAT = 'yyyy-MM-dd''T''HH:mm:ssXXX',
PARTITIONS = 1);
@@ -13,13 +13,14 @@ CREATE TABLE DETECTED_CLICKS AS
AS_VALUE(IP_ADDRESS) AS IP_ADDRESS,
AS_VALUE(URL) AS URL,
AS_VALUE(TIMESTAMP) AS TIMESTAMP
- FROM CLICKS WINDOW TUMBLING (SIZE 2 MINUTES)
+ FROM CLICKS WINDOW TUMBLING (SIZE 2 MINUTES, RETENTION 1000 DAYS)
GROUP BY IP_ADDRESS, URL, TIMESTAMP
HAVING COUNT(IP_ADDRESS) = 1;
-CREATE STREAM RAW_DISTINCT_CLICKS (IP_ADDRESS VARCHAR, URL VARCHAR, TIMESTAMP VARCHAR)
+CREATE STREAM RAW_DISTINCT_CLICKS (IP_ADDRESS STRING, URL STRING, TIMESTAMP STRING)
WITH (KAFKA_TOPIC = 'DETECTED_CLICKS',
- VALUE_FORMAT = 'JSON');
+ PARTITIONS = 1,
+ FORMAT = 'JSON');
CREATE STREAM DISTINCT_CLICKS AS
SELECT
diff --git a/_includes/tutorials/finding-distinct/ksql/code/test/input.json b/_includes/tutorials/finding-distinct/ksql/code/test/input.json
index 588a78aced..bf9a7eb436 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/test/input.json
+++ b/_includes/tutorials/finding-distinct/ksql/code/test/input.json
@@ -2,56 +2,50 @@
"inputs": [
{
"topic": "CLICKS",
- "key": "10.0.0.1",
"value": {
"IP_ADDRESS": "10.0.0.1",
"URL": "https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html",
- "TIMESTAMP": "2020-01-17T14:50:43+00:00"
+ "TIMESTAMP": "2021-01-17T14:50:43+00:00"
}
},
{
"topic": "CLICKS",
- "key": "10.0.0.2",
"value": {
"IP_ADDRESS": "10.0.0.12",
"URL": "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
- "TIMESTAMP": "2020-01-17T14:53:44+00:01"
+ "TIMESTAMP": "2021-01-17T14:53:44+00:01"
}
},
{
"topic": "CLICKS",
- "key": "10.0.0.3",
"value": {
"IP_ADDRESS": "10.0.0.13",
"URL": "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
- "TIMESTAMP": "2020-01-17T14:56:45+00:03"
+ "TIMESTAMP": "2021-01-17T14:56:45+00:03"
}
},
{
"topic": "CLICKS",
- "key": "10.0.0.1",
"value": {
"IP_ADDRESS": "10.0.0.1",
"URL": "https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html",
- "TIMESTAMP": "2020-01-17T14:50:43+00:00"
+ "TIMESTAMP": "2021-01-17T14:50:43+00:00"
}
},
{
"topic": "CLICKS",
- "key": "10.0.0.2",
"value": {
"IP_ADDRESS": "10.0.0.12",
"URL": "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
- "TIMESTAMP": "2020-01-17T14:53:44+00:01"
+ "TIMESTAMP": "2021-01-17T14:53:44+00:01"
}
},
{
"topic": "CLICKS",
- "key": "10.0.0.3",
"value": {
"IP_ADDRESS": "10.0.0.13",
"URL": "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
- "TIMESTAMP": "2020-01-17T14:56:45+00:03"
+ "TIMESTAMP": "2021-01-17T14:56:45+00:03"
}
}
]
diff --git a/_includes/tutorials/finding-distinct/ksql/code/test/output.json b/_includes/tutorials/finding-distinct/ksql/code/test/output.json
index 8fcdf451df..2a651c99f3 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/test/output.json
+++ b/_includes/tutorials/finding-distinct/ksql/code/test/output.json
@@ -5,27 +5,27 @@
"key": "10.0.0.1",
"value": {
"URL": "https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html",
- "TIMESTAMP": "2020-01-17T14:50:43+00:00"
+ "TIMESTAMP": "2021-01-17T14:50:43+00:00"
},
- "timestamp": 1579272643000
+ "timestamp": 1610895043000
},
{
"topic": "DISTINCT_CLICKS",
"key": "10.0.0.12",
"value": {
"URL": "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
- "TIMESTAMP": "2020-01-17T14:53:44+00:01"
+ "TIMESTAMP": "2021-01-17T14:53:44+00:01"
},
- "timestamp": 1579272764000
+ "timestamp": 1610895164000
},
{
"topic": "DISTINCT_CLICKS",
"key": "10.0.0.13",
"value": {
"URL": "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
- "TIMESTAMP": "2020-01-17T14:56:45+00:03"
+ "TIMESTAMP": "2021-01-17T14:56:45+00:03"
},
- "timestamp": 1579272825000
+ "timestamp": 1610895225000
}
]
}
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/create-inputs.sql b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/create-inputs.sql
index 30759092ba..c23283928e 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/create-inputs.sql
+++ b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/create-inputs.sql
@@ -1,6 +1,6 @@
CREATE STREAM CLICKS (IP_ADDRESS VARCHAR, URL VARCHAR, TIMESTAMP VARCHAR)
WITH (KAFKA_TOPIC = 'CLICKS',
- VALUE_FORMAT = 'JSON',
+ FORMAT = 'JSON',
TIMESTAMP = 'TIMESTAMP',
TIMESTAMP_FORMAT = 'yyyy-MM-dd''T''HH:mm:ssXXX',
PARTITIONS = 1);
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/create-outputs.sql b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/create-outputs.sql
index 389b944857..4a03610088 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/create-outputs.sql
+++ b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/create-outputs.sql
@@ -6,13 +6,14 @@ CREATE TABLE DETECTED_CLICKS AS
AS_VALUE(IP_ADDRESS) AS IP_ADDRESS,
AS_VALUE(URL) AS URL,
AS_VALUE(TIMESTAMP) AS TIMESTAMP
- FROM CLICKS WINDOW TUMBLING (SIZE 2 MINUTES)
+ FROM CLICKS WINDOW TUMBLING (SIZE 2 MINUTES, RETENTION 1000 DAYS)
GROUP BY IP_ADDRESS, URL, TIMESTAMP
HAVING COUNT(IP_ADDRESS) = 1;
CREATE STREAM RAW_DISTINCT_CLICKS (IP_ADDRESS VARCHAR, URL VARCHAR, TIMESTAMP VARCHAR)
WITH (KAFKA_TOPIC = 'DETECTED_CLICKS',
- VALUE_FORMAT = 'JSON');
+ PARTITIONS = 1,
+ FORMAT = 'JSON');
CREATE STREAM DISTINCT_CLICKS AS
SELECT
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-print-topic.log b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-print-topic.log
index 84fb0fc690..fd2dbc329f 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-print-topic.log
+++ b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-print-topic.log
@@ -1,6 +1,6 @@
-Key format: KAFKA_STRING
+Key format: JSON or HOPPING(KAFKA_STRING) or TUMBLING(KAFKA_STRING) or KAFKA_STRING
Value format: JSON or KAFKA_STRING
-rowtime: 2020/01/17 14:50:43.000 Z, key: 10.0.0.1, value: {"URL":"https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html","TIMESTAMP":"2020-01-17T14:50:43+00:00"}
-rowtime: 2020/01/17 14:52:44.000 Z, key: 10.0.0.12, value: {"URL":"https://www.confluent.io/hub/confluentinc/kafka-connect-datagen","TIMESTAMP":"2020-01-17T14:53:44+00:01"}
-rowtime: 2020/01/17 14:53:45.000 Z, key: 10.0.0.13, value: {"URL":"https://www.confluent.io/hub/confluentinc/kafka-connect-datagen","TIMESTAMP":"2020-01-17T14:56:45+00:03"}
+rowtime: 2021/01/17 14:50:43.000 Z, key: "10.0.0.1", value: {"URL":"https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html","TIMESTAMP":"2021-01-17T14:50:43+00:00"}, partition: 0
+rowtime: 2021/01/17 14:52:44.000 Z, key: "10.0.0.12", value: {"URL":"https://www.confluent.io/hub/confluentinc/kafka-connect-datagen","TIMESTAMP":"2021-01-17T14:53:44+00:01"}, partition: 0
+rowtime: 2021/01/17 14:53:45.000 Z, key: "10.0.0.13", value: {"URL":"https://www.confluent.io/hub/confluentinc/kafka-connect-datagen","TIMESTAMP":"2021-01-17T14:56:45+00:03"}, partition: 0
Topic printing ceased
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-transient-query.log b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-transient-query.log
index cc041db8e2..8363dcfc4d 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-transient-query.log
+++ b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-transient-query.log
@@ -1,9 +1,9 @@
+-------------------------------------------------------------------------+-------------------------------------------------------------------------+-------------------------------------------------------------------------+
|IP_ADDRESS |URL |TIMESTAMP |
+-------------------------------------------------------------------------+-------------------------------------------------------------------------+-------------------------------------------------------------------------+
-|10.0.0.1 |https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/|2020-01-17T14:50:43+00:00 |
+|10.0.0.1 |https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/|2021-01-17T14:50:43+00:00 |
| |docs/index.html | |
-|10.0.0.12 |https://www.confluent.io/hub/confluentinc/kafka-connect-datagen |2020-01-17T14:53:44+00:01 |
-|10.0.0.13 |https://www.confluent.io/hub/confluentinc/kafka-connect-datagen |2020-01-17T14:56:45+00:03 |
+|10.0.0.12 |https://www.confluent.io/hub/confluentinc/kafka-connect-datagen |2021-01-17T14:53:44+00:01 |
+|10.0.0.13 |https://www.confluent.io/hub/confluentinc/kafka-connect-datagen |2021-01-17T14:56:45+00:03 |
Limit Reached
Query terminated
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-transient-window.log b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-transient-window.log
index cc041db8e2..8363dcfc4d 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-transient-window.log
+++ b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/expected-transient-window.log
@@ -1,9 +1,9 @@
+-------------------------------------------------------------------------+-------------------------------------------------------------------------+-------------------------------------------------------------------------+
|IP_ADDRESS |URL |TIMESTAMP |
+-------------------------------------------------------------------------+-------------------------------------------------------------------------+-------------------------------------------------------------------------+
-|10.0.0.1 |https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/|2020-01-17T14:50:43+00:00 |
+|10.0.0.1 |https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/|2021-01-17T14:50:43+00:00 |
| |docs/index.html | |
-|10.0.0.12 |https://www.confluent.io/hub/confluentinc/kafka-connect-datagen |2020-01-17T14:53:44+00:01 |
-|10.0.0.13 |https://www.confluent.io/hub/confluentinc/kafka-connect-datagen |2020-01-17T14:56:45+00:03 |
+|10.0.0.12 |https://www.confluent.io/hub/confluentinc/kafka-connect-datagen |2021-01-17T14:53:44+00:01 |
+|10.0.0.13 |https://www.confluent.io/hub/confluentinc/kafka-connect-datagen |2021-01-17T14:56:45+00:03 |
Limit Reached
Query terminated
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/populate-stream.sql b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/populate-stream.sql
index fb66f4ff30..3d58dcbcfc 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/populate-stream.sql
+++ b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/populate-stream.sql
@@ -1,7 +1,7 @@
-INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.1', 'https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html', '2020-01-17T14:50:43+00:00');
-INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.12', 'https://www.confluent.io/hub/confluentinc/kafka-connect-datagen', '2020-01-17T14:53:44+00:01');
-INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.13', 'https://www.confluent.io/hub/confluentinc/kafka-connect-datagen', '2020-01-17T14:56:45+00:03');
+INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.1', 'https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html', '2021-01-17T14:50:43+00:00');
+INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.12', 'https://www.confluent.io/hub/confluentinc/kafka-connect-datagen', '2021-01-17T14:53:44+00:01');
+INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.13', 'https://www.confluent.io/hub/confluentinc/kafka-connect-datagen', '2021-01-17T14:56:45+00:03');
-INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.1', 'https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html', '2020-01-17T14:50:43+00:00');
-INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.12', 'https://www.confluent.io/hub/confluentinc/kafka-connect-datagen', '2020-01-17T14:53:44+00:01');
-INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.13', 'https://www.confluent.io/hub/confluentinc/kafka-connect-datagen', '2020-01-17T14:56:45+00:03');
+INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.1', 'https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html', '2021-01-17T14:50:43+00:00');
+INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.12', 'https://www.confluent.io/hub/confluentinc/kafka-connect-datagen', '2021-01-17T14:53:44+00:01');
+INSERT INTO CLICKS (IP_ADDRESS, URL, TIMESTAMP) VALUES ('10.0.0.13', 'https://www.confluent.io/hub/confluentinc/kafka-connect-datagen', '2021-01-17T14:56:45+00:03');
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/set-properties.sql b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/set-properties.sql
index d1ba61d77d..8110d2531f 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/set-properties.sql
+++ b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/set-properties.sql
@@ -1 +1,2 @@
-SET 'auto.offset.reset' = 'earliest';
\ No newline at end of file
+SET 'auto.offset.reset' = 'earliest';
+SET 'cache.max.bytes.buffering' = '0';
\ No newline at end of file
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/transient-window.sql b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/transient-window.sql
index 3cfff01773..865c9f4666 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/transient-window.sql
+++ b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/dev/transient-window.sql
@@ -4,6 +4,6 @@ SELECT
TIMESTAMP
FROM CLICKS WINDOW TUMBLING (SIZE 2 MINUTES)
GROUP BY IP_ADDRESS, URL, TIMESTAMP
-HAVING COUNT(IP_ADDRESS) = 1
+HAVING COUNT(IP_ADDRESS) = 1
EMIT CHANGES
LIMIT 3;
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/test/expected-results.log b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/test/expected-results.log
index f0337ba701..97aafb27f0 100644
--- a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/test/expected-results.log
+++ b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/test/expected-results.log
@@ -1 +1 @@
- >>> Test passed!
+ >>> Test passed!
\ No newline at end of file
diff --git a/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/test/run-tests.sh b/_includes/tutorials/finding-distinct/ksql/code/tutorial-steps/test/run-tests.sh
old mode 100644
new mode 100755
diff --git a/_includes/tutorials/flatten-nested-data/ksql/code/docker-compose.yml b/_includes/tutorials/flatten-nested-data/ksql/code/docker-compose.yml
index cecce84934..736bdf2878 100644
--- a/_includes/tutorials/flatten-nested-data/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/flatten-nested-data/ksql/code/docker-compose.yml
@@ -35,15 +35,16 @@ services:
hostname: schema-registry
container_name: schema-registry
depends_on:
+ - zookeeper
- broker
ports:
- "8081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
+ SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
@@ -61,7 +62,7 @@ services:
KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/generate-test-data-streams/ksql/code/docker-compose.yml b/_includes/tutorials/generate-test-data-streams/ksql/code/docker-compose.yml
index ee51523f56..243c9eb5f8 100644
--- a/_includes/tutorials/generate-test-data-streams/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/generate-test-data-streams/ksql/code/docker-compose.yml
@@ -43,7 +43,7 @@ services:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.8.1
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb
container_name: ksqldb
depends_on:
@@ -93,7 +93,7 @@ services:
sleep infinity
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.8.1
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/generate-test-data-streams/ksql/code/tutorial-steps/dev/describe-stream-01_expected.log b/_includes/tutorials/generate-test-data-streams/ksql/code/tutorial-steps/dev/describe-stream-01_expected.log
index 9ca16ecbd4..b6abb9ace4 100644
--- a/_includes/tutorials/generate-test-data-streams/ksql/code/tutorial-steps/dev/describe-stream-01_expected.log
+++ b/_includes/tutorials/generate-test-data-streams/ksql/code/tutorial-steps/dev/describe-stream-01_expected.log
@@ -8,4 +8,4 @@ Name : CLICKS
PATH | VARCHAR(STRING)
HOST | VARCHAR(STRING)
----------------------------------------
-For runtime statistics and query details run: DESCRIBE EXTENDED {% include_raw tutorials/rekeying/ksql/code/tutorial-steps/dev/expected-print-input.log %}
+++++
-Note that the key is `null` for every message. This means that ratings data for the same movie could be spread across multiple partitions. This is generally not good for scalability when you care about having the same "kind" of data in a single partition.
\ No newline at end of file
+Note that the messages have keys ranging from `1` to `9`, but more importantly, the messages are not keyed by the movies' `ID`. This means that ratings data for the same movie could be spread across multiple partitions. This is generally not good for scalability when you care about having the same "kind" of data in a single partition.
\ No newline at end of file
diff --git a/_includes/tutorials/serialization/ksql/code/docker-compose.yml b/_includes/tutorials/serialization/ksql/code/docker-compose.yml
index 543cca0e84..4636a06e4c 100644
--- a/_includes/tutorials/serialization/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/serialization/ksql/code/docker-compose.yml
@@ -43,7 +43,7 @@ services:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
ksqldb-server:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
@@ -61,7 +61,7 @@ services:
KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
ksqldb-cli:
- image: confluentinc/ksqldb-cli:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-cli:0.705000235.1-rc2
container_name: ksqldb-cli
depends_on:
- broker
diff --git a/_includes/tutorials/serialization/ksql/code/tutorial-steps/dev/expected-print.log b/_includes/tutorials/serialization/ksql/code/tutorial-steps/dev/expected-print.log
index 36709ce762..fb62c559af 100644
--- a/_includes/tutorials/serialization/ksql/code/tutorial-steps/dev/expected-print.log
+++ b/_includes/tutorials/serialization/ksql/code/tutorial-steps/dev/expected-print.log
@@ -1,6 +1,6 @@
Key format: KAFKA_BIGINT or KAFKA_DOUBLE or KAFKA_STRING
Value format: PROTOBUF
-rowtime: 2020/05/29 22:21:08.375 Z, key: 1, value: TITLE: "Lethal Weapon" RELEASE_YEAR: 1992
-rowtime: 2020/05/29 22:21:08.569 Z, key: 2, value: TITLE: "Die Hard" RELEASE_YEAR: 1988
-rowtime: 2020/05/29 22:21:08.709 Z, key: 3, value: TITLE: "Predator" RELEASE_YEAR: 1997
+rowtime: 4/30/20 4:34:10 PM UTC, key: 1, value: TITLE: "Lethal Weapon" RELEASE_YEAR: 1992, partition: 0
+rowtime: 4/30/20 4:34:10 PM UTC, key: 2, value: TITLE: "Die Hard" RELEASE_YEAR: 1988, partition: 0
+rowtime: 4/30/20 4:34:11 PM UTC, key: 3, value: TITLE: "Predator" RELEASE_YEAR: 1997, partition: 0
Topic printing ceased
diff --git a/_includes/tutorials/session-windows/ksql/code/docker-compose.yml b/_includes/tutorials/session-windows/ksql/code/docker-compose.yml
index c84222130d..7c9e30c110 100644
--- a/_includes/tutorials/session-windows/ksql/code/docker-compose.yml
+++ b/_includes/tutorials/session-windows/ksql/code/docker-compose.yml
@@ -35,15 +35,16 @@ services:
hostname: schema-registry
container_name: schema-registry
depends_on:
+ - zookeeper
- broker
ports:
- "8081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
+ SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
ksqldb:
- image: confluentinc/ksqldb-server:0.11.0
+ image: 519856050701.dkr.ecr.us-west-2.amazonaws.com/docker/prod/confluentinc/ksqldb-server:0.705000235.1-rc2
hostname: ksqldb
container_name: ksqldb
depends_on:
diff --git a/_includes/tutorials/session-windows/ksql/code/tutorial-steps/dev/expected-print-topic.log b/_includes/tutorials/session-windows/ksql/code/tutorial-steps/dev/expected-print-topic.log
index 955ac080ff..2c1dcf2d61 100644
--- a/_includes/tutorials/session-windows/ksql/code/tutorial-steps/dev/expected-print-topic.log
+++ b/_includes/tutorials/session-windows/ksql/code/tutorial-steps/dev/expected-print-topic.log
@@ -1,8 +1,8 @@
Key format: SESSION(KAFKA_STRING)
Value format: AVRO
-rowtime: 7/18/19 10:00:00 AM UTC, key: [51.56.119.117@1563444000000/1563444000000], value: {"SESSION_START_TS": "2019-07-18 10:00:00", "SESSION_END_TS": "2019-07-18 10:00:00", "CLICK_COUNT": 1, "SESSION_LENGTH_MS": 0}
-rowtime: 7/18/19 10:00:00 AM UTC, key: [51.56.119.117@1563444000000/1563444000000], value: