diff --git a/.gitpod.yml b/.gitpod.yml new file mode 100644 index 0000000000..de42c718dc --- /dev/null +++ b/.gitpod.yml @@ -0,0 +1,41 @@ +github: + prebuilds: + master: true + branches: true + +tasks: + - name: Open tutorial in browser + command: gp preview https://developer.confluent.io/tutorials/creating-first-apache-kafka-producer-application/kafka.html + - name: Docker + init: gp sync-await code-copy + command: docker compose -f tutorial-workspace/docker-compose.yml up -d ; exit ; clear + - name: Terminal + before: gp preview https://developer.confluent.io/tutorials/creating-first-apache-kafka-producer-application/kafka.html + init: | + curl -L --http1.1 https://cnfl.io/cli | sudo sh -s -- -b /usr/local/bin + mkdir tutorial-workspace + mv _includes/tutorials/kafka-producer-application/kafka/code/* tutorial-workspace + rm -rf -- !(tutorial-workspace) + rm -rf tutorial-workspace/tutorial-steps .git* .semaphore/ + sdk default java 17.0.7.fx-zulu + command: gp sync-done code-copy ; cd tutorial-workspace ; clear + +vscode: + extensions: + - github.github-vscode-theme + - vscjava.vscode-java-pack + - vscjava.vscode-java-debug + +ports: + # zookeeper + - port: 2181 + onOpen: ignore + visibility: private + # broker + - port: 29092 + onOpen: ignore + visibility: private + # SR + - port: 8081 + onOpen: ignore + visibility: private \ No newline at end of file diff --git a/_data/harnesses/kafka-producer-application/gitpod.yml b/_data/harnesses/kafka-producer-application/gitpod.yml new file mode 100644 index 0000000000..b69d2ef894 --- /dev/null +++ b/_data/harnesses/kafka-producer-application/gitpod.yml @@ -0,0 +1,48 @@ +dev: + steps: + - title: Background + content: + - action: skip + render: + file: tutorials/kafka-producer-application/kafka/markup/gitpod/background.adoc + + - title: Application tour + content: + - action: skip + render: + file: tutorials/kafka-producer-application/kafka/markup/gitpod/app-tour.adoc + + - title: Compile the KafkaProducer application + content: + - action: execute + file: tutorial-steps/gitpod/build-uberjar.sh + render: + file: tutorials/kafka-producer-application/kafka/markup/gitpod/build-uberjar.adoc + + - title: Create a topic + content: + - action: execute + file: tutorial-steps/gitpod/harness-create-topic.sh + render: + file: tutorials/kafka-producer-application/kafka/markup/gitpod/create-topic.adoc + + - title: Run the KafkaProducer application + content: + - action: execute + file: tutorial-steps/gitpod/run-dev-app.sh + render: + file: tutorials/kafka-producer-application/kafka/markup/gitpod/run-dev-app.adoc + + - title: Confirm records sent by consuming from topic + content: + - action: execute_async + file: tutorial-steps/gitpod/harness-console-consumer.sh + stdout: tutorial-steps/gitpod/outputs/actual-output.txt + render: + file: tutorials/kafka-producer-application/kafka/markup/gitpod/run-consumer.adoc + + - title: Next steps + content: + - action: skip + render: + file: tutorials/kafka-producer-application/kafka/markup/gitpod/next-steps.adoc diff --git a/_data/tutorials.yaml b/_data/tutorials.yaml index f89f04c097..5deef41513 100644 --- a/_data/tutorials.yaml +++ b/_data/tutorials.yaml @@ -604,12 +604,12 @@ streams-to-table: kafka: disabled confluent: enabled kafka-producer-application: - title: How to build your first Apache KafkaProducer application + title: How to build your first Apache Kafka® producer application meta-description: build your first Kafka producer application canonical: confluent slug: /creating-first-apache-kafka-producer-application question: How do you get started building your first Kafka producer application? - introduction: You'd like to integrate a KafkaProducer into your event-driven application, + introduction: You'd like to integrate a Kafka producer into your event-driven application, but you're not sure where to start. In this tutorial, you'll build a small application that uses a KafkaProducer to write records to Kafka. status: diff --git a/_includes/gitpod-content.html b/_includes/gitpod-content.html new file mode 100644 index 0000000000..7541747a78 --- /dev/null +++ b/_includes/gitpod-content.html @@ -0,0 +1,48 @@ +
{% include_raw tutorials/kafka-producer-application/kafka/code/tutorial-steps/gitpod/build-uberjar.sh %}
++++++
diff --git a/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/create-topic.adoc b/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/create-topic.adoc
new file mode 100644
index 0000000000..a95f7d521e
--- /dev/null
+++ b/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/create-topic.adoc
@@ -0,0 +1,20 @@
+
+In this step we're going to create a topic for use during this tutorial. Before proceeding, ensure that Kafka is up and running by listing topics:
++++++
+{% include_raw tutorials/kafka-producer-application/kafka/code/tutorial-steps/gitpod/list-topics.sh %}
++++++
+
+You should see the `__consumer_offsets` and `_schemas` topics listed. If you see an error like `No such container: broker`, the images may still be downloading in the terminal named `Docker`.
+
+When Kafka is up and running, run this command to open a shell on the broker Docker container
++++++
+{% include_raw tutorials/kafka-producer-application/kafka/code/tutorial-steps/gitpod/open-docker-shell.sh %}
++++++
+
+Next, create the topic that the producer can write to
+
++++++
+{% include_raw tutorials/kafka-producer-application/kafka/code/tutorial-steps/gitpod/create-topic.sh %}
++++++
+
+Enter `CTRL+D` to exit the broker shell.
diff --git a/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/next-steps.adoc b/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/next-steps.adoc
new file mode 100644
index 0000000000..d0dda32693
--- /dev/null
+++ b/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/next-steps.adoc
@@ -0,0 +1,5 @@
+Now that you have a working producer application and a way to test with `kafka-console-consumer`, you may want to modify the application to do something more interesting. For example, the program ends after printing each line from the file. How might you modify it to print a record every second? How would you change it to emit results of an API call every second and never end? Can you figure out how to emit weather data from https://open-meteo.com/[Open-Meteo] every second for your current location and store the results in a `temperature` topic?
+
+When you are done tinkering, ensure that you shut down your Gitpod workspace so that you don't waste credits or incur cost unecessarily. You can do this by navigating to https://gitpod.io/workspaces[https://gitpod.io/workspaces], clicking the three dots on the right of the running workspace, and clicking `Delete` followed by `Delete Workspace`.
+
+Once everything is cleaned up, head on back to Confluent Developer to try link:{{ site.url }}[another tutorial]!
diff --git a/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/run-consumer.adoc b/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/run-consumer.adoc
new file mode 100644
index 0000000000..7fd138eb7e
--- /dev/null
+++ b/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/run-consumer.adoc
@@ -0,0 +1,21 @@
+Now we'll run a console consumer that will read topics from the output topic to confirm your application published the expected records.
+
+First, open a shell on the broker Docker container:
+
++++++
+{% include_raw tutorials/kafka-producer-application/kafka/code/tutorial-steps/gitpod/open-docker-shell.sh %}
++++++
+
+Now run the console consumer:
+
++++++
+{% include_raw tutorials/kafka-producer-application/kafka/code/tutorial-steps/gitpod/console-consumer.sh %}
++++++
+
+The output from the consumer can vary if you added any of your own records, but it should look something like this:
+
+++++
+{% include_raw tutorials/kafka-producer-application/kafka/code/tutorial-steps/gitpod/expected-output.txt %}
+++++
+
+Now close the consumer with a `CTRL+C` then the broker shell with a `CTRL+D`
diff --git a/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/run-dev-app.adoc b/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/run-dev-app.adoc
new file mode 100644
index 0000000000..893e8fed2d
--- /dev/null
+++ b/_includes/tutorials/kafka-producer-application/kafka/markup/gitpod/run-dev-app.adoc
@@ -0,0 +1,27 @@
+Now that you have an uberjar for the KafkaProducerApplication, you can launch it in the workspace.
++++++
+{% include_raw tutorials/kafka-producer-application/kafka/code/tutorial-steps/gitpod/run-dev-app.sh %}
++++++
+
+After you run the previous command, the application will process the file and you should something like this on the console:
+
+[source, text]
+----
+Offsets and timestamps committed in batch from input.txt
+Record written to offset 0 timestamp 1597352120029
+Record written to offset 1 timestamp 1597352120037
+Record written to offset 2 timestamp 1597352120037
+Record written to offset 3 timestamp 1597352120037
+Record written to offset 4 timestamp 1597352120037
+Record written to offset 5 timestamp 1597352120037
+Record written to offset 6 timestamp 1597352120037
+Record written to offset 7 timestamp 1597352120037
+Record written to offset 8 timestamp 1597352120037
+Record written to offset 9 timestamp 1597352120037
+Record written to offset 10 timestamp 1597352120038
+----
+
+Now you can experiment some by creating your own file in base directory and re-run the above command and substitute your file name for `input.txt`
+
+Remember any data before the `-` is the key and data after is the value.
+
diff --git a/_layouts/gitpod.html b/_layouts/gitpod.html
new file mode 100644
index 0000000000..267479fea3
--- /dev/null
+++ b/_layouts/gitpod.html
@@ -0,0 +1,73 @@
+---
+---
+
+
+
+
+ {% include vendors.html %}
+
+
+
+
+
+
+
+ {% if site.data.tutorials[page.static_data].canonical %}
+ {% unless page.stack == site.data.tutorials[page.static_data].canonical or page.stack == 'ksql' %}
+
+ {% endunless %}
+ {% endif %}
+
+