Skip to content

build(deps): bump the maven-dependencies group with 5 updates #552

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Feb 24, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions kafka-spring-boot-3-integrationtests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>redpanda</artifactId>
<artifactId>kafka</artifactId>
<scope>test</scope>
</dependency>
<dependency>
Expand All @@ -102,7 +102,7 @@
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers-bom</artifactId>
<version>1.20.4</version>
<version>1.20.5</version>
<type>pom</type>
<scope>import</scope>
</dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.springframework.test.context.ContextConfiguration;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import org.testcontainers.redpanda.RedpandaContainer;
import org.testcontainers.kafka.KafkaContainer;

import java.time.Duration;

Expand All @@ -41,8 +41,9 @@
class StreamableKafkaSourceIntegrationTest {

@Container
private static final RedpandaContainer REDPANDA_CONTAINER = new RedpandaContainer(
"docker.redpanda.com/vectorized/redpanda:v22.2.1");
private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer("apache/kafka-native")
.withEnv("KAFKA_LISTENERS", "PLAINTEXT://:9092,BROKER://:9093,CONTROLLER://:9094");

private ApplicationContextRunner testApplicationContext;

@BeforeEach
Expand All @@ -56,15 +57,18 @@ void messageSendViaKafkaShouldBeReceived() {
.withPropertyValues("axon.axonserver.enabled=false")
.withPropertyValues("axon.kafka.fetcher.enabled=true")
.withPropertyValues("axon.kafka.consumer.event-processor-mode=tracking")
.withPropertyValues("axon.kafka.producer.bootstrap-servers=" + REDPANDA_CONTAINER.getBootstrapServers())
.withPropertyValues("axon.kafka.consumer.bootstrap-servers=" + REDPANDA_CONTAINER.getBootstrapServers())
.withPropertyValues("axon.kafka.producer.bootstrap-servers=" + KAFKA_CONTAINER.getBootstrapServers())
.withPropertyValues("axon.kafka.consumer.bootstrap-servers=" + KAFKA_CONTAINER.getBootstrapServers())
.withUserConfiguration(DefaultContext.class)
.run(context -> {
EventGateway eventGateway = context.getBean(EventGateway.class);
assertNotNull(eventGateway);
publishEvent(eventGateway);
StreamableKafkaMessageSource<String, byte[]> messageSource = context.getBean(
StreamableKafkaMessageSource.class);

//noinspection unchecked
StreamableKafkaMessageSource<String, byte[]> messageSource =
context.getBean(StreamableKafkaMessageSource.class);

assertNotNull(messageSource);
receiveMessage(messageSource);
});
Expand All @@ -75,8 +79,10 @@ private void publishEvent(EventGateway eventGateway) {
eventGateway.publish(event);
}

private void receiveMessage(StreamableKafkaMessageSource<String, byte[]> messageSource)
throws InterruptedException {
private void receiveMessage(
StreamableKafkaMessageSource<String, byte[]> messageSource
) throws InterruptedException {
//noinspection resource
BlockingStream<TrackedEventMessage<?>> stream = messageSource.openStream(null);
await().atMost(Duration.ofSeconds(5L)).until(stream::hasNextAvailable);
TrackedEventMessage<?> message = stream.nextAvailable();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
import org.springframework.test.context.ContextConfiguration;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import org.testcontainers.redpanda.RedpandaContainer;
import org.testcontainers.kafka.KafkaContainer;

import java.net.URI;
import java.time.Duration;
Expand All @@ -56,10 +56,10 @@
class TokenReplayIntegrationTest {

@Container
private static final RedpandaContainer REDPANDA_CONTAINER = new RedpandaContainer(
"docker.redpanda.com/vectorized/redpanda:v22.2.1");
private ApplicationContextRunner testApplicationContext;
private static final KafkaContainer KAFKA_CONTAINER = new KafkaContainer("apache/kafka-native")
.withEnv("KAFKA_LISTENERS", "PLAINTEXT://:9092,BROKER://:9093,CONTROLLER://:9094");

private ApplicationContextRunner testApplicationContext;

@BeforeEach
void setUp() {
Expand All @@ -69,27 +69,27 @@ void setUp() {
.withPropertyValues("axon.kafka.publisher.enabled=false")
.withPropertyValues("axon.kafka.message-converter-mode=cloud_event")
.withPropertyValues("axon.kafka.consumer.event-processor-mode=tracking")
.withPropertyValues("axon.kafka.consumer.bootstrap-servers=" + REDPANDA_CONTAINER.getBootstrapServers())
.withPropertyValues("axon.kafka.consumer.bootstrap-servers=" + KAFKA_CONTAINER.getBootstrapServers())
.withUserConfiguration(DefaultContext.class);
}

@Test
void afterResetShouldOnlyProcessTenEventsIfTimeSetMidway() {
testApplicationContext
.withPropertyValues("axon.kafka.default-topic=counterfeed-1")
.withPropertyValues("axon.kafka.default-topic=counter-feed-1")
.run(context -> {
Counter counter = context.getBean(Counter.class);
assertNotNull(counter);
assertEquals(0, counter.getCount());
Instant between = addRecords("counterfeed-1");
Instant between = addRecords("counter-feed-1");
await().atMost(Duration.ofSeconds(5L)).untilAsserted(
() -> assertEquals(20, counter.getCount())
);
EventProcessingConfiguration processingConfiguration = context.getBean(EventProcessingConfiguration.class);
assertNotNull(processingConfiguration);
processingConfiguration
.eventProcessorByProcessingGroup(
"counterfeedprocessor",
"counter-feed-processor",
TrackingEventProcessor.class
)
.ifPresent(tep -> {
Expand All @@ -107,20 +107,20 @@ void afterResetShouldOnlyProcessTenEventsIfTimeSetMidway() {
@Test
void afterResetShouldOnlyProcessNewMessages() {
testApplicationContext
.withPropertyValues("axon.kafka.default-topic=counterfeed-2")
.withPropertyValues("axon.kafka.default-topic=counter-feed-2")
.run(context -> {
Counter counter = context.getBean(Counter.class);
assertNotNull(counter);
assertEquals(0, counter.getCount());
addRecords("counterfeed-2");
addRecords("counter-feed-2");
await().atMost(Duration.ofSeconds(5L)).untilAsserted(
() -> assertEquals(20, counter.getCount())
);
EventProcessingConfiguration processingConfiguration = context.getBean(EventProcessingConfiguration.class);
assertNotNull(processingConfiguration);
processingConfiguration
.eventProcessorByProcessingGroup(
"counterfeedprocessor",
"counter-feed-processor",
TrackingEventProcessor.class
)
.ifPresent(tep -> {
Expand All @@ -129,15 +129,15 @@ void afterResetShouldOnlyProcessNewMessages() {
assertEquals(0, counter.getCount());
tep.start();
});
addRecords("counterfeed-2");
addRecords("counter-feed-2");
await().atMost(Duration.ofSeconds(5L)).untilAsserted(
() -> assertEquals(20, counter.getCount())
);
});
}

private Instant addRecords(String topic) {
Producer<String, CloudEvent> producer = newProducer(REDPANDA_CONTAINER.getBootstrapServers());
Producer<String, CloudEvent> producer = newProducer(KAFKA_CONTAINER.getBootstrapServers());
sendTenMessages(producer, topic);
Instant now = Instant.now();
sendTenMessages(producer, topic);
Expand All @@ -146,12 +146,11 @@ private Instant addRecords(String topic) {
}

private void sendMessage(Producer<String, CloudEvent> producer, String topic) {
CloudEvent event = new CloudEventBuilder()
.withId(UUID.randomUUID().toString())
.withSource(URI.create("source"))
.withData("Payload".getBytes())
.withType("java.util.String")
.build();
CloudEvent event = new CloudEventBuilder().withId(UUID.randomUUID().toString())
.withSource(URI.create("source"))
.withData("Payload".getBytes())
.withType("java.util.String")
.build();
ProducerRecord<String, CloudEvent> record = new ProducerRecord<>(topic, 0, null, null, event);
producer.send(record);
}
Expand Down Expand Up @@ -182,7 +181,7 @@ public void registerProcessor(
StreamableKafkaMessageSource<?, ?> streamableKafkaMessageSource
) {
configurer.eventProcessing()
.registerTrackingEventProcessor("counterfeedprocessor", c -> streamableKafkaMessageSource);
.registerTrackingEventProcessor("counter-feed-processor", c -> streamableKafkaMessageSource);
}
}

Expand All @@ -205,7 +204,7 @@ void reset() {

@SuppressWarnings("unused")
@Component
@ProcessingGroup("counterfeedprocessor")
@ProcessingGroup("counter-feed-processor")
private static class KafkaEventHandler {

private final Counter counter;
Expand Down
2 changes: 1 addition & 1 deletion kafka/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

<properties>
<jmh-core.version>1.37</jmh-core.version>
<test-containers.version>1.20.4</test-containers.version>
<test-containers.version>1.20.5</test-containers.version>
<maven-enforcer-plugin.version>3.5.0</maven-enforcer-plugin.version>
<findbugs-jsr305.version>3.0.2</findbugs-jsr305.version>
<cloudevents.version>2.5.0</cloudevents.version>
Expand Down
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@
<!-- Plugins -->
<jacoco-maven.version>0.8.11</jacoco-maven.version>
<maven-assembly.version>3.7.1</maven-assembly.version>
<maven-clean-plugin.version>3.4.0</maven-clean-plugin.version>
<maven-compiler.version>3.13.0</maven-compiler.version>
<maven-clean-plugin.version>3.4.1</maven-clean-plugin.version>
<maven-compiler.version>3.14.0</maven-compiler.version>
<maven-deploy.version>3.1.2</maven-deploy.version>
<maven-enforcer.version>3.5.0</maven-enforcer.version>
<maven-failsafe.version>3.5.2</maven-failsafe.version>
Expand Down
Loading