这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 62 additions & 0 deletions spring-kafka-4/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,16 @@
<artifactId>spring-kafka</artifactId>
</dependency>

<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>${apache.avro.version}</version>
</dependency>
<dependency>
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>${kafka-avro-serializer.version}</version>
</dependency>
<!-- test dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
Expand Down Expand Up @@ -58,6 +68,48 @@

<build>
<plugins>
<plugin>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
<version>${apache.avro.version}</version>
<configuration>
<stringType>String</stringType>
</configuration>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>schema</goal>
</goals>
<configuration>
<sourceDirectory>${project.basedir}/src/main/resources/avro</sourceDirectory>
<includes>
<include>*.avsc</include>
</includes>
<outputDirectory>${project.build.directory}/generated-sources/avro</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>${build-helper-maven-plugin.version}</version>
<executions>
<execution>
<id>add-source</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>${project.build.directory}/generated-sources/avro</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
Expand All @@ -71,6 +123,16 @@
<properties>
<java.version>21</java.version>
<spring-boot.version>3.4.4</spring-boot.version>
<apache.avro.version>1.12.0</apache.avro.version>
<kafka-avro-serializer.version>7.9.1</kafka-avro-serializer.version>
<build-helper-maven-plugin.version>3.2.0</build-helper-maven-plugin.version>
</properties>

<repositories>
<repository>
<id>confluent</id>
<url>https://packages.confluent.io/maven/</url>
</repository>
</repositories>

</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
package com.baeldung.avro.deserialization.exception;

import java.util.ArrayList;
import java.util.List;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.annotation.KafkaListener;

import com.baeldung.avro.deserialization.exception.avro.Article;

@SpringBootApplication
class AvroMagicByteApp {

private static final Logger LOG = LoggerFactory.getLogger(AvroMagicByteApp.class);

private final List<String> blog = new ArrayList<>();

public static void main(String[] args) {
new SpringApplicationBuilder().sources(AvroMagicByteApp.class)
.profiles("avro-magic-byte")
.run(args);
}

@KafkaListener(topics = "baeldung.article.published")
public void listen(Article article) {
LOG.info("a new article was published: {}", article);
blog.add(article.getTitle());
}

public List<String> getBlog() {
return blog;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
package com.baeldung.avro.deserialization.exception;

import java.util.Map;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.listener.DeadLetterPublishingRecoverer;
import org.springframework.kafka.listener.DefaultErrorHandler;

@Configuration
class DlqConfig {

@Bean
DefaultErrorHandler errorHandler(DeadLetterPublishingRecoverer dlqPublishingRecoverer) {
return new DefaultErrorHandler(dlqPublishingRecoverer);
}

@Bean
DeadLetterPublishingRecoverer dlqPublishingRecoverer(KafkaTemplate<byte[], byte[]> bytesKafkaTemplate) {
return new DeadLetterPublishingRecoverer(bytesKafkaTemplate);
}

@Bean("bytesKafkaTemplate")
KafkaTemplate<?, ?> bytesTemplate(ProducerFactory<?, ?> kafkaProducerFactory) {
return new KafkaTemplate<>(kafkaProducerFactory, Map.of(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()));
}

}
13 changes: 13 additions & 0 deletions spring-kafka-4/src/main/resources/application-avro-magic-byte.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@

spring:
kafka:
# bootstrap-servers <-- it'll be injected in test via Testcontainers and @ServiceConnection
consumer:
group-id: test-group
auto-offset-reset: earliest
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.springframework.kafka.support.serializer.ErrorHandlingDeserializer
properties:
spring.deserializer.value.delegate.class: io.confluent.kafka.serializers.KafkaAvroDeserializer
schema.registry.url: mock://test
specific.avro.reader: true
10 changes: 10 additions & 0 deletions spring-kafka-4/src/main/resources/avro/Article.avsc
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"type": "record",
"name": "Article",
"namespace": "com.baeldung.avro.deserialization.exception.avro",
"fields": [
{ "name": "title", "type": "string" },
{ "name": "author", "type": "string" },
{ "name": "tags", "type": { "type": "array", "items": "string" } }
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
package com.baeldung.avro.deserialization.exception;

import static java.time.Duration.ofSeconds;
import static org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
import static org.assertj.core.api.Assertions.assertThat;
import static org.testcontainers.shaded.org.awaitility.Awaitility.await;

import java.time.Duration;
import java.util.List;
import java.util.Map;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.testcontainers.service.connection.ServiceConnection;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.context.ActiveProfiles;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.kafka.KafkaContainer;
import org.testcontainers.utility.DockerImageName;

import com.baeldung.avro.deserialization.exception.avro.Article;

import io.confluent.kafka.serializers.KafkaAvroSerializer;

@SpringBootTest
@ActiveProfiles("avro-magic-byte")
class AvroMagicByteLiveTest {

@Container
@ServiceConnection
static KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("apache/kafka:4.0.0"));

@Autowired
private AvroMagicByteApp listener;

@Test
void whenSendingCorrectArticle_thenItsAddedToTheBlog() throws Exception {
avroKafkaTemplate().send("baeldung.article.published", aTestArticle("Avro Magic Byte"))
.get();

await().untilAsserted(() -> assertThat(listener.getBlog()).containsExactly("Avro Magic Byte"));
}

@Test
void whenSendingMalformedMessage_thenSendToDLQ() throws Exception {
stringKafkaTemplate().send("baeldung.article.published", "not a valid avro message!")
.get();

var dlqRecord = listenForOneMessage("baeldung.article.published-dlt", ofSeconds(5L));

assertThat(dlqRecord.value()).isEqualTo("not a valid avro message!");
}

private static KafkaTemplate<Object, Object> avroKafkaTemplate() {
return new KafkaTemplate<>(kafkaProducerFactory());
}

private static KafkaTemplate<Object, Object> stringKafkaTemplate() {
return new KafkaTemplate<>(kafkaProducerFactory(), Map.of(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()));
}

private static DefaultKafkaProducerFactory<Object, Object> kafkaProducerFactory() {
return new DefaultKafkaProducerFactory<>(
Map.of(BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers(), KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName(),
VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName(), "schema.registry.url", "mock://test"));
}

private static ConsumerRecord<?, ?> listenForOneMessage(String topic, Duration timeout) {
return KafkaTestUtils.getOneRecord(kafka.getBootstrapServers(), "test-group-id", topic, 0, false, true, timeout);
}

private static Article aTestArticle(String title) {
return new Article(title, "John Doe", List.of("avro", "kafka", "spring"));
}

}