Skip to content

Commit d2297dc

Browse files
authored
Merge pull request #6 from oyvindstegard/master
Lift project to Kafka clients 3.6, Spring Kafka 3.1 and Spring Boot 3.2
2 parents ce746f1 + 9b4e162 commit d2297dc

File tree

20 files changed

+415
-410
lines changed

20 files changed

+415
-410
lines changed

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -2,3 +2,5 @@ target/
22
/.idea/
33
*.iml
44
sequence-producer.state
5+
dependency-reduced-pom.xml
6+

README.md

+163-152
Large diffs are not rendered by default.

boot-app

+2-2
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@ set -e
44
cd "$(dirname "$0")"
55

66
if ! test -f messages/target/messages-*.jar -a\
7-
-f clients-spring/target/clients-spring-*-exec.jar; then
7+
-f clients-spring/target/clients-spring-*.jar; then
88
mvn -B install
99
fi
1010

1111
cd clients-spring
12-
exec java -jar target/clients-spring-*-exec.jar "$@"
12+
exec java -jar target/clients-spring-*.jar "$@"
1313

clients-spring/pom.xml

-36
Original file line numberDiff line numberDiff line change
@@ -8,27 +8,9 @@
88
<groupId>no.nav.kafka</groupId>
99
<artifactId>kafka-sandbox</artifactId>
1010
<version>1.0-SNAPSHOT</version>
11-
<relativePath>../pom.xml</relativePath>
1211
</parent>
1312

1413
<artifactId>clients-spring</artifactId>
15-
<packaging>jar</packaging>
16-
17-
<properties>
18-
<spring-boot.version>2.6.1</spring-boot.version>
19-
</properties>
20-
21-
<dependencyManagement>
22-
<dependencies>
23-
<dependency>
24-
<groupId>org.springframework.boot</groupId>
25-
<artifactId>spring-boot-starter-parent</artifactId>
26-
<version>${spring-boot.version}</version>
27-
<type>pom</type>
28-
<scope>import</scope>
29-
</dependency>
30-
</dependencies>
31-
</dependencyManagement>
3214

3315
<dependencies>
3416
<dependency>
@@ -56,38 +38,20 @@
5638
<groupId>org.springframework.boot</groupId>
5739
<artifactId>spring-boot-starter-test</artifactId>
5840
<scope>test</scope>
59-
<exclusions>
60-
<exclusion>
61-
<groupId>org.junit.vintage</groupId>
62-
<artifactId>junit-vintage-engine</artifactId>
63-
</exclusion>
64-
</exclusions>
6541
</dependency>
6642
</dependencies>
6743

6844
<build>
6945
<plugins>
70-
<plugin>
71-
<groupId>org.apache.maven.plugins</groupId>
72-
<artifactId>maven-compiler-plugin</artifactId>
73-
</plugin>
74-
<plugin>
75-
<groupId>org.apache.maven.plugins</groupId>
76-
<artifactId>maven-surefire-plugin</artifactId>
77-
</plugin>
7846
<plugin>
7947
<groupId>org.springframework.boot</groupId>
8048
<artifactId>spring-boot-maven-plugin</artifactId>
81-
<version>${spring-boot.version}</version>
8249
<executions>
8350
<execution>
8451
<id>repackage</id>
8552
<goals>
8653
<goal>repackage</goal>
8754
</goals>
88-
<configuration>
89-
<classifier>exec</classifier>
90-
</configuration>
9155
</execution>
9256
</executions>
9357
</plugin>

clients-spring/src/main/java/no/nav/kafka/sandbox/data/EventStoreWithFailureRate.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ public static class AverageRatioRandom<T> implements FailureRateStrategy<T> {
9494

9595
public AverageRatioRandom(float failureRate) {
9696
if (failureRate < 0 || failureRate > 1.0) {
97-
throw new IllegalArgumentException("failure rate must be a decmial number between 0.0 and 1.0");
97+
throw new IllegalArgumentException("failure rate must be a decimal number between 0.0 and 1.0");
9898
}
9999
this.failureRate = failureRate;
100100
}

clients-spring/src/main/java/no/nav/kafka/sandbox/measurements/MeasurementsConfig.java

+11-31
Original file line numberDiff line numberDiff line change
@@ -81,10 +81,9 @@ public EventStore<SensorEvent> sensorEventStore(@Value("${measurements.event-sto
8181
public ConcurrentKafkaListenerContainerFactory<String, SensorEvent> measurementsListenerContainer(
8282
ConsumerFactory<String, SensorEvent> consumerFactory,
8383
Optional<CommonErrorHandler> errorHandler,
84-
Optional<BatchErrorHandler> legacyErrorHandler, // just temporary, since we still have legacy error handlers in config
8584
@Value("${measurements.consumer.handle-deserialization-error:true}") boolean handleDeserializationError) {
8685

87-
// Consumer configuration from application.yml, where we will override some properties:
86+
// Consumer configuration from application.yml, where we will override some properties here:
8887
Map<String, Object> externalConfigConsumerProps = new HashMap<>(consumerFactory.getConfigurationProperties());
8988

9089
ConcurrentKafkaListenerContainerFactory<String, SensorEvent> factory = new ConcurrentKafkaListenerContainerFactory<>();
@@ -99,11 +98,8 @@ public ConcurrentKafkaListenerContainerFactory<String, SensorEvent> measurements
9998
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.BATCH);
10099

101100
if (errorHandler.isPresent()) {
102-
LOG.info("Using error handler: {}", errorHandler.get().getClass().getSimpleName());
101+
LOG.info("Using error handler: {}", errorHandler.map(h -> h.getClass().getSimpleName()).orElse("none"));
103102
factory.setCommonErrorHandler(errorHandler.get());
104-
} else if (legacyErrorHandler.isPresent()) {
105-
LOG.info("Using legacy error handler: {}", legacyErrorHandler.get().getClass().getSimpleName());
106-
factory.setBatchErrorHandler(legacyErrorHandler.get());
107103
} else {
108104
LOG.info("Using Spring Kafka default error handler");
109105
}
@@ -122,8 +118,8 @@ private DefaultKafkaConsumerFactory<String, SensorEvent> consumerFactory(
122118
boolean handleDeserializationError) {
123119
// override some consumer props from external config
124120
Map<String, Object> consumerProps = new HashMap<>(externalConfigConsumerProps);
125-
consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "spring-web-measurement");
126-
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "spring-web-measurement");
121+
consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "boot-app-measurement");
122+
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "boot-app-measurement");
127123

128124
// Deserialization config
129125
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
@@ -139,7 +135,7 @@ private DefaultKafkaConsumerFactory<String, SensorEvent> consumerFactory(
139135
}
140136

141137
@Bean
142-
@ConditionalOnProperty(value = "measurements.consumer.error-handler", havingValue = "ignore")
138+
@ConditionalOnProperty(value = "measurements.consumer.error-handler", havingValue = "log-and-ignore")
143139
public CommonErrorHandler ignoreHandler() {
144140
return new CommonLoggingErrorHandler();
145141
}
@@ -150,44 +146,28 @@ public CommonErrorHandler infiniteRetryHandler() {
150146
return new DefaultErrorHandler(new FixedBackOff(FixedBackOff.DEFAULT_INTERVAL, FixedBackOff.UNLIMITED_ATTEMPTS));
151147
}
152148

153-
// TODO: upgrade to non-deprecated common class of error handlers with similar behaviour:
154-
@Bean
155-
@ConditionalOnProperty(value = "measurements.consumer.error-handler", havingValue = "seek-to-current")
156-
public BatchErrorHandler seekToCurrentHandler() {
157-
return new SeekToCurrentBatchErrorHandler();
158-
}
159-
160-
@Bean
161-
@ConditionalOnProperty(value = "measurements.consumer.error-handler", havingValue = "seek-to-current-with-backoff")
162-
public BatchErrorHandler seekToCurrentWithBackoffHandler() {
163-
SeekToCurrentBatchErrorHandler handler = new SeekToCurrentBatchErrorHandler();
164-
// For this error handler, max attempts actually does not matter
165-
handler.setBackOff(new FixedBackOff(2000L, 2));
166-
return handler;
167-
}
168-
169149
@Bean
170150
@ConditionalOnProperty(value = "measurements.consumer.error-handler", havingValue = "retry-with-backoff")
171-
public BatchErrorHandler retryWithBackoffHandler() {
172-
return new RetryingBatchErrorHandler(new FixedBackOff(2000L, 2), null);
151+
public CommonErrorHandler retryWithBackoffHandler() {
152+
return new DefaultErrorHandler(new FixedBackOff(2000L, 2));
173153
}
174154

175155
@Bean
176156
@ConditionalOnProperty(value = "measurements.consumer.error-handler", havingValue = "retry-with-backoff-recovery")
177-
public BatchErrorHandler retryWithBackoffRecoveryHandler(EventStore<SensorEvent> eventStore) {
157+
public CommonErrorHandler retryWithBackoffRecoveryHandler(EventStore<SensorEvent> eventStore) {
178158
return new RetryingErrorHandler(eventStore);
179159
}
180160

181161
@Bean
182162
@ConditionalOnProperty(value = "measurements.consumer.error-handler", havingValue = "recovering")
183-
public BatchErrorHandler recoveringHandler() {
163+
public CommonErrorHandler recoveringHandler() {
184164
return new RecoveringErrorHandler();
185165
}
186166

187167
@Bean
188168
@ConditionalOnProperty(value = "measurements.consumer.error-handler", havingValue = "stop-container")
189-
public BatchErrorHandler containerStoppingHandler() {
190-
return new ContainerStoppingBatchErrorHandler();
169+
public CommonErrorHandler containerStoppingHandler() {
170+
return new CommonContainerStoppingErrorHandler();
191171
}
192172

193173
}

clients-spring/src/main/java/no/nav/kafka/sandbox/measurements/MeasurementsConsumer.java

+9-10
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,16 @@
11
package no.nav.kafka.sandbox.measurements;
22

33
import no.nav.kafka.sandbox.data.EventStore;
4-
import no.nav.kafka.sandbox.measurements.errorhandlers.RecoveringErrorHandler;
54
import no.nav.kafka.sandbox.messages.Measurements;
65
import org.apache.kafka.clients.consumer.ConsumerRecord;
76
import org.apache.kafka.common.header.Header;
87
import org.slf4j.Logger;
98
import org.slf4j.LoggerFactory;
109
import org.springframework.beans.factory.annotation.Value;
1110
import org.springframework.kafka.annotation.KafkaListener;
12-
import org.springframework.kafka.listener.BatchErrorHandler;
1311
import org.springframework.kafka.listener.BatchListenerFailedException;
1412
import org.springframework.kafka.support.serializer.DeserializationException;
15-
import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer;
13+
import org.springframework.kafka.support.serializer.SerializationUtils;
1614
import org.springframework.stereotype.Component;
1715

1816
import java.io.ByteArrayInputStream;
@@ -40,14 +38,14 @@ public class MeasurementsConsumer {
4038

4139
private final long slowdownMillis;
4240

43-
private final boolean usingRecoveringBatchErrorHandler;
41+
private final boolean useBatchListenerFailedException;
4442

4543
public MeasurementsConsumer(EventStore<Measurements.SensorEvent> store,
4644
@Value("${measurements.consumer.slowdown:0}") long slowdownMillis,
47-
Optional<BatchErrorHandler> errorHandler) {
45+
@Value("${measurements.consumer.useBatchListenerFailedException:false}") boolean useBatchListenerFailedException) {
4846
this.eventStore = store;
4947
this.slowdownMillis = slowdownMillis;
50-
this.usingRecoveringBatchErrorHandler = errorHandler.isPresent() && errorHandler.get() instanceof RecoveringErrorHandler;
48+
this.useBatchListenerFailedException = useBatchListenerFailedException;
5149
}
5250

5351
/**
@@ -71,8 +69,8 @@ public void receive(List<ConsumerRecord<String, Measurements.SensorEvent>> recor
7169
NullPointerException businessException = new NullPointerException("Message at "
7270
+ record.topic() + "-" + record.partition() + ":" + record.offset() + " with key " + record.key() + " was null");
7371

74-
if (usingRecoveringBatchErrorHandler) {
75-
// Communicate to recovering batch error handler which record in the batch that failed, and the root cause
72+
if (useBatchListenerFailedException) {
73+
// Communicate to error handler which record in the batch that failed, and the root cause
7674
throw new BatchListenerFailedException(businessException.getMessage(), businessException, record);
7775
} else {
7876
// Throw raw root cause for other types of error handling
@@ -83,7 +81,8 @@ public void receive(List<ConsumerRecord<String, Measurements.SensorEvent>> recor
8381
try {
8482
eventStore.storeEvent(record.value());
8583
} catch (Exception e) {
86-
if (usingRecoveringBatchErrorHandler) {
84+
if (useBatchListenerFailedException) {
85+
// Communicate to error handler which record in the batch that failed, and the root cause
8786
throw new BatchListenerFailedException(e.getMessage(), e, record);
8887
} else {
8988
throw e;
@@ -103,7 +102,7 @@ private boolean checkFailedDeserialization(ConsumerRecord<String, Measurements.S
103102

104103

105104
private static Optional<Throwable> failedValueDeserialization(ConsumerRecord<String, Measurements.SensorEvent> record) {
106-
Header valueDeserializationError = record.headers().lastHeader(ErrorHandlingDeserializer.VALUE_DESERIALIZER_EXCEPTION_HEADER);
105+
Header valueDeserializationError = record.headers().lastHeader(SerializationUtils.VALUE_DESERIALIZER_EXCEPTION_HEADER);
107106
if (valueDeserializationError != null) {
108107
try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(valueDeserializationError.value()))){
109108
DeserializationException dex = (DeserializationException)ois.readObject();

clients-spring/src/main/java/no/nav/kafka/sandbox/measurements/MeasurementsRestController.java

+5-6
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,8 @@
88
import org.springframework.web.bind.annotation.RequestParam;
99
import org.springframework.web.bind.annotation.RestController;
1010

11-
import java.time.LocalDateTime;
12-
import java.util.Collections;
11+
import java.time.OffsetDateTime;
1312
import java.util.List;
14-
import java.util.stream.Collectors;
1513

1614
@RestController
1715
public class MeasurementsRestController {
@@ -27,11 +25,12 @@ public MeasurementsRestController(EventStore<Measurements.SensorEvent> sensorEve
2725
* @return messages from most oldest to most recent, optionally filtering by timestamp.
2826
*/
2927
@GetMapping(path = "/measurements/api", produces = MediaType.APPLICATION_JSON_VALUE)
30-
public List<Measurements.SensorEvent> getMeasurements(@RequestParam(value = "after", required = false, defaultValue = "1970-01-01T00:00")
31-
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime after) {
28+
public List<Measurements.SensorEvent> getMeasurements(@RequestParam(value = "after", required = false, defaultValue = "1970-01-01T00:00Z")
29+
@DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) OffsetDateTime after) {
30+
3231
return eventStore.fetchEvents().stream()
3332
.filter(e -> e.getTimestamp().isAfter(after))
34-
.collect(Collectors.toList());
33+
.toList();
3534
}
3635

3736
}

clients-spring/src/main/java/no/nav/kafka/sandbox/measurements/errorhandlers/RecoveringErrorHandler.java

+5-2
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,14 @@
22

33
import org.slf4j.Logger;
44
import org.slf4j.LoggerFactory;
5+
import org.springframework.kafka.listener.DefaultErrorHandler;
56
import org.springframework.kafka.listener.ListenerExecutionFailedException;
6-
import org.springframework.kafka.listener.RecoveringBatchErrorHandler;
77
import org.springframework.util.backoff.FixedBackOff;
88

9-
public class RecoveringErrorHandler extends RecoveringBatchErrorHandler {
9+
/**
10+
* This error handler does not recover anything more than exactly failed records
11+
*/
12+
public class RecoveringErrorHandler extends DefaultErrorHandler {
1013

1114
private static final Logger LOG = LoggerFactory.getLogger(RecoveringErrorHandler.class);
1215

clients-spring/src/main/java/no/nav/kafka/sandbox/measurements/errorhandlers/RetryingErrorHandler.java

+8-4
Original file line numberDiff line numberDiff line change
@@ -4,18 +4,22 @@
44
import no.nav.kafka.sandbox.messages.Measurements;
55
import org.slf4j.Logger;
66
import org.slf4j.LoggerFactory;
7+
import org.springframework.kafka.listener.DefaultErrorHandler;
78
import org.springframework.kafka.listener.ListenerExecutionFailedException;
8-
import org.springframework.kafka.listener.RetryingBatchErrorHandler;
99
import org.springframework.util.backoff.FixedBackOff;
1010

1111
import java.io.IOException;
1212

13-
public class RetryingErrorHandler extends RetryingBatchErrorHandler {
13+
/**
14+
* Error handler with access to event store, tries to recover records by writing to store, under certain
15+
* conditions.
16+
*/
17+
public class RetryingErrorHandler extends DefaultErrorHandler {
1418

1519
private static final Logger LOG = LoggerFactory.getLogger(RetryingErrorHandler.class);
1620

1721
public RetryingErrorHandler(EventStore<Measurements.SensorEvent> store) {
18-
super(new FixedBackOff(2000L, 2), (record, exception) -> {
22+
super((record, exception) -> {
1923
Throwable cause = exception;
2024
if (exception instanceof ListenerExecutionFailedException) {
2125
cause = exception.getCause();
@@ -51,7 +55,7 @@ public RetryingErrorHandler(EventStore<Measurements.SensorEvent> store) {
5155
// Depending on business requirements (e.g. if not at-least-once semantics), then another strategy might
5256
// be to skip the whole batch, let Spring commit offsets and continue with the next instead.
5357
throw new RuntimeException("Unrecoverable batch error", cause);
54-
});
58+
}, new FixedBackOff(2000L, 2));
5559
}
5660

5761
}

clients-spring/src/main/resources/application.yml

+13-10
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
spring:
22
application:
3-
name: Kafka sandbox web
3+
name: Kafka sandbox boot-app
44
# See class org.springframework.boot.autoconfigure.kafka.KafkaProperties:
55
kafka:
66
bootstrap-servers: localhost:9092
77
consumer:
8-
client-id: spring-web
9-
group-id: ${GROUPID:spring-web}
8+
client-id: boot-app
9+
group-id: ${GROUPID:boot-app}
1010
properties:
1111
spring.json.trusted.packages: no.nav.kafka.sandbox.messages
1212
value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer
@@ -44,15 +44,18 @@ measurements:
4444

4545
# Select error handler:
4646
# 'spring-default': just uses the Spring default for batch error handling (does not explicitly set an error handler).
47-
# 'ignore': logs, but ignores all errors from consumer, implemented in Spring error handler CommonLoggingErrorHandler.
47+
# 'log-and-ignore': logs, but ignores all errors from consumer, implemented in Spring error handler CommonLoggingErrorHandler.
4848
# 'infinite-retry': tries failed batches an infinite number of times, with a backoff/delay between each attempt. Spring DefaultErrorHandler with a BackOff.
49-
# 'seek-to-current': Spring SeekToCurrentBatchErrorHandler w/all defaults and no backoff (unlimited retries)
50-
# 'seek-to-current-with-backoff': Spring SeekToCurrentBatchErrorHandler w/fixed delay and max 2 retries.
51-
# 'retry-with-backoff': Spring RetryingBatchErrorHandler with no configured ConsumerRecordRecoverer
52-
# 'retry-with-backoff-recovery': Spring RetryingBatchErrorHandler with custom ConsumerRecordRecoverer set, see RetryingErrorHandler.
53-
# 'recovering': Spring RecoveringBatchErrorHandler, see RecoveringErrorHandler.
54-
# 'stop-container': Spring ContainerStoppingBatchErrorHandler
49+
# 'retry-with-backoff': Spring DefaultErrorHandler with 2 retry attempts
50+
# 'retry-with-backoff-recovery': no.nav.k.s.m.e.RetryingErrorHandler with custom ConsumerRecordRecoverer set.
51+
# 'recovering': no.nav.k.s.m.e.RecoveringErrorHandler
52+
# 'stop-container': Spring CommonContainerStoppingErrorHandler
5553
error-handler: spring-default
5654

55+
# Select whether consumer should throw BatchListenerFailedException w/cause when an internal processing failure occurs, or
56+
# just directly throw any exception. Setting to true will allow Spring to detect where a failure occured in a batch of multiple
57+
# records.
58+
useBatchListenerFailedException: false
59+
5760
# Select whether deserialization exceptions of values should be handled:
5861
handle-deserialization-error: true

0 commit comments

Comments
 (0)