diff --git a/messaging-modules/apache-camel-kserve/.gitignore b/messaging-modules/apache-camel-kserve/.gitignore
new file mode 100644
index 000000000000..a9472bb4a309
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/.gitignore
@@ -0,0 +1,31 @@
+target/
+dependency-reduced-pom.xml
+
+### IntelliJ IDEA ###
+.idea
+*.iws
+*.iml
+*.ipr
+
+### Eclipse ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
+
+### NetBeans ###
+/nbproject/private/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/
+build/
+
+### VS Code ###
+.vscode/
+
+### Mac OS ###
+.DS_Store
diff --git a/messaging-modules/apache-camel-kserve/README.md b/messaging-modules/apache-camel-kserve/README.md
new file mode 100644
index 000000000000..8f197675943b
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/README.md
@@ -0,0 +1,15 @@
+This module contains 2 sub-modules:
+
+1) triton server with pre-loaded model(should be downloaded)
+2) the sentiment-service in java with apache-camel-kserve
+
+The modules both contain a Dockerfile and can be easily deployed locally using docker-compose.yml
+
+First, you need to download the model from [huggingface](https://huggingface.co/pjxcharya/onnx-sentiment-model/tree/main) and place it in triton-server/models/sentiment/1.
+Then execute:
+
+```bash
+docker-compose up --build
+```
+
+The endpoint to test everything works is: `http://localhost:8080/sentiments?sentence=i probably like you`
diff --git a/messaging-modules/apache-camel-kserve/docker-compose.yml b/messaging-modules/apache-camel-kserve/docker-compose.yml
new file mode 100644
index 000000000000..5957194e5e8a
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/docker-compose.yml
@@ -0,0 +1,16 @@
+version: '3.8'
+
+services:
+ triton-server:
+ build: ./triton-server
+ environment:
+ - NVIDIA_VISIBLE_DEVICES=all
+ ports:
+ - "8000:8000" # HTTP
+ - "8001:8001" # gRPC
+ - "8002:8002" # Metrics
+ sentiment-service:
+ build: ./sentiment-service
+ ports:
+ - "8080:8080"
+ restart: unless-stopped
diff --git a/messaging-modules/apache-camel-kserve/pom.xml b/messaging-modules/apache-camel-kserve/pom.xml
new file mode 100644
index 000000000000..f31ae23c3d29
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/pom.xml
@@ -0,0 +1,19 @@
+
+
+ 4.0.0
+ pom
+
+
+ com.baeldung
+ messaging-modules
+ 0.0.1-SNAPSHOT
+
+
+ sentiment-parent-pom
+
+
+ sentiment-service
+
+
diff --git a/messaging-modules/apache-camel-kserve/sentiment-service/Dockerfile b/messaging-modules/apache-camel-kserve/sentiment-service/Dockerfile
new file mode 100644
index 000000000000..733db107a252
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/sentiment-service/Dockerfile
@@ -0,0 +1,12 @@
+FROM eclipse-temurin:21-jre
+
+WORKDIR /app
+
+# Copy the fat JAR from the builder stage
+COPY target/sentiment-service-1.0-SNAPSHOT.jar app.jar
+
+# Expose HTTP port
+EXPOSE 8080
+
+# Run the app
+ENTRYPOINT ["java", "-jar", "app.jar"]
diff --git a/messaging-modules/apache-camel-kserve/sentiment-service/pom.xml b/messaging-modules/apache-camel-kserve/sentiment-service/pom.xml
new file mode 100644
index 000000000000..15fdecf539e6
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/sentiment-service/pom.xml
@@ -0,0 +1,108 @@
+
+
+ 4.0.0
+
+
+ com.baeldung
+ sentiment-parent-pom
+ 0.0.1-SNAPSHOT
+
+
+ Sentiment System - Service
+ This is the main service of the system, that uses Apache Camel to integrate with Triton server and
+ use an AI model for inference
+ sentiment-service
+
+
+ 21
+ ${java.version}
+ ${java.version}
+ ${java.version}
+ UTF-8
+
+ 4.13.0
+ 2.19.2
+ 0.21.0
+ 3.6.0
+
+
+
+
+
+ org.apache.camel
+ camel-main
+ ${camel.version}
+
+
+
+
+ org.apache.camel
+ camel-undertow
+ ${camel.version}
+
+
+ org.apache.camel
+ camel-rest
+ ${camel.version}
+
+
+
+
+ org.apache.camel
+ camel-kserve
+ ${camel.version}
+
+
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ ${jackson-databind.version}
+
+
+
+
+ ai.djl.huggingface
+ tokenizers
+ ${tokenizers.version}
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+ true
+ org.learnings.aimodels.sentiments.Application
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ ${maven-shade-plugin.version}
+
+
+ package
+ shade
+
+
+
+ org.learnings.aimodels.sentiments.Application
+
+
+
+
+
+
+
+
+
+
diff --git a/messaging-modules/apache-camel-kserve/sentiment-service/src/main/java/org/learnings/aimodels/sentiments/Application.java b/messaging-modules/apache-camel-kserve/sentiment-service/src/main/java/org/learnings/aimodels/sentiments/Application.java
new file mode 100644
index 000000000000..938210b07fe2
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/sentiment-service/src/main/java/org/learnings/aimodels/sentiments/Application.java
@@ -0,0 +1,22 @@
+package org.learnings.aimodels.sentiments;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.impl.DefaultCamelContext;
+import org.learnings.aimodels.sentiments.web.api.SentimentsRoute;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class Application {
+
+ private static final Logger log = LoggerFactory.getLogger(Application.class);
+
+ public static void main(String[] args) throws Exception {
+ CamelContext context = new DefaultCamelContext();
+ context.addRoutes(new SentimentsRoute());
+
+ context.start();
+ log.info("🚀 Sentiment service running on http://localhost:8080/sentiments");
+ Thread.sleep(Long.MAX_VALUE);
+ context.stop();
+ }
+}
diff --git a/messaging-modules/apache-camel-kserve/sentiment-service/src/main/java/org/learnings/aimodels/sentiments/web/api/SentimentsRoute.java b/messaging-modules/apache-camel-kserve/sentiment-service/src/main/java/org/learnings/aimodels/sentiments/web/api/SentimentsRoute.java
new file mode 100644
index 000000000000..b7a52b996d94
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/sentiment-service/src/main/java/org/learnings/aimodels/sentiments/web/api/SentimentsRoute.java
@@ -0,0 +1,99 @@
+package org.learnings.aimodels.sentiments.web.api;
+
+import ai.djl.huggingface.tokenizers.Encoding;
+import ai.djl.huggingface.tokenizers.HuggingFaceTokenizer;
+import com.google.protobuf.ByteString;
+import inference.GrpcPredictV2.InferTensorContents;
+import inference.GrpcPredictV2.ModelInferRequest;
+import inference.GrpcPredictV2.ModelInferResponse;
+import org.apache.camel.Exchange;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.model.rest.RestBindingMode;
+import org.apache.camel.model.rest.RestParamType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class SentimentsRoute extends RouteBuilder {
+
+ private static final Logger log = LoggerFactory.getLogger(SentimentsRoute.class);
+ private final HuggingFaceTokenizer tokenizer = HuggingFaceTokenizer.newInstance("distilbert-base-uncased");
+
+ @Override
+ public void configure() {
+ // Configure REST via Undertow
+ restConfiguration()
+ .component("undertow")
+ .host("0.0.0.0")
+ .port(8080)
+ .bindingMode(RestBindingMode.off);
+
+ // REST GET endpoint
+ rest("/sentiments")
+ .get()
+ .param().name("sentence").required(true).type(RestParamType.query).endParam()
+ .outType(String[].class)
+ .responseMessage().code(200).message("the sentence is.. ").endResponseMessage()
+ .to("direct:classify");
+
+ // Main route
+ from("direct:classify")
+ .routeId("sentiment-inference")
+ .setBody(this::createRequest)
+ .setHeader("Content-Type", constant("application/json"))
+ .to("kserve:infer?modelName=sentiment&target=host.docker.internal:8001")
+ // .to("kserve:infer?modelName=sentiment&target=localhost:8001")
+ .process(this::postProcess);
+ }
+
+ private ModelInferRequest createRequest(Exchange exchange) {
+ String sentence = exchange.getIn().getHeader("sentence", String.class);
+ Encoding encoding = tokenizer.encode(sentence);
+ List inputIds = Arrays.stream(encoding.getIds()).boxed().collect(Collectors.toList());
+ List attentionMask = Arrays.stream(encoding.getAttentionMask()).boxed().collect(Collectors.toList());
+
+ var content0 = InferTensorContents.newBuilder().addAllInt64Contents(inputIds);
+ var input0 = ModelInferRequest.InferInputTensor.newBuilder()
+ .setName("input_ids").setDatatype("INT64").addShape(1).addShape(inputIds.size())
+ .setContents(content0);
+
+ var content1 = InferTensorContents.newBuilder().addAllInt64Contents(attentionMask);
+ var input1 = ModelInferRequest.InferInputTensor.newBuilder()
+ .setName("attention_mask").setDatatype("INT64").addShape(1).addShape(attentionMask.size())
+ .setContents(content1);
+
+ ModelInferRequest requestBody = ModelInferRequest.newBuilder()
+ .addInputs(0, input0).addInputs(1, input1)
+ .build();
+ log.debug("-- payload: [{}]", requestBody);
+
+ return requestBody;
+ }
+
+ private void postProcess(Exchange exchange) {
+ log.debug("-- in response");
+ ModelInferResponse response = exchange.getMessage().getBody(ModelInferResponse.class);
+
+ List> logits = response.getRawOutputContentsList().stream()
+ .map(ByteString::asReadOnlyByteBuffer)
+ .map(buf -> buf.order(ByteOrder.LITTLE_ENDIAN).asFloatBuffer())
+ .map(buf -> {
+ List longs = new ArrayList<>(buf.remaining());
+ while (buf.hasRemaining()) {
+ longs.add(buf.get());
+ }
+ return longs;
+ })
+ .toList();
+
+ log.debug("-- logits: [{}]", logits);
+ String result = Math.abs(logits.getFirst().getFirst()) < logits.getFirst().getLast() ? "good" : "bad";
+
+ exchange.getMessage().setBody(result);
+ }
+}
diff --git a/messaging-modules/apache-camel-kserve/triton-server/Dockerfile b/messaging-modules/apache-camel-kserve/triton-server/Dockerfile
new file mode 100644
index 000000000000..57d9e0f320f5
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/triton-server/Dockerfile
@@ -0,0 +1,10 @@
+FROM nvcr.io/nvidia/tritonserver:25.02-py3
+
+# Copy the model repository into the container
+COPY models/ /models/
+
+# Expose default Triton ports
+EXPOSE 8000 8001 8002
+
+# Set entrypoint to run Triton with your model repo
+CMD ["tritonserver", "--model-repository=/models"]
diff --git a/messaging-modules/apache-camel-kserve/triton-server/models/sentiment/config.pbtxt b/messaging-modules/apache-camel-kserve/triton-server/models/sentiment/config.pbtxt
new file mode 100644
index 000000000000..e1f27d0f7b7f
--- /dev/null
+++ b/messaging-modules/apache-camel-kserve/triton-server/models/sentiment/config.pbtxt
@@ -0,0 +1,24 @@
+name: "sentiment"
+platform: "onnxruntime_onnx"
+max_batch_size: 8
+
+input [
+ {
+ name: "input_ids"
+ data_type: TYPE_INT64
+ dims: [ -1 ]
+ },
+ {
+ name: "attention_mask"
+ data_type: TYPE_INT64
+ dims: [ -1 ]
+ }
+]
+
+output [
+ {
+ name: "logits"
+ data_type: TYPE_FP32
+ dims: [ 2 ]
+ }
+]
diff --git a/messaging-modules/pom.xml b/messaging-modules/pom.xml
index 042c78e844af..e79af560b95f 100644
--- a/messaging-modules/pom.xml
+++ b/messaging-modules/pom.xml
@@ -16,6 +16,7 @@
apache-camel
+ apache-camel-kserve
apache-rocketmq
automq
jgroups