diff --git a/README-zh.md b/README-zh.md index 2cded77..9740fd1 100644 --- a/README-zh.md +++ b/README-zh.md @@ -21,14 +21,14 @@ OpenAi4J是一个非官方的Java库,旨在帮助java开发者与OpenAI的GPT ## 导入依赖 ### Gradle -`implementation 'io.github.lambdua::0.22.3'` +`implementation 'io.github.lambdua::0.22.4'` ### Maven ```xml io.github.lambdua service - 0.22.3 + 0.22.4 ``` @@ -61,7 +61,7 @@ static void simpleChat() { io.github.lambdua api - 0.22.3 + 0.22.4 ``` diff --git a/README.md b/README.md index 39d1a7b..d896e2e 100644 --- a/README.md +++ b/README.md @@ -25,14 +25,14 @@ applications effortlessly. ## Import ### Gradle -`implementation 'io.github.lambdua::0.22.3'` +`implementation 'io.github.lambdua::0.22.4'` ### Maven ```xml io.github.lambdua service - 0.22.3 + 0.22.4 ``` @@ -67,7 +67,7 @@ To utilize pojos, import the api module: io.github.lambdua api - 0.22.3 + 0.22.4 ``` diff --git a/api/pom.xml b/api/pom.xml index 46fa603..96da0cc 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -6,7 +6,7 @@ io.github.lambdua openai-java - 0.22.3 + 0.22.4 jar api diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionChunk.java b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionChunk.java index f6fff8d..fca2ccf 100644 --- a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionChunk.java +++ b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionChunk.java @@ -1,5 +1,6 @@ package com.theokanning.openai.completion.chat; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.theokanning.openai.Usage; import lombok.Data; @@ -51,4 +52,13 @@ public class ChatCompletionChunk { */ Usage usage; + /** + * The original data packet returned by chat completion. + * the value like this: + *
+     * data:{"id":"chatcmpl-A0QiHfuacgBSbvd8Ld1Por1HojY31","object":"chat.completion.chunk","created":1724666049,"model":"gpt-3.5-turbo-0125","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]}
+     * 
+ */ + @JsonIgnore + String source; } diff --git a/client/pom.xml b/client/pom.xml index a1107ff..78e9188 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -6,7 +6,7 @@ io.github.lambdua openai-java - 0.22.3 + 0.22.4 jar diff --git a/example/pom.xml b/example/pom.xml index 09baf13..db08674 100644 --- a/example/pom.xml +++ b/example/pom.xml @@ -6,7 +6,7 @@ io.github.lambdua example - 0.22.3 + 0.22.4 example @@ -17,7 +17,7 @@ io.github.lambdua service - 0.22.3 + 0.22.4 diff --git a/pom.xml b/pom.xml index 8306207..4c3fd5f 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ io.github.lambdua openai-java - 0.22.3 + 0.22.4 pom openai java 版本 https://github.com/Lambdua/openai-java diff --git a/service/pom.xml b/service/pom.xml index 278c8f2..0e6a651 100644 --- a/service/pom.xml +++ b/service/pom.xml @@ -6,7 +6,7 @@ io.github.lambdua openai-java - 0.22.3 + 0.22.4 jar diff --git a/service/src/main/java/com/theokanning/openai/service/ChatMessageAccumulatorWrapper.java b/service/src/main/java/com/theokanning/openai/service/ChatMessageAccumulatorWrapper.java new file mode 100644 index 0000000..7d13467 --- /dev/null +++ b/service/src/main/java/com/theokanning/openai/service/ChatMessageAccumulatorWrapper.java @@ -0,0 +1,28 @@ +package com.theokanning.openai.service; + +import com.theokanning.openai.completion.chat.ChatCompletionChunk; + +/** + * Wrapper class of ChatMessageAccumulator + * + * @author Allen Hu + * @date 2024/10/18 + */ +public class ChatMessageAccumulatorWrapper { + + private final ChatMessageAccumulator chatMessageAccumulator; + private final ChatCompletionChunk chatCompletionChunk; + + public ChatMessageAccumulatorWrapper(ChatMessageAccumulator chatMessageAccumulator, ChatCompletionChunk chatCompletionChunk) { + this.chatMessageAccumulator = chatMessageAccumulator; + this.chatCompletionChunk = chatCompletionChunk; + } + + public ChatMessageAccumulator getChatMessageAccumulator() { + return chatMessageAccumulator; + } + + public ChatCompletionChunk getChatCompletionChunk() { + return chatCompletionChunk; + } +} diff --git a/service/src/main/java/com/theokanning/openai/service/OpenAiService.java b/service/src/main/java/com/theokanning/openai/service/OpenAiService.java index 5a3856f..fee6aa7 100644 --- a/service/src/main/java/com/theokanning/openai/service/OpenAiService.java +++ b/service/src/main/java/com/theokanning/openai/service/OpenAiService.java @@ -1,6 +1,7 @@ package com.theokanning.openai.service; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; @@ -72,6 +73,8 @@ import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; +import java.util.function.Supplier; public class OpenAiService { @@ -190,7 +193,17 @@ public ChatCompletionResult createChatCompletion(ChatCompletionRequest request) public Flowable streamChatCompletion(ChatCompletionRequest request) { request.setStream(true); - return stream(api.createChatCompletionStream(request), ChatCompletionChunk.class); + return stream(api.createChatCompletionStream(request), ChatCompletionChunk.class, new BiConsumer() { + @Override + public void accept(ChatCompletionChunk chatCompletionChunk, SSE sse) { + chatCompletionChunk.setSource(sse.getData()); + } + }, new Supplier() { + @Override + public ChatCompletionChunk get() { + return new ChatCompletionChunk(); + } + }); } @@ -692,6 +705,31 @@ public static Flowable stream(Call apiCall, Class cl) { return stream(apiCall).map(sse -> mapper.readValue(sse.getData(), cl)); } + /** + * Calls the Open AI api and returns a Flowable of type T for streaming + * omitting the last message. + * @param apiCall The api call + * @param cl Class of type T to return + * @param consumer After the instance creation is complete + * @param newInstance If the serialization fails, call this interface to get an instance + */ + public static Flowable stream(Call apiCall, Class cl, BiConsumer consumer, + Supplier newInstance) { + return stream(apiCall, true).map(sse -> { + try { + T t = mapper.readValue(sse.getData(), cl); + if (Objects.nonNull(consumer)) { + consumer.accept(t, sse); + } + return t; + } catch (JsonProcessingException e) { + T t = newInstance.get(); + consumer.accept(t, sse); + return t; + } + }); + } + /** * Shuts down the OkHttp ExecutorService. * The default behaviour of OkHttp's ExecutorService (ConnectionPool) @@ -758,6 +796,26 @@ public Flowable mapStreamToAccumulator(Flowable mapStreamToAccumulatorWrapper(Flowable flowable) { + ChatFunctionCall functionCall = new ChatFunctionCall(null, null); + AssistantMessage accumulatedMessage = new AssistantMessage(); + return flowable.map(chunk -> { + List choices = chunk.getChoices(); + AssistantMessage messageChunk = null; + if (null != choices && !choices.isEmpty()) { + ChatCompletionChoice firstChoice = choices.get(0); + messageChunk = firstChoice.getMessage(); + appendContent(messageChunk, accumulatedMessage); + processFunctionCall(messageChunk, functionCall, accumulatedMessage); + processToolCalls(messageChunk, accumulatedMessage); + if (firstChoice.getFinishReason() != null) { + handleFinishReason(firstChoice.getFinishReason(), functionCall, accumulatedMessage); + } + } + ChatMessageAccumulator chatMessageAccumulator = new ChatMessageAccumulator(messageChunk, accumulatedMessage, chunk.getUsage()); + return new ChatMessageAccumulatorWrapper(chatMessageAccumulator, chunk); + }); + } /** * 处理消息块中的函数调用。