diff --git a/ai/llmchat/Kconfig b/ai/llmchat/Kconfig index 1ae4c1a56b..26f0b3370a 100644 --- a/ai/llmchat/Kconfig +++ b/ai/llmchat/Kconfig @@ -89,12 +89,32 @@ if PKG_USING_LLMCHAT "https://api.deepseek.com/chat/completions" endif + config PKG_LLM_THREAD_STACK_SIZE + int "llm thread size" + default 10240 + help + llm thread size + + config PKG_LLM_CMD_BUFFER_SIZE + int "llm cmd input buffer size" + default 256 + help + llm cmd input buffer size + config PKG_WEB_SORKET_BUFSZ int "webclient session buffer size" default 2048 help webclient session buffer size + config PKG_LLMCHAT_STREAM + bool "Enable llmchat http stream" + default y + + config PKG_LLMCHAT_DBG + bool "Enable llmchat debug" + default n + choice prompt "Version" default PKG_USING_LLMCHAT_LATEST_VERSION diff --git a/ai/llmchat/package.json b/ai/llmchat/package.json index f4f443ecc1..1a9b5a2aa6 100644 --- a/ai/llmchat/package.json +++ b/ai/llmchat/package.json @@ -17,6 +17,11 @@ "repository": "https://github.com/Rbb666/llm_chat", "homepage": "https://github.com/Rbb666/llm_chat#readme", "site": [ + { + "version": "v1.2.0", + "URL": "https://github.com/Rbb666/llm_chat/archive/refs/tags/1.2.0.zip", + "filename": "llm_chat-1.2.0.zip" + }, { "version": "v1.0.0", "URL": "https://github.com/Rbb666/llm_chat/archive/refs/tags/1.0.0.zip",