From 92136afb7d23ae9ac439e77f9dc669b04ba68119 Mon Sep 17 00:00:00 2001 From: Clivia <132346501+Yanyutin753@users.noreply.github.com> Date: Fri, 16 Aug 2024 21:12:26 +0800 Subject: [PATCH 1/5] =?UTF-8?q?=F0=9F=92=84=20=E4=BC=98=E5=8C=96=E4=B8=8A?= =?UTF-8?q?=E4=BC=A0=E6=96=87=E4=BB=B6=E6=A0=B7=E5=BC=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/api/openapi.ts | 17 +++--- src/views/mj/aiGpt.vue | 2 +- src/views/mj/aiGptInput.vue | 9 +++- src/views/mj/mjTextAttr.vue | 103 +++++++++++++++++++++++++++--------- 4 files changed, 96 insertions(+), 35 deletions(-) diff --git a/src/api/openapi.ts b/src/api/openapi.ts index 4f52feaa08..7b5131414f 100644 --- a/src/api/openapi.ts +++ b/src/api/openapi.ts @@ -574,21 +574,26 @@ export const getHistoryMessage= async (dataSources:Chat.Chat[],loadingCnt=1 ,sta let o = dataSources[ii]; //mlog('o',ii ,o); let content= o.text; + let obj={ + "role": 'user' as 'system' | 'user' | 'assistant', + "content": [] as any + }; if( o.inversion && o.opt?.images && o.opt.images.length>0 ){ - //获取附件信息 比如 图片 文件等 + //附件需要时远程的图片链接 或者文件 链接 (类似于gpt-4-all,v佬国产逆向或者gpt-4o-all的文件上传方式) try{ let str = await localGet( o.opt.images[0]) as string; let fileBase64= JSON.parse(str) as string[]; let arr = fileBase64.filter( (ff:string)=>ff.indexOf('http')>-1); - if(arr.length>0) content = arr.join(' ')+' '+ content ; - + obj.content.push({ "type": "text", "text": content }); + arr.forEach((f: string) => { + obj.content.push({ "type": "image_url", "image_url": { url: f } }); + }); mlog(t('mjchat.attr') ,o.opt.images[0] , content ); }catch(ee){ } } - - //mlog('d',gptConfigStore.myData.talkCount ,i ,o.inversion , o.text); - rz.push({content , role: !o.inversion ? 'assistant' : 'user'}); + // mlog('d',gptConfigStore.myData.talkCount ,i ,o.inversion , o.text); + rz.push(obj); } rz.reverse(); mlog('rz',rz); diff --git a/src/views/mj/aiGpt.vue b/src/views/mj/aiGpt.vue index e2eb7aaf8a..e8597311eb 100644 --- a/src/views/mj/aiGpt.vue +++ b/src/views/mj/aiGpt.vue @@ -71,7 +71,7 @@ watch(()=>homeStore.myData.act, async (n)=>{ if( !canVisionModel(model) ) model= canBase64Model(model)//model='gpt-4-vision-preview'; try{ - let images= await localSaveAny( JSON.stringify( dd.fileBase64) ) ; + let images= await localSaveAny( JSON.stringify({fileName: dd.fileName, fileBase64: dd.fileBase64 }) ) ; mlog('key', images ); promptMsg.opt= {images:[images]} }catch(e){ diff --git a/src/views/mj/aiGptInput.vue b/src/views/mj/aiGptInput.vue index 5d19f87c30..1b0f0d727d 100644 --- a/src/views/mj/aiGptInput.vue +++ b/src/views/mj/aiGptInput.vue @@ -24,7 +24,7 @@ const chatStore = useChatStore() const emit = defineEmits(['update:modelValue']) const props = defineProps<{ modelValue:string,disabled?:boolean,searchOptions?:AutoCompleteOptions,renderOption?: RenderLabel }>(); const fsRef = ref() -const st = ref<{fileBase64:string[],isLoad:number,isShow:boolean,showMic:boolean,micStart:boolean}>({fileBase64:[],isLoad:0 +const st = ref<{fileBase64:string[],fileName:string[],isLoad:number,isShow:boolean,showMic:boolean,micStart:boolean}>({fileBase64:[],fileName:[],isLoad:0 ,isShow:false,showMic:false , micStart:false}) const { isMobile } = useBasicLayout() const placeholder = computed(() => { @@ -50,11 +50,13 @@ const handleSubmit = ( ) => { } let obj={ prompt: mvalue.value, - fileBase64:st.value.fileBase64 + fileBase64:st.value.fileBase64, + fileName:st.value.fileName } homeStore.setMyData({act:'gpt.submit', actData:obj }); mvalue.value=''; st.value.fileBase64=[]; + st.value.fileName=[]; return false; } const ms= useMessage(); @@ -102,6 +104,7 @@ funt(); return ; } st.value.fileBase64.push(d) + st.value.fileName.push(file.name) } ).catch(e=>ms.error(e)); } }else{ @@ -117,8 +120,10 @@ funt(); ms.info(t('mj.uploadSuccess')); if(r.url.indexOf('http')>-1) { st.value.fileBase64.push(r.url) + st.value.fileName.push(file.name) }else{ st.value.fileBase64.push(location.origin +r.url) + st.value.fileName.push(file.name) } }else if(r.error) ms.error(r.error); }).catch(e=>{ diff --git a/src/views/mj/mjTextAttr.vue b/src/views/mj/mjTextAttr.vue index 1825d7998b..325d4a3b69 100644 --- a/src/views/mj/mjTextAttr.vue +++ b/src/views/mj/mjTextAttr.vue @@ -1,33 +1,84 @@ + \ No newline at end of file From 13344b456239996e6dcc9086f6487c724d894f12 Mon Sep 17 00:00:00 2001 From: Clivia <132346501+Yanyutin753@users.noreply.github.com> Date: Sun, 18 Aug 2024 00:14:50 +0800 Subject: [PATCH 2/5] =?UTF-8?q?=F0=9F=92=84=20=E4=BC=98=E5=8C=96=E6=96=87?= =?UTF-8?q?=E4=BB=B6=E4=B8=8A=E4=BC=A0=E6=A0=B7=E5=BC=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/views/mj/mjTextAttr.vue | 43 +++++++++++++------------------------ 1 file changed, 15 insertions(+), 28 deletions(-) diff --git a/src/views/mj/mjTextAttr.vue b/src/views/mj/mjTextAttr.vue index 325d4a3b69..0bc9fef3cc 100644 --- a/src/views/mj/mjTextAttr.vue +++ b/src/views/mj/mjTextAttr.vue @@ -43,42 +43,29 @@ loadImages(); \ No newline at end of file From 05f1934026d159c6d7c2d663e84bf727a0ba6716 Mon Sep 17 00:00:00 2001 From: Clivia <132346501+Yanyutin753@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:16:10 +0800 Subject: [PATCH 3/5] revert openapi.ts --- src/api/openapi.ts | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/src/api/openapi.ts b/src/api/openapi.ts index 7b5131414f..cf1c71e226 100644 --- a/src/api/openapi.ts +++ b/src/api/openapi.ts @@ -574,26 +574,20 @@ export const getHistoryMessage= async (dataSources:Chat.Chat[],loadingCnt=1 ,sta let o = dataSources[ii]; //mlog('o',ii ,o); let content= o.text; - let obj={ - "role": 'user' as 'system' | 'user' | 'assistant', - "content": [] as any - }; if( o.inversion && o.opt?.images && o.opt.images.length>0 ){ - //附件需要时远程的图片链接 或者文件 链接 (类似于gpt-4-all,v佬国产逆向或者gpt-4o-all的文件上传方式) + //获取附件信息 比如 图片 文件等 try{ let str = await localGet( o.opt.images[0]) as string; let fileBase64= JSON.parse(str) as string[]; let arr = fileBase64.filter( (ff:string)=>ff.indexOf('http')>-1); - obj.content.push({ "type": "text", "text": content }); - arr.forEach((f: string) => { - obj.content.push({ "type": "image_url", "image_url": { url: f } }); - }); + if(arr.length>0) content = arr.join(' ')+' '+ content ; mlog(t('mjchat.attr') ,o.opt.images[0] , content ); }catch(ee){ } } - // mlog('d',gptConfigStore.myData.talkCount ,i ,o.inversion , o.text); - rz.push(obj); + + //mlog('d',gptConfigStore.myData.talkCount ,i ,o.inversion , o.text); + rz.push({content , role: !o.inversion ? 'assistant' : 'user'}); } rz.reverse(); mlog('rz',rz); From 3eb19a409f3db9c339814cd21034dad461a9bf9b Mon Sep 17 00:00:00 2001 From: Clivia <132346501+Yanyutin753@users.noreply.github.com> Date: Fri, 16 Aug 2024 21:12:26 +0800 Subject: [PATCH 4/5] =?UTF-8?q?=F0=9F=92=84=20=E4=BC=98=E5=8C=96=E6=96=87?= =?UTF-8?q?=E4=BB=B6=E4=B8=8A=E4=BC=A0=E6=A0=B7=E5=BC=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 💄 优化上传文件样式 --- src/api/openapi.ts | 1 - src/views/mj/aiGpt.vue | 2 +- src/views/mj/aiGptInput.vue | 9 +++- src/views/mj/mjTextAttr.vue | 92 ++++++++++++++++++++++++++----------- 4 files changed, 73 insertions(+), 31 deletions(-) diff --git a/src/api/openapi.ts b/src/api/openapi.ts index 4f52feaa08..cf1c71e226 100644 --- a/src/api/openapi.ts +++ b/src/api/openapi.ts @@ -581,7 +581,6 @@ export const getHistoryMessage= async (dataSources:Chat.Chat[],loadingCnt=1 ,sta let fileBase64= JSON.parse(str) as string[]; let arr = fileBase64.filter( (ff:string)=>ff.indexOf('http')>-1); if(arr.length>0) content = arr.join(' ')+' '+ content ; - mlog(t('mjchat.attr') ,o.opt.images[0] , content ); }catch(ee){ } diff --git a/src/views/mj/aiGpt.vue b/src/views/mj/aiGpt.vue index e2eb7aaf8a..e8597311eb 100644 --- a/src/views/mj/aiGpt.vue +++ b/src/views/mj/aiGpt.vue @@ -71,7 +71,7 @@ watch(()=>homeStore.myData.act, async (n)=>{ if( !canVisionModel(model) ) model= canBase64Model(model)//model='gpt-4-vision-preview'; try{ - let images= await localSaveAny( JSON.stringify( dd.fileBase64) ) ; + let images= await localSaveAny( JSON.stringify({fileName: dd.fileName, fileBase64: dd.fileBase64 }) ) ; mlog('key', images ); promptMsg.opt= {images:[images]} }catch(e){ diff --git a/src/views/mj/aiGptInput.vue b/src/views/mj/aiGptInput.vue index 5d19f87c30..1b0f0d727d 100644 --- a/src/views/mj/aiGptInput.vue +++ b/src/views/mj/aiGptInput.vue @@ -24,7 +24,7 @@ const chatStore = useChatStore() const emit = defineEmits(['update:modelValue']) const props = defineProps<{ modelValue:string,disabled?:boolean,searchOptions?:AutoCompleteOptions,renderOption?: RenderLabel }>(); const fsRef = ref() -const st = ref<{fileBase64:string[],isLoad:number,isShow:boolean,showMic:boolean,micStart:boolean}>({fileBase64:[],isLoad:0 +const st = ref<{fileBase64:string[],fileName:string[],isLoad:number,isShow:boolean,showMic:boolean,micStart:boolean}>({fileBase64:[],fileName:[],isLoad:0 ,isShow:false,showMic:false , micStart:false}) const { isMobile } = useBasicLayout() const placeholder = computed(() => { @@ -50,11 +50,13 @@ const handleSubmit = ( ) => { } let obj={ prompt: mvalue.value, - fileBase64:st.value.fileBase64 + fileBase64:st.value.fileBase64, + fileName:st.value.fileName } homeStore.setMyData({act:'gpt.submit', actData:obj }); mvalue.value=''; st.value.fileBase64=[]; + st.value.fileName=[]; return false; } const ms= useMessage(); @@ -102,6 +104,7 @@ funt(); return ; } st.value.fileBase64.push(d) + st.value.fileName.push(file.name) } ).catch(e=>ms.error(e)); } }else{ @@ -117,8 +120,10 @@ funt(); ms.info(t('mj.uploadSuccess')); if(r.url.indexOf('http')>-1) { st.value.fileBase64.push(r.url) + st.value.fileName.push(file.name) }else{ st.value.fileBase64.push(location.origin +r.url) + st.value.fileName.push(file.name) } }else if(r.error) ms.error(r.error); }).catch(e=>{ diff --git a/src/views/mj/mjTextAttr.vue b/src/views/mj/mjTextAttr.vue index 1825d7998b..0bc9fef3cc 100644 --- a/src/views/mj/mjTextAttr.vue +++ b/src/views/mj/mjTextAttr.vue @@ -1,33 +1,71 @@ + \ No newline at end of file From 0f769c6f458d4915d7f4c132f9d6cde35c91aae0 Mon Sep 17 00:00:00 2001 From: Clivia <132346501+Yanyutin753@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:30:49 +0800 Subject: [PATCH 5/5] =?UTF-8?q?=F0=9F=92=84=20=E4=BC=98=E5=8C=96=E4=B8=8A?= =?UTF-8?q?=E4=BC=A0=E5=9B=BE=E7=89=87=E6=A0=B7=E5=BC=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/views/mj/mjTextAttr.vue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/views/mj/mjTextAttr.vue b/src/views/mj/mjTextAttr.vue index 0bc9fef3cc..96d8a469bc 100644 --- a/src/views/mj/mjTextAttr.vue +++ b/src/views/mj/mjTextAttr.vue @@ -45,7 +45,7 @@ loadImages();