Skip to content

Commit 5201349

Browse files
committed
fix error when clicking the stop button and optimize SSE logic
1 parent cdd8a86 commit 5201349

File tree

5 files changed

+60
-61
lines changed

5 files changed

+60
-61
lines changed

components/MsgEditor.vue

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
:disabled="disabled"
99
:loading="loading"
1010
:hint="hint"
11+
:hide-details="loading"
1112
append-inner-icon="send"
1213
@keyup.enter.exact="enterOnly"
1314
@click:appendInner="clickSendBtn"

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
"highlight.js": "^11.7.0",
1919
"is-mobile": "^3.1.1",
2020
"marked": "^4.2.12",
21+
"nanoid": "^4.0.1",
2122
"vuetify": "^3.0.6"
2223
},
2324
"license": "MIT"

pages/index.vue

Lines changed: 33 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,20 @@
11
<script setup>
2-
import { fetchEventSource } from '@microsoft/fetch-event-source'
2+
import {EventStreamContentType, fetchEventSource} from '@microsoft/fetch-event-source'
33
44
const runtimeConfig = useRuntimeConfig()
55
const currentModel = useCurrentModel()
66
const openaiApiKey = useApiKey()
77
const fetchingResponse = ref(false)
8+
9+
let ctrl
10+
const abortFetch = () => {
11+
if (ctrl) {
12+
ctrl.abort()
13+
}
14+
fetchingResponse.value = false
15+
}
816
const fetchReply = async (message, parentMessageId) => {
9-
const ctrl = new AbortController()
17+
ctrl = new AbortController()
1018
try {
1119
await fetchEventSource('/api/conversation', {
1220
signal: ctrl.signal,
@@ -22,43 +30,50 @@ const fetchReply = async (message, parentMessageId) => {
2230
conversationId: currentConversation.value.id
2331
}),
2432
onopen(response) {
25-
if (response.status === 200) {
33+
if (response.ok && response.headers.get('content-type') === EventStreamContentType) {
2634
return;
2735
}
2836
throw new Error(`Failed to send message. HTTP ${response.status} - ${response.statusText}`);
2937
},
3038
onclose() {
39+
if (ctrl.signal.aborted === true) {
40+
return;
41+
}
3142
throw new Error(`Failed to send message. Server closed the connection unexpectedly.`);
3243
},
3344
onerror(err) {
3445
throw err;
3546
},
3647
onmessage(message) {
37-
if (message.event === 'error') {
38-
throw new Error(JSON.parse(message.data).error);
48+
const event = message.event
49+
const data = JSON.parse(message.data)
50+
51+
if (event === 'error') {
52+
throw new Error(data.error);
3953
}
40-
const { type, data } = JSON.parse(message.data);
41-
if (type === 'done') {
54+
55+
if (event === 'done') {
4256
if (currentConversation.value.id === null) {
4357
currentConversation.value.id = data.conversationId
4458
}
4559
currentConversation.value.messages[currentConversation.value.messages.length - 1].id = data.messageId
46-
ctrl.abort();
47-
fetchingResponse.value = false
60+
abortFetch()
4861
return;
4962
}
63+
5064
if (currentConversation.value.messages[currentConversation.value.messages.length - 1].from === 'ai') {
51-
currentConversation.value.messages[currentConversation.value.messages.length - 1].message += data
65+
currentConversation.value.messages[currentConversation.value.messages.length - 1].message += data.content
5266
} else {
53-
currentConversation.value.messages.push({id: null, from: 'ai', message: data})
67+
currentConversation.value.messages.push({id: null, from: 'ai', message: data.content})
5468
}
69+
5570
scrollChatWindow()
5671
},
5772
})
5873
} catch (err) {
59-
ctrl.abort()
74+
console.log(err)
75+
abortFetch()
6076
showSnackbar(err.message)
61-
fetchingResponse.value = false
6277
}
6378
}
6479
@@ -70,6 +85,9 @@ const currentConversation = ref({})
7085
7186
const grab = ref(null)
7287
const scrollChatWindow = () => {
88+
if (grab.value === null) {
89+
return;
90+
}
7391
grab.value.scrollIntoView({behavior: 'smooth'})
7492
}
7593
@@ -91,8 +109,7 @@ const send = (message) => {
91109
scrollChatWindow()
92110
}
93111
const stop = () => {
94-
ctrl.abort();
95-
fetchingResponse.value = false
112+
abortFetch()
96113
}
97114
98115
const snackbar = ref(false)
@@ -125,7 +142,7 @@ createNewConversation()
125142
</v-container>
126143
<v-divider></v-divider>
127144
</v-card>
128-
<div ref="grab" class="w-100" style="height: 150px;"></div>
145+
<div ref="grab" class="w-100" style="height: 200px;"></div>
129146
</div>
130147
<Welcome v-else />
131148
<v-footer app class="d-flex flex-column">

server/api/conversation.post.js

Lines changed: 24 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1,49 +1,39 @@
11
import ChatGPTClient from '@waylaidwanderer/chatgpt-api'
22
import { PassThrough } from 'node:stream'
3+
import { nanoid } from 'nanoid'
34

4-
const serializeSSEEvent = (chunk) => {
5-
let payload = "";
6-
if (chunk.id) {
7-
payload += `id: ${chunk.id}\n`;
8-
}
9-
if (chunk.event) {
10-
payload += `event: ${chunk.event}\n`;
11-
}
12-
if (chunk.data) {
13-
payload += `data: ${chunk.data}\n`;
14-
}
15-
if (chunk.retry) {
16-
payload += `retry: ${chunk.retry}\n`;
17-
}
18-
if (!payload) {
19-
return "";
20-
}
21-
payload += "\n";
22-
return payload;
5+
const serializeSSEEvent = (event, data) => {
6+
const id = nanoid();
7+
const eventStr = event ? `event: ${event}\n` : '';
8+
const dataStr = data ? `data: ${JSON.stringify(data)}\n` : '';
9+
10+
return `id: ${id}\n${eventStr}${dataStr}\n`;
2311
}
2412

2513
export default defineEventHandler(async (event) => {
2614
const body = await readBody(event)
2715
const conversationId = body.conversationId ? body.conversationId.toString() : undefined
2816
const parentMessageId = body.parentMessageId ? body.parentMessageId.toString() : undefined
2917
const tunnel = new PassThrough()
30-
const writeToTunnel = (data) => {
31-
tunnel.write(serializeSSEEvent(data))
18+
const writeToTunnel = (event, data) => {
19+
tunnel.write(serializeSSEEvent(event, data))
20+
}
21+
const endTunnel = () => {
22+
tunnel.end()
3223
}
24+
3325
setResponseHeaders(event, {
3426
'Content-Type': 'text/event-stream',
3527
'Cache-Control': 'no-cache',
3628
'Connection': 'keep-alive'
3729
})
3830

3931
if (!body.openaiApiKey) {
40-
writeToTunnel({
41-
event: 'error',
42-
data: JSON.stringify({
43-
code: 503,
44-
error: 'You haven\'t set the api key of openai',
45-
}),
32+
writeToTunnel('error', {
33+
code: 503,
34+
error: 'You haven\'t set the api key of openai',
4635
})
36+
endTunnel()
4737
return sendStream(event, tunnel)
4838
}
4939

@@ -79,29 +69,19 @@ export default defineEventHandler(async (event) => {
7969
parentMessageId,
8070
onProgress: (token) => {
8171
// console.log(token)
82-
writeToTunnel({ data: JSON.stringify({
83-
type: 'token',
84-
data: token
85-
})
86-
})
72+
writeToTunnel('message',{content: token})
8773
}
8874
});
89-
writeToTunnel({ data: JSON.stringify({
90-
type: 'done',
91-
data: response
92-
}) })
93-
console.log(response)
75+
writeToTunnel('done',response)
76+
console.info(response)
9477
} catch (e) {
9578
const code = e?.json?.data?.code || 503;
9679
const message = e?.json?.error?.message || 'There was an error communicating with ChatGPT.';
97-
writeToTunnel({
98-
event: 'error',
99-
data: JSON.stringify({
100-
code,
101-
error: message,
102-
}),
80+
writeToTunnel('error', {
81+
code,
82+
error: message
10383
})
10484
}
105-
85+
tunnel.end()
10686
return sendStream(event, tunnel)
10787
})

yarn.lock

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3266,7 +3266,7 @@ nanoid@^3.3.4:
32663266
resolved "https://registry.npmmirror.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab"
32673267
integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==
32683268

3269-
nanoid@^4.0.0:
3269+
nanoid@^4.0.0, nanoid@^4.0.1:
32703270
version "4.0.1"
32713271
resolved "https://registry.npmmirror.com/nanoid/-/nanoid-4.0.1.tgz#398d7ccfdbf9faf2231b2ca7e8fff5dbca6a509b"
32723272
integrity sha512-udKGtCCUafD3nQtJg9wBhRP3KMbPglUsgV5JVsXhvyBs/oefqb4sqMEhKBBgqZncYowu58p1prsZQBYvAj/Gww==

0 commit comments

Comments
 (0)