1
1
import { Icon } from '@iconify/react' ;
2
- import { Alert , Box , Button , Stack , TextField , Typography , useTheme } from '@mui/material' ;
2
+ import {
3
+ Accordion ,
4
+ AccordionDetails ,
5
+ AccordionSummary ,
6
+ Alert ,
7
+ Box ,
8
+ Button ,
9
+ Stack ,
10
+ TextField ,
11
+ Typography ,
12
+ useTheme ,
13
+ } from '@mui/material' ;
3
14
import React from 'react' ;
4
15
import * as sanitizeHtml from 'sanitize-html' ;
5
16
import { v4 as uuidv4 } from 'uuid' ;
17
+ import { ICONS } from '../../../../icons/iconify-icons-mapping' ;
6
18
import { authHeaders , routeAssistantChatModel } from '../..' ;
7
19
import { CHAT_ENTITY } from '../../constants' ;
8
20
import { useStreaming } from '../../hooks/useStreaming' ;
9
21
import { StyledChatFooter , StyledLoadingDot , StyledMessageList } from '../../styles' ;
10
- import { ConversationItem , ConversationPayload , ConversationResponseChunk } from '../../types' ;
22
+ import {
23
+ ChatItemResponse ,
24
+ ConversationItem ,
25
+ ConversationPayload ,
26
+ ConversationResponseChunk ,
27
+ ResponseThought ,
28
+ } from '../../types' ;
11
29
import { createChunkStreamFetcher , createConversationPayload } from '../../utils' ;
12
30
import formatAiMessage from '../../utils/formatMessage' ;
13
31
import { Message } from '../Message' ;
14
32
import { ChatProps } from './types' ;
15
33
34
+ const ChatThoughts = ( { thoughts } : { thoughts : ResponseThought [ ] } ) => {
35
+ const [ expanded , setExpanded ] = React . useState < string | false > ( false ) ;
36
+
37
+ const handleChange = ( panel : string ) => ( event : React . SyntheticEvent , isExpanded : boolean ) => {
38
+ setExpanded ( isExpanded ? panel : false ) ;
39
+ } ;
40
+
41
+ return (
42
+ < >
43
+ { thoughts ?. map ( ( thought , idx ) => {
44
+ const isLast = idx === thoughts . length - 1 ;
45
+ const isLoading = thought . in_progress ;
46
+
47
+ return (
48
+ < Accordion
49
+ expanded = { isLoading || isLast || expanded === thought . id_ }
50
+ onChange = { handleChange ( thought . id_ ) }
51
+ sx = { { backgroundColor : 'transparent' } }
52
+ >
53
+ < AccordionSummary
54
+ id = { thought . id_ }
55
+ expandIcon = { < Icon icon = { ICONS . ARROW_DOWN } width = { 20 } height = { 20 } /> }
56
+ >
57
+ < Typography fontWeight = { 600 } fontSize = { 14 } >
58
+ Tool: { thought . tool_name }
59
+ </ Typography >
60
+ </ AccordionSummary >
61
+ < AccordionDetails >
62
+ < Stack spacing = { 1 } >
63
+ < div >
64
+ { thought ?. processedChunks &&
65
+ thought ?. processedChunks . map ( ( segment ) => {
66
+ return (
67
+ < React . Fragment key = { segment . id } >
68
+ { segment . isCode ? (
69
+ < code
70
+ dangerouslySetInnerHTML = { { __html : sanitizeHtml ( segment ?. text ) } }
71
+ />
72
+ ) : (
73
+ < span
74
+ dangerouslySetInnerHTML = { { __html : sanitizeHtml ( segment ?. text ) } }
75
+ />
76
+ ) }
77
+ </ React . Fragment >
78
+ ) ;
79
+ } ) }
80
+ </ div >
81
+ { isLoading && (
82
+ < Stack direction = "row" alignItems = "center" >
83
+ < StyledLoadingDot />
84
+ < StyledLoadingDot />
85
+ < StyledLoadingDot />
86
+ </ Stack >
87
+ ) }
88
+ </ Stack >
89
+ </ AccordionDetails >
90
+ </ Accordion >
91
+ ) ;
92
+ } ) }
93
+ </ >
94
+ ) ;
95
+ } ;
96
+
16
97
export const Chat = ( {
17
98
codemieSecretData,
18
99
conversation,
19
100
updateConversation,
20
101
requestError,
21
102
} : ChatProps ) => {
22
103
const [ _conversation , setConversation ] = React . useState < ConversationItem > ( conversation ) ;
104
+ const [ isRequestLoading , setIsRequestLoading ] = React . useState ( false ) ;
23
105
24
106
const inputRef = React . useRef < HTMLInputElement > ( null ) ;
25
107
@@ -39,12 +121,17 @@ export const Chat = ({
39
121
> ( {
40
122
fetcher : assistantChatFetcher ,
41
123
onNewChunk : async ( value , accumulator ) => {
124
+ setIsRequestLoading ( false ) ;
125
+
42
126
const _accumulator = [ ...accumulator ] ;
43
127
44
128
if ( value . last ) {
45
129
setConversation ( ( prev ) => {
46
130
const updatedHistory = [ ...prev . conversationHistory ] ;
47
- updatedHistory [ updatedHistory . length - 1 ] . response . message = value . generated ;
131
+ const historyLength = updatedHistory . length ;
132
+ const lastHistoryItem = updatedHistory [ historyLength - 1 ] ;
133
+ lastHistoryItem . response . message = value . generated ;
134
+ lastHistoryItem . response . inProgress = false ;
48
135
49
136
const newConversation = {
50
137
...prev ,
@@ -58,43 +145,85 @@ export const Chat = ({
58
145
return _accumulator ;
59
146
}
60
147
61
-
62
- const generatedText = value . generated_chunk || value . thought . message ;
63
- if ( generatedText . trim ( ) !== '' ) {
64
- _accumulator . push ( generatedText ) ;
65
- }
66
-
67
148
let chatStream = _accumulator . join ( '' ) ;
149
+ let generatedText = '' ;
68
150
69
- if ( ( chatStream . match ( / ` ` ` / g) || [ ] ) . length % 2 === 1 ) {
70
- chatStream += '```' ; // append a closing ```
71
- }
151
+ if ( value . thought ) {
152
+ const thought = value . thought ;
72
153
73
- const formattedChunks = await formatAiMessage ( chatStream ) ;
154
+ setConversation ( ( prev ) => {
155
+ const historyLength = prev . conversationHistory . length ;
156
+ const lastHistoryItem = prev . conversationHistory [ historyLength - 1 ] ;
157
+ if ( ! lastHistoryItem . response . thoughts ) lastHistoryItem . response . thoughts = [ ] ;
74
158
75
- setConversation ( ( prev ) => {
76
- const updatedHistory = [ ...prev . conversationHistory ] ;
159
+ const alreadyExistingStateThought = lastHistoryItem . response . thoughts . find (
160
+ ( t ) => t . id_ === thought . id_
161
+ ) ;
77
162
78
- updatedHistory [ updatedHistory . length - 1 ] . response . processedChunks = [ ...formattedChunks ] ;
79
- return {
80
- ...prev ,
81
- conversationHistory : updatedHistory ,
82
- } ;
83
- } ) ;
163
+ if ( alreadyExistingStateThought ) {
164
+ alreadyExistingStateThought . in_progress = thought . in_progress ;
165
+ alreadyExistingStateThought . message += thought . message ;
166
+
167
+ formatAiMessage ( alreadyExistingStateThought . message ) . then (
168
+ ( res ) => ( alreadyExistingStateThought . processedChunks = res )
169
+ ) ;
170
+
171
+ if ( thought . in_progress === false ) {
172
+ alreadyExistingStateThought . in_progress = false ;
173
+ }
174
+ } else {
175
+ if ( thought . message . trim ( ) !== '' ) {
176
+ lastHistoryItem . response . thoughts = [ ...lastHistoryItem . response . thoughts , thought ] ;
177
+ }
178
+ }
179
+
180
+ return {
181
+ ...prev ,
182
+ conversationHistory : [ ...prev . conversationHistory ] ,
183
+ } ;
184
+ } ) ;
185
+ } else {
186
+ generatedText = value . generated_chunk ;
187
+
188
+ if ( generatedText . trim ( ) !== '' ) {
189
+ _accumulator . push ( generatedText ) ;
190
+ }
191
+
192
+ if ( ( chatStream . match ( / ` ` ` / g) || [ ] ) . length % 2 === 1 ) {
193
+ chatStream += '```' ; // append a closing ```
194
+ }
195
+
196
+ const formattedChunks = await formatAiMessage ( chatStream ) ;
197
+
198
+ setConversation ( ( prev ) => {
199
+ const updatedHistory = [ ...prev . conversationHistory ] ;
200
+ const historyLength = updatedHistory . length ;
201
+ const lastHistoryItem = updatedHistory [ historyLength - 1 ] ;
202
+
203
+ if ( ! lastHistoryItem . response . inProgress ) {
204
+ lastHistoryItem . response . inProgress = true ;
205
+ }
206
+
207
+ lastHistoryItem . response . processedChunks = [ ...formattedChunks ] ;
208
+
209
+ return {
210
+ ...prev ,
211
+ conversationHistory : updatedHistory ,
212
+ } ;
213
+ } ) ;
214
+ }
84
215
85
216
return _accumulator ;
86
217
} ,
87
- onStart : ( ) => { } ,
218
+ onStart : ( ) => {
219
+ setIsRequestLoading ( true ) ;
220
+ } ,
88
221
onFinish : ( data ) => {
89
222
console . log ( 'finish' , data ) ;
90
223
} ,
91
224
onError : ( error ) => console . log ( 'error' , error ) ,
92
225
} ) ;
93
226
94
- React . useEffect ( ( ) => {
95
- setConversation ( conversation ) ;
96
- } , [ conversation ] ) ;
97
-
98
227
const handleSendMessage = React . useCallback (
99
228
( text : string ) => {
100
229
setConversation ( ( prev ) => {
@@ -126,8 +255,55 @@ export const Chat = ({
126
255
127
256
const isEmpty =
128
257
! _conversation ?. conversationHistory || _conversation ?. conversationHistory . length === 0 ;
258
+
129
259
const theme = useTheme ( ) ;
130
260
261
+ React . useEffect ( ( ) => {
262
+ setConversation ( conversation ) ;
263
+ } , [ conversation ] ) ;
264
+
265
+ const renderChatResponse = React . useCallback (
266
+ ( chatResponse : ChatItemResponse , isLast : boolean ) => {
267
+ const responseType = chatResponse . thoughts ? 'multiple' : 'single' ;
268
+
269
+ if ( isRequestLoading && isLast ) {
270
+ return (
271
+ < Stack direction = "row" alignItems = "center" >
272
+ < StyledLoadingDot />
273
+ < StyledLoadingDot />
274
+ < StyledLoadingDot />
275
+ </ Stack >
276
+ ) ;
277
+ }
278
+
279
+ switch ( responseType ) {
280
+ case 'single' :
281
+ return chatResponse . inProgress ? (
282
+ < Stack direction = "row" alignItems = "center" >
283
+ < StyledLoadingDot />
284
+ < StyledLoadingDot />
285
+ < StyledLoadingDot />
286
+ </ Stack >
287
+ ) : (
288
+ chatResponse . processedChunks . map ( ( segment ) => (
289
+ < React . Fragment key = { segment . id } >
290
+ { segment . isCode ? (
291
+ < code dangerouslySetInnerHTML = { { __html : sanitizeHtml ( segment ?. text ) } } />
292
+ ) : (
293
+ < span dangerouslySetInnerHTML = { { __html : sanitizeHtml ( segment ?. text ) } } />
294
+ ) }
295
+ </ React . Fragment >
296
+ ) )
297
+ ) ;
298
+ case 'multiple' :
299
+ return < ChatThoughts thoughts = { chatResponse . thoughts } /> ;
300
+ default :
301
+ return null ;
302
+ }
303
+ } ,
304
+ [ isRequestLoading ]
305
+ ) ;
306
+
131
307
return (
132
308
< >
133
309
< StyledMessageList >
@@ -143,7 +319,8 @@ export const Chat = ({
143
319
>
144
320
{ ! isEmpty ? (
145
321
< Stack spacing = { 2 } width = "100%" >
146
- { _conversation . conversationHistory . map ( ( chat ) => {
322
+ { _conversation . conversationHistory . map ( ( chat , idx ) => {
323
+ const isLast = idx === _conversation . conversationHistory . length - 1 ;
147
324
return (
148
325
< Stack spacing = { 2 } alignItems = "center" key = { chat . id } >
149
326
< Message
@@ -154,29 +331,7 @@ export const Chat = ({
154
331
< Message
155
332
entityRole = { CHAT_ENTITY . ASSISTANT }
156
333
createdAt = { chat . createdAt }
157
- content = {
158
- chat . response . processedChunks . length === 0 ? (
159
- < Stack direction = "row" alignItems = "center" >
160
- < StyledLoadingDot />
161
- < StyledLoadingDot />
162
- < StyledLoadingDot />
163
- </ Stack >
164
- ) : (
165
- chat . response . processedChunks . map ( ( segment ) => (
166
- < React . Fragment key = { segment . id } >
167
- { segment . isCode ? (
168
- < code
169
- dangerouslySetInnerHTML = { { __html : sanitizeHtml ( segment ?. text ) } }
170
- />
171
- ) : (
172
- < span
173
- dangerouslySetInnerHTML = { { __html : sanitizeHtml ( segment ?. text ) } }
174
- />
175
- ) }
176
- </ React . Fragment >
177
- ) )
178
- )
179
- }
334
+ content = { renderChatResponse ( chat . response , isLast ) }
180
335
/>
181
336
</ Stack >
182
337
) ;
0 commit comments