1
1
"use client" ;
2
2
3
- import React , {
4
- Fragment ,
5
- useCallback ,
6
- useEffect ,
7
- useRef ,
8
- useState ,
9
- } from "react" ;
3
+ import React , { useCallback , useEffect , useRef , useState } from "react" ;
10
4
import { toast } from "react-toastify" ;
11
5
import { useRouter , useSearchParams } from "next/navigation" ;
12
6
import { Renderer , marked } from "marked" ;
@@ -26,6 +20,7 @@ import {
26
20
import { ArrowDownIcon , ArrowUpIcon , SmallCloseIcon } from "@chakra-ui/icons" ;
27
21
import { Select , Link } from "@chakra-ui/react" ;
28
22
import { Client } from "@langchain/langgraph-sdk" ;
23
+ import { v4 as uuidv4 } from "uuid" ;
29
24
30
25
import { EmptyState } from "./EmptyState" ;
31
26
import { ChatMessageBubble } from "./ChatMessageBubble" ;
@@ -37,6 +32,7 @@ import { useThreadList } from "../hooks/useThreadList";
37
32
import { useThreadMessages } from "../hooks/useThreadMessages" ;
38
33
import { useLangGraphClient } from "../hooks/useLangGraphClient" ;
39
34
import { useStreamState } from "../hooks/useStreamState" ;
35
+ import { useLocalStorage } from "../hooks/useLocalStorage" ;
40
36
41
37
const MODEL_TYPES = [
42
38
"openai_gpt_3_5_turbo" ,
@@ -66,6 +62,19 @@ const getAssistantId = async (client: Client) => {
66
62
export function ChatWindow ( ) {
67
63
const router = useRouter ( ) ;
68
64
const searchParams = useSearchParams ( ) ;
65
+
66
+ const messageContainerRef = useRef < HTMLDivElement | null > ( null ) ;
67
+ const [ input , setInput ] = useState ( "" ) ;
68
+ const [ isLoading , setIsLoading ] = useState ( false ) ;
69
+ const [ llm , setLlm ] = useState (
70
+ searchParams . get ( "llm" ) ?? "openai_gpt_3_5_turbo" ,
71
+ ) ;
72
+ const [ llmIsLoading , setLlmIsLoading ] = useState ( true ) ;
73
+ const [ assistantId , setAssistantId ] = useState < string > ( "" ) ;
74
+ const [ userId , setUserId ] = useLocalStorage ( "userId" , null ) ;
75
+
76
+ const client = useLangGraphClient ( ) ;
77
+
69
78
const { currentThread } = useThread ( ) ;
70
79
const {
71
80
threads,
@@ -74,23 +83,13 @@ export function ChatWindow() {
74
83
deleteThread,
75
84
loadMoreThreads,
76
85
areThreadsLoading,
77
- } = useThreadList ( ) ;
86
+ } = useThreadList ( userId ) ;
78
87
const { streamState, startStream, stopStream } = useStreamState ( ) ;
79
88
const { refreshMessages, messages, setMessages, next } = useThreadMessages (
80
89
currentThread ?. thread_id ?? null ,
81
90
streamState ,
82
91
stopStream ,
83
92
) ;
84
- const messageContainerRef = useRef < HTMLDivElement | null > ( null ) ;
85
- const [ input , setInput ] = useState ( "" ) ;
86
- const [ isLoading , setIsLoading ] = useState ( false ) ;
87
- const [ llm , setLlm ] = useState (
88
- searchParams . get ( "llm" ) ?? "openai_gpt_3_5_turbo" ,
89
- ) ;
90
- const [ llmIsLoading , setLlmIsLoading ] = useState ( true ) ;
91
- const [ assistantId , setAssistantId ] = useState < string > ( "" ) ;
92
-
93
- const client = useLangGraphClient ( ) ;
94
93
95
94
const setLanggraphInfo = async ( ) => {
96
95
try {
@@ -101,8 +100,16 @@ export function ChatWindow() {
101
100
}
102
101
} ;
103
102
103
+ const setUserInfo = ( ) => {
104
+ if ( userId == null ) {
105
+ const userId = uuidv4 ( ) ;
106
+ setUserId ( userId ) ;
107
+ }
108
+ } ;
109
+
104
110
useEffect ( ( ) => {
105
111
setLlm ( searchParams . get ( "llm" ) ?? defaultLlmValue ) ;
112
+ setUserInfo ( ) ;
106
113
setLanggraphInfo ( ) ;
107
114
setLlmIsLoading ( false ) ;
108
115
} , [ ] ) ;
0 commit comments