Skip to content

Commit b5c79e5

Browse files
committed
propagate user ID
1 parent 6e543bc commit b5c79e5

File tree

3 files changed

+55
-21
lines changed

3 files changed

+55
-21
lines changed

frontend/app/components/ChatWindow.tsx

Lines changed: 25 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,6 @@
11
"use client";
22

3-
import React, {
4-
Fragment,
5-
useCallback,
6-
useEffect,
7-
useRef,
8-
useState,
9-
} from "react";
3+
import React, { useCallback, useEffect, useRef, useState } from "react";
104
import { toast } from "react-toastify";
115
import { useRouter, useSearchParams } from "next/navigation";
126
import { Renderer, marked } from "marked";
@@ -26,6 +20,7 @@ import {
2620
import { ArrowDownIcon, ArrowUpIcon, SmallCloseIcon } from "@chakra-ui/icons";
2721
import { Select, Link } from "@chakra-ui/react";
2822
import { Client } from "@langchain/langgraph-sdk";
23+
import { v4 as uuidv4 } from "uuid";
2924

3025
import { EmptyState } from "./EmptyState";
3126
import { ChatMessageBubble } from "./ChatMessageBubble";
@@ -37,6 +32,7 @@ import { useThreadList } from "../hooks/useThreadList";
3732
import { useThreadMessages } from "../hooks/useThreadMessages";
3833
import { useLangGraphClient } from "../hooks/useLangGraphClient";
3934
import { useStreamState } from "../hooks/useStreamState";
35+
import { useLocalStorage } from "../hooks/useLocalStorage";
4036

4137
const MODEL_TYPES = [
4238
"openai_gpt_3_5_turbo",
@@ -66,6 +62,19 @@ const getAssistantId = async (client: Client) => {
6662
export function ChatWindow() {
6763
const router = useRouter();
6864
const searchParams = useSearchParams();
65+
66+
const messageContainerRef = useRef<HTMLDivElement | null>(null);
67+
const [input, setInput] = useState("");
68+
const [isLoading, setIsLoading] = useState(false);
69+
const [llm, setLlm] = useState(
70+
searchParams.get("llm") ?? "openai_gpt_3_5_turbo",
71+
);
72+
const [llmIsLoading, setLlmIsLoading] = useState(true);
73+
const [assistantId, setAssistantId] = useState<string>("");
74+
const [userId, setUserId] = useLocalStorage("userId", null);
75+
76+
const client = useLangGraphClient();
77+
6978
const { currentThread } = useThread();
7079
const {
7180
threads,
@@ -74,23 +83,13 @@ export function ChatWindow() {
7483
deleteThread,
7584
loadMoreThreads,
7685
areThreadsLoading,
77-
} = useThreadList();
86+
} = useThreadList(userId);
7887
const { streamState, startStream, stopStream } = useStreamState();
7988
const { refreshMessages, messages, setMessages, next } = useThreadMessages(
8089
currentThread?.thread_id ?? null,
8190
streamState,
8291
stopStream,
8392
);
84-
const messageContainerRef = useRef<HTMLDivElement | null>(null);
85-
const [input, setInput] = useState("");
86-
const [isLoading, setIsLoading] = useState(false);
87-
const [llm, setLlm] = useState(
88-
searchParams.get("llm") ?? "openai_gpt_3_5_turbo",
89-
);
90-
const [llmIsLoading, setLlmIsLoading] = useState(true);
91-
const [assistantId, setAssistantId] = useState<string>("");
92-
93-
const client = useLangGraphClient();
9493

9594
const setLanggraphInfo = async () => {
9695
try {
@@ -101,8 +100,16 @@ export function ChatWindow() {
101100
}
102101
};
103102

103+
const setUserInfo = () => {
104+
if (userId == null) {
105+
const userId = uuidv4();
106+
setUserId(userId);
107+
}
108+
};
109+
104110
useEffect(() => {
105111
setLlm(searchParams.get("llm") ?? defaultLlmValue);
112+
setUserInfo();
106113
setLanggraphInfo();
107114
setLlmIsLoading(false);
108115
}, []);

frontend/app/hooks/useLocalStorage.ts

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import { useState } from "react";
2+
3+
type StoredValue = string | string[] | Record<string, string> | null;
4+
5+
export function useLocalStorage(key: string, initialValue: StoredValue) {
6+
const [storedValue, setStoredValue] = useState(() => {
7+
try {
8+
const item = window.localStorage.getItem(key);
9+
return item ? JSON.parse(item) : initialValue;
10+
} catch (error) {
11+
console.log(error);
12+
return initialValue;
13+
}
14+
});
15+
const setValue = (value: StoredValue) => {
16+
try {
17+
setStoredValue(value);
18+
window.localStorage.setItem(key, JSON.stringify(value));
19+
} catch (error) {
20+
console.log(error);
21+
}
22+
};
23+
return [storedValue, setValue];
24+
}

frontend/app/hooks/useThreadList.ts

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ function threadsReducer(
3737
}
3838
}
3939

40-
export function useThreadList(): ThreadListProps {
40+
export function useThreadList(userId: string): ThreadListProps {
4141
const [threads, dispatch] = useReducer(threadsReducer, null);
4242
const [offset, setOffset] = useState(0);
4343
const [areThreadsLoading, setAreThreadsLoading] = useState(false);
@@ -49,6 +49,9 @@ export function useThreadList(): ThreadListProps {
4949
const fetchedThreads = await client.threads.search({
5050
offset,
5151
limit: PAGE_SIZE,
52+
metadata: {
53+
userId,
54+
},
5255
});
5356
if (offset === 0) {
5457
dispatch({ type: "set", threads: fetchedThreads });
@@ -69,14 +72,14 @@ export function useThreadList(): ThreadListProps {
6972
}, [areThreadsLoading]);
7073

7174
const createThread = useCallback(async (name: string) => {
72-
const saved = await client.threads.create({ metadata: { name } });
75+
const saved = await client.threads.create({ metadata: { name, userId } });
7376
dispatch({ type: "add", threads: [saved] });
7477
return saved;
7578
}, []);
7679

7780
const updateThread = useCallback(async (thread_id: string, name: string) => {
7881
const saved = await client.threads.upsert(thread_id, {
79-
metadata: { name },
82+
metadata: { name, userId },
8083
});
8184
dispatch({ type: "add", threads: [saved] });
8285
return saved;

0 commit comments

Comments
 (0)