Skip to content

Commit ea6584b

Browse files
authored
pass multiple feedback URLs (#353)
1 parent 3851333 commit ea6584b

File tree

8 files changed

+371
-285
lines changed

8 files changed

+371
-285
lines changed

.github/workflows/eval.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,14 +26,14 @@ jobs:
2626
with:
2727
python-version: "3.11"
2828
poetry-version: "1.7.1"
29-
cache-key: lint
29+
cache-key: eval
3030

3131
- name: Install dependencies
3232
run: poetry install --with dev
3333

3434
- name: Evaluate
3535
env:
36-
LANGCHAIN_API_KEY: ${{ secrets.LANGCHAIN_API_KEY }}
36+
LANGSMITH_API_KEY: ${{ secrets.LANGSMITH_API_KEY }}
3737
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
3838
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
3939
WEAVIATE_URL: ${{ secrets.WEAVIATE_URL }}

backend/graph.py

Lines changed: 38 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import os
2+
from collections import defaultdict
23
from typing import Annotated, Literal, Sequence, TypedDict
34

45
import weaviate
@@ -25,6 +26,7 @@
2526
from langchain_openai import ChatOpenAI
2627
from langchain_weaviate import WeaviateVectorStore
2728
from langgraph.graph import END, StateGraph, add_messages
29+
from langsmith import Client as LangsmithClient
2830

2931
from backend.constants import WEAVIATE_DOCS_INDEX_NAME
3032
from backend.ingest import get_embeddings_model
@@ -105,6 +107,8 @@
105107
COHERE_MODEL_KEY = "cohere_command"
106108
GROQ_LLAMA_3_MODEL_KEY = "groq_llama_3"
107109

110+
FEEDBACK_KEYS = ["user_score", "user_click"]
111+
108112

109113
def update_documents(
110114
_: list[Document], right: list[Document] | list[dict]
@@ -127,6 +131,7 @@ class AgentState(TypedDict):
127131
messages: Annotated[list[BaseMessage], add_messages]
128132
# for convenience in evaluations
129133
answer: str
134+
feedback_urls: dict[str, list[str]]
130135

131136

132137
gpt_3_5 = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0, streaming=True)
@@ -260,8 +265,27 @@ def get_chat_history(messages: Sequence[BaseMessage]) -> Sequence[BaseMessage]:
260265
return chat_history
261266

262267

268+
def get_feedback_urls(config: RunnableConfig) -> dict[str, list[str]]:
269+
ls_client = LangsmithClient()
270+
run_id = config["configurable"].get("run_id")
271+
if run_id is None:
272+
return {}
273+
274+
tokens = ls_client.create_presigned_feedback_tokens(run_id, FEEDBACK_KEYS)
275+
key_to_token_urls = defaultdict(list)
276+
277+
for token_idx, token in enumerate(tokens):
278+
key_idx = token_idx % len(FEEDBACK_KEYS)
279+
key = FEEDBACK_KEYS[key_idx]
280+
key_to_token_urls[key].append(token.url)
281+
return key_to_token_urls
282+
283+
263284
def synthesize_response(
264-
state: AgentState, model: LanguageModelLike, prompt_template: str
285+
state: AgentState,
286+
config: RunnableConfig,
287+
model: LanguageModelLike,
288+
prompt_template: str,
265289
) -> AgentState:
266290
prompt = ChatPromptTemplate.from_messages(
267291
[
@@ -282,16 +306,24 @@ def synthesize_response(
282306
),
283307
}
284308
)
285-
return {"messages": [synthesized_response], "answer": synthesized_response.content}
309+
# finally, add feedback URLs so that users can leave feedback
310+
feedback_urls = get_feedback_urls(config)
311+
return {
312+
"messages": [synthesized_response],
313+
"answer": synthesized_response.content,
314+
"feedback_urls": feedback_urls,
315+
}
286316

287317

288-
def synthesize_response_default(state: AgentState) -> AgentState:
289-
return synthesize_response(state, llm, RESPONSE_TEMPLATE)
318+
def synthesize_response_default(
319+
state: AgentState, config: RunnableConfig
320+
) -> AgentState:
321+
return synthesize_response(state, config, llm, RESPONSE_TEMPLATE)
290322

291323

292-
def synthesize_response_cohere(state: AgentState) -> AgentState:
324+
def synthesize_response_cohere(state: AgentState, config: RunnableConfig) -> AgentState:
293325
model = llm.bind(documents=state["documents"])
294-
return synthesize_response(state, model, COHERE_RESPONSE_TEMPLATE)
326+
return synthesize_response(state, config, model, COHERE_RESPONSE_TEMPLATE)
295327

296328

297329
def route_to_response_synthesizer(

frontend/app/components/ChatMessageBubble.tsx

Lines changed: 21 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -130,14 +130,15 @@ const createAnswerElements = (
130130

131131
export function ChatMessageBubble(props: {
132132
message: Message;
133-
feedbackUrls?: Record<string, string>;
133+
feedbackUrls?: Record<string, string[]>;
134134
aiEmoji?: string;
135135
isMostRecent: boolean;
136136
messageCompleted: boolean;
137137
}) {
138138
const { type, content } = props.message;
139-
const responseFeedbackUrl = props.feedbackUrls?.[RESPONSE_FEEDBACK_KEY];
140-
const sourceFeedbackUrl = props.feedbackUrls?.[SOURCE_CLICK_KEY];
139+
const responseFeedbackUrls =
140+
props.feedbackUrls?.[RESPONSE_FEEDBACK_KEY] ?? [];
141+
const sourceFeedbackUrls = props.feedbackUrls?.[SOURCE_CLICK_KEY] ?? [];
141142
const isUser = type === "human";
142143
const [isLoading, setIsLoading] = useState(false);
143144
const [feedback, setFeedback] = useState<Feedback | null>(null);
@@ -161,23 +162,27 @@ export function ChatMessageBubble(props: {
161162
};
162163

163164
const sendUserFeedback = async (score: number) => {
164-
if (responseFeedbackUrl === undefined) {
165+
if (responseFeedbackUrls.length === 0) {
165166
return;
166167
}
167168
if (isLoading) {
168169
return;
169170
}
170171
setIsLoading(true);
171172
try {
172-
const data = await sendFeedback({
173-
feedbackUrl: responseFeedbackUrl,
174-
score,
175-
feedbackId: feedback?.feedback_id,
176-
comment,
177-
isExplicit: true,
178-
});
179-
if (data.code === 200) {
180-
setFeedback({ score, feedback_id: data.feedbackId });
173+
const feedbackResponses = [];
174+
for (const feedbackUrl of responseFeedbackUrls) {
175+
const data = await sendFeedback({
176+
feedbackUrl,
177+
score,
178+
feedbackId: feedback?.feedback_id,
179+
comment,
180+
isExplicit: true,
181+
});
182+
feedbackResponses.push(data);
183+
}
184+
if (feedbackResponses.every((response) => response.code === 200)) {
185+
setFeedback({ score, feedback_id: feedbackResponses[0].feedbackId });
181186
score == 1 ? animateButton("upButton") : animateButton("downButton");
182187
if (comment) {
183188
setComment("");
@@ -272,7 +277,7 @@ export function ChatMessageBubble(props: {
272277
filteredSources.map(() => false),
273278
)
274279
}
275-
feedbackUrl={sourceFeedbackUrl}
280+
feedbackUrls={sourceFeedbackUrls}
276281
/>
277282
</Box>
278283
))}
@@ -306,7 +311,7 @@ export function ChatMessageBubble(props: {
306311
variant="outline"
307312
colorScheme={feedback === null ? "green" : "gray"}
308313
onClick={() => {
309-
if (feedback === null && responseFeedbackUrl) {
314+
if (feedback === null && responseFeedbackUrls) {
310315
sendUserFeedback(1);
311316
animateButton("upButton");
312317
} else {
@@ -322,7 +327,7 @@ export function ChatMessageBubble(props: {
322327
variant="outline"
323328
colorScheme={feedback === null ? "red" : "gray"}
324329
onClick={() => {
325-
if (feedback === null && responseFeedbackUrl) {
330+
if (feedback === null && responseFeedbackUrls) {
326331
sendUserFeedback(0);
327332
animateButton("downButton");
328333
} else {

frontend/app/components/SourceBubble.tsx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,19 +8,19 @@ export function SourceBubble({
88
highlighted,
99
onMouseEnter,
1010
onMouseLeave,
11-
feedbackUrl,
11+
feedbackUrls,
1212
}: {
1313
source: Source;
1414
highlighted: boolean;
1515
onMouseEnter: () => any;
1616
onMouseLeave: () => any;
17-
feedbackUrl?: string;
17+
feedbackUrls: string[];
1818
}) {
1919
return (
2020
<Card
2121
onClick={async () => {
2222
window.open(source.url, "_blank");
23-
if (feedbackUrl) {
23+
for (const feedbackUrl of feedbackUrls) {
2424
await sendFeedback({
2525
feedbackUrl,
2626
value: source.url,

frontend/app/hooks/useStreamState.ts

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ export interface StreamState {
1010
status: "inflight" | "error" | "done";
1111
messages?: Message[];
1212
documents?: Document[];
13-
feedbackUrls?: Record<string, string>;
13+
feedbackUrls?: Record<string, string[]>;
1414
}
1515

1616
export interface StreamStateProps {
@@ -68,8 +68,7 @@ export function useStreamState(): StreamStateProps {
6868
input: messages == null ? null : { messages },
6969
config,
7070
streamMode: ["messages", "values"],
71-
signal: controller.signal,
72-
feedbackKeys: [RESPONSE_FEEDBACK_KEY, SOURCE_CLICK_KEY],
71+
signal: controller.signal
7372
});
7473

7574
for await (const chunk of stream) {
@@ -94,6 +93,7 @@ export function useStreamState(): StreamStateProps {
9493
...streamStates[threadId],
9594
status: "inflight",
9695
documents: data["documents"],
96+
feedbackUrls: data["feedback_urls"]
9797
},
9898
}));
9999
} else if (chunk.event === "error") {
@@ -104,15 +104,6 @@ export function useStreamState(): StreamStateProps {
104104
status: "error",
105105
},
106106
}));
107-
} else if (chunk.event === "feedback") {
108-
setStreamStates((streamStates) => ({
109-
...streamStates,
110-
[threadId]: {
111-
...streamStates[threadId],
112-
feedbackUrls: chunk.data,
113-
status: "inflight",
114-
},
115-
}));
116107
} else if (chunk.event === "end") {
117108
setStreamStates((streamStates) => ({
118109
...streamStates,

frontend/app/hooks/useThreadList.ts

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -76,19 +76,25 @@ export function useThreadList(userId: string): ThreadListProps {
7676
setOffset((prevOffset) => prevOffset + PAGE_SIZE);
7777
}, [areThreadsLoading]);
7878

79-
const createThread = useCallback(async (name: string) => {
80-
const saved = await client.threads.create({ metadata: { name, userId } });
81-
dispatch({ type: "add", threads: [saved] });
82-
return saved;
83-
}, [userId]);
79+
const createThread = useCallback(
80+
async (name: string) => {
81+
const saved = await client.threads.create({ metadata: { name, userId } });
82+
dispatch({ type: "add", threads: [saved] });
83+
return saved;
84+
},
85+
[userId],
86+
);
8487

85-
const updateThread = useCallback(async (thread_id: string, name: string) => {
86-
const saved = await client.threads.update(thread_id, {
87-
metadata: { name, userId },
88-
});
89-
dispatch({ type: "add", threads: [saved] });
90-
return saved;
91-
}, [userId]);
88+
const updateThread = useCallback(
89+
async (thread_id: string, name: string) => {
90+
const saved = await client.threads.update(thread_id, {
91+
metadata: { name, userId },
92+
});
93+
dispatch({ type: "add", threads: [saved] });
94+
return saved;
95+
},
96+
[userId],
97+
);
9298

9399
const deleteThread = useCallback(
94100
async (thread_id: string) => {

0 commit comments

Comments
 (0)