Skip to content

Commit f9bcc5c

Browse files
authored
Merge pull request #14 from oracle-devrel/no-model-listing
fix model Id from setenv, instead of dropdow selector
2 parents af85259 + 5f3e8f4 commit f9bcc5c

File tree

4 files changed

+45
-37
lines changed

4 files changed

+45
-37
lines changed

.gitignore

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
# Icon must end with two \r
77
Icon
88

9-
109
# Thumbnails
1110
._*
1211

@@ -75,4 +74,5 @@ build/
7574
!**/src/test/**/build/
7675
.idea
7776
bin/
78-
dist/
77+
dist/
78+
application-local.yaml

backend/src/main/java/dev/victormartin/oci/genai/backend/backend/PromptController.java

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
import org.slf4j.Logger;
99
import org.slf4j.LoggerFactory;
1010
import org.springframework.beans.factory.annotation.Autowired;
11+
import org.springframework.beans.factory.annotation.Value;
1112
import org.springframework.http.HttpStatus;
1213
import org.springframework.messaging.handler.annotation.MessageMapping;
1314
import org.springframework.messaging.simp.annotation.SendToUser;
@@ -20,6 +21,9 @@
2021
public class PromptController {
2122
Logger logger = LoggerFactory.getLogger(PromptController.class);
2223

24+
@Value("${genai.model_id}")
25+
private String hardcodedModelId;
26+
2327
@Autowired
2428
private final InteractionRepository interactionRepository;
2529

@@ -35,17 +39,18 @@ public PromptController(InteractionRepository interactionRepository, OCIGenAISer
3539
@SendToUser("/queue/answer")
3640
public Answer handlePrompt(Prompt prompt) {
3741
String promptEscaped = HtmlUtils.htmlEscape(prompt.content());
38-
logger.info("Prompt " + promptEscaped + " received, on model " + prompt.modelId());
42+
logger.info("Prompt " + promptEscaped + " received, on model " + prompt.modelId() + " but using hardcoded one" +
43+
" " + hardcodedModelId);
3944
Interaction interaction = new Interaction();
4045
interaction.setConversationId(prompt.conversationId());
4146
interaction.setDatetimeRequest(new Date());
42-
interaction.setModelId(prompt.modelId());
47+
interaction.setModelId(hardcodedModelId);
4348
interaction.setRequest(promptEscaped);
4449
Interaction saved = interactionRepository.save(interaction);
4550
try {
4651
if (prompt.content() == null || prompt.content().length()< 1) { throw new InvalidPromptRequest(); }
47-
if (prompt.modelId() == null || !prompt.modelId().startsWith("ocid1.generativeaimodel.")) { throw new InvalidPromptRequest(); }
48-
String responseFromGenAI = genAI.request(promptEscaped, prompt.modelId());
52+
// if (prompt.modelId() == null || !prompt.modelId().startsWith("ocid1.generativeaimodel.")) { throw new InvalidPromptRequest(); }
53+
String responseFromGenAI = genAI.request(promptEscaped, hardcodedModelId);
4954
saved.setDatetimeResponse(new Date());
5055
saved.setResponse(responseFromGenAI);
5156
interactionRepository.save(saved);

scripts/lib/oci.mjs

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -369,13 +369,12 @@ export async function getLatestGenAIModels(
369369
vendor: vendor,
370370
});
371371

372-
const filteredByCapatility = activeCohereModels.filter(
373-
({ capabilities }) => {
374-
if (capabilities.length !== 1) return false;
375-
if (capabilities[0] !== capability) return false;
376-
return true;
377-
}
378-
);
372+
const filteredByCapatility = activeCohereModels.filter((model) => {
373+
const { capabilities } = model;
374+
if (capabilities.length !== 1) return false;
375+
if (capabilities[0] !== capability) return false;
376+
return true;
377+
});
379378

380379
const latestVersion = max(filteredByCapatility, (item) =>
381380
parseFloat(item.version)

web/src/Chat.jsx

Lines changed: 28 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -27,28 +27,28 @@ function Chat() {
2727
const [updateModels, setUpdateModels] = useState(true);
2828
const { subscribe, unsubscribe, send, isConnected } = useStomp();
2929

30-
useEffect(() => {
31-
const fecthModels = async () => {
32-
try {
33-
const response = await fetch("/api/genai/models");
34-
const data = await response.json();
35-
setModels(
36-
data.filter(
37-
({ capabilities }) =>
38-
capabilities.length === 1 &&
39-
capabilities.includes("TEXT_GENERATION")
40-
)
41-
);
42-
} catch (error) {
43-
setErrorMessage("Error fetching Generative AI Models from Backend");
44-
}
45-
};
30+
// useEffect(() => {
31+
// const fecthModels = async () => {
32+
// try {
33+
// const response = await fetch("/api/genai/models");
34+
// const data = await response.json();
35+
// setModels(
36+
// data.filter(
37+
// ({ capabilities }) =>
38+
// capabilities.length === 1 &&
39+
// capabilities.includes("TEXT_GENERATION")
40+
// )
41+
// );
42+
// } catch (error) {
43+
// setErrorMessage("Error fetching Generative AI Models from Backend");
44+
// }
45+
// };
4646

47-
if (updateModels) {
48-
setUpdateModels(false);
49-
fecthModels();
50-
}
51-
}, [updateModels]);
47+
// if (updateModels) {
48+
// setUpdateModels(false);
49+
// fecthModels();
50+
// }
51+
// }, [updateModels]);
5252

5353
useEffect(() => {
5454
let timeoutId;
@@ -90,7 +90,11 @@ function Chat() {
9090

9191
useEffect(() => {
9292
if (isConnected && promptValue.length) {
93-
send("/genai/prompt", { conversationId, content: promptValue, modelId });
93+
send("/genai/prompt", {
94+
conversationId,
95+
content: promptValue,
96+
modelId: "notapply",
97+
});
9498
setWaiting(true);
9599
setPromptValue("");
96100
}
@@ -99,7 +103,7 @@ function Chat() {
99103

100104
return (
101105
<Box>
102-
<FormControl fullWidth>
106+
{/* <FormControl fullWidth>
103107
<InputLabel id="model-label">Model</InputLabel>
104108
<Select
105109
labelId="model-label"
@@ -116,7 +120,7 @@ function Chat() {
116120
))}
117121
</Select>
118122
</FormControl>
119-
<Divider style={{ margin: "1rem" }} />
123+
<Divider style={{ margin: "1rem" }} /> */}
120124
<Conversation>{conversation}</Conversation>
121125
{waiting && <CircularProgress style={{ padding: "1rem" }} />}
122126
<PromptInput

0 commit comments

Comments
 (0)