|
1 | 1 | package oracleai.services;
|
2 | 2 |
|
| 3 | +import com.oracle.bmc.ClientConfiguration; |
| 4 | +import com.oracle.bmc.ConfigFileReader; |
3 | 5 | import com.oracle.bmc.Region;
|
4 | 6 | import com.oracle.bmc.auth.AuthenticationDetailsProvider;
|
5 |
| -import com.oracle.bmc.generativeai.GenerativeAiClient; |
6 |
| -import com.oracle.bmc.generativeai.model.GenerateTextDetails; |
7 |
| -import com.oracle.bmc.generativeai.model.GenerateTextResult; |
8 |
| -import com.oracle.bmc.generativeai.model.OnDemandServingMode; |
9 |
| -import com.oracle.bmc.generativeai.requests.GenerateTextRequest; |
10 |
| -import com.oracle.bmc.generativeai.responses.GenerateTextResponse; |
| 7 | +import com.oracle.bmc.auth.ConfigFileAuthenticationDetailsProvider; |
| 8 | +import com.oracle.bmc.retrier.RetryConfiguration; |
| 9 | +import com.oracle.bmc.ClientConfiguration; |
| 10 | +import com.oracle.bmc.ConfigFileReader; |
| 11 | +import com.oracle.bmc.Region; |
| 12 | +import com.oracle.bmc.auth.AuthenticationDetailsProvider; |
| 13 | +import com.oracle.bmc.auth.ConfigFileAuthenticationDetailsProvider; |
| 14 | +import com.oracle.bmc.generativeaiinference.GenerativeAiInferenceClient; |
| 15 | +import com.oracle.bmc.generativeaiinference.model.CohereLlmInferenceRequest; |
| 16 | +import com.oracle.bmc.generativeaiinference.model.GenerateTextDetails; |
| 17 | +import com.oracle.bmc.generativeaiinference.model.OnDemandServingMode; |
| 18 | +import com.oracle.bmc.generativeaiinference.requests.GenerateTextRequest; |
| 19 | +import com.oracle.bmc.generativeaiinference.responses.GenerateTextResponse; |
| 20 | +import com.oracle.bmc.generativeaiinference.responses.GenerateTextResponse; |
| 21 | + |
| 22 | + |
11 | 23 | import oracleai.AIApplication;
|
12 | 24 |
|
13 | 25 | import java.util.Arrays;
|
14 | 26 | import java.util.List;
|
15 | 27 |
|
16 | 28 | public class OracleGenAI {
|
17 | 29 |
|
18 |
| - |
19 | 30 | public static String chat(String textcontent) throws Exception {
|
20 |
| - AuthenticationDetailsProvider provider = AuthProvider.getAuthenticationDetailsProvider(); |
21 |
| - //GenAI is only available in US_CHICAGO_1 for current beta, thus the override |
22 |
| - GenerativeAiClient generativeAiClient = GenerativeAiClient.builder().region(Region.US_CHICAGO_1).build(provider); |
23 |
| - List<String> prompts = Arrays.asList(textcontent); |
| 31 | + return new OracleGenAI().doChat(textcontent); |
| 32 | + } |
| 33 | + |
| 34 | + public String doChat(String textcontent) throws Exception { |
| 35 | + final GenerativeAiInferenceClient generativeAiInferenceClient = |
| 36 | + new GenerativeAiInferenceClient(AuthProvider.getAuthenticationDetailsProvider()); |
| 37 | + // generativeAiInferenceClient.setEndpoint(ENDPOINT); |
| 38 | + generativeAiInferenceClient.setRegion(Region.US_CHICAGO_1); |
| 39 | + CohereLlmInferenceRequest cohereLlmInferenceRequest = |
| 40 | + CohereLlmInferenceRequest.builder() |
| 41 | + .prompt(textcontent) |
| 42 | + .maxTokens(600) |
| 43 | + .temperature(0.75) |
| 44 | + .frequencyPenalty(1.0) |
| 45 | + .topP(0.7) |
| 46 | + .isStream(false) // SDK doesn't support streaming responses, feature is under development |
| 47 | + .isEcho(true) |
| 48 | + .build(); |
24 | 49 | GenerateTextDetails generateTextDetails = GenerateTextDetails.builder()
|
25 | 50 | .servingMode(OnDemandServingMode.builder().modelId("cohere.command").build()) // "cohere.command-light" is also available to use
|
26 | 51 | // .servingMode(DedicatedServingMode.builder().endpointId("custom-model-endpoint").build()) // for custom model from Dedicated AI Cluster
|
27 | 52 | .compartmentId(AIApplication.COMPARTMENT_ID)
|
28 |
| - .prompts(prompts) |
29 |
| - .maxTokens(300) |
30 |
| - .temperature(0.75) |
31 |
| - .frequencyPenalty(1.0) |
32 |
| - .topP(0.7) |
33 |
| - .isStream(false) |
34 |
| - .isEcho(false) |
| 53 | + .inferenceRequest(cohereLlmInferenceRequest) |
35 | 54 | .build();
|
36 | 55 | GenerateTextRequest generateTextRequest = GenerateTextRequest.builder()
|
37 | 56 | .generateTextDetails(generateTextDetails)
|
38 | 57 | .build();
|
39 |
| - GenerateTextResponse generateTextResponse = generativeAiClient.generateText(generateTextRequest); |
40 |
| - GenerateTextResult result = generateTextResponse.getGenerateTextResult(); |
41 |
| - if(result !=null && result.getGeneratedTexts().size() > 0 ) { |
42 |
| - String all_results =""; |
43 |
| - for (List<com.oracle.bmc.generativeai.model.GeneratedText> list : result.getGeneratedTexts()) { |
44 |
| - for (com.oracle.bmc.generativeai.model.GeneratedText text:list){ |
45 |
| - all_results = all_results+text.getText(); |
46 |
| - } |
47 |
| - } |
48 |
| - return all_results; |
49 |
| - } |
50 |
| - return "We could not find a result for your text. Try a different image."; |
| 58 | + GenerateTextResponse generateTextResponse = generativeAiInferenceClient.generateText(generateTextRequest); |
| 59 | + System.out.println(generateTextResponse.toString()); |
| 60 | + return generateTextResponse.toString(); |
51 | 61 | }
|
52 | 62 |
|
53 | 63 | }
|
0 commit comments