# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).
require 'oci'
# Create a default config using DEFAULT profile in default location
# Refer to https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File for more info
config = OCI::ConfigFileLoader.load_config
# Initialize service client with default config file
generative_ai_inference_client =
OCI::GenerativeAiInference::GenerativeAiInferenceClient.new(config: config)
# Send the request to service, some parameters are not required, see API doc for more info
chat_response =
generative_ai_inference_client.chat(
OCI::GenerativeAiInference::Models::ChatDetails.new(
compartment_id: 'ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value',
serving_mode:
OCI::GenerativeAiInference::Models::DedicatedServingMode.new(
serving_type: 'DEDICATED',
endpoint_id: 'ocid1.test.oc1..<unique_ID>EXAMPLE-endpointId-Value'
),
chat_request:
OCI::GenerativeAiInference::Models::CohereChatRequest.new(
api_format: 'COHERE',
message: 'EXAMPLE-message-Value',
chat_history: [
OCI::GenerativeAiInference::Models::CohereChatBotMessage.new(
role: 'CHATBOT',
message: 'EXAMPLE-message-Value',
tool_calls: [
OCI::GenerativeAiInference::Models::CohereToolCall.new(
name: 'EXAMPLE-name-Value',
parameters: 'EXAMPLE-parameters-Value'
)
]
)
],
documents: %w[EXAMPLE-documents-Value],
response_format:
OCI::GenerativeAiInference::Models::CohereResponseTextFormat.new(
type: 'TEXT'
),
is_search_queries_only: false,
preamble_override: 'EXAMPLE-preambleOverride-Value',
is_stream: true,
stream_options:
OCI::GenerativeAiInference::Models::StreamOptions.new(
is_include_usage: true
),
max_tokens: 771,
max_input_tokens: 40,
temperature: 0.5061691,
top_k: 407,
top_p: 0.15991622,
prompt_truncation: 'OFF',
frequency_penalty: 0.15873528,
presence_penalty: 0.18363398,
seed: 818,
is_echo: false,
tools: [
OCI::GenerativeAiInference::Models::CohereTool.new(
name: 'EXAMPLE-name-Value',
description: 'EXAMPLE-description-Value',
parameter_definitions: {
'EXAMPLE_KEY_fqjy8' => {
'type' => 'EXAMPLE-type-Value',
'description' => 'EXAMPLE-description-Value',
'isRequired' => False
}
}
)
],
tool_results: [
OCI::GenerativeAiInference::Models::CohereToolResult.new(
call:
OCI::GenerativeAiInference::Models::CohereToolCall.new(
name: 'EXAMPLE-name-Value',
parameters: 'EXAMPLE-parameters-Value'
),
outputs: %w[EXAMPLE-outputs-Value]
)
],
is_force_single_step: true,
stop_sequences: %w[EXAMPLE--Value],
is_raw_prompting: false,
citation_quality: 'FAST',
safety_mode: 'STRICT'
)
)
)
# Get the data from response
puts "#{chat_response.data}"