# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).
import oci
# Create a default config using DEFAULT profile in default location
# Refer to
# https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File
# for more info
config = oci.config.from_file()
# Initialize service client with default config file
generative_ai_inference_client = oci.generative_ai_inference.GenerativeAiInferenceClient(
config)
# Send the request to service, some parameters are not required, see API
# doc for more info
generate_text_response = generative_ai_inference_client.generate_text(
generate_text_details=oci.generative_ai_inference.models.GenerateTextDetails(
compartment_id="ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value",
serving_mode=oci.generative_ai_inference.models.OnDemandServingMode(
serving_type="ON_DEMAND",
model_id="ocid1.test.oc1..<unique_ID>EXAMPLE-modelId-Value"),
inference_request=oci.generative_ai_inference.models.CohereLlmInferenceRequest(
runtime_type="COHERE",
prompt="EXAMPLE-prompt-Value",
is_stream=False,
num_generations=4,
is_echo=False,
max_tokens=926,
temperature=1.4242727,
top_k=129,
top_p=0.72326833,
frequency_penalty=0.86628914,
presence_penalty=0.5237303,
stop_sequences=["EXAMPLE--Value"],
return_likelihoods="NONE",
truncate="START")),
opc_retry_token="EXAMPLE-opcRetryToken-Value",
opc_request_id="MPOFOLOJJOC0UCNBDQOC<unique_ID>")
# Get the data from response
print(generate_text_response.data)