# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).

import oci

# Create a default config using DEFAULT profile in default location
# Refer to
# https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File
# for more info
config = oci.config.from_file()


# Initialize service client with default config file
generative_ai_inference_client = oci.generative_ai_inference.GenerativeAiInferenceClient(
    config)


# Send the request to service, some parameters are not required, see API
# doc for more info
chat_response = generative_ai_inference_client.chat(
    chat_details=oci.generative_ai_inference.models.ChatDetails(
        compartment_id="ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value",
        serving_mode=oci.generative_ai_inference.models.OnDemandServingMode(
            serving_type="ON_DEMAND",
            model_id="ocid1.test.oc1..<unique_ID>EXAMPLE-modelId-Value"),
        chat_request=oci.generative_ai_inference.models.CohereChatRequestV2(
            api_format="COHEREV2",
            messages=[
                oci.generative_ai_inference.models.CohereToolMessageV2(
                    role="TOOL",
                    content=[
                        oci.generative_ai_inference.models.CohereDocumentContentV2(
                            type="DOCUMENT",
                            document="EXAMPLE-document-Value")],
                    tool_call_id="ocid1.test.oc1..<unique_ID>EXAMPLE-toolCallId-Value")],
            documents=["EXAMPLE-documents-Value"],
            citation_options=oci.generative_ai_inference.models.CitationOptionsV2(
                mode="FAST"),
            tools_choice="REQUIRED",
            tools=[
                oci.generative_ai_inference.models.CohereToolV2(
                    type="FUNCTION",
                    function=oci.generative_ai_inference.models.Function(
                        name="EXAMPLE-name-Value",
                        parameters="EXAMPLE-parameters-Value",
                        description="EXAMPLE-description-Value"))],
            is_strict_tools_enabled=True,
            is_log_probs_enabled=False,
            thinking=oci.generative_ai_inference.models.CohereThinkingV2(
                type="DISABLED",
                token_budget=203),
            response_format=oci.generative_ai_inference.models.CohereResponseTextFormat(
                type="TEXT"),
            is_search_queries_only=False,
            stream_options=oci.generative_ai_inference.models.StreamOptions(
                is_include_usage=False),
            is_stream=True,
            max_tokens=887,
            temperature=0.13406122,
            top_k=36,
            top_p=0.30801296,
            frequency_penalty=0.90215766,
            presence_penalty=0.7918264,
            seed=937,
            stop_sequences=["EXAMPLE--Value"],
            priority=404,
            is_raw_prompting=True,
            safety_mode="OFF")),
    opc_retry_token="EXAMPLE-opcRetryToken-Value",
    opc_request_id="NOYUHSDU8YXMQ1XEONMW<unique_ID>")

# Get the data from response
print(chat_response.data)