# This is an automatically generated code sample.
# To make this code sample work in your Oracle Cloud tenancy,
# please replace the values for any parameters whose current values do not fit
# your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
# boolean, number, and enum parameters with values not fitting your use case).

import oci

# Create a default config using DEFAULT profile in default location
# Refer to
# https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File
# for more info
config = oci.config.from_file()


# Initialize service client with default config file
generative_ai_inference_client = oci.generative_ai_inference.GenerativeAiInferenceClient(
    config)


# Send the request to service, some parameters are not required, see API
# doc for more info
chat_response = generative_ai_inference_client.chat(
    chat_details=oci.generative_ai_inference.models.ChatDetails(
        compartment_id="ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value",
        serving_mode=oci.generative_ai_inference.models.OnDemandServingMode(
            serving_type="ON_DEMAND",
            model_id="ocid1.test.oc1..<unique_ID>EXAMPLE-modelId-Value"),
        chat_request=oci.generative_ai_inference.models.GenericChatRequest(
            api_format="GENERIC",
            messages=[
                oci.generative_ai_inference.models.AssistantMessage(
                    role="ASSISTANT",
                    content=[
                        oci.generative_ai_inference.models.ImageContent(
                            type="IMAGE",
                            image_url=oci.generative_ai_inference.models.ImageUrl(
                                url="EXAMPLE-url-Value",
                                detail="HIGH"))],
                    name="EXAMPLE-name-Value",
                    refusal="EXAMPLE-refusal-Value",
                    tool_calls=[
                            oci.generative_ai_inference.models.FunctionCall(
                                type="FUNCTION",
                                id="ocid1.test.oc1..<unique_ID>EXAMPLE-id-Value",
                                name="EXAMPLE-name-Value",
                                arguments="EXAMPLE-arguments-Value")],
                    annotations=[
                            oci.generative_ai_inference.models.Annotation(
                                type="EXAMPLE-type-Value",
                                url_citation=oci.generative_ai_inference.models.UrlCitation(
                                    start_index=162,
                                    end_index=607,
                                    title="EXAMPLE-title-Value",
                                    url="EXAMPLE-url-Value"))])],
            reasoning_effort="MEDIUM",
            verbosity="MEDIUM",
            metadata="EXAMPLE-metadata-Value",
            is_stream=False,
            stream_options=oci.generative_ai_inference.models.StreamOptions(
                is_include_usage=True),
            num_generations=2,
            seed=773,
            is_echo=False,
            top_k=242,
            top_p=0.61209404,
            temperature=0.77909863,
            frequency_penalty=1.4083462,
            presence_penalty=1.2640847,
            stop=["EXAMPLE--Value"],
            log_probs=667,
            max_tokens=793,
            max_completion_tokens=705,
            logit_bias="EXAMPLE-logitBias-Value",
            prediction=oci.generative_ai_inference.models.StaticContent(
                type="CONTENT",
                content=[
                    oci.generative_ai_inference.models.TextContent(
                        type="TEXT",
                        text="EXAMPLE-text-Value")]),
            response_format=oci.generative_ai_inference.models.TextResponseFormat(
                type="TEXT"),
            tool_choice=oci.generative_ai_inference.models.ToolChoiceFunction(
                type="FUNCTION",
                name="EXAMPLE-name-Value"),
            is_parallel_tool_calls=True,
            tools=[
                oci.generative_ai_inference.models.FunctionDefinition(
                    type="FUNCTION",
                    name="EXAMPLE-name-Value",
                    description="EXAMPLE-description-Value",
                    parameters="EXAMPLE-parameters-Value")],
            web_search_options=oci.generative_ai_inference.models.WebSearchOptions(
                search_context_size="HIGH",
                user_location=oci.generative_ai_inference.models.ApproximateLocation(
                    city="EXAMPLE-city-Value",
                    region="EXAMPLE-region-Value",
                    country="zz",
                    timezone="EXAMPLE-timezone-Value")),
            service_tier="AUTO")),
    opc_retry_token="EXAMPLE-opcRetryToken-Value",
    opc_request_id="LOD4WPSSSGSPTXLRU5JH<unique_ID>")

# Get the data from response
print(chat_response.data)