// This is an automatically generated code sample.
// To make this code sample work in your Oracle Cloud tenancy,
// please replace the values for any parameters whose current values do not fit
// your use case (such as resource IDs, strings containing ‘EXAMPLE’ or ‘unique_id’, and
// boolean, number, and enum parameters with values not fitting your use case).

package main

import (
	"context"
	"fmt"

	"github.com/oracle/oci-go-sdk/v65/common"
	"github.com/oracle/oci-go-sdk/v65/example/helpers"
	"github.com/oracle/oci-go-sdk/v65/generativeaiinference"
)

func ExampleChat() {
	// Create a default authentication provider that uses the DEFAULT
	// profile in the configuration file.
	// Refer to <see href="https://docs.cloud.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm#SDK_and_CLI_Configuration_File>the public documentation</see> on how to prepare a configuration file.
	client, err := generativeaiinference.NewGenerativeAiInferenceClientWithConfigurationProvider(common.DefaultConfigProvider())
	helpers.FatalIfError(err)

	// Create a request and dependent object(s).
	var logitBias interface{}
	logitBias = "EXAMPLE-logitBias-Value"
	var metadata interface{}
	metadata = "EXAMPLE-metadata-Value"
	var parameters interface{}
	parameters = "EXAMPLE-parameters-Value"
	req := generativeaiinference.ChatRequest{ChatDetails: generativeaiinference.ChatDetails{ChatRequest: generativeaiinference.GenericChatRequest{ReasoningEffort: generativeaiinference.GenericChatRequestReasoningEffortLow,
		ToolChoice:          generativeaiinference.ToolChoiceAuto{},
		TopK:                common.Int(0),
		Verbosity:           generativeaiinference.GenericChatRequestVerbosityHigh,
		IsStream:            common.Bool(false),
		IsParallelToolCalls: common.Bool(true),
		MaxTokens:           common.Int(181),
		Messages: []generativeaiinference.Message{generativeaiinference.ToolMessage{Content: []generativeaiinference.ChatContent{generativeaiinference.TextContent{Text: common.String("EXAMPLE-text-Value")}},
			ToolCallId: common.String("ocid1.test.oc1..<unique_ID>EXAMPLE-toolCallId-Value")}},
		NumGenerations:   common.Int(4),
		Prediction:       generativeaiinference.StaticContent{Content: []generativeaiinference.TextContent{generativeaiinference.TextContent{Text: common.String("EXAMPLE-text-Value")}}},
		Temperature:      common.Float64(0.028240442),
		TopP:             common.Float64(0.45642453),
		IsEcho:           common.Bool(true),
		FrequencyPenalty: common.Float64(1.1442062),
		LogProbs:         common.Int(943),
		LogitBias:        &logitBias,
		Seed:             common.Int(790),
		StreamOptions:    &generativeaiinference.StreamOptions{IsIncludeUsage: common.Bool(true)},
		Metadata:         &metadata,
		PresencePenalty:  common.Float64(1.4775215),
		ResponseFormat:   generativeaiinference.JsonObjectResponseFormat{},
		Stop:             []string{"EXAMPLE--Value"},
		Tools: []generativeaiinference.ToolDefinition{generativeaiinference.FunctionDefinition{Name: common.String("EXAMPLE-name-Value"),
			Parameters:  &parameters,
			Description: common.String("EXAMPLE-description-Value")}},
		MaxCompletionTokens: common.Int(833)},
		CompartmentId: common.String("ocid1.test.oc1..<unique_ID>EXAMPLE-compartmentId-Value"),
		ServingMode:   generativeaiinference.OnDemandServingMode{ModelId: common.String("ocid1.test.oc1..<unique_ID>EXAMPLE-modelId-Value")}},
		OpcRequestId:  common.String("HFVWUGKMIX2HFEXJSLB7<unique_ID>"),
		OpcRetryToken: common.String("EXAMPLE-opcRetryToken-Value")}

	// Send the request using the service client
	resp, err := client.Chat(context.Background(), req)
	helpers.FatalIfError(err)

	// Retrieve value from the response.
	fmt.Println(resp)
}