from oci.addons.adk import Agent, AgentClient from compartments import Compartments from get_config import get_config, auth_type, profile, region, agent_endpoint_id from oci_ai_text_generation import chat from oci_ai_models import models import oci config=get_config() generative_ai_client = oci.generative_ai.GenerativeAiClient(config) def main(): client = AgentClient( auth_type=auth_type, profile=profile, region=region, ) Instructions =""" Route all requests to tool calling """ # Create a local agent object with the client, instructions, and tools. # You also need the agent endpoint id. To obtain the OCID, follow Step 1. agent = Agent( client=client, agent_endpoint_id=agent_endpoint_id, instructions=Instructions, tools=[Compartments(), models(), chat()] ) agent.setup() # Run the agent. You can embed this method in your webapp, slack bot, etc. # You invoke the run() when you need to handle your user's request. # Do not change this input input = """ Fetch the tenancy id using the get tenancy function. """ response = agent.run(input) response.pretty_print() # Do not change this input input = """ Analyze if the user input is text generation, summarization, (question and answer) or code. then set the model name as 'meta.llama-3.3-70b-instruct' and If the user input is regarding coding then set the model name as 'meta.llama-4-maverick-17b-128e-instruct-fp8'. Pass the model name, compartment id and fetch the model id using the model_id function. """ response = agent.run(input, session_id=response.session_id) response.pretty_print() # Change this input alone as per your requirement. input = """ Write an essay about cloud technology in not more than 200 words """ response = agent.run(input, session_id=response.session_id) response.pretty_print() if __name__ == "__main__": main()