Wednesday, June 19, 2024

AI Agents Monitoring and Analytics with AgentOps and AutoGen

This video is a step-by-step tutorial to install and integrate AgentOps with AutoGen to monitor, test and replay analytics.



Code:

conda create -n agentops python=3.11 -y && conda activate agentops

pip install pyautogen agentops


export AGENTOPS_API_KEY=""

export OPENAI_API_KEY=""


import agentops
import autogen
from autogen import ConversableAgent, UserProxyAgent, config_list_from_json

agentops.init(tags=["fahdmirza"])

import os

llm_config = {
    "config_list": [{"model": "gpt-4", "api_key": os.environ["OPENAI_API_KEY"]}],
}



assistant = autogen.ConversableAgent("agent", llm_config=llm_config)
user_proxy = UserProxyAgent("user", code_execution_config=False)

agentops.end_session("Success")

================================

import agentops
import autogen
from typing import Annotated, Literal
from autogen import ConversableAgent, register_function
import os

agentops.start_session(tags=["agentictools3"])

Operator = Literal["+", "-", "*", "/"]


def calculator(a: int, b: int, operator: Annotated[Operator, "operator"]) -> int:
    if operator == "+":
        return a + b
    elif operator == "-":
        return a - b
    elif operator == "*":
        return a * b
    elif operator == "/":
        return int(a / b)
    else:
        raise ValueError("Invalid operator")

llm_config = {
    "config_list": [{"model": "gpt-4", "api_key": os.environ["OPENAI_API_KEY"]}],
}


# Create the agent that uses the LLM.
assistant = ConversableAgent(
    name="Assistant",
    system_message="You are a helpful AI assistant. "
    "You can help with simple calculations. "
    "Return 'TERMINATE' when the task is done.",
    llm_config=llm_config,
)

# The user proxy agent is used for interacting with the assistant agent
# and executes tool calls.
user_proxy = ConversableAgent(
    name="User",
    llm_config=False,
    is_termination_msg=lambda msg: msg.get("content") is not None and "TERMINATE" in msg["content"],
    human_input_mode="NEVER",
)

assistant.register_for_llm(name="calculator", description="A simple calculator")(calculator)
user_proxy.register_for_execution(name="calculator")(calculator)

# Register the calculator function to the two agents.
register_function(
    calculator,
    caller=assistant,  # The assistant agent can suggest calls to the calculator.
    executor=user_proxy,  # The user proxy agent can execute the calculator calls.
    name="calculator",  # By default, the function name is used as the tool name.
    description="A simple calculator",  # A description of the tool.
)

# Let the assistant start the conversation.  It will end when the user types exit.
user_proxy.initiate_chat(assistant, message="What is (1423 - 123) / 3 + (32 + 23) * 5?")

agentops.end_session("Success")

No comments: