It’s an SDK + runtime that combines Semantic Kernel and AutoGen into one powerful foundation for building and scaling AI agents that can reason, plan, and act autonomously.
check how i paired it with SambaNova’s Llama-4 Maverick to build your Weather Agent ![]()
![]()
The future of AI agents is definitely here — and it’s open! ![]()
code
import asyncio
from random import randint
from typing import Annotated
from agent_framework.openai import OpenAIChatClient
"""
SambaNova with OpenAI Chat Client Example
This sample demonstrates using SambaNova models through OpenAI Chat Client by
configuring the base URL to point to SambaNova's API.
"""
# 🔑 Hardcode your API key and model here
SAMBANOVA_API_KEY = "SAMBANOVA_API_KEY" # 👈 Replace with your real API key
MODEL_ID = "Llama-4-Maverick-17B-128E-Instruct"
def get_weather(
location: Annotated[str, "The location to get the weather for."],
) -> str:
"""Get the weather for a given location."""
conditions = ["sunny", "cloudy", "rainy", "stormy"]
return f"The weather in {location} is {conditions[randint(0, 3)]} with a high of {randint(10, 30)}°C."
async def non_streaming_example() -> None:
"""Example of non-streaming response (get the complete result at once)."""
print("=== Non-streaming Response Example ===")
agent = OpenAIChatClient(
api_key='SAMBANOVA_API_KEY',
base_url="https://api.sambanova.ai/v1",
model_id='Llama-4-Maverick-17B-128E-Instruct',
).create_agent(
name="WeatherAgent",
instructions="You are a helpful weather agent.",
tools=get_weather,
)
query = "What's the weather like in Seattle?"
print(f"User: {query}")
result = await agent.run(query)
print(f"Result: {result}\n")
async def streaming_example() -> None:
"""Example of streaming response (get results as they are generated)."""
print("=== Streaming Response Example ===")
agent = OpenAIChatClient(
api_key='SAMBANOVA_API_KEY',
base_url="https://api.sambanova.ai/v1",
model_id='Llama-4-Maverick-17B-128E-Instruct',
).create_agent(
name="WeatherAgent",
instructions="You are a helpful weather agent.",
tools=get_weather,
)
query = "What's the weather like in Portland?"
print(f"User: {query}")
print("Agent: ", end="", flush=True)
async for chunk in agent.run_stream(query):
if chunk.text:
print(chunk.text, end="", flush=True)
print("\n")
async def main() -> None:
print("=== SambaNova with Microsoft Agentic Framework ===")
await non_streaming_example()
await streaming_example()
if __name__ == "__main__":
asyncio.run(main())