The agent is created using Microsoft Autogen and Ollama. The agent pull the weather data through the website(open-meteo) API and return the data in JSON format. The AI agent then summarize the data and produce a short summary on tomorrow's weather.
References
Shekhar Agrawal; Srinivasa Sunil Chippada; Rathish Mohan. Ultimate Agentic AI with AutoGen for Enterprise Automation: Design, Build, And Deploy Enterprise-Grade AI Agents Using LLMs and AutoGen To Power Intelligent, ... Enterprise Automation (English Edition). Orange Education Pvt Ltd, AVA™. Kindle Edition.
Python Code generated by Grok as below:
import autogen
import requests
from datetime import datetime, timedelta
import json
# ====================== LLM CONFIG (your Ollama setup) ======================
config_list = [
{
"model": "llama3.2", # Change to your actual pulled model (e.g. qwen2.5, phi4, etc.)
"base_url": "http://localhost:11434/v1",
"api_key": "ollama",
}
]
llm_config = {
"config_list": config_list,
"seed": 42,
"temperature": 0.7,
}
# ====================== Create Agents ======================
assistant = autogen.AssistantAgent(
name="Weather_Assistant",
llm_config=llm_config,
system_message="""You are a helpful weather analyst.
When given weather data (in JSON), extract and clearly summarize tomorrow's weather.
Include: date, max/min temperature, weather condition, precipitation chance, and wind if available.
Keep the response natural and easy to read."""
)
user_proxy = autogen.UserProxyAgent(
name="User",
human_input_mode="NEVER", # Fully automated
max_consecutive_auto_reply=2,
code_execution_config={
"work_dir": "coding",
"use_docker": False
}
)
# ====================== Function to get tomorrow's weather ======================
def get_tomorrow_weather(city: str = "Kuala Lumpur"):
"""
Fetch tomorrow's weather using Open-Meteo API (free, no key needed).
Returns nicely formatted data or error message.
"""
try:
# Step 1: Get coordinates for the city
geo_url = f"https://geocoding-api.open-meteo.com/v1/search?name={city}&count=1&language=en&format=json"
geo_response = requests.get(geo_url, timeout=10)
geo_data = geo_response.json()
if not geo_data.get("results"):
return f"❌ Could not find location: {city}"
lat = geo_data["results"][0]["latitude"]
lon = geo_data["results"][0]["longitude"]
# Step 2: Get weather forecast (daily for tomorrow)
tomorrow = (datetime.now() + timedelta(days=1)).strftime("%Y-%m-%d")
weather_url = (
f"https://api.open-meteo.com/v1/forecast?"
f"latitude={lat}&longitude={lon}"
f"&daily=weather_code,temperature_2m_max,temperature_2m_min,"
f"precipitation_probability_max,wind_speed_10m_max"
f"&timezone=auto&forecast_days=2"
)
weather_response = requests.get(weather_url, timeout=10)
weather_data = weather_response.json()
# Extract tomorrow's data (index 1 = tomorrow)
daily = weather_data["daily"]
idx = 1 # tomorrow
weather_code = daily["weather_code"][idx]
temp_max = daily["temperature_2m_max"][idx]
temp_min = daily["temperature_2m_min"][idx]
precip_prob = daily["precipitation_probability_max"][idx]
wind_max = daily["wind_speed_10m_max"][idx]
# Simple weather code description
code_desc = {
0: "Clear sky", 1: "Mainly clear", 2: "Partly cloudy", 3: "Overcast",
45: "Fog", 48: "Depositing rime fog",
51: "Light drizzle", 53: "Moderate drizzle", 55: "Dense drizzle",
61: "Slight rain", 63: "Moderate rain", 65: "Heavy rain",
71: "Slight snow", 73: "Moderate snow", 75: "Heavy snow",
80: "Slight rain showers", 81: "Moderate rain showers", 82: "Violent rain showers",
95: "Thunderstorm", 96: "Thunderstorm with slight hail", 99: "Thunderstorm with heavy hail"
}.get(weather_code, "Unknown")
result = {
"city": city,
"date": daily["time"][idx],
"condition": code_desc,
"temperature_max": temp_max,
"temperature_min": temp_min,
"precipitation_probability": precip_prob,
"wind_speed_max": wind_max,
"units": {"temp": "°C", "wind": "km/h", "precip": "%"}
}
return json.dumps(result, indent=2)
except Exception as e:
return f"❌ Error fetching weather: {str(e)}"
# ====================== Start the conversation ======================
print("🌤️ Fetching tomorrow's weather using AutoGen + Ollama...\n")
# First, get raw weather data using the function
raw_data = get_tomorrow_weather("Kuala Lumpur") # Change city here if you want
# Let the assistant summarize it nicely
user_proxy.initiate_chat(
assistant,
message=f"""Here is the raw weather data for tomorrow in JSON format:
{raw_data}
Please summarize tomorrow's weather in a friendly, natural way."""
)
# Optional: You can also run it for your current location by changing the city
# Example: get_tomorrow_weather("Singapore") or "London"
No comments:
Post a Comment