import subprocess def install_dependencies(): subprocess.check_call(["python", "-m", "pip", "install", "--upgrade", "pip"]) subprocess.check_call(["python", "-m", "pip", "install", "--upgrade", "huggingface_hub"]) subprocess.check_call(["python", "-m", "pip", "install", "--upgrade", "streamlit"]) subprocess.check_call(["python", "-m", "pip", "install", "--upgrade", "requests"]) # subprocess.check_call(["python", "-m", "pip", "install", "--upgrade", "json"]) # subprocess.check_call(["python", "-m", "pip", "install", "--upgrade", "os"]) # subprocess.check_call(["python", "-m", "pip", "install", "--upgrade", "subprocess"]) # other packages here install_dependencies() import streamlit as st import requests import json import os from huggingface_hub import InferenceClient # ── Page config ────────────────────────────────────────────────────────────── st.set_page_config( page_title="Weather Forecast", page_icon="🌤️", layout="wide", ) # ── Custom CSS ──────────────────────────────────────────────────────────────── # with open('./style.css.txt', 'r') as file: # csstext = file.read() # #print(content) # st.markdown(csstext, unsafe_allow_html=True) def local_css(file_name): with open(file_name) as f: # st.markdown(f'', unsafe_allow_html=True) st.markdown(f'{f.read()}', unsafe_allow_html=True) # Call the function at the top of your app # local_css("./style.css") local_css("style.css") st.title("RainyTrek") # ── WMO weather code → emoji + label ───────────────────────────────────────── WMO_CODES = { 0: ("☀️", "Clear"), 1: ("🌤️", "Mostly Clear"), 2: ("⛅", "Partly Cloudy"), 3: ("☁️", "Overcast"), 45: ("🌫️", "Foggy"), 48: ("🌫️", "Icy Fog"), 51: ("🌦️", "Light Drizzle"), 53: ("🌦️", "Drizzle"), 55: ("🌧️", "Heavy Drizzle"), 61: ("🌧️", "Light Rain"), 63: ("🌧️", "Rain"), 65: ("🌧️", "Heavy Rain"), 71: ("🌨️", "Light Snow"), 73: ("❄️", "Snow"), 75: ("❄️", "Heavy Snow"), 77: ("🌨️", "Snow Grains"), 80: ("🌦️", "Rain Showers"), 81: ("🌧️", "Heavy Showers"), 82: ("⛈️", "Violent Showers"), 85: ("🌨️", "Snow Showers"), 86: ("🌨️", "Heavy Snow Showers"), 95: ("⛈️", "Thunderstorm"), 96: ("⛈️", "Thunderstorm + Hail"), 99: ("⛈️", "Thunderstorm + Heavy Hail"), } def wmo_info(code): return WMO_CODES.get(code, ("🌡️", "Unknown")) # ── Extract cities via HuggingFace LLM ─────────────────────────────────────── def extract_cities_with_llm(user_prompt: str) -> list[str]: """Use HF Inference API to extract city names from a natural-language prompt.""" client = InferenceClient( # model="mistralai/Mistral-7B-Instruct-v0.3", # model="mistralai/Mistral-7B-Instruct-v0.2", # model="nvidia/Gemma-4-26B-A4B-NVFP4", # model="TinyLlama/TinyLlama-1.1B-Chat-v1.0", model="meta-llama/Llama-3.3-70B-Instruct", # token=st.secrets.get("HF_TOKEN", None), # token=st.secrets.get("rainytrek010526001read", None), # token=st.secrets.get("API_KEY", None), # token=os.getenv("API_KEY"), token=os.getenv("rainytrek010526001read"), provider="fireworks-ai" ) system_prompt = ( "You are a helpful assistant that extracts city names from user messages. " "Respond ONLY with a JSON array of city name strings. " "Example: [\"Paris\", \"Tokyo\", \"New York\"]. " "If no cities are mentioned, respond with []." ) messages = [ {"role": "system", "content": system_prompt}, {"role": "user", "content": f"Extract all city names from this text:\n\n{user_prompt}"}, ] response = client.chat_completion(messages=messages, max_tokens=256, temperature=0.1) raw = response.choices[0].message.content.strip() # # strip markdown fences if present # if raw.startswith("```"): # raw = raw.split("```")[1] # if raw.startswith("json"): # raw = raw[4:] # raw = raw.strip() # cities = json.loads(raw) ## raw = [c.strip() for c in response["choices"][0]["message"]["content"].split("\"") if len(c.strip()) > 1] cities = json.loads(raw) return [c.strip() for c in cities if isinstance(c, str) and c.strip()] # ── Geocoding via Open-Meteo ────────────────────────────────────────────────── def geocode_city(city: str) -> dict | None: url = "https://geocoding-api.open-meteo.com/v1/search" r = requests.get(url, params={"name": city, "count": 1, "language": "en", "format": "json"}, timeout=10) r.raise_for_status() results = r.json().get("results") if not results: return None loc = results[0] return { "name": loc.get("name", city), "country": loc.get("country", ""), "lat": loc["latitude"], "lon": loc["longitude"], "timezone": loc.get("timezone", "auto"), } # ── Fetch 7-day forecast from Open-Meteo ───────────────────────────────────── def fetch_forecast(lat: float, lon: float, timezone: str) -> dict: url = "https://api.open-meteo.com/v1/forecast" params = { "latitude": lat, "longitude": lon, "daily": [ "weathercode", "temperature_2m_max", "temperature_2m_min", "precipitation_sum", "windspeed_10m_max", ], "current_weather": True, "timezone": timezone, "forecast_days": 7, } r = requests.get(url, params=params, timeout=10) r.raise_for_status() return r.json() # ── Render a city weather card ──────────────────────────────────────────────── def render_city_card(loc: dict, forecast: dict): daily = forecast["daily"] current = forecast.get("current_weather", {}) cur_temp = current.get("temperature", "—") cur_wind = current.get("windspeed", "—") cur_code = current.get("weathercode", 0) cur_icon, cur_label = wmo_info(cur_code) days_html = "" for i in range(len(daily["time"])): date = daily["time"][i] weekday = __import__("datetime").datetime.strptime(date, "%Y-%m-%d").strftime("%a") code = daily["weathercode"][i] icon, _ = wmo_info(code) tmax = daily["temperature_2m_max"][i] tmin = daily["temperature_2m_min"][i] precip = daily["precipitation_sum"][i] days_html += f"
" days_html += f"
{weekday}
{date[5:]}
" days_html += f"
{icon}
" days_html += f"
{tmax}°
" days_html += f"
{tmin}°
" days_html += f"
💧 {precip}mm
" days_html += f"
" st.markdown(f"""
{cur_icon} {loc['name']}, {loc['country']}
{loc['lat']:.4f}°N {loc['lon']:.4f}°E
Now {cur_temp}°C
Wind {cur_wind} km/h
{cur_label}
{days_html}
""", unsafe_allow_html=True) # ── Main UI ─────────────────────────────────────────────────────────────────── st.markdown('
🌤 Weather Forecast
', unsafe_allow_html=True) # st.markdown('
powered by Open-Meteo · Mistral 7B · Hugging Face
', unsafe_allow_html=True) st.markdown('
powered by Open-Meteo · Llama 70B · Hugging Face
', unsafe_allow_html=True) user_input = st.text_input( label="Your question", placeholder='e.g. "What\'s the weather like in Paris and Tokyo this week?"', label_visibility="collapsed", ) run = st.button("Get Forecast →", use_container_width=False) if run and user_input.strip(): with st.spinner("Asking the LLM to find cities…"): try: cities = extract_cities_with_llm(user_input) except Exception as e: st.markdown(f'
⚠️ LLM error: {e}
', unsafe_allow_html=True) cities = [] if not cities: st.markdown('
what cities in particular should I look for?
', unsafe_allow_html=True) else: st.markdown(f'
Cities detected by LLM
{" · ".join(cities)}
', unsafe_allow_html=True) for city in cities: with st.spinner(f"Fetching weather for {city}…"): try: loc = geocode_city(city) if not loc: st.markdown(f'
Could not geocode "{city}"
', unsafe_allow_html=True) continue forecast = fetch_forecast(loc["lat"], loc["lon"], loc["timezone"]) render_city_card(loc, forecast) except Exception as e: st.markdown(f'
⚠️ Error for {city}: {e}
', unsafe_allow_html=True) elif run: st.markdown('
Please enter a question first.
', unsafe_allow_html=True) st.markdown("---") st.markdown( '
' 'Weather data: Open-Meteo (open-source, no API key needed) · LLM: Mistral-7B-Instruct via Hugging Face Inference API' '
', unsafe_allow_html=True, )