| from fastapi import FastAPI |
| import uvicorn |
|
|
| import pandas as pd |
| import numpy as np |
| import pickle |
| import rasterio |
| import h5py |
| from skimage.morphology import disk |
| from geopy.extra.rate_limiter import RateLimiter |
| from geopy.geocoders import Nominatim |
|
|
| app = FastAPI() |
|
|
| |
| |
| @app.get("/") |
| def root(): |
| return {"API": "Hail Docker Data"} |
| |
| def geocode_address(address): |
|
|
| try: |
| address2 = address.replace(' ', '+').replace(',', '%2C') |
| df = pd.read_json( |
| f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json') |
| results = df.iloc[:1, 0][0][0]['coordinates'] |
| lat, lon = results['y'], results['x'] |
| except: |
| geolocator = Nominatim(user_agent='GTA Lookup') |
| geocode = RateLimiter(geolocator.geocode, min_delay_seconds=2) |
| location = geolocator.geocode(address) |
| lat, lon = location.latitude, location.longitude |
| |
| return lat, lon |
|
|
| def get_hail_data(address, start_date, end_date, radius_miles, get_max): |
|
|
| resolution=1 |
| radius = int(np.ceil(radius_miles*1.6/resolution)) |
|
|
| |
| start_date = pd.Timestamp(str(start_date)).strftime('%Y%m%d') |
| end_date = pd.Timestamp(str(end_date)).strftime('%Y%m%d') |
| date_years = pd.date_range(start=start_date, end=end_date, freq='M') |
| date_range_days = pd.date_range(start_date, end_date) |
| years = list(set([d.year for d in date_years])) |
|
|
| if len(years) == 0: |
| years = [pd.Timestamp(start_date).year] |
| |
| |
| lat, lon= geocode_address(address) |
|
|
| |
| |
| transform = pickle.load(open('Data/transform_mrms.pkl', 'rb')) |
| |
| row, col = rasterio.transform.rowcol(transform, lon, lat) |
|
|
| |
|
|
| files = [ |
| 'Data/2023_hail.h5', |
| 'Data/2022_hail.h5', |
| 'Data/2021_hail.h5', |
| 'Data/2020_hail.h5' |
| ] |
|
|
| files_choosen = [i for i in files if any(i for j in years if str(j) in i)] |
|
|
| |
| all_data = [] |
| all_dates = [] |
| for file in files_choosen: |
| with h5py.File(file, 'r') as f: |
| |
| dates = f['dates'][:] |
| date_idx = np.where((dates >= int(start_date)) |
| & (dates <= int(end_date)))[0] |
|
|
| |
| dates = dates[date_idx] |
| data = f['hail'][date_idx, row-radius_miles:row + |
| radius_miles+1, col-radius_miles:col+radius_miles+1] |
|
|
| all_data.append(data) |
| all_dates.append(dates) |
|
|
| data_all = np.vstack(all_data) |
| dates_all = np.concatenate(all_dates) |
|
|
| |
| data_mat = np.where(data_all < 0, 0, data_all)*0.0393701 |
|
|
| |
| disk_mask = np.where(disk(radius_miles) == 1, True, False) |
| data_mat = np.where(disk_mask, data_mat, -1).round(3) |
|
|
| |
| |
| if get_max == True: |
| data_max = np.max(data_mat, axis=(1, 2)) |
| df_data = pd.DataFrame({'Date': dates_all, |
| 'Hail_max': data_max}) |
| |
| else: |
| data_all = list(data_mat) |
| df_data = pd.DataFrame({'Date': dates_all, |
| 'Hail_all': data_all}) |
|
|
| df_data['Date'] = pd.to_datetime(df_data['Date'], format='%Y%m%d') |
| df_data = df_data.set_index('Date') |
|
|
| df_data = df_data.reindex(date_range_days, fill_value=0).reset_index().rename( |
| columns={'index': 'Date'}) |
| df_data['Date'] = df_data['Date'].dt.strftime('%Y-%m-%d') |
|
|
| return df_data |
| |
|
|
| @app.get('/Hail_Docker_Data') |
| async def predict(address: str, start_date: str, end_date: str, radius_miles: int, get_max: bool): |
|
|
| try: |
| results = get_hail_data(address, start_date, |
| end_date, radius_miles, get_max) |
| except: |
| results = pd.DataFrame({'Date': ['error'], 'Hail_max': ['error']}) |
|
|
| return results.to_json() |