import requests, gzip, shutil, subprocess, time, json, os
from datetime import datetime, timezone
from pathlib import Path

# --- CONFIGURATION ---
# Define products: { Name: NOAA_URL }
PRODUCTS = {
    "1h_qpe": "https://mrms.ncep.noaa.gov/2D/RadarOnly_QPE_01H/MRMS_RadarOnly_QPE_01H.latest.grib2.gz",
    "mesh": "https://mrms.ncep.noaa.gov/2D/MESH/MRMS_MESH.latest.grib2.gz",
    "lightning": "https://mrms.ncep.noaa.gov/2D/NLDN_CG_005min_AvgDensity/MRMS_NLDN_CG_005min_AvgDensity.latest.grib2.gz"
}

BASE_DIR = Path("/var/www/html/mrms/output")
BASE_DIR.mkdir(exist_ok=True, parents=True)
HEARTBEAT_FILE = Path("/var/www/html/mrms/output/2min.txt")

# Southeast US Crop Box: [Upper Left Lon, Upper Left Lat, Lower Right Lon, Lower Right Lat]
CROP_BOX = ["-95.0", "38.0", "-75.0", "24.0"]

# Dictionary to store completion times
timestamps = {"1h_qpe": "N/A", "mesh": "N/A", "lightning": "N/A"}

def process_product(name, url):
    gz = BASE_DIR / f"{name}.gz"
    grib = BASE_DIR / f"{name}.grib2"
    
    try:
        # 1. Download
        r = requests.get(url, stream=True, timeout=60)
        r.raise_for_status()
        with open(gz, "wb") as f:
            shutil.copyfileobj(r.raw, f)
        
        # 2. Decompress
        with gzip.open(gz, "rb") as f_in:
            with open(grib, "wb") as f_out:
                shutil.copyfileobj(f_in, f_out)

        # 3. Handle Lightning (Vector/GeoJSON)
        if name == "lightning":
            xyz_file = BASE_DIR / "lightning.xyz"
            # Extract raw coordinates to text
            subprocess.run(["gdal_translate", "-of", "XYZ", "-projwin", *CROP_BOX, str(grib), str(xyz_file)], check=True)
            
            features = []
            if xyz_file.exists():
                with open(xyz_file, "r") as f:
                    for line in f:
                        parts = line.split()
                        if len(parts) == 3:
                            lon, lat, val = float(parts[0]), float(parts[1]), float(parts[2])
                            if val > 0: # Only keep pixels with lightning activity
                                features.append({
                                    "type": "Feature",
                                    "geometry": {"type": "Point", "coordinates": [lon, lat]},
                                    "properties": {"density": val}
                                })
                
                with open(BASE_DIR / "lightning.json", "w") as f:
                    json.dump({"type": "FeatureCollection", "features": features}, f)
                
                if xyz_file.exists(): xyz_file.unlink()

        # 4. Handle QPE/MESH (Raster/COG)
        else:
            tif = BASE_DIR / f"{name}_t.tif"
            warp = BASE_DIR / f"{name}_w.tif"
            cog = BASE_DIR / f"mrms_{name}_southeast_cog.tif"

            # Translate (Crop)
            subprocess.run(["gdal_translate", "-of", "GTiff", "-ot", "Float32", "-projwin", *CROP_BOX, "-a_srs", "EPSG:4326", str(grib), str(tif)], check=True)
            # Warp (Reproject to Web Mercator)
            subprocess.run(["gdalwarp", "-overwrite", "-s_srs", "EPSG:4326", "-t_srs", "EPSG:3857", "-r", "near", "-dstnodata", "-9999", str(tif), str(warp)], check=True)
            # COG (Optimize for Web)
            if cog.exists(): cog.unlink()
            subprocess.run(["gdal_translate", "-of", "COG", "-co", "COMPRESS=DEFLATE", "-co", "PREDICTOR=2", str(warp), str(cog)], check=True)
            
            for f in [tif, warp]:
                if f.exists(): f.unlink()

        # Update success time
        timestamps[name] = datetime.now(timezone.utc).isoformat(timespec='seconds').replace("+00:00", "Z")
        print(f"[{datetime.now()}] Successfully processed {name}")
        return True

    except Exception as e:
        print(f"[{datetime.now()}] Error processing {name}: {e}")
        return False
    finally:
        # Cleanup raw GRIB files
        for f in [gz, grib]:
            if f.exists(): f.unlink()

# --- RUN EXECUTION ---
if __name__ == "__main__":
    print(f"--- Starting MRMS Update Cycle ---")
    for prod_name, prod_url in PRODUCTS.items():
        process_product(prod_name, prod_url)

    # Write unified heartbeat
    with open(HEARTBEAT_FILE, "w") as f:
        for name, ts in timestamps.items():
            f.write(f"{name},{ts}\n")
    print(f"--- Cycle Complete ---")
