Пример #1
0
import subprocess
from datetime import datetime
from glob import glob
import json
import sys
import os
import re

import pytz
import pysolar
from PIL import Image, ImageOps, ImageEnhance, ImageChops

from settings import GFS_FOLDER
import utils

logger = utils.install_logger()

try:
    from settings import GRIB2JSON_PATH
    grib2json = GRIB2JSON_PATH
except ImportError:
    grib2json = 'grib2json'


def find_rainclouds(THIS_GFS_SLUG):
    global grib2json

    THIS_GFS_FOLDER = os.path.join(GFS_FOLDER, THIS_GFS_SLUG)
    if not THIS_GFS_FOLDER:
        logger.debug("no grib files found. Run fetch.py?")
        return False
Пример #2
0
# -*- coding: utf-8 -*-

import os
from urllib.request import urlopen
from urllib.error import HTTPError
from datetime import datetime, timedelta

from settings import GFS_FOLDER
from utils import install_logger

logger = install_logger()


def forecast_url(forecast_delta):
    """ return list of (slug, url) """
    now = datetime.utcnow()
    d = now - timedelta(hours=forecast_delta)
    d_rounded = d.replace(hour=(d.hour // 6)*6)
    forecast_hour = ((d.hour % 6 + forecast_delta) // 3) * 3
    slug = now.strftime("%Y%m%d%H")
    slug_rounded = d_rounded.strftime("%Y%m%d%H")

    url = "http://nomads.ncep.noaa.gov/cgi-bin/filter_gfs_0p50.pl?file=gfs.t%sz.pgrb2full.0p50.f%03d&lev_entire_atmosphere_%%5C%%28considered_as_a_single_layer%%5C%%29=on&var_PWAT=on&leftlon=0&rightlon=360&toplat=90&bottomlat=-90&dir=%%2Fgfs.%s" % (d_rounded.strftime("%H"), forecast_hour, slug_rounded)

    return slug, url

"""
This is the code that serves to download the raw precipitation data.

Run it as such: python fetch.py