示例#1
0
def download(now, offset):
    """
    Download a given timestamp from NCEP and inject into LDM
    Example:  ftp://ftpprd.ncep.noaa.gov/pub/data/nccf/com/hourly/prod/
              nam_pcpn_anal.20090916/ST4.2009091618.01h.gz
    """
    hours = [1, ]
    if now.hour % 6 == 0 and offset > 24:
        hours.append(6)
    if now.hour == 12:
        hours.append(24)
    for hr in hours:
        url = "%s.%02ih.gz" % (now.strftime(("http://ftpprd.ncep.noaa.gov/"
                                             "data/nccf/com/hourly/prod/"
                                             "nam_pcpn_anal.%Y%m%d/"
                                             "ST4.%Y%m%d%H")), hr)
        response = exponential_backoff(requests.get, url, timeout=60)
        if response is None or response.status_code != 200:
            if offset > 23:
                print('ncep_stage4.py: dl %s failed' % (url,))
            continue
        # Same temp file
        o = open("tmp.grib.gz", 'wb')
        o.write(response.content)
        o.close()
        subprocess.call("gunzip -f tmp.grib.gz", shell=True)
        # Inject into LDM
        cmd = ("/home/ldm/bin/pqinsert -p 'data a %s blah "
               "stage4/ST4.%s.%02ih.grib grib' tmp.grib"
               ) % (now.strftime("%Y%m%d%H%M"), now.strftime("%Y%m%d%H"), hr)
        subprocess.call(cmd, shell=True)
        os.remove('tmp.grib')

        # Do stage2 ml now
        if hr == 1:
            url = "%s.Grb.gz" % (now.strftime(("http://ftpprd.ncep.noaa.gov"
                                               "/data/nccf/com/hourly/prod/"
                                               "nam_pcpn_anal.%Y%m%d/"
                                               "ST2ml%Y%m%d%H")), )
        else:
            url = "%s.%02ih.gz" % (now.strftime(("http://ftpprd.ncep.noaa.gov/"
                                                 "data/nccf/com/hourly/prod/"
                                                 "nam_pcpn_anal.%Y%m%d/"
                                                 "ST2ml%Y%m%d%H")), hr)
        response = exponential_backoff(requests.get, url, timeout=60)
        if response is None or response.status_code != 200:
            print('ncep_stage4.py: dl %s failed' % (url,))
            continue
        # Same temp file
        o = open("tmp.grib.gz", 'wb')
        o.write(response.content)
        o.close()
        subprocess.call("gunzip -f tmp.grib.gz", shell=True)
        # Inject into LDM
        cmd = ("/home/ldm/bin/pqinsert -p 'data a %s blah "
               "stage4/ST2ml.%s.%02ih.grib grib' tmp.grib"
               ) % (now.strftime("%Y%m%d%H%M"), now.strftime("%Y%m%d%H"), hr)
        subprocess.call(cmd, shell=True)
        os.remove('tmp.grib')
示例#2
0
 def pwd(self):
     """ Low friction function to get connectivity """
     self._connect()
     pwd = exponential_backoff(self.conn.pwd)
     if pwd is None:
         self._reconnect()
         pwd = exponential_backoff(self.conn.pwd)
     logging.debug("pwd() is currently '%s'", pwd)
     return pwd
示例#3
0
 def put_file(self, path, localfn, remotefn):
     """ Put the File """
     res = exponential_backoff(self._put, path, localfn, remotefn)
     if not res:
         self._reconnect()
         res = exponential_backoff(self._put, path, localfn, remotefn)
         if not res:
             logging.error("Double Failure to upload filename: '%s'",
                           localfn)
             return False
     return True
示例#4
0
def test_backoff():
    """Do the backoff of a bad func"""
    def bad():
        """Always errors"""
        raise Exception("Always Raises :)")
    res = util.exponential_backoff(bad, _ebfactor=0)
    assert res is None
示例#5
0
 def trim_columns(self):
     """ Attempt to trim off any extraneous columns """
     self.get_cell_feed()
     for col in range(1, int(self.cols)+1):
         if self.data["1"].get(str(col)) is not None:
             continue
         print('Column Delete Candidate %s' % (col,))
         found_data = False
         for row in range(1, int(self.rows)+1):
             _v = self.data.get(str(row), {}).get(str(col))
             if _v not in [None, 'n/a', 'did not collect']:
                 found_data = True
                 print(('ERROR row: %s has data: %s'
                        ) % (row, self.data[str(row)][str(col)]))
         if not found_data:
             print('Deleting column %s' % (col,))
             if col == int(self.cols):
                 self.drop_last_column()
                 return True
             # Move columns left
             updateFeed = spdata.build_batch_cells_update(self.spread_id,
                                                          self.id)
             for col2 in range(int(col), int(self.cols)):
                 for row in range(1, int(self.rows)+1):
                     updateFeed.add_set_cell(str(row), str(col2),
                                             self.get_cell_value(row,
                                                                 col2 + 1))
             self.cell_feed = exponential_backoff(self.spr_client.batch,
                                                  updateFeed, force=True)
             # Drop last column
             self.refetch_feed()
             self.drop_last_column()
             return True
     return False
示例#6
0
def dl(now, varname):
    """get the files"""
    uri = now.strftime(("http://www.ftp.ncep.noaa.gov/data/nccf/com/cfs/prod/"
                        "cfs/cfs.%Y%m%d/%H/time_grib_01/" +
                        varname + ".01.%Y%m%d%H.daily.grb2"))
    response = exponential_backoff(requests.get, uri, timeout=60)
    if response is None or response.status_code != 200:
        print('download_cfs.py: dl %s failed' % (uri,))
        return
    tmpfn = "/tmp/%s.cfs.grib" % (varname, )
    o = open(tmpfn, 'wb')
    o.write(response.content)
    o.close()
    # Check out this file to see how much data we actually have, it had
    # better be a big number
    grb = pygrib.open(tmpfn)
    if grb.messages < REQUIRED_MSGS:
        print(("download_cfs %s %s has only %s messages, need %s+"
               ) % (now, varname, grb.messages, REQUIRED_MSGS))
    else:
        # Inject into LDM
        cmd = ("/home/ldm/bin/pqinsert -p 'data a %s blah "
               "model/cfs/%02i/%s.01.%s.daily.grib2 grib' %s"
               ) % (now.strftime("%Y%m%d%H%M"), now.hour, varname,
                    now.strftime("%Y%m%d%H"), tmpfn)
        subprocess.call(cmd, shell=True)

    os.remove(tmpfn)
示例#7
0
def dl_ncep(ts):
    ''' Download stuff we want from NCEP '''
    print('1. Download NCEP GFS Data')
    baseuri = "https://ftpprd.ncep.noaa.gov/data/nccf/com/gfs/prod"
    tmpdir = "/tmp/gfs.%s" % (ts.strftime("%Y%m%d%H"),)
    if not os.path.isdir(tmpdir):
        os.makedirs(tmpdir)

    for i in range(0, 73, 3):
        g1file = "%s/gfs.t%02iz.pgrb2.1p00.f%03i.grib" % (tmpdir, ts.hour, i)
        g2file = "%s/gfs.t%02iz.pgrb2.1p00.f%03i" % (tmpdir, ts.hour, i)
        if not os.path.isfile(g1file):
            print('   Fetching: %s' % (g2file,), end='')
            uri = ("%s/gfs.%s/gfs.t%02iz.pgrb2.1p00.f%03i"
                   ) % (baseuri, ts.strftime("%Y%m%d%H"), ts.hour, i)
            res = exponential_backoff(requests.get, uri, timeout=60)
            o = open(g2file, 'wb')
            o.write(res.content)
            o.close()
            print(' %s' % (len(res.content),))

        if not os.path.isfile(g1file):
            # convert to grib2
            subprocess.call("/usr/local/bin/cnvgrib -g21 %s %s" % (g2file,
                                                                   g1file),
                            shell=True, stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE)

        # Remove the grib2 file as it is no longer needed...
        if os.path.isfile(g1file) and os.path.isfile(g2file):
            os.unlink(g2file)
示例#8
0
 def get_worksheets(self):
     """ Get the worksheets associated with this spreadsheet """
     feed = exponential_backoff(self.spr_client.GetWorksheets, self.id)
     if feed is None:
         return
     for entry in feed.entry:
         self.worksheets[entry.title.text] = Worksheet(self.spr_client,
                                                       entry)
示例#9
0
    def expand_rows(self, amount=1):
        """ Expand this sheet by the number of rows desired

        Args:
          amount (int, optional): The number of rows to expand worksheet by
        """
        self.rows = self.rows + amount
        self.entry.row_count.text = "%s" % (self.rows,)
        self.entry = exponential_backoff(self.spr_client.update, self.entry)
示例#10
0
    def get_list_feed(self):
        """Get a ListFeed for this Worksheet

        Returns:
          list_feed
        """
        if self.list_feed is not None:
            return self.list_feed
        self.list_feed = exponential_backoff(self.spr_client.get_list_feed,
                                             self.spread_id, self.id)
        return self.list_feed
示例#11
0
    def add_column(self, label, row2=None, row3=None):
        """ Add a column, if it does not exist """
        self.get_cell_feed()
        for col in range(1, int(self.cols)+1):
            if self.get_cell_value("1", col) == label:
                print('Column %s with label already found: %s' % (col, label))
                return
        self.expand_cols(1)

        for i, lbl in enumerate([label, row2, row3]):
            if lbl is None:
                continue
            entry = exponential_backoff(self.spr_client.get_cell,
                                        self.spread_id, self.id,
                                        str(i+1),
                                        str(self.cols))
            entry.cell.input_value = lbl
            exponential_backoff(self.spr_client.update, entry)

        self.refetch_feed()
        self.cell_feed = None
示例#12
0
文件: unidata_dl.py 项目: akrherz/DEV
def main():
    """Go Main Go"""
    os.chdir('data/nexrad/NIDS')
    for nexrad in tqdm(glob.glob('???')):
        source = "twdr" if nexrad in TWDR.sts else "nexrad"
        os.chdir(nexrad)
        for nids in ['N0Q', 'NET', 'N0R', 'EET']:
            if not os.path.isdir(nids):
                continue
            os.chdir(nids)
            for date in DATES:
                dir_uri = date.strftime(("http://motherlode.ucar.edu/native/"
                                         "radar/level3/" + source +
                                         "/" + nids + "/" +
                                         nexrad + "/%Y%m%d/"))
                req = exponential_backoff(requests.get, dir_uri, timeout=30)
                tokens = FNREGEX.findall(req.content.decode('ascii'))
                for token in tokens:
                    localfn = "%s_%s_%s" % (token[1], token[2], token[3])
                    if os.path.isfile(localfn):
                        continue
                    remotefn = ("%sLevel3_%s_%s_%s_%s.nids"
                                ) % (dir_uri, *token)
                    req = exponential_backoff(requests.get, remotefn,
                                              timeout=30)
                    fp = open(localfn, 'wb')
                    fp.write(req.content)
                    fp.close()
                    # write files for re-ingest by Ridge
                    # ICT_20180519_1030_N0U.ridge
                    ridgefn = ("/mesonet/tmp/ridge/%s_%s_%s_%s"
                               ) % (nexrad, token[2], token[3], nids)
                    fp = open(ridgefn, 'wb')
                    fp.write(req.content)
                    fp.close()

            os.chdir('..')
        os.chdir('..')
示例#13
0
def get_xref_siteids_plotids(drive, spr_client, config):
    ''' Get a dict of site IDs with a list of plot IDs for each '''
    spreadkeys = get_xref_plotids(drive)
    data = {}
    for uniqueid in spreadkeys.keys():
        data[uniqueid.lower()] = []
        feed = exponential_backoff(spr_client.get_list_feed,
                                   spreadkeys[uniqueid], 'od6')
        for entry in feed.entry:
            row = entry.to_dict()
            if row['plotid'] is None:
                continue
            data[uniqueid.lower()].append(row['plotid'].lower())
    return data
示例#14
0
    def del_column(self, label, sloppy=False):
        """ Delete a column from the worksheet that has a given label
        this also zeros out any data in the column too

        Args:
          label (str): the column label based on the first row's value
          sloppy (bool): should we only find that the contents start the value
        """
        self.get_cell_feed()
        worked = False
        for col in range(1, int(self.cols)+1):
            if self.get_cell_value(1, col) != label and not sloppy:
                continue
            if sloppy and not self.get_cell_value(1, col).startswith(label):
                continue
            worked = True
            print('Found %s in column %s, deleting column' % (label, col))
            entry = self.get_cell_entry(1, col)
            entry.cell.input_value = ""
            exponential_backoff(self.spr_client.update, entry)

            updateFeed = spdata.build_batch_cells_update(self.spread_id,
                                                         self.id)
            for row in range(1, int(self.rows)+1):
                updateFeed.add_set_cell(str(row), str(col), "")
            self.cell_feed = exponential_backoff(self.spr_client.batch,
                                                 updateFeed, force=True)

        if not worked:
            print("Error, did not find column |%s| for deletion" % (label,))
            print("The columns were:")
            for col in range(1, int(self.cols)+1):
                print("  %2i |%s|" % (col, self.get_cell_value(1, col)))
            return
        self.refetch_feed()
        while self.trim_columns():
            print('Trimming Columns!')
示例#15
0
def get_site_metadata(config, spr_client=None):
    '''
    Return a dict of research site metadata
    '''
    meta = {}
    if spr_client is None:
        spr_client = get_spreadsheet_client(config)

    lf = exponential_backoff(spr_client.get_list_feed,
                             config['cscap']['metamaster'], 'od6')
    for entry in lf.entry:
        d = entry.to_dict()
        meta[d['uniqueid']] = {'climate_site': d['iemclimatesite'].split()[0],
                               }
    return meta
示例#16
0
 def get_cell_feed(self):
     if self.cell_feed is not None:
         return
     self.cell_feed = exponential_backoff(self.spr_client.get_cells,
                                          self.spread_id, self.id)
     for entry in self.cell_feed.entry:
         row = entry.cell.row
         _rowstore = self.data.setdefault(row, dict())
         # https://developers.google.com/google-apps/spreadsheets/?hl=en#working_with_cell-based_feeds
         # The input_value could be a function, pick the numeric_value
         # first, which can be None for non-numeric types
         if entry.cell.numeric_value is not None:
             _numstore = self.numdata.setdefault(row, dict())
             _numstore[entry.cell.col] = entry.cell.numeric_value
         _rowstore[entry.cell.col] = entry.cell.input_value
示例#17
0
def generator(sid, lat, lon, rerun=False):

    now = datetime.datetime.now()
    rest_uri = ("%slat=%s&lon=%s&format=12+hourly&startDate=%s"
                "&numDays=7&Submit=Submit"
                ) % (ENDPOINT, lat, lon, now.strftime("%Y-%m-%d"))
    r = exponential_backoff(requests.get, rest_uri, timeout=30)
    if r is None:
        return False
    data = r.content
    try:
        doc = ElementTree.XML(data)
    except Exception, exp:
        print "%s got exception: %s sample: |%s|" % (sid, exp, data[:100])
        return False
示例#18
0
def main(valid):
    """Run for the given valid time!"""
    DBCONN = psycopg2.connect(database='postgis', host='iemdb')

    v12 = valid - datetime.timedelta(hours=13)

    for sid in nt.sts.keys():
        # skip virtual sites
        if sid.startswith("_"):
            continue
        uri = ("http://rucsoundings.noaa.gov/get_raobs.cgi?data_source=RAOB;"
               "start_year=%s;start_month_name=%s;"
               ) % (valid.year, valid.strftime("%b"))
        uri += ("start_mday=%s;start_hour=%s;start_min=0;n_hrs=12.0;"
                ) % (valid.day, valid.hour)
        uri += "fcst_len=shortest;airport=%s;" % (sid,)
        uri += "text=Ascii%20text%20%28GSD%20format%29;"
        uri += ("hydrometeors=false&startSecs=%s&endSecs=%s"
                ) % (v12.strftime("%s"), valid.strftime("%s"))

        cursor = DBCONN.cursor()
        r = exponential_backoff(requests.get, uri, timeout=30)
        if r is None:
            print("ingest_from_rucsoundings failed %s for %s" % (sid, valid))
            continue
        try:
            for rob in parse(r.content, sid):
                nt.sts[sid]['count'] = len(rob.profile)
                rob.database_save(cursor)
        except Exception, exp:
            print 'RAOB FAIL %s %s %s, check /tmp for data' % (sid, valid, exp)
            o = open("/tmp/%s_%s_fail" % (sid, valid.strftime("%Y%m%d%H%M")),
                     'w')
            o.write(r.content)
            o.close()
        finally:
示例#19
0
def plotter(fdict):
    """ Go """
    ctx = util.get_autoplot_context(fdict, get_description())
    state = ctx["state"]
    syear = ctx["syear"]
    eyear = ctx["eyear"]

    fips = ""
    for key in state_fips:
        if state_fips[key] == state:
            fips = key
    payload = "{'area':'%s', 'type':'state', 'statstype':'2'}" % (fips,)
    headers = {}
    headers["Accept"] = "application/json, text/javascript, */*; q=0.01"
    headers["Content-Type"] = "application/json; charset=UTF-8"
    req = util.exponential_backoff(
        requests.post, SERVICE, payload, headers=headers
    )
    if req is None:
        raise NoDataFound("Drought Web Service failed to deliver data.")
    jdata = req.json()
    if "d" not in jdata:
        raise NoDataFound("Data Not Found.")
    df = pd.DataFrame(jdata["d"])
    df["Date"] = pd.to_datetime(df["ReleaseDate"])
    df.sort_values("Date", ascending=True, inplace=True)
    df["x"] = df["Date"] + datetime.timedelta(hours=(3.5 * 24))

    fig = plt.figure(figsize=(7, 9))
    ax = fig.add_axes([0.1, 0.1, 0.87, 0.84])
    lastrow = None
    for year, gdf in df.groupby(df.Date.dt.year):
        if year < syear or year > eyear:
            continue
        xs = []
        ys = []
        for _, row in gdf.iterrows():
            if lastrow is None:
                lastrow = row
            delta = (
                (lastrow["D4"] - row["D4"]) * 5.0
                + (lastrow["D3"] - row["D3"]) * 4.0
                + (lastrow["D2"] - row["D2"]) * 3.0
                + (lastrow["D1"] - row["D1"]) * 2.0
                + (lastrow["D0"] - row["D0"])
            )
            xs.append(int(row["Date"].strftime("%j")))
            ys.append(year + (0 - delta) / 100.0)
            lastrow = row
        if len(xs) < 4:
            continue
        fcube = interp1d(xs, ys, kind="cubic")
        xnew = np.arange(xs[0], xs[-1])
        yval = np.ones(len(xnew)) * year
        ynew = fcube(xnew)
        ax.fill_between(
            xnew,
            yval,
            ynew,
            where=(ynew < yval),
            facecolor="blue",
            interpolate=True,
        )
        ax.fill_between(
            xnew,
            yval,
            ynew,
            where=(ynew >= yval),
            facecolor="red",
            interpolate=True,
        )

    ax.set_ylim(eyear + 1, syear - 1)
    ax.set_xlim(0, 366)
    ax.set_xlabel(
        (
            "curve height of 1 year is 1 effective drought category "
            "change over area of %s"
        )
        % (state_names[state],)
    )
    ax.set_ylabel("Year, thru %s" % (df.Date.max().strftime("%d %b %Y"),))
    ax.set_title(
        (
            "%.0f-%.0f US Drought Monitor Weekly Change for %s\n"
            "curve height represents change in intensity + coverage"
        )
        % (syear, eyear, state_names[state])
    )

    ax.grid(True)
    ax.set_xticks((1, 32, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335))
    ax.set_xticklabels(calendar.month_abbr[1:])

    ax.set_yticks(
        np.arange(ax.get_ylim()[0] - 1, ax.get_ylim()[1], -1, dtype="i")
    )
    fig.text(0.02, 0.03, "Blue areas are improving conditions", color="b")
    fig.text(0.4, 0.03, "Red areas are degrading conditions", color="r")

    return fig, df[["Date", "NONE", "D0", "D1", "D2", "D3", "D4"]]
示例#20
0
def workflow():
    """ Do stuff """
    req = exponential_backoff(requests.get, URI, timeout=30)
    if req is None or req.status_code != 200:
        return
    data = req.json()
    features = data.get("features", [])
    pgconn = get_dbconn("postgis")
    cursor = pgconn.cursor()

    cursor.execute("SELECT label, idnum from idot_dashcam_current")
    current = {}
    for row in cursor:
        current[row[0]] = row[1]

    for feat in features:
        logdt = feat["attributes"]["PHOTO_FILEDATE"]
        if logdt is None:
            continue
        ts = datetime.datetime.utcfromtimestamp(logdt / 1000.0)
        valid = ts.replace(tzinfo=pytz.UTC)
        label = feat["attributes"]["PHOTO_ANUMBER"]
        idnum = feat["attributes"]["PHOTO_UID"]
        LOG.debug(
            "label: %s current: %s new: %s",
            label,
            current.get(label, 0),
            idnum,
        )
        if idnum <= current.get(label, 0):
            continue
        photourl = feat["attributes"]["PHOTO_URL"]
        # Go get the URL for saving!
        # print label, utc, feat['attributes']['PHOTO_URL']
        LOG.debug("Fetch %s", photourl)
        req = exponential_backoff(requests.get, photourl, timeout=15)
        if req is None or req.status_code != 200:
            LOG.info(
                "dot_truckcams.py dl fail |%s| %s",
                "req is None" if req is None else req.status_code,
                photourl,
            )
            continue
        tmp = tempfile.NamedTemporaryFile(delete=False)
        tmp.write(req.content)
        tmp.close()
        cmd = ("/home/ldm/bin/pqinsert -p 'plot ac %s %s %s jpg' %s") % (
            valid.strftime("%Y%m%d%H%M"),
            get_current_fn(label),
            get_archive_fn(label, valid),
            tmp.name,
        )
        proc = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE)
        proc.stderr.read()
        os.unlink(tmp.name)

        pt = P3857(feat["geometry"]["x"], feat["geometry"]["y"], inverse=True)
        geom = "SRID=4326;POINT(%s %s)" % (pt[0], pt[1])
        # This table has an insert trigger that logs the entry as well
        cursor.execute(
            """
            INSERT into idot_dashcam_current(label, valid, idnum,
            geom) VALUES (%s, %s, %s, %s)
        """,
            (label, valid, idnum, geom),
        )

    cursor.close()
    pgconn.commit()
    pgconn.close()
示例#21
0
def fetch(valid):
    """ Fetch the radiation data for this timestamp
    80:54371554:d=2014101002:ULWRF:top of atmosphere:anl:
    81:56146124:d=2014101002:DSWRF:surface:anl:
    """
    uri = valid.strftime(("http://www.ftp.ncep.noaa.gov/data/nccf/"
                          "com/hrrr/prod/hrrr.%Y%m%d/conus/hrrr.t%Hz."
                          "wrfprsf00.grib2.idx"))
    req = requests.get(uri, timeout=30)
    if req.status_code != 200:
        print("download_hrrr failed to get idx\n%s" % (uri, ))
        return

    offsets = []
    neednext = False
    for line in req.content.decode('utf-8').split("\n"):
        tokens = line.split(":")
        if len(tokens) < 3:
            continue
        if neednext:
            offsets[-1].append(int(tokens[1]))
            neednext = False
        if tokens[3] in ['ULWRF', 'DSWRF']:
            offsets.append([
                int(tokens[1]),
            ])
            neednext = True
        # Save soil temp and water at surface, 10cm and 40cm
        if tokens[3] in ['TSOIL', 'SOILW']:
            if tokens[4] in [
                    '0-0 m below ground', '0.1-0.1 m below ground',
                    '0.3-0.3 m below ground', '0.6-0.6 m below ground',
                    '1-1 m below ground'
            ]:
                offsets.append([
                    int(tokens[1]),
                ])
                neednext = True

    outfn = valid.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/model/hrrr/"
                            "%H/hrrr.t%Hz.3kmf00.grib2"))
    outdir = os.path.dirname(outfn)
    if not os.path.isdir(outdir):
        os.makedirs(outdir)  # make sure LDM can then write to dir
        subprocess.call("chmod 775 %s" % (outdir, ), shell=True)
    output = open(outfn, 'ab', 0o664)

    if len(offsets) != 13:
        print("download_hrrr_rad warning, found %s gribs for %s" %
              (len(offsets), valid))
    for pr in offsets:
        headers = {'Range': 'bytes=%s-%s' % (pr[0], pr[1])}
        req = exponential_backoff(requests.get,
                                  uri[:-4],
                                  headers=headers,
                                  timeout=30)
        if req is None:
            print("download_hrrr.py failure for uri: %s" % (uri, ))
        else:
            output.write(req.content)

    output.close()
示例#22
0
def download(now, offset):
    """
    Download a given timestamp from NCEP and inject into LDM
    Example:  ftp://ftpprd.ncep.noaa.gov/pub/data/nccf/com/hourly/prod/
              nam_pcpn_anal.20090916/ST4.2009091618.01h.gz
    """
    hours = [1]
    if now.hour % 6 == 0 and offset > 24:
        hours.append(6)
    if now.hour == 12:
        hours.append(24)
    for hr in hours:
        url = "%s.%02ih.gz" % (
            now.strftime(("http://ftpprd.ncep.noaa.gov/"
                          "data/nccf/com/pcpanl/prod/"
                          "pcpanl.%Y%m%d/"
                          "ST4.%Y%m%d%H")),
            hr,
        )
        response = exponential_backoff(requests.get, url, timeout=60)
        if response is None or response.status_code != 200:
            if offset > 23:
                print("ncep_stage4.py: dl %s failed" % (url, ))
            continue
        # Same temp file
        output = open("tmp.grib.gz", "wb")
        output.write(response.content)
        output.close()
        subprocess.call("gunzip -f tmp.grib.gz", shell=True)
        # Inject into LDM
        cmd = ("/home/ldm/bin/pqinsert -p 'data a %s blah "
               "stage4/ST4.%s.%02ih.grib grib' tmp.grib") % (
                   now.strftime("%Y%m%d%H%M"), now.strftime("%Y%m%d%H"), hr)
        subprocess.call(cmd, shell=True)
        os.remove("tmp.grib")

        # Do stage2 ml now
        if hr == 1:
            url = "%s.Grb.gz" % (now.strftime(("http://ftpprd.ncep.noaa.gov"
                                               "/data/nccf/com/pcpanl/prod/"
                                               "pcpanl.%Y%m%d/"
                                               "ST2ml%Y%m%d%H")), )
        else:
            url = "%s.%02ih.gz" % (
                now.strftime(("http://ftpprd.ncep.noaa.gov/"
                              "data/nccf/com/pcpanl/prod/"
                              "pcpanl.%Y%m%d/"
                              "ST2ml%Y%m%d%H")),
                hr,
            )
        response = exponential_backoff(requests.get, url, timeout=60)
        if response is None or response.status_code != 200:
            if offset > 23:
                print("ncep_stage4.py: dl %s failed" % (url, ))
            continue
        # Same temp file
        output = open("tmp.grib.gz", "wb")
        output.write(response.content)
        output.close()
        subprocess.call("gunzip -f tmp.grib.gz", shell=True)
        # Inject into LDM
        cmd = ("/home/ldm/bin/pqinsert -p 'data a %s blah "
               "stage4/ST2ml.%s.%02ih.grib grib' tmp.grib") % (
                   now.strftime("%Y%m%d%H%M"), now.strftime("%Y%m%d%H"), hr)
        subprocess.call(cmd, shell=True)
        os.remove("tmp.grib")
示例#23
0
def generator(sid, lat, lon, rerun=False):
    """Generate things"""
    now = datetime.datetime.now()
    rest_uri = ("%slat=%s&lon=%s&format=12+hourly&startDate=%s"
                "&numDays=7&Submit=Submit"
                ) % (ENDPOINT, lat, lon, now.strftime("%Y-%m-%d"))
    r = exponential_backoff(requests.get, rest_uri, timeout=30)
    if r is None:
        return False
    data = r.content
    try:
        doc = ElementTree.XML(data)
    except Exception as exp:
        print("%s got exception: %s sample: |%s|" % (sid, exp, data[:100]))
        return False

    taxis = {}
    tnames = {}
    for elem in doc.findall("./data/time-layout"):
        key = elem.find("layout-key").text
        taxis[key] = []
        tnames[key] = []
        for elem2 in elem.findall("./start-valid-time"):
            ts = datetime.datetime.strptime(elem2.text[:16], '%Y-%m-%dT%H:%M')
            taxis[key].append(ts)
            tnames[key].append(elem2.attrib.get("period-name", None))

    temps = {}
    for elem in doc.findall("./data/parameters/temperature"):
        name = elem.find("./name")
        temps[name.text] = {'taxis': elem.attrib['time-layout'],
                            'vals': []}
        for v in elem.findall("./value"):
            temps[name.text]['vals'].append(v.text)

    for elem in doc.findall("./data/parameters/weather"):
        weather = {'taxis': elem.attrib['time-layout'],
                   'vals': []}
        for v in elem.findall("./weather-conditions"):
            weather['vals'].append(v.attrib.get('weather-summary', None))

    for elem in doc.findall("./data/parameters/conditions-icon"):
        icons = {'taxis': elem.attrib['time-layout'],
                 'vals': []}
        for v in elem.findall("./icon-link"):
            icons['vals'].append(v.text)

    if 'Daily Maximum Temperature' not in temps:
        if rerun:
            print('--------------------------------------------------')
            print('Whoa, could not find daily maximum temperature key')
            print(sid)
            print(data)
        return False

    data = {}
    for val, tm in zip(temps['Daily Maximum Temperature']['vals'],
                       tnames[temps['Daily Maximum Temperature']['taxis']]):
        if tm not in data:
            # print 'Adding tm for high', tm, data.keys()
            data[tm] = {'high': None, 'low': None, 'weather': None,
                        'icon': None}
        data[tm]['high'] = val

    for val, tm in zip(temps['Daily Minimum Temperature']['vals'],
                       tnames[temps['Daily Minimum Temperature']['taxis']]):
        if tm not in data:
            # print 'Adding tm for mintmp', tm, data.keys()
            data[tm] = {'high': None, 'low': None, 'weather': None,
                        'icon': None}
        data[tm]['low'] = val

    for val, tm in zip(icons['vals'],
                       tnames[icons['taxis']]):
        if tm not in data:
            # print 'Adding tm for icons', tm
            data[tm] = {'high': None, 'low': None, 'weather': None,
                        'icon': None}
        data[tm]['icon'] = val

    for val, tm in zip(weather['vals'], tnames[weather['taxis']]):
        # print 'Wx Axis', tm, val
        if tm not in data:
            # print 'Adding tm for weather', tm
            data[tm] = {'high': None, 'low': None, 'weather': None,
                        'icon': None}
        data[tm]['weather'] = val

    # print "MAX TEMPS", tnames[ temps['Daily Maximum Temperature']['taxis'] ]
    # print "MIN TEMPS", tnames[ temps['Daily Minimum Temperature']['taxis'] ]
    # print "WEATHER", tnames[ weather['taxis'] ]
    # print "ICONS", tnames[ icons['taxis'] ]
    o = open('%s.html' % (sid,), 'w')
    o.write("<!-- %s -->" % (sid,))
    o.write("<table cellspacing=\"0\" cellpadding=\"1\" width=\"640\">")

    o.write("<tr>")
    for tm in tnames[icons['taxis']][:9]:
        o.write("<th width=\"11%%\">%s</th>" % (tm, ))
    o.write("</tr>")

    o.write("<tr>")
    for tm in tnames[icons['taxis']][:9]:
        if data[tm]['icon'] is not None:
            o.write(("<td><img src=\"%s\" alt=\"fx\"/></td>"
                     ) % (data[tm]['icon'], ))
        else:
            o.write("<td></td>")
    o.write("</tr>")

    o.write("<tr>")
    for tm in tnames[icons['taxis']][:9]:
        if data[tm]['high'] is not None:
            d = data[tm]['high']
            l = 'Hi'
        else:
            d = data[tm]['low']
            l = 'Lo'
        o.write("<td>%s %s&deg;F</td>" % (l, d))
    o.write("</tr>")

    o.write("</table>")
    o.close()
    # LDM insert
    cmd = ("/home/ldm/bin/pqinsert -p 'data c 000000000000 "
           "kcci/fx/%s.html blah blah' %s.html"
           ) % (sid, sid)
    subprocess.call(cmd, shell=True)
    # cleanup
    os.unlink("%s.html" % (sid,))
    return True
示例#24
0
def run(valid):
    """ run for this valid time! """
    if not upstream_has_data(valid):
        return
    gribfn = valid.strftime(("/mesonet/ARCHIVE/data/%Y/%m/%d/model/hrrr/%H/"
                             "hrrr.t%Hz.refd.grib2"))
    if os.path.isfile(gribfn):
        # See how many grib messages we have
        try:
            grbs = pygrib.open(gribfn)
            if grbs.messages == (18 * 4 + (HOURS[valid.hour] - 18) + 1):
                # print("%s is complete!" % (gribfn, ))
                return
            del grbs
        except Exception as exp:
            logging.debug(exp)
    tmpfn = "/tmp/%s.grib2" % (valid.strftime("%Y%m%d%H"), )
    output = open(tmpfn, "wb")
    for hr in range(0, min([39, HOURS[valid.hour]]) + 1):
        shr = "%02i" % (hr, )
        if hr <= 18:
            uri = valid.strftime(
                (BASE + "hrrr.%Y%m%d/conus/hrrr.t%Hz.wrfsubhf" + shr +
                 ".grib2.idx"))
        else:
            uri = valid.strftime(
                (BASE + "hrrr.%Y%m%d/conus/hrrr.t%Hz.wrfsfcf" + shr +
                 ".grib2.idx"))
        req = exponential_backoff(requests.get, uri, timeout=30)
        if req is None or req.status_code != 200:
            print("dl_hrrrref failed to fetch %s" % (uri, ))
            print("ABORT")
            return
        data = req.text

        offsets = []
        neednext = False
        for line in data.split("\n"):
            if line.strip() == "":
                continue
            tokens = line.split(":")
            if neednext:
                offsets[-1].append(int(tokens[1]))
                neednext = False
            if tokens[3] == "REFD" and tokens[4] == "1000 m above ground":
                offsets.append([int(tokens[1])])
                neednext = True

        if hr > 0 and hr < 19 and len(offsets) != 4:
            print(("dl_hrrrref[%s] hr: %s offsets: %s") %
                  (valid.strftime("%Y%m%d%H"), hr, offsets))
        for pr in offsets:
            headers = {"Range": "bytes=%s-%s" % (pr[0], pr[1])}
            req = exponential_backoff(requests.get, uri[:-4], headers=headers)
            if req is None:
                print("dl_hrrrref FAIL %s %s" % (uri[:-4], headers))
                continue
            output.write(req.content)

    output.close()
    # insert into LDM Please
    pqstr = ("data a %s bogus "
             "model/hrrr/%02i/hrrr.t%02iz.refd.grib2 grib2") % (
                 valid.strftime("%Y%m%d%H%M"), valid.hour, valid.hour)
    subprocess.call(
        "/home/ldm/bin/pqinsert -p '%s' %s" % (pqstr, tmpfn),
        shell=True,
        stderr=subprocess.PIPE,
        stdout=subprocess.PIPE,
    )
    os.unlink(tmpfn)
示例#25
0
 def drop_last_column(self):
     self.cols = self.cols - 1
     self.entry.col_count.text = "%s" % (self.cols,)
     self.entry = exponential_backoff(self.spr_client.update, self.entry)
     self.cell_feed = None
示例#26
0
    "impassable": 86,
}

lookup = {}
current = {}
cursor.execute(
    """
    select c.segid, b.longname, c.cond_code, b.idot_id from
    roads_current c JOIN roads_base b on (c.segid = b.segid)
    """
)
for row in cursor:
    lookup[row[3]] = row[0]
    current[row[0]] = row[2]

r = exponential_backoff(requests.get, URI, timeout=30)
if r is None:
    sys.exit()
j = r.json()

if "layers" not in j:
    print(
        (
            "ingest_roads_rest got invalid RESULT:\n%s"
            % (json.dumps(j, sort_keys=True, indent=4, separators=(",", ": ")))
        )
    )
    sys.exit()

featureset = j["layers"][0]["featureSet"]
dirty = False
示例#27
0
def main():
    """Go something greatish"""
    pgconn = psycopg2.connect(database='postgis', host='iemdb')
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    lookup = {}
    current = {}
    cursor.execute("""
        select c.segid, b.longname, c.cond_code, b.idot_id from
        roads_current c JOIN roads_base b on (c.segid = b.segid)
        """)
    for row in cursor:
        lookup[row[3]] = row[0]
        current[row[0]] = row[2]

    req = exponential_backoff(requests.get, URI, timeout=30)
    if req is None:
        sys.exit()
    jobj = req.json()

    if 'layers' not in jobj:
        print(('ingest_roads_rest got invalid RESULT:\n%s'
               ) % (json.dumps(jobj, sort_keys=True, indent=4,
                               separators=(',', ': '))
                    ))
        sys.exit()

    featureset = jobj['layers'][0]['featureSet']
    dirty = False
    for feat in featureset['features']:
        props = feat['attributes']
        segid = lookup.get(props['SEGMENT_ID'])
        if segid is None:
            print(("ingest_roads_rest unknown longname '%s' segment_id '%s'"
                   ) % (props['LONGNAME'], props['SEGMENT_ID']))
            continue
        raw = props['HL_PAVEMENT_CONDITION']
        if raw is None:
            continue
        cond = ROADCOND.get(raw)
        if cond is None:
            print(("ingest_roads_reset longname '%s' has unknown cond '%s'\n%s"
                   ) % (props['LONGNAME'],
                        props['HL_PAVEMENT_CONDITION'],
                        json.dumps(props, sort_keys=True,
                                   indent=4, separators=(',', ': '))))
            continue
        # Timestamps appear to be UTC now
        if props['CARS_MSG_UPDATE_DATE'] is not None:
            # print(json.dumps(feat, indent=4))
            valid = datetime.datetime(1970, 1, 1) + datetime.timedelta(
                seconds=props['CARS_MSG_UPDATE_DATE']/1000.)
        else:
            valid = datetime.datetime.utcnow()
        # Save to log, if difference
        if cond != current[segid]:
            cursor.execute("""
                INSERT into roads_2017_2018_log(segid, valid, cond_code, raw)
                VALUES (%s, %s, %s, %s)
            """, (segid, valid.strftime("%Y-%m-%d %H:%M+00"), cond, raw))
            dirty = True
        # Update currents
        cursor.execute("""
            UPDATE roads_current SET cond_code = %s, valid = %s,
            raw = %s WHERE segid = %s
        """, (cond, valid.strftime("%Y-%m-%d %H:%M+00"), raw, segid))

    # Force a run each morning at about 3 AM
    if (datetime.datetime.now().hour == 3 and
            datetime.datetime.now().minute < 10):
        dirty = True

    if dirty:
        export_shapefile(cursor, valid)

    cursor.close()
    pgconn.commit()
    pgconn.close()
示例#28
0
def main(valid):
    """Run for the given valid time!"""
    nt = NetworkTable("RAOB")
    dbconn = get_dbconn("postgis")
    # check what we have
    obs = read_sql(
        """
        SELECT station, count(*) from
        raob_flights f JOIN raob_profile_""" + str(valid.year) + """ p
        ON (f.fid = p.fid) where valid = %s GROUP by station
        ORDER by station ASC
    """,
        dbconn,
        params=(valid, ),
        index_col="station",
    )
    obs["added"] = 0
    v12 = valid - datetime.timedelta(hours=13)

    progress = tqdm(list(nt.sts.keys()), disable=not sys.stdout.isatty())
    for sid in progress:
        # skip virtual sites
        if sid.startswith("_"):
            continue
        if sid in obs.index and obs.at[sid, "count"] > 10:
            continue
        progress.set_description(sid)
        uri = ("https://rucsoundings.noaa.gov/get_raobs.cgi?data_source=RAOB;"
               "start_year=%s;start_month_name=%s;") % (valid.year,
                                                        valid.strftime("%b"))
        uri += ("start_mday=%s;start_hour=%s;start_min=0;n_hrs=12.0;") % (
            valid.day,
            valid.hour,
        )
        uri += "fcst_len=shortest;airport=%s;" % (sid, )
        uri += "text=Ascii%20text%20%28GSD%20format%29;"
        uri += ("hydrometeors=false&startSecs=%s&endSecs=%s") % (
            v12.strftime("%s"),
            valid.strftime("%s"),
        )

        req = exponential_backoff(requests.get, uri, timeout=30)
        if req is None or req.status_code != 200:
            LOG.info("dl failed %s for %s", sid, valid)
            continue
        cursor = dbconn.cursor()
        try:
            for rob in parse(req.content.decode("utf-8"), sid):
                if rob.valid == valid:
                    obs.at[sid, "added"] = len(rob.profile)
                rob.database_save(cursor)
        except Exception as exp:
            fn = "/tmp/%s_%s_fail" % (sid, valid.strftime("%Y%m%d%H%M"))
            LOG.info("FAIL %s %s %s, check %s for data", sid, valid, exp, fn)
            with open(fn, "w") as fh:
                fh.write(req.content)
        finally:
            cursor.close()
            dbconn.commit()
    LOG.debug("%s entered %s levels of data", valid, obs["added"].sum())
    df2 = obs[obs["count"] == 0]
    if len(df2.index) > 40:
        LOG.info("%s high missing count of %s", valid, len(df2.index))
示例#29
0
def workflow():
    ''' Do stuff '''
    valid = datetime.datetime.now()
    valid = valid.replace(tzinfo=pytz.timezone("America/Chicago"),
                          microsecond=0)
    req = requests.get(URI, timeout=30)
    if req.status_code != 200:
        return
    data = req.json()
    featureset = data['layers'][0].get('featureSet', dict())
    features = featureset.get('features', [])
    POSTGIS = psycopg2.connect(database='postgis', host='iemdb')
    cursor = POSTGIS.cursor()

    cursor.execute("""SELECT label, idnum from idot_dashcam_current""")
    current = {}
    for row in cursor:
        current[row[0]] = row[1]

    for feat in features:
        logdt = feat['attributes']['PHOTO_FILEDATE']
        if logdt is None:
            continue
        ts = datetime.datetime.utcfromtimestamp(logdt/1000.)
        valid = valid.replace(year=ts.year, month=ts.month, day=ts.day,
                              hour=ts.hour, minute=ts.minute,
                              second=ts.second)
        label = feat['attributes']['PHOTO_ANUMBER']
        idnum = feat['attributes']['PHOTO_UID']
        if idnum <= current.get(label, 0):
            continue

        utc = valid.astimezone(pytz.timezone("UTC"))
        # Go get the URL for saving!
        # print label, utc, feat['attributes']['PHOTO_URL']
        req = exponential_backoff(requests.get,
                                  feat['attributes']['PHOTO_URL'], timeout=15)
        if req is None or req.status_code != 200:
            print(('dot_truckcams.py dl fail |%s| %s'
                   ) % ('req is None' if req is None else req.status_code,
                        feat['attributes']['PHOTO_URL']))
            continue
        tmp = tempfile.NamedTemporaryFile(delete=False)
        tmp.write(req.content)
        tmp.close()
        cmd = ("/home/ldm/bin/pqinsert -p 'plot ac %s %s %s jpg' %s"
               ) % (utc.strftime("%Y%m%d%H%M"), get_current_fn(label),
                    get_archive_fn(label, utc), tmp.name)
        proc = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE)
        _ = proc.stderr.read()
        # if output != "":
        #    print '-------------------------'
        #    print '  dot_truckcams.py pqinsert stderr result:'
        #    print output
        #    print 'label: %s timestamp: %s' % (label, utc)
        #    print 'URI: %s' % (feat['attributes']['PHOTO_URL'],)
        #    print '-------------------------\n'
        os.unlink(tmp.name)

        pt = P3857(feat['geometry']['x'], feat['geometry']['y'], inverse=True)
        geom = 'SRID=4326;POINT(%s %s)' % (pt[0], pt[1])
        cursor.execute("""
            INSERT into idot_dashcam_current(label, valid, idnum,
            geom) VALUES (%s, %s, %s, %s)
        """, (label, valid, idnum, geom))

    cursor.close()
    POSTGIS.commit()
    POSTGIS.close()
示例#30
0
def main():
    """ Go Main! """
    msg = MIMEMultipart("alternative")
    now = datetime.datetime.now()
    msg["Subject"] = "IEM Daily Bulletin for %s" % (
        now.strftime("%b %-d %Y"), )
    msg["From"] = "daryl herzmann <*****@*****.**>"
    if os.environ["USER"] == "akrherz":
        msg["To"] = "*****@*****.**"
    else:
        msg["To"] = "*****@*****.**"

    text = """Iowa Environmental Mesonet Daily Bulletin for %s\n\n""" % (
        now.strftime("%d %B %Y"), )
    html = """
    <h3>Iowa Environmental Mesonet Daily Bulletin for %s</h3>
    """ % (now.strftime("%d %B %Y"), )

    t, h = news()
    text += t
    html += h
    try:
        t, h = get_github_commits()
        text += t
        html += h
    except Exception as exp:
        print("get_github_commits failed with %s" % (exp, ))
        text += "\n(script failure fetching github activity\n"
        html += "<br />(script failure fetching github activity<br />"
    t, h = feature()
    text += t
    html += h
    t, h = wwa.run()
    text += t
    html += h
    t, h = cowreport()
    text += t
    html += h

    part1 = MIMEText(text, "plain")
    part2 = MIMEText(html, "html")
    msg.attach(part1)
    msg.attach(part2)

    exponential_backoff(send_email, msg)

    # Send forth LDM
    fp = open("tmp.txt", "w")
    fp.write(text)
    fp.close()
    subprocess.call(
        ('/home/ldm/bin/pqinsert -p "plot c 000000000000 '
         'iemdb.txt bogus txt" tmp.txt'),
        shell=True,
    )
    fp = open("tmp.txt", "w")
    fp.write(html)
    fp.close()
    subprocess.call(
        ('/home/ldm/bin/pqinsert -p "plot c 000000000000 '
         'iemdb.html bogus txt" tmp.txt'),
        shell=True,
    )
    os.unlink("tmp.txt")
示例#31
0
def get_github_commits():
    """ Get the recent day's worth of github code commits

    Returns:
      txt (str): text variant result
      html (str): html variant result
    """
    utcnow = datetime.datetime.utcnow()
    yesterday = utcnow - datetime.timedelta(hours=24)
    yesterday = yesterday.replace(hour=12, minute=0, second=0)
    iso = yesterday.strftime("%Y-%m-%dT%H:%M:%SZ")

    txt = ["> IEM Code Pushes <to branch> on Github\n"]
    html = ["<h3>IEM Code Pushes &lt;to branch&gt; on Github</h3>"]

    # get branches, master is first!
    branches = ["master"]
    req = exponential_backoff(requests.get, IEM_BRANCHES, timeout=30)
    for branch in req.json():
        if branch["name"] == "master":
            continue
        branches.append(branch["name"])

    hashes = []
    links = []
    for branch in branches:
        uri = ("https://api.github.com/repos/akrherz/iem/commits?since=%s&"
               "sha=%s") % (iso, branch)
        req2 = exponential_backoff(requests.get, uri, timeout=30)
        # commits are in reverse order
        for commit in req2.json()[::-1]:
            if commit["sha"] in hashes:
                continue
            hashes.append(commit["sha"])
            timestring = commit["commit"]["author"]["date"]
            utcvalid = datetime.datetime.strptime(timestring,
                                                  "%Y-%m-%dT%H:%M:%SZ")
            valid = utcvalid.replace(tzinfo=pytz.utc).astimezone(
                pytz.timezone("America/Chicago"))
            data = {
                "stamp":
                valid.strftime("%b %-d %-2I:%M %p"),
                "msg":
                commit["commit"]["message"],
                "htmlmsg":
                commit["commit"]["message"].replace("\n\n", "\n").replace(
                    "\n", "<br />\n"),
                "branch":
                branch,
                "url":
                commit["html_url"][:-20],  # chomp to make shorter
                "i":
                len(links) + 1,
            }
            links.append("[%(i)s] %(url)s" % data)
            txt.append(mywrap("  %(stamp)s[%(i)s] <%(branch)s> %(msg)s" %
                              data))
            html.append(('<li><a href="%(url)s">%(stamp)s</a> '
                         "&lt;%(branch)s&gt; %(htmlmsg)s</li>\n") % data)

    if len(txt) == 1:
        txt = txt[0] + "    No code commits found in previous 24 Hours"
        html = html[0] + ("<strong>No code commits found "
                          "in previous 24 Hours</strong>")
    else:
        txt = "\n".join(txt) + "\n\n" + "\n".join(links)
        html = html[0] + "<ul>" + "\n".join(html[1:]) + "</ul>"

    return txt + "\n\n", html + "<br /><br />"
示例#32
0
def main(valid):
    """Run for the given valid time!"""
    dbconn = get_dbconn("postgis")

    v12 = valid - datetime.timedelta(hours=13)

    for sid in NT.sts:
        # skip virtual sites
        if sid.startswith("_") or sid in ["KHKS"]:
            continue
        uri = ("https://rucsoundings.noaa.gov/get_raobs.cgi?data_source=RAOB;"
               "start_year=%s;start_month_name=%s;") % (valid.year,
                                                        valid.strftime("%b"))
        uri += ("start_mday=%s;start_hour=%s;start_min=0;n_hrs=12.0;") % (
            valid.day,
            valid.hour,
        )
        uri += "fcst_len=shortest;airport=%s;" % (sid, )
        uri += "text=Ascii%20text%20%28GSD%20format%29;"
        uri += ("hydrometeors=false&startSecs=%s&endSecs=%s") % (
            v12.strftime("%s"),
            valid.strftime("%s"),
        )

        cursor = dbconn.cursor()
        req = exponential_backoff(requests.get, uri, timeout=30)
        if req is None:
            print("ingest_from_rucsoundings failed %s for %s" % (sid, valid))
            continue
        try:
            for rob in parse(req.content.decode("utf-8"), sid):
                NT.sts[sid]["count"] = len(rob.profile)
                rob.database_save(cursor)
        except Exception as exp:
            print(("RAOB FAIL %s %s %s, check /tmp for data") %
                  (sid, valid, exp))
            output = open(
                "/tmp/%s_%s_fail" % (sid, valid.strftime("%Y%m%d%H%M")), "w")
            output.write(req.content)
            output.close()
        finally:
            cursor.close()
            dbconn.commit()

    # Loop thru and see which stations we were missing data from
    missing = []
    for sid in NT.sts:
        if NT.sts[sid]["online"]:
            if NT.sts[sid].get("count", 0) == 0:
                missing.append(sid)

    if len(missing) > 40:
        cursor = dbconn.cursor()
        for sid in missing:
            # Go find the last ob we have for the site
            cursor.execute(
                """SELECT max(valid) from raob_flights where
            station = %s""",
                (sid, ),
            )
            row = cursor.fetchone()
            if row[0] is None:
                print("RAOB dl station: %s has null max(valid)?" % (sid, ))
                continue
            lastts = row[0].astimezone(pytz.utc)
            print(("RAOB dl fail ts: %s sid: %s last: %s") % (
                valid.strftime("%Y-%m-%d %H"),
                sid,
                lastts.strftime("%Y-%m-%d %H"),
            ))

    dbconn.close()
示例#33
0
def fetch(valid, hr):
    """ Fetch the data for this timestamp
    """
    uri = valid.strftime(
        (
            "http://www.ftp.ncep.noaa.gov/data/nccf/"
            "com/nam/prod/nam.%Y%m%d/nam.t%Hz.conusnest."
            "hiresf0" + str(hr) + ".tm00.grib2.idx"
        )
    )
    req = exponential_backoff(requests.get, uri, timeout=30)
    if req is None or req.status_code != 200:
        print("download_hrrr failed to get idx\n%s" % (uri,))
        return

    offsets = []
    neednext = False
    for line in req.content.decode("utf-8").split("\n"):
        tokens = line.split(":")
        if len(tokens) < 3:
            continue
        if neednext:
            offsets[-1].append(int(tokens[1]))
            neednext = False
        if tokens[3] in ["ULWRF", "DSWRF"]:
            if tokens[4] == "surface" and tokens[5].find("ave fcst") > 0:
                offsets.append([int(tokens[1])])
                neednext = True
        # Save soil temp and water at surface, 10cm and 40cm
        if tokens[3] in ["TSOIL", "SOILW"]:
            if tokens[4] in [
                "0-0.1 m below ground",
                "0.1-0.4 m below ground",
                "0.4-1 m below ground",
            ]:
                offsets.append([int(tokens[1])])
                neednext = True

    outfn = valid.strftime(
        (
            "/mesonet/ARCHIVE/data/%Y/%m/%d/model/nam/"
            "%H/nam.t%Hz.conusnest.hiresf0" + str(hr) + ".tm00.grib2"
        )
    )
    outdir = os.path.dirname(outfn)
    if not os.path.isdir(outdir):
        os.makedirs(outdir)  # make sure LDM can then write to dir
        subprocess.call("chmod 775 %s" % (outdir,), shell=True)
    output = open(outfn, "ab", 0o664)

    if len(offsets) != 8:
        print(
            "download_nam warning, found %s gribs for %s[%s]"
            % (len(offsets), valid, hr)
        )
    for pr in offsets:
        headers = {"Range": "bytes=%s-%s" % (pr[0], pr[1])}
        req = exponential_backoff(
            requests.get, uri[:-4], headers=headers, timeout=30
        )
        if req is None:
            print("download_nam.py failure for uri: %s" % (uri,))
        else:
            output.write(req.content)

    output.close()
示例#34
0
 def refetch_feed(self):
     self.entry = exponential_backoff(self.spr_client.get_worksheet,
                                      self.spread_id, self.id)
     self.set_metadata()
示例#35
0
 def expand_cols(self, amount=1):
     """ Expand this sheet by the number of columns desired"""
     self.cols = self.cols + amount
     self.entry.col_count.text = "%s" % (self.cols,)
     self.entry = exponential_backoff(self.spr_client.update, self.entry)
     self.cell_feed = None
示例#36
0
def workflow():
    """ Do stuff """
    postgis = get_dbconn("postgis")
    cursor = postgis.cursor()

    current = {}
    cursor.execute("""SELECT label, valid from idot_snowplow_current""")
    for row in cursor:
        current[row[0]] = row[1]

    req = exponential_backoff(requests.get, URI, timeout=30)
    if req is None:
        return
    if req.status_code != 200:
        LOG.info(
            ("dot_plows got non-200 status_code: %s\n"
             "Content: %s"),
            req.status_code,
            req.content,
        )
        return
    data = json.loads(req.content)
    for feat in data.get("features", []):
        logdt = feat["attributes"]["MODIFIEDDT"]
        label = feat["attributes"]["LABEL"]
        if logdt is None:
            continue
        # Unsure why I do it this way, but alas
        ts = datetime.datetime.utcfromtimestamp(logdt / 1000.0)
        valid = utc(ts.year, ts.month, ts.day, ts.hour, ts.minute, ts.second)
        if valid > CEILING:
            # print(json.dumps(feat, sort_keys=True,
            #                 indent=4, separators=(',', ': ')))
            continue
        if len(label) > 20:
            LOG.info("Invalid dot_plow feed label of %s", repr(label))
            continue

        if current.get(label) is None:
            # allows subsequent data insert to work
            current[label] = valid - datetime.timedelta(minutes=1)
            cursor.execute(
                """
                INSERT into idot_snowplow_current(label, valid) VALUES (%s,%s)
            """,
                (label, valid),
            )
        LOG.debug(
            "logdt: %s label: %s, valid: %s current: %s",
            logdt,
            label,
            valid,
            current[label],
        )
        if current[label] >= valid:
            LOG.debug("    label: %s skipped as old", label)
            continue
        cursor.execute(
            """
            UPDATE idot_snowplow_current
            SET
            valid = %s,
            heading = %s,
            velocity = %s,
            roadtemp = %s,
            airtemp = %s,
            solidmaterial = %s,
            liquidmaterial = %s,
            prewetmaterial = %s,
            solidsetrate = %s,
            liquidsetrate = %s,
            prewetsetrate = %s,
            leftwingplowstate = %s,
            rightwingplowstate = %s,
            frontplowstate = %s,
            underbellyplowstate = %s,
            solid_spread_code = %s,
            road_temp_code = %s,
            geom = 'SRID=4326;POINT(%s %s)'
            WHERE label = %s
        """,
            (
                valid,
                feat["attributes"]["HEADING"],
                feat["attributes"]["VELOCITY"],
                feat["attributes"]["ROADTEMP"],
                feat["attributes"]["AIRTEMP"],
                feat["attributes"]["SOLIDMATERIAL"],
                feat["attributes"]["LIQUIDMATERIAL"],
                feat["attributes"]["PREWETMATERIAL"],
                feat["attributes"]["SOLIDSETRATE"],
                feat["attributes"]["LIQUIDSETRATE"],
                feat["attributes"]["PREWETSETRATE"],
                feat["attributes"]["LEFTWINGPLOWSTATE"],
                feat["attributes"]["RIGHTWINGPLOWSTATE"],
                feat["attributes"]["FRONTPLOWSTATE"],
                feat["attributes"]["UNDERBELLYPLOWSTATE"],
                None,  # SOIL_SPREAD_CODE
                None,  # ROAD_TEMP_CODE,
                feat["attributes"]["XPOSITION"],
                feat["attributes"]["YPOSITION"],
                label,
            ),
        )
        # Archive it too
        cursor.execute(
            """
            INSERT into idot_snowplow_""" + str(valid.year) + """
            (label, valid, heading, velocity, roadtemp, airtemp, solidmaterial,
            liquidmaterial, prewetmaterial, solidsetrate, liquidsetrate,
            prewetsetrate, leftwingplowstate, rightwingplowstate,
            frontplowstate , underbellyplowstate, solid_spread_code,
            road_temp_code, geom)
             values
            (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s
            , %s, 'SRID=4326;POINT(%s %s)')
        """,
            (
                label,
                valid,
                feat["attributes"]["HEADING"],
                feat["attributes"]["VELOCITY"],
                feat["attributes"]["ROADTEMP"],
                feat["attributes"]["AIRTEMP"],
                feat["attributes"]["SOLIDMATERIAL"],
                feat["attributes"]["LIQUIDMATERIAL"],
                feat["attributes"]["PREWETMATERIAL"],
                feat["attributes"]["SOLIDSETRATE"],
                feat["attributes"]["LIQUIDSETRATE"],
                feat["attributes"]["PREWETSETRATE"],
                feat["attributes"]["LEFTWINGPLOWSTATE"],
                feat["attributes"]["RIGHTWINGPLOWSTATE"],
                feat["attributes"]["FRONTPLOWSTATE"],
                feat["attributes"]["UNDERBELLYPLOWSTATE"],
                None,
                None,
                feat["attributes"]["XPOSITION"],
                feat["attributes"]["YPOSITION"],
            ),
        )

    postgis.commit()
    postgis.close()