Example #1
0
def real_processor(text):
    """Process this product, please"""
    collect = metarcollect.parser(text, nwsli_provider=NWSLI_PROVIDER)
    if collect.warnings:
        common.email_error("\n".join(collect.warnings), collect.unixtext)
    jmsgs = collect.get_jabbers(("https://mesonet.agron.iastate.edu/ASOS/"
                                 "current.phtml?network="))
    if not MANUAL:
        for jmsg in jmsgs:
            if jmsg[0] in JABBER_MESSAGES:
                continue
            JABBER_MESSAGES.append(jmsg[0])
            JABBER.send_message(*jmsg)
    for mtr in collect.metars:
        if mtr.network is None:
            log.msg(("station: '%s' is unknown to metadata table") %
                    (mtr.station_id, ))
            deffer = ASOSDB.runOperation(
                """
            INSERT into unknown(id) values (%s)
            """,
                (mtr.station_id, ),
            )
            deffer.addErrback(common.email_error, text)
            continue
        deffer = IEMDB.runInteraction(do_db, mtr)
        deffer.addErrback(common.email_error, collect.unixtext)
Example #2
0
def real_processor(txn, text):
    """ Lets actually process! """
    prod = lsrparser(text)

    for lsr in prod.lsrs:
        if lsr.typetext.upper() not in reference.lsr_events:
            errmsg = "Unknown LSR typecode '%s'" % (lsr.typetext,)
            common.email_error(errmsg, text)
        uniquekey = hash(lsr.text)
        if uniquekey in LSRDB:
            prod.duplicates += 1
            lsr.duplicate = True
            continue
        LSRDB[uniquekey] = datetime.datetime.utcnow().replace(
            tzinfo=pytz.utc)
        lsr.sql(txn)

    j = prod.get_jabbers(common.SETTINGS.get('pywwa_lsr_url', 'pywwa_lsr_url'))
    for i, (p, h, x) in enumerate(j):
        # delay some to perhaps stop triggering SPAM lock outs at twitter
        reactor.callLater(i, JABBER.send_message, p, h, x)

    if prod.warnings:
        common.email_error("\n\n".join(prod.warnings), text)
    elif not prod.lsrs:
        raise Exception("No LSRs parsed!", text)
Example #3
0
def real_parser(buf):
    """ Actually do something with the buffer, please """
    if buf.strip() == "":
        return
    utcnow = datetime.datetime.utcnow()
    utcnow = utcnow.replace(tzinfo=pytz.timezone("UTC"))
    
    nws = product.TextProduct( buf)

    if (utcnow - nws.valid).days > 180 or (utcnow - nws.valid).days < -180:
        common.email_error("Very Latent Product! %s" % (nws.valid,), nws.text)
        return
    
    if nws.valid.month > 6:
        table = "products_%s_0712" % (nws.valid.year,)
    else:
        table = "products_%s_0106" % (nws.valid.year,)
    
    if nws.afos is None:
        raise ParseError("TextProduct.afos is null")
    
    
    df = DBPOOL.runOperation("""INSERT into """+table+"""(pil, data, entered,
        source, wmo) VALUES(%s,%s,%s,%s,%s)""",  (nws.afos.strip(), nws.text, 
                             nws.valid,
                             nws.source, nws.wmo) 
     )
    df.addErrback( common.email_error, buf)
    df.addErrback( log.err )
Example #4
0
def really_process_data(buf):
    """ Actually do some processing """
    gmtnow = datetime.datetime.utcnow()
    gmtnow = gmtnow.replace(tzinfo=pytz.utc)

    # Make sure we have a trailing $$, if not report error and slap one on
    if buf.find("$$") == -1:
        common.email_error("No $$ Found!", buf)
        buf += "\n\n$$\n\n"

    # Create our TextProduct instance
    text_product = vtecparser(buf,
                              utcnow=gmtnow,
                              ugc_provider=ugc_dict,
                              nwsli_provider=nwsli_dict)
    # Don't parse these as they contain duplicated information
    if text_product.source == "KNHC" and text_product.afos[:3] == "TCV":
        return
    # Skip spanish products
    if text_product.source == "TJSJ" and text_product.afos[3:] == "SPN":
        return

    df = PGCONN.runInteraction(text_product.sql)
    df.addCallback(step2, text_product)
    df.addErrback(common.email_error, text_product.unixtext)
    df.addErrback(log.err)
Example #5
0
 def process_data(self, data):
     """Callback when we have data to process"""
     try:
         # BUG make sure we are okay here after we resolve pyLDM str issues
         real_processor(data)
     except Exception as exp:
         common.email_error(exp, data, -1)
Example #6
0
def process_dsm(data):
    m = PARSER_RE.match( data )
    if m is None:
        log.msg("FAIL ||%s||" %( data,))
        common.email_error("DSM RE Match Failure", data)
        return
    d = m.groupdict()
    # Only parse United States Sites
    if d['id'][0] != "K":
        return
    # Figure out the timestamp
    now = datetime.datetime.now()
    ts = now.replace(day=int(d['day']), month=int(d['month']))
    if ts.month == 12 and now.month == 1:
        ts -= datetime.timedelta(days=360)
        ts = ts.replace(day=int(d['day']))
    updater = []
    if d['high'] is not None and d['high'] != "M":
        updater.append("max_tmpf = %s" % (d['high'],))
    if d['low'] is not None and d['low'] != "M":
        updater.append("min_tmpf = %s" % (d['low'],))

    if d['precip'] is not None and d['precip'] not in ["M","T"]:
        updater.append("pday = %s" % (float(d['precip']) / 100.0,))
    elif d['precip'] == "T":
        updater.append("pday = 0.0001")

    sql = """UPDATE summary_%s s SET %s FROM stations t WHERE t.iemid = s.iemid 
    and t.id = '%s' and day = '%s'""" % (
         ts.year, " , ".join(updater), d['id'][1:], ts.strftime("%Y-%m-%d"))
    log.msg("%s %s H:%s L:%s P:%s" % (d['id'], ts.strftime("%Y-%m-%d"),
          d['high'], d['low'], d['precip'] ))
    if len(updater) > 0:
        defer = DBPOOL.runOperation( sql )
        defer.addErrback( common.email_error, sql)
Example #7
0
def real_parser(txn, data):
    """Please process some data"""
    prod = parser(data)
    prod.tzlocalize(STATIONS)
    prod.sql(txn)
    if prod.warnings:
        common.email_error("\n".join(prod.warnings), data)
Example #8
0
def real_processor(txn, text):
    """ Lets actually process! """
    prod = lsrparser(text)

    for lsr in prod.lsrs:
        if lsr.typetext.upper() not in reference.lsr_events:
            errmsg = "Unknown LSR typecode '%s'" % (lsr.typetext, )
            common.email_error(errmsg, text)
        uniquekey = hash(lsr.text)
        if uniquekey in LSRDB:
            prod.duplicates += 1
            lsr.duplicate = True
            continue
        LSRDB[uniquekey] = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
        lsr.sql(txn)

    j = prod.get_jabbers(common.SETTINGS.get("pywwa_lsr_url", "pywwa_lsr_url"))
    for i, (p, h, x) in enumerate(j):
        # delay some to perhaps stop triggering SPAM lock outs at twitter
        reactor.callLater(i, JABBER.send_message, p, h, x)

    if prod.warnings:
        common.email_error("\n\n".join(prod.warnings), text)
    elif not prod.lsrs:
        raise Exception("No LSRs parsed!", text)
Example #9
0
def really_process_data(txn, buf):
    ''' Actually do some processing '''
    utcnow = datetime.datetime.utcnow()
    utcnow = utcnow.replace(tzinfo=pytz.timezone("UTC"))

    # Create our TextProduct instance
    prod = productparser(buf, utcnow=utcnow, ugc_provider=ugc_dict,
                         nwsli_provider=nwsli_dict)

    # Do the Jabber work necessary after the database stuff has completed
    for (plain, html, xtra) in prod.get_jabbers(
            common.settings.get('pywwa_product_url', 'pywwa_product_url')):
        if xtra.get('channels', '') == '':
            common.email_error("xtra[channels] is empty!", buf)
        if not MANUAL:
            jabber.sendMessage(plain, html, xtra)

    if DB_ON:
        # Insert into database
        product_id = prod.get_product_id()
        sqlraw = buf.replace("\015\015\012", "\n").replace("\000", "").strip()
        sql = """
        INSERT into text_products(product, product_id) values (%s,%s)
        """
        myargs = (sqlraw, product_id)
        if (len(prod.segments) > 0 and prod.segments[0].sbw):
            giswkt = ('SRID=4326;%s'
                      ) % (MultiPolygon([prod.segments[0].sbw]).wkt, )
            sql = """
                INSERT into text_products(product, product_id, geom)
                values (%s,%s,%s)
            """
            myargs = (sqlraw, product_id, giswkt)
        txn.execute(sql, myargs)
Example #10
0
def real_parser(txn, data):
    """Please process some data"""
    prod = parser(data)
    prod.tzlocalize(STATIONS)
    prod.sql(txn)
    if prod.warnings:
        common.email_error("\n".join(prod.warnings), data)
Example #11
0
 def process_data(self, data):
     """Callback when we have data to process"""
     try:
         # BUG make sure we are okay here after we resolve pyLDM str issues
         real_processor(data)
     except Exception as exp:
         common.email_error(exp, data, -1)
Example #12
0
def real_processor(text):
    """Process this product, please"""
    collect = metarcollect.parser(text, nwsli_provider=NWSLI_PROVIDER)
    if collect.warnings:
        common.email_error("\n".join(collect.warnings), collect.unixtext)
    jmsgs = collect.get_jabbers(
        "https://mesonet.agron.iastate.edu/ASOS/current.phtml?network="
    )
    if not MANUAL:
        for jmsg in jmsgs:
            if jmsg[0] in JABBER_MESSAGES:
                continue
            # Hacky here, but get the METAR.XXXX channel to find which site
            # this is.
            skip = False
            channels = jmsg[2].get("channels", [])
            for channel in channels.split(","):
                if channel.startswith("METAR."):
                    if channel.split(".")[1] in IGNORELIST:
                        log.msg(f"IGNORELIST Jabber relay of {jmsg[0]}")
                        skip = True
            JABBER_MESSAGES.append(jmsg[0])
            if not skip:
                JABBER.send_message(*jmsg)
    for mtr in collect.metars:
        if mtr.network is None:
            log.msg("station: '{mtr.station_id}' is unknown to metadata table")
            deffer = ASOSDB.runOperation(
                "INSERT into unknown(id) values (%s)", (mtr.station_id,)
            )
            deffer.addErrback(common.email_error, text)
            continue
        deffer = IEMDB.runInteraction(do_db, mtr)
        deffer.addErrback(common.email_error, collect.unixtext)
Example #13
0
    def process_data(self, buf):
        """ Process the product """
        try:
            # Make sure we have a trailing $$
            if buf.find("$$") == -1:
                common.email_error("No $$ Found!", buf)
                buf += "\n\n$$\n\n"
            text_product = product.TextProduct( buf )
            xtra = {
                    'product_id': text_product.get_product_id(),
                    }
            skip_con = False
            if (text_product.afos[:3] == "FLS" and 
                len(text_product.segments) > 4):
                skip_con = True

            log.msg( str(text_product) )
            for j in range(len(text_product.segments)):
                df = POSTGIS.runInteraction(segment_processor, text_product, j, 
                                      skip_con)
                df.addErrback(common.email_error, text_product.unixtext)
                df.addErrback( log.err )
                
            if skip_con:
                wfo = text_product.source[1:]
                jabber_txt = "%s: %s has sent an updated FLS product (continued products were not reported here).  Consult this website for more details. %s?wfo=%s" % (wfo, wfo, 
                                                                                                                                                                        config.get('urls', 'riverapp'), wfo)
                jabber_html = "%s has sent an updated FLS product (continued products were not reported here).  Consult <a href=\"%s?wfo=%s\">this website</a> for more details." % (wfo, config.get('urls', 'riverapp'), wfo)
                jabber.sendMessage(jabber_txt, jabber_html, xtra)
                twt = "Updated Flood Statement"
                uri = "%s?wfo=%s" % (config.get('urls', 'riverapp'), wfo)
                common.tweet([wfo,], twt, uri)

        except Exception, myexp:
            common.email_error(myexp, buf)
Example #14
0
 def process_data(self, data):
     """
     Actual ingestor
     """
     try:
         real_process(data)
     except Exception as myexp:
         common.email_error(myexp, data)
Example #15
0
 def process_data(self, buf):
     """ Process the product """
     try:
         prod = parser(buf, nwsli_provider=LOCS)
         # prod.draw()
     except Exception, myexp:
         common.email_error(myexp, buf)
         return
Example #16
0
 def process_data(self, buf):
     """
     Actual ingestor
     """
     try:
         real_process(buf)
     except Exception, myexp:
         common.email_error(myexp, buf)
Example #17
0
def real_process(initial_raw):
    """ The real processor of the raw data, fun! """
    raw = initial_raw + "\015\012"
    raw = raw.replace("\015\015\012", "___").replace("\x1e", "")
    sections = re.findall("([A-Z0-9]{4}\s+... MOS GUIDANCE .*?)______", raw)
    map(section_parser, sections)
    if len(sections) == 0:
        common.email_error("FAILED REGEX", initial_raw)
Example #18
0
def real_parser(txn, buf):
    """
    I'm gonna do the heavy lifting here
    """
    prod = hmlparser(buf)
    prod.sql(txn)
    if len(prod.warnings) > 0:
        common.email_error("\n".join(prod.warnings), buf)
Example #19
0
 def process_data(self, data):
     """
     Actual ingestor
     """
     try:
         real_process(data)
     except Exception as myexp:
         common.email_error(myexp, data)
Example #20
0
 def process_data(self, data):
     """
     Actual ingestor
     """
     self.prods += 1
     try:
         real_process(data)
     except Exception as exp:
         common.email_error(exp, data)
Example #21
0
def real_parser(txn, buf):
    """callback func"""
    ffg = parser(buf)
    if ffg.afos == "FFGMPD":
        return
    ffg.sql(txn)
    if ffg.warnings and ffg.warnings[0].find("termination") == -1:
        common.email_error("\n".join(ffg.warnings), buf)
    sz = 0 if ffg.data is None else len(ffg.data.index)
    log.msg("FFG found %s entries for product %s" % (sz, ffg.get_product_id()))
Example #22
0
def real_parser(txn, buf):
    """callback func"""
    ffg = parser(buf)
    if ffg.afos == 'FFGMPD':
        return
    ffg.sql(txn)
    if ffg.warnings and ffg.warnings[0].find("termination") == -1:
        common.email_error("\n".join(ffg.warnings), buf)
    sz = 0 if ffg.data is None else len(ffg.data.index)
    log.msg("FFG found %s entries for product %s" % (sz,
                                                     ffg.get_product_id()))
Example #23
0
def step2(dummy, text_product):
    ''' After the SQL is done, lets do other things '''
    if len(text_product.warnings) > 0:
        common.email_error("\n\n".join(text_product.warnings),
                           text_product.text)

    # Do the Jabber work necessary after the database stuff has completed
    for (plain, html, xtra) in text_product.get_jabbers(
            common.settings.get('pywwa_vtec_url', 'pywwa_vtec_url'),
            common.settings.get('pywwa_river_url', 'pywwa_river_url')):
        if xtra.get('channels', '') == '':
            common.email_error("xtra[channels] is empty!", text_product.text)
        if not MANUAL:
            jabber.sendMessage(plain, html, xtra)
Example #24
0
def step2(_dummy, text_product):
    """After the SQL is done, lets do other things"""
    if text_product.warnings:
        common.email_error("\n\n".join(text_product.warnings),
                           text_product.text)

    # Do the Jabber work necessary after the database stuff has completed
    for (plain, html, xtra) in text_product.get_jabbers(
            common.SETTINGS.get("pywwa_vtec_url", "pywwa_vtec_url"),
            common.SETTINGS.get("pywwa_river_url", "pywwa_river_url"),
    ):
        if xtra.get("channels", "") == "":
            common.email_error("xtra[channels] is empty!", text_product.text)
        if not MANUAL:
            send_jabber_message(plain, html, xtra)
Example #25
0
def async_func(data):
    """spawn a process with a deferred given the inbound data product"""
    defer = Deferred()
    try:
        tp = product.TextProduct(data, parse_segments=False)
    except Exception as exp:
        common.email_error(exp, data)
        return
    prod = TEXTPRODUCT(product_id=tp.get_product_id(),
                       afos=tp.afos,
                       text=tp.text)
    proc = SHEFIT(prod)
    proc.deferred = defer
    proc.deferred.addErrback(log.err)

    reactor.spawnProcess(proc, "./shefit", ["shefit"], {})
    return proc.deferred
Example #26
0
def real_parser(txn, buf):
    """
    I'm gonna do the heavy lifting here
    """
    prod = pirepparser(buf, nwsli_provider=LOCS)
    prod.assign_cwsu(txn)
    for report in prod.reports:
        if report.text in PIREPS:
            report.is_duplicate = True
        PIREPS[report.text] = datetime.datetime.utcnow()

    j = prod.get_jabbers("unused")
    if prod.warnings:
        common.email_error("\n".join(prod.warnings), buf)
    for msg in j:
        JABBER.send_message(msg[0], msg[1], msg[2])

    prod.sql(txn)
Example #27
0
def real_parser(txn, buf):
    """
    I'm gonna do the heavy lifting here
    """
    prod = pirepparser(buf, nwsli_provider=LOCS)
    prod.assign_cwsu(txn)
    for report in prod.reports:
        if report.text in PIREPS:
            report.is_duplicate = True
        PIREPS[report.text] = datetime.datetime.utcnow()

    j = prod.get_jabbers()
    if prod.warnings:
        common.email_error("\n".join(prod.warnings), buf)
    for msg in j:
        JABBER.send_message(msg[0], msg[1], msg[2])

    prod.sql(txn)
Example #28
0
def async_func(data):
    """spawn a process with a deferred given the inbound data product
    """
    defer = Deferred()
    try:
        tp = product.TextProduct(data, parse_segments=False)
    except Exception as exp:
        common.email_error(exp, data)
        return
    prod = TEXTPRODUCT(product_id=tp.get_product_id(),
                       afos=tp.afos,
                       text=tp.text)
    proc = SHEFIT(prod)
    proc.deferred = defer
    proc.deferred.addErrback(log.err)

    reactor.spawnProcess(proc, "shefit", ["shefit"], {})
    return proc.deferred
Example #29
0
def really_process_data(buf):
    ''' Actually do some processing '''
    gmtnow = datetime.datetime.utcnow()
    gmtnow = gmtnow.replace(tzinfo=pytz.timezone("UTC"))

    # Make sure we have a trailing $$, if not report error and slap one on
    if buf.find("$$") == -1:
        common.email_error("No $$ Found!", buf)
        buf += "\n\n$$\n\n"

    # Create our TextProduct instance
    text_product = vtecparser(buf, utcnow=gmtnow, ugc_provider=ugc_dict,
                              nwsli_provider=nwsli_dict)
    # Skip spanish products
    if text_product.source == 'TJSJ' and text_product.afos[3:] == 'SPN':
        return

    df = PGCONN.runInteraction(text_product.sql)
    df.addCallback(step2, text_product)
    df.addErrback(common.email_error, text_product.unixtext)
    df.addErrback(log.err)
Example #30
0
def real_processor(text):
    """Process this product, please"""
    collect = metarcollect.parser(text, nwsli_provider=NWSLI_PROVIDER)
    if collect.warnings:
        common.email_error("\n".join(collect.warnings), collect.unixtext)
    jmsgs = collect.get_jabbers(("https://mesonet.agron.iastate.edu/ASOS/"
                                 "current.phtml?network="))
    if not MANUAL:
        for jmsg in jmsgs:
            JABBER.send_message(*jmsg)
    for mtr in collect.metars:
        if mtr.network is None:
            log.msg((
                "station: '%s' is unknown to metadata table"
                ) % (mtr.station_id, ))
            deffer = ASOSDB.runOperation("""
            INSERT into unknown(id) values (%s)
            """, (mtr.station_id,))
            deffer.addErrback(common.email_error, text)
            continue
        deffer = IEMDB.runInteraction(do_db, mtr)
        deffer.addErrback(common.email_error, collect.unixtext)
Example #31
0
def real_processor(txn, text):
    """ Lets actually process! """
    prod = lsrparser(text)

    if len(prod.lsrs) == 0:
        raise Exception("No LSRs parsed!", text)

    for lsr in prod.lsrs:
        if lsr.typetext not in reference.lsr_events:
            errmsg = "Unknown LSR typecode '%s'" % (lsr.typetext,)
            common.email_error(errmsg, text)
        uniquekey = hash(lsr.text)
        if uniquekey in LSRDB:
            prod.duplicates += 1
            lsr.duplicate = True
            continue
        LSRDB[uniquekey] = datetime.datetime.utcnow().replace(
            tzinfo=pytz.timezone("UTC"))
        lsr.sql(txn)

    j = prod.get_jabbers(common.settings.get('pywwa_lsr_url', 'pywwa_lsr_url'))
    for (p, h, x) in j:
        JABBER.sendMessage(p, h, x)
Example #32
0
def really_process_data(txn, buf):
    """ Actually do some processing """
    utcnow = datetime.datetime.utcnow()
    utcnow = utcnow.replace(tzinfo=pytz.utc)

    # Create our TextProduct instance
    prod = productparser(buf,
                         utcnow=utcnow,
                         ugc_provider=ugc_dict,
                         nwsli_provider=nwsli_dict)

    # Do the Jabber work necessary after the database stuff has completed
    for (plain, html, xtra) in prod.get_jabbers(
            common.SETTINGS.get("pywwa_product_url", "pywwa_product_url")):
        if xtra.get("channels", "") == "":
            common.email_error("xtra[channels] is empty!", buf)
        if not MANUAL:
            jabber.send_message(plain, html, xtra)

    if DB_ON:
        # Insert into database
        product_id = prod.get_product_id()
        sqlraw = buf.replace("\015\015\012", "\n").replace("\000", "").strip()
        sql = """
        INSERT into text_products(product, product_id) values (%s,%s)
        """
        myargs = (sqlraw, product_id)
        if prod.segments and prod.segments[0].sbw:
            giswkt = ("SRID=4326;%s") % (MultiPolygon([prod.segments[0].sbw
                                                       ]).wkt, )
            sql = """
                INSERT into text_products(product, product_id, geom)
                values (%s,%s,%s)
            """
            myargs = (sqlraw, product_id, giswkt)
        txn.execute(sql, myargs)
Example #33
0
def realprocessor(txn, prod, data):
    """ Actually do the work """
    # Can't always use the AFOS as the station ID :(
    if len(prod.data) > 1:
        station = None
        for stid in NT.sts.keys():
            if NT.sts[stid]['name'].upper() == data['cli_station']:
                station = stid[1:]  # drop first char
                break
        if station is None:
            common.email_error(("Unknown CLI Station Text: |%s|"
                                ) % (data['cli_station'],), prod.unixtext)
            return
    else:
        station = prod.afos[3:]
    table = "summary_%s" % (data['cli_valid'].year,)
    txn.execute("""
        SELECT max_tmpf, min_tmpf, pday, pmonth, snow from """+table+""" d
        JOIN stations t on (t.iemid = d.iemid)
        WHERE d.day = %s and t.id = %s and t.network ~* 'ASOS'
        """, (data['cli_valid'], station))
    row = txn.fetchone()
    if row is None:
        print 'No %s rows found for %s on %s' % (table, station,
                                                 data['cli_valid'])
        save_data(txn, prod, station, data)
        return
    updatesql = []
    logmsg = []
    if data['data'].get('temperature_maximum'):
        climax = data['data']['temperature_maximum']
        if int(climax) != row['max_tmpf']:
            updatesql.append(' max_tmpf = %s' % (climax,))
            logmsg.append('MaxT O:%s N:%s' % (row['max_tmpf'], climax))
    if data['data'].get('temperature_minimum'):
        climin = data['data']['temperature_minimum']
        if int(climin) != row['min_tmpf']:
            updatesql.append(' min_tmpf = %s' % (climin,))
            logmsg.append('MinT O:%s N:%s' % (row['min_tmpf'], climin))
    if data['data'].get('precip_month'):
        val = data['data']['precip_month']
        if val != row['pmonth']:
            updatesql.append(' pmonth = %s' % (val,))
            logmsg.append('PMonth O:%s N:%s' % (row['pmonth'], val))
    if data['data'].get('precip_today'):
        val = data['data']['precip_today']
        if val != row['pday']:
            updatesql.append(' pday = %s' % (val,))
            logmsg.append('PDay O:%s N:%s' % (row['pday'], val))

    if data['data'].get('snow_today'):
        val = data['data']['snow_today']
        if row['snow'] is None or val != row['snow']:
            updatesql.append(' snow = %s' % (val,))
            logmsg.append('Snow O:%s N:%s' % (row['snow'], val))

    if len(updatesql) > 0:
        txn.execute("""UPDATE """+table+""" d SET
        """ + ','.join(updatesql) + """
         FROM stations t WHERE t.iemid = d.iemid and d.day = %s and t.id = %s
         and t.network ~* 'ASOS' """, (data['cli_valid'], station))
        log.msg(("%s rows for %s (%s) %s"
                 ) % (txn.rowcount, station,
                      data['cli_valid'].strftime("%y%m%d"), ','.join(logmsg)))

    save_data(txn, prod, station, data)
Example #34
0
 def process_data(self, buf):
     """ Process the product """
     try:
         real_parser(buf)
     except Exception, myexp:
         common.email_error(myexp, buf)
Example #35
0
 def process_data(self, data):
     """Callback when we have data to process"""
     try:
         real_processor(data)
     except Exception as exp:
         common.email_error(exp, data, -1)
Example #36
0
def section_parser(sect):
    """ Actually process a data section, getting closer :) """
    metadata = re.findall(
        "([A-Z0-9]{4})\s+(...) MOS GUIDANCE\s+([01]?[0-9])/([0-3][0-9])/([0-9]{4})\s+([0-2][0-9]00) UTC", sect
    )
    (station, model, month, day, year, hhmm) = metadata[0]
    initts = datetime.datetime(int(year), int(month), int(day), int(hhmm[:2]))
    initts = initts.replace(tzinfo=pytz.timezone("UTC"))

    times = [initts]
    data = {}
    lines = sect.split("___")
    hrs = lines[2].split()
    for h in hrs[1:]:
        if h == "00":
            ts = times[-1] + datetime.timedelta(days=1)
            ts = ts.replace(hour=0)
        else:
            ts = times[-1].replace(hour=int(h))
        times.append(ts)
        data[ts] = {}

    for line in lines[3:]:
        if len(line) < 10:
            continue
        vname = line[:3].replace("/", "_")
        if vname == "X_N":
            vname = "N_X"
        vals = re.findall("(...)", line[4:])
        for i in range(len(vals)):
            if vname == "T06" and times[i + 1].hour in [0, 6, 12, 18]:
                data[times[i + 1]]["T06_1"] = vals[i - 1].replace("/", "").strip()
                data[times[i + 1]]["T06_2"] = vals[i].replace("/", "").strip()
            elif vname == "T06":
                pass
            elif vname == "T12" and times[i + 1].hour in [0, 12]:
                data[times[i + 1]]["T12_1"] = vals[i - 1].replace("/", "").strip()
                data[times[i + 1]]["T12_2"] = vals[i].replace("/", "").strip()
            elif vname == "T12":
                pass
            elif vname == "WDR":
                data[times[i + 1]][vname] = int(vals[i].strip()) * 10
            else:
                data[times[i + 1]][vname] = vals[i].strip()

    inserts = 0
    for ts in data.keys():
        if ts == initts:
            continue
        fst = """INSERT into t%s (station, model, runtime, ftime,
        """ % (
            initts.year,
        )
        sst = "VALUES(%s,%s,%s,%s,"
        args = [station, model, initts, ts]
        for vname in data[ts].keys():
            fst += " %s," % (vname,)
            sst += "%s,"
            args.append(make_null(data[ts][vname]))
        sql = fst[:-1] + ") " + sst[:-1] + ")"
        deffer = DBPOOL.runOperation(sql, args)
        deffer.addErrback(common.email_error, sect)
        inserts += 1
    # Simple debugging
    if inserts == 0:
        common.email_error("No data found?", sect)
Example #37
0
 def process_data(self, buf):
     try:
         real_process(buf)
     except Exception, myexp:
         common.email_error(myexp, buf)
Example #38
0
 def process_data(self, data):
     """ Process the product """
     try:
         really_process_data(data)
     except Exception as myexp:  # pylint: disable=W0703
         common.email_error(myexp, data)
Example #39
0
 def test_send_email(self):
     ''' See if we could potentially spam myself, this will not actually
     do anything as we aren't running a twisted reactor '''
     self.assertTrue( common.email_error("MyException", "test_common.py"))
Example #40
0
def real_parser(txn, buf):
    """I'm gonna do the heavy lifting here"""
    prod = hmlparser(buf)
    prod.sql(txn)
    if prod.warnings:
        common.email_error("\n".join(prod.warnings), buf)
Example #41
0
 def process_data(self, raw):
     try:
         #raw = raw.replace("\015\015\012", "\n")
         real_process(raw)
     except Exception,exp:
         common.email_error(exp, raw)
Example #42
0
 def process_data(self, raw):
     try:
         real_process(raw)
     except Exception, exp:
         common.email_error(exp, raw)
Example #43
0
 def process_data(self, data):
     """ Process the product """
     try:
         really_process_data(data)
     except Exception as myexp:  # pylint: disable=W0703
         common.email_error(myexp, data)
Example #44
0
def error_wrapper(exp, buf):
    """Don't whine about known invalid products"""
    if buf.find("HWOBYZ") > -1:
        log.msg("Skipping Error for HWOBYZ")
        return
    common.email_error(exp, buf)
Example #45
0
def really_process(prod, data):
    """
    This processes the output we get from the SHEFIT program
    """
    # Now we loop over the data we got :)
    # log.msg("\n"+data)
    mydata = {}
    for line in data.split("\n"):
        # Skip blank output lines or short lines
        if line.strip() == "" or len(line) < 90:
            continue
        # data is fixed, so we should parse it
        sid = line[:8].strip()
        if len(sid) > 8:
            log.msg("SiteID Len Error: [%s] %s" % (sid, prod.product_id))
            continue
        if sid not in mydata:
            mydata[sid] = {}
        modelruntime = make_datetime(line[31:41], line[42:50])
        if modelruntime is not None:
            # print("Skipping forecast data for %s" % (sid, ))
            continue
        tstamp = make_datetime(line[10:20], line[21:29])
        # We don't care about data in the future!
        utcnow = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
        if tstamp > (utcnow + datetime.timedelta(hours=1)):
            continue
        if tstamp < (utcnow - datetime.timedelta(days=60)):
            log.msg("Rejecting old data %s %s" % (sid, tstamp))
            continue
        s_data = mydata.setdefault(sid, dict())
        st_data = s_data.setdefault(tstamp, dict())

        varname = line[52:59].strip()
        value = line[60:73].strip()
        if value.find("****") > -1:
            log.msg("Bad Data from %s\n%s" % (prod.product_id, data))
            value = -9999.0
        else:
            value = float(value)
            # shefit generates 0.001 for trace, IEM uses something else
            if (value > 0.0009 and value < 0.0011 and varname[:2]
                    in ["PC", "PP", "QA", "QD", "QR", "QT", "SD", "SF", "SW"]):
                value = TRACE_VALUE
        # Handle variable time length data
        if varname[2] == "V":
            itime = line[87:91]
            if itime[0] == "2":
                varname = "%sDVD%s" % (varname, itime[-2:])
        # Handle 7.4.6 Paired Value ("Vector") Physical Elements
        if varname[:2] in [
                "HQ",
                "MD",
                "MN",
                "MS",
                "MV",
                "NO",
                "ST",
                "TB",
                "TE",
                "TV",
        ]:
            depth = int(value)
            if depth == 0:
                value = abs(value * 1000)
            else:
                value = abs((value * 1000) % (depth * 1000))
            if depth < 0:
                value = 0 - value
                depth = abs(depth)
            varname = "%s.%02i" % (varname, depth)
            if len(varname) > 10:
                if depth > 999:
                    log.msg(("Ignoring sid: %s varname: %s value: %s") %
                            (sid, varname, value))
                    continue
                common.email_error(
                    ("sid: %s varname: %s value: %s "
                     "is too large") % (sid, varname, value),
                    "%s\n%s" % (data, prod.text),
                )
            continue
        st_data[varname] = value
    # Now we process each station we found in the report! :)
    for sid in mydata:
        times = list(mydata[sid].keys())
        times.sort()
        for tstamp in times:
            process_site(prod, sid, tstamp, mydata[sid][tstamp])
Example #46
0
def save_data(txn, prod, station, data):
    """ Save atomic data to cli_data table """
    # Use four char here
    station = "%s%s" % (prod.source[0], station)

    if station not in NT.sts:
        common.email_error("Unknown CLI Station: %s" % (station,),
                           prod.unixtext)

    txn.execute("""
    SELECT product from cli_data where station = %s and valid = %s
    """, (station, data['cli_valid']))
    if txn.rowcount == 1:
        row = txn.fetchone()
        if prod.get_product_id() < row['product']:
            print(('Skip save of %s as previous %s row newer?'
                   ) % (prod.get_product_id(), row['product']))
            return
        txn.execute("""DELETE from cli_data WHERE station = %s and valid = %s
        """, (station, data['cli_valid']))

    txn.execute("""INSERT into cli_data(
        station, product, valid, high, high_normal, high_record,
        high_record_years, low, low_normal, low_record, low_record_years,
        precip, precip_month, precip_jan1, precip_jul1, precip_normal,
        precip_record,
        precip_record_years, precip_month_normal, snow, snow_month,
        snow_jun1, snow_jul1,
        snow_dec1, precip_dec1, precip_dec1_normal, precip_jan1_normal,
        high_time, low_time, snow_record_years, snow_record,
        snow_jun1_normal, snow_jul1_normal, snow_dec1_normal,
        snow_month_normal, precip_jun1, precip_jun1_normal)
        VALUES (
        %s, %s, %s, %s, %s, %s,
        %s, %s, %s, %s, %s,
        %s, %s, %s, %s, %s,
        %s,
        %s, %s, %s, %s,
        %s, %s,
        %s, %s, %s, %s,
        %s, %s, %s, %s,
        %s, %s, %s, %s, %s, %s
        )
    """, (station, prod.get_product_id(), data['cli_valid'],
          data['data'].get('temperature_maximum'),
          data['data'].get('temperature_maximum_normal'),
          data['data'].get('temperature_maximum_record'),
          data['data'].get('temperature_maximum_record_years', []),
          data['data'].get('temperature_minimum'),
          data['data'].get('temperature_minimum_normal'),
          data['data'].get('temperature_minimum_record'),
          data['data'].get('temperature_minimum_record_years', []),
          data['data'].get('precip_today'),
          data['data'].get('precip_month'),
          data['data'].get('precip_jan1'), data['data'].get('precip_jul1'),
          data['data'].get('precip_today_normal'),
          data['data'].get('precip_today_record'),
          data['data'].get('precip_today_record_years', []),
          data['data'].get('precip_month_normal'),
          data['data'].get('snow_today'), data['data'].get('snow_month'),
          data['data'].get('snow_jun1'), data['data'].get('snow_jul1'),
          data['data'].get('snow_dec1'), data['data'].get('precip_dec1'),
          data['data'].get('precip_dec1_normal'),
          data['data'].get('precip_jan1_normal'),
          data['data'].get('temperature_maximum_time'),
          data['data'].get('temperature_minimum_time'),
          data['data'].get('snow_today_record_years', []),
          data['data'].get('snow_today_record'),
          data['data'].get('snow_jun1_normal'),
          data['data'].get('snow_jul1_normal'),
          data['data'].get('snow_dec1_normal'),
          data['data'].get('snow_month_normal'),
          data['data'].get('precip_jun1'),
          data['data'].get('precip_jun1_normal')
          ))
Example #47
0
def really_process(prod, data):
    """
    This processes the output we get from the SHEFIT program
    """
    # Now we loop over the data we got :)
    # log.msg("\n"+data)
    mydata = {}
    for line in data.split("\n"):
        # Skip blank output lines or short lines
        if line.strip() == "" or len(line) < 90:
            continue
        # data is fixed, so we should parse it
        sid = line[:8].strip()
        if len(sid) > 8:
            log.msg("SiteID Len Error: [%s] %s" % (sid, prod.product_id))
            continue
        if sid not in mydata:
            mydata[sid] = {}
        modelruntime = make_datetime(line[31:41], line[42:50])
        if modelruntime is not None:
            # print("Skipping forecast data for %s" % (sid, ))
            continue
        tstamp = make_datetime(line[10:20], line[21:29])
        # We don't care about data in the future!
        utcnow = datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
        if tstamp > (utcnow + datetime.timedelta(hours=1)):
            continue
        if tstamp < (utcnow - datetime.timedelta(days=60)):
            log.msg("Rejecting old data %s %s" % (sid, tstamp))
            continue
        s_data = mydata.setdefault(sid, dict())
        st_data = s_data.setdefault(tstamp, dict())

        varname = line[52:59].strip()
        value = line[60:73].strip()
        if value.find("****") > -1:
            log.msg("Bad Data from %s\n%s" % (prod.product_id, data))
            value = -9999.0
        else:
            value = float(value)
            # shefit generates 0.001 for trace, IEM uses something else
            if (value > 0.0009 and value < 0.0011 and
                    varname[:2] in ['PC', 'PP', 'QA', 'QD', 'QR', 'QT',
                                    'SD', 'SF', 'SW']):
                value = TRACE_VALUE
        # Handle variable time length data
        if varname[2] == 'V':
            itime = line[87:91]
            if itime[0] == '2':
                varname = "%sDVD%s" % (varname, itime[-2:])
        # Handle 7.4.6 Paired Value ("Vector") Physical Elements
        if varname[:2] in ['HQ', 'MD', 'MN', 'MS', 'MV', 'NO', 'ST', 'TB',
                           'TE', 'TV']:
            depth = int(value)
            if depth == 0:
                value = abs(value * 1000)
            else:
                value = abs((value * 1000) % (depth * 1000))
            if depth < 0:
                value = 0 - value
                depth = abs(depth)
            varname = "%s.%02i" % (varname, depth)
            if len(varname) > 10:
                if depth > 999:
                    log.msg(("Ignoring sid: %s varname: %s value: %s"
                             ) % (sid, varname, value))
                    continue
                common.email_error(("sid: %s varname: %s value: %s "
                                    "is too large") % (sid, varname, value),
                                   "%s\n%s" % (data, prod.text))
            continue
        st_data[varname] = value
    # Now we process each station we found in the report! :)
    for sid in mydata:
        times = list(mydata[sid].keys())
        times.sort()
        for tstamp in times:
            process_site(prod, sid, tstamp, mydata[sid][tstamp])
Example #48
0
    z.write("iaroad_cond.shx")
    z.write("iaroad_cond.dbf")
    shutil.copyfile("/mesonet/data/gis/meta/26915.prj", "iaroad_cond.prj")
    z.write("iaroad_cond.prj")
    z.close()

    utc = ts.astimezone( pytz.timezone("UTC") )
    subprocess.call("/home/ldm/bin/pqinsert -p 'zip ac %s gis/shape/26915/ia/iaroad_cond.zip GIS/iaroad_cond_%s.zip zip' iaroad_cond.zip" % (
                                            utc.strftime("%Y%m%d%H%M"), 
                                            utc.strftime("%Y%m%d%H%M")), 
                    shell=True )

    for suffix in ['shp', 'shx', 'dbf', 'prj', 'zip']:
        os.unlink("iaroad_cond.%s" % (suffix,))

if (__name__ == "__main__"):
    raw = sys.stdin.read()
    try:
        ts = process(raw)
        generate_shapefile(ts)
    except:
        traceback.print_exc(file=errors)

    errors.seek(0)
    errstr = errors.read()
    if len(errstr) > 0:
        logger.error( errstr )
        common.email_error(errstr, raw)
    pcursor.close()
    POSTGIS.commit()
    POSTGIS.close()