def test_windrose(): """Exercise the windrose code""" basevalid = utc(2015, 1, 1, 6) valid = [basevalid] sknt = [None] drct = [None] for s in range(360): basevalid += datetime.timedelta(hours=1) valid.append(basevalid) # Keep the max speed at ~24kts sknt.append(s / 13.) drct.append(s) fig = windrose('AMW2', sknt=sknt, drct=drct, valid=valid, sname='Ames') assert fig is not None res = windrose( 'AMW2', sknt=sknt, drct=drct, valid=valid, sts=datetime.datetime(2015, 1, 1), ets=datetime.datetime(2015, 10, 2), justdata=True) assert isinstance(res, str) # allow _get_data to be excercised res = windrose('XXXXX') assert res is not None fig = windrose( 'AMW2', sknt=sknt, drct=drct, valid=valid, sts=datetime.datetime(2001, 1, 1), ets=datetime.datetime(2016, 1, 1), nogenerated=True) return fig
def test_mcdparser(dbcursor): ''' Test Parsing of MCD Product ''' prod = parser(get_test_file('MCD_MPD/SWOMCD.txt')) assert abs(prod.geometry.area - 4.302) < 0.001 assert prod.discussion_num == 1525 assert prod.attn_wfo[2] == 'DLH' ans = "PORTIONS OF NRN WI AND THE UPPER PENINSULA OF MI" assert prod.areas_affected == ans # With probability this time prod = parser(get_test_file('MCD_MPD/SWOMCDprob.txt')) assert abs(prod.geometry.area - 2.444) < 0.001 assert prod.watch_prob == 20 jmsg = prod.get_jabbers('http://localhost') ans = ( '<p>Storm Prediction Center issues ' '<a href="http://www.spc.noaa.gov/' 'products/md/2013/md1678.html">Mesoscale Discussion #1678</a> ' '[watch probability: 20%] (<a href="http://localhost' '?pid=201308091725-KWNS-ACUS11-SWOMCD">View text</a>)</p>') assert jmsg[0][1] == ans ans = ( 'Storm Prediction Center issues Mesoscale Discussion #1678 ' '[watch probability: 20%] ' 'http://www.spc.noaa.gov/products/md/2013/md1678.html') assert jmsg[0][0] == ans ans = utc(2013, 8, 9, 17, 25) assert prod.sts == ans ans = utc(2013, 8, 9, 19, 30) assert prod.ets == ans prod.database_save(dbcursor)
def test_valid_nomnd(): """ check valid (no Mass News) Parsing """ utcnow = utc(2012, 11, 27) tp = product.TextProduct( get_test_file('AFD_noMND.txt'), utcnow=utcnow) ts = utc(2012, 11, 27, 0, 1) assert tp.valid == ts
def test_01(): """LSR.txt process a valid LSR without blemish """ utcnow = utc(2013, 7, 23, 23, 54) prod = parser(get_test_file("LSR.txt"), utcnow=utcnow) assert len(prod.lsrs) == 58 assert abs(prod.lsrs[57].magnitude_f - 73) < 0.01 assert prod.lsrs[57].county == "MARION" assert prod.lsrs[57].state == "IA" assert abs(prod.lsrs[57].get_lon() - -93.11) < 0.01 assert abs(prod.lsrs[57].get_lat() - 41.3) < 0.01 assert prod.is_summary() assert prod.lsrs[57].wfo == 'DMX' answer = utc(2013, 7, 23, 3, 55) assert prod.lsrs[57].valid == answer j = prod.get_jabbers('http://iem.local/') assert j[57][0], ( "Knoxville Airport [Marion Co, IA] AWOS reports NON-TSTM WND " "GST of M73 MPH at 22 Jul, 10:55 PM CDT -- HEAT BURST. " "TEMPERATURE ROSE FROM 70 TO 84 IN 15 MINUTES AND DEW POINT " "DROPPED FROM 63 TO 48 IN 10 MINUTES. " "http://iem.local/#DMX/201307230355/201307230355") ans = ( "At 4:45 PM CDT, Dows [Wright Co, IA] LAW ENFORCEMENT " "reports TSTM WND DMG. DELAYED REPORT. LARGE TREE " "BRANCH DOWN IN TOWN THAT TOOK OUT A POWER LINE " "AND BLOCKING PART OF A ROAD." ) assert prod.lsrs[5].tweet() == ans
def test_utc(): """Does the utc() function work as expected.""" answer = datetime.datetime(2017, 2, 1, 2, 20).replace(tzinfo=pytz.UTC) res = util.utc(2017, 2, 1, 2, 20) assert answer == res answer = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) assert answer.year == util.utc().year
def find_time(self): """Find the start and end valid time of this watch Returns: (datetime, datetime): representing the time of this watch """ if self.action == self.CANCELS: return (None, None) sts = self.utcnow ets = self.utcnow tokens = re.findall(("([0-3][0-9])([0-2][0-9])([0-6][0-9])Z - " "([0-3][0-9])([0-2][0-9])([0-6][0-9])Z"), self.unixtext) day1 = int(tokens[0][0]) hour1 = int(tokens[0][1]) minute1 = int(tokens[0][2]) day2 = int(tokens[0][3]) hour2 = int(tokens[0][4]) minute2 = int(tokens[0][5]) sts = utc(self.utcnow.year, self.utcnow.month, day1, hour1, minute1) ets = utc(self.utcnow.year, self.utcnow.month, day2, hour2, minute2) # If we are near the end of the month and the day1 is 1, add 1 month if self.utcnow.day > 27 and day1 == 1: sts += datetime.timedelta(days=+35) sts = sts.replace(day=1) if self.utcnow.day > 27 and day2 == 1: ets += datetime.timedelta(days=+35) ets = ets.replace(day=1) return (sts, ets)
def test_sigpat(): """ Make sure we don't have another failure with geom parsing """ utcnow = utc(2014, 8, 11, 12, 34) tp = parser(get_test_file('SIGMETS/SIGPAT.txt'), utcnow) j = tp.get_jabbers('http://localhost', 'http://localhost') assert abs(tp.sigmets[0].geom.area - 33.71) < 0.01 assert tp.sigmets[0].sts == utc(2014, 8, 11, 12, 35) assert tp.sigmets[0].ets == utc(2014, 8, 11, 16, 35) assert j[0][0] == 'PHFO issues SIGMET TANGO 1 till 1635 UTC'
def test_170406_day48(dbcursor): """Can we parse a present day days 4-8""" spc = parser(get_test_file('SPCPTS/PTSD48.txt')) # spc.draw_outlooks() outlook = spc.get_outlook('ANY SEVERE', '0.15', 4) assert abs(outlook.geometry.area - 40.05) < 0.01 spc.sql(dbcursor) collect = spc.get_outlookcollection(4) assert collect.issue == utc(2017, 4, 9, 12) assert collect.expire == utc(2017, 4, 10, 12)
def test_171121_issue45(dbcursor): """Do we alert on duplicated ETNs?""" utcnow = utc(2017, 4, 20, 21, 33) prod = vtecparser(get_test_file('vtec/NPWDMX_0.txt'), utcnow=utcnow) prod.sql(dbcursor) utcnow = utc(2017, 11, 20, 21, 33) prod = vtecparser(get_test_file('vtec/NPWDMX_1.txt'), utcnow=utcnow) prod.sql(dbcursor) warnings = filter_warnings(prod.warnings) assert len(warnings) == 1
def test_daily_offset(): """ Compute the offsets """ ts = utc(2013, 1, 1, 0, 0) offset = iemre.daily_offset(ts) assert offset == 0 ts = datetime.date(2013, 2, 1) offset = iemre.daily_offset(ts) assert offset == 31 ts = utc(2013, 1, 5, 12, 0) offset = iemre.daily_offset(ts) assert offset == 4
def test_hourly_offset(): """ Compute the offsets """ ts = utc(2013, 1, 1, 0, 0) offset = iemre.hourly_offset(ts) assert offset == 0 ts = utc(2013, 1, 1, 6, 0) ts = ts.astimezone(pytz.timezone("America/Chicago")) offset = iemre.hourly_offset(ts) assert offset == 6 ts = utc(2013, 1, 5, 12, 0) offset = iemre.hourly_offset(ts) assert offset == 4*24 + 12
def test_str1(dbcursor): """ check spcpts parsing """ spc = parser(get_test_file('SPCPTS/SPCPTS.txt')) # spc.draw_outlooks() assert spc.valid == utc(2013, 7, 19, 19, 52) assert spc.issue == utc(2013, 7, 19, 20, 0) assert spc.expire == utc(2013, 7, 20, 12, 0) spc.sql(dbcursor) spc.compute_wfos(dbcursor) # It is difficult to get a deterministic result here as in Travis, we # don't have UGCS, so the WFO lookup yields no results j = spc.get_jabbers("") assert len(j) >= 1
def test_vtec_series(dbcursor): """Test a lifecycle of WSW products """ prod = vtecparser(get_test_file('WSWDMX/WSW_00.txt')) assert prod.afos == 'WSWDMX' prod.sql(dbcursor) # Did Marshall County IAZ049 get a ZR.Y dbcursor.execute(""" SELECT issue from warnings_2013 WHERE wfo = 'DMX' and eventid = 1 and phenomena = 'ZR' and significance = 'Y' and status = 'EXB' and ugc = 'IAZ049' """) assert dbcursor.rowcount == 1 prod = vtecparser(get_test_file('WSWDMX/WSW_01.txt')) assert prod.afos == 'WSWDMX' prod.sql(dbcursor) # Is IAZ006 in CON status with proper end time answer = utc(2013, 1, 28, 6) dbcursor.execute("""SELECT expire from warnings_2013 WHERE wfo = 'DMX' and eventid = 1 and phenomena = 'WS' and significance = 'W' and status = 'CON' and ugc = 'IAZ006' """) assert dbcursor.rowcount == 1 row = dbcursor.fetchone() assert row[0] == answer # No change for i in range(2, 9): prod = vtecparser(get_test_file('WSWDMX/WSW_%02i.txt' % (i,))) assert prod.afos == 'WSWDMX' prod.sql(dbcursor) prod = vtecparser(get_test_file('WSWDMX/WSW_09.txt')) assert prod.afos == 'WSWDMX' prod.sql(dbcursor) # IAZ006 should be cancelled answer = utc(2013, 1, 28, 5, 38) dbcursor.execute("""SELECT expire from warnings_2013 WHERE wfo = 'DMX' and eventid = 1 and phenomena = 'WS' and significance = 'W' and status = 'CAN' and ugc = 'IAZ006' """) assert dbcursor.rowcount == 1 row = dbcursor.fetchone() assert row[0] == answer
def test_simple(month): """Can we walk before we run.""" text = ( "KCVG DS 24/%02i 590353/ 312359// 53/ 48/" "/9470621/T/T/00/00/00/00/00/00/" "00/00/00/00/00/00/00/00/00/00/00/00/00/00/00/00/00/225/26381759/" "26500949=" ) % (month, ) tzprovider = {'KCVG': pytz.timezone("America/New_York")} dsm = process(text) dsm.compute_times(utc(2019, month, 25)) dsm.tzlocalize(tzprovider['KCVG']) assert dsm.date == datetime.date(2019, month, 24) assert dsm.station == 'KCVG' assert dsm.time_sped_max == utc(2019, month, 24, 22, 59)
def test_170809_nocrcrlf(): """Product fails WMO parsing due to usage of RTD as bbb field""" utcnow = utc(2017, 8, 9, 9) prod = PARSER( get_test_file("METAR/rtd_bbb.txt"), utcnow=utcnow, nwsli_provider=NWSLI_PROVIDER) assert len(prod.metars) == 1
def test_140604_sbwupdate(dbcursor): """Make sure we are updating the right info in the sbw table """ utcnow = utc(2014, 6, 4) dbcursor.execute("""DELETE from sbw_2014 where wfo = 'LMK' and eventid = 95 and phenomena = 'SV' and significance = 'W' """) dbcursor.execute("""DELETE from warnings_2014 where wfo = 'LMK' and eventid = 95 and phenomena = 'SV' and significance = 'W' """) prod = vtecparser(get_test_file('SVRLMK_1.txt'), utcnow=utcnow) prod.sql(dbcursor) dbcursor.execute("""SELECT expire from sbw_2014 WHERE wfo = 'LMK' and eventid = 95 and phenomena = 'SV' and significance = 'W' """) assert dbcursor.rowcount == 1 prod = vtecparser(get_test_file('SVRLMK_2.txt'), utcnow=utcnow) prod.sql(dbcursor) dbcursor.execute("""SELECT expire from sbw_2014 WHERE wfo = 'LMK' and eventid = 95 and phenomena = 'SV' and significance = 'W' """) assert dbcursor.rowcount == 3 warnings = filter_warnings(prod.warnings) assert not warnings
def test_140610_no_vtec_time(dbcursor): """ A VTEC Product with both 0000 for start and end time, sigh """ utcnow = utc(2014, 6, 10, 0, 56) prod = vtecparser(get_test_file('FLSLZK_notime.txt'), utcnow=utcnow) prod.sql(dbcursor) assert prod.segments[0].vtec[0].begints is None assert prod.segments[0].vtec[0].endts is None
def test_routine(dbcursor): """what can we do with a ROU VTEC product """ utcnow = utc(2014, 6, 19, 2, 56) prod = vtecparser(get_test_file('FLWMKX_ROU.txt'), utcnow=utcnow) prod.sql(dbcursor) warnings = filter_warnings(prod.warnings) assert not warnings
def test_wcn_updates(): """ Make sure our Tags and svs_special works for combined message """ utcnow = utc(2014, 6, 6, 20, 37) ugc_provider = {} for u in range(1, 201, 2): n = 'a' * int(min((u+1/2), 40)) for st in ['AR', 'MS', 'TN', 'MO']: ugc_provider["%sC%03i" % (st, u)] = UGC(st, 'C', "%03i" % (u,), name=n, wfos=['DMX']) prod = vtecparser(get_test_file('WCNMEG.txt'), utcnow=utcnow, ugc_provider=ugc_provider) j = prod.get_jabbers('http://localhost', 'http://localhost') ans = ( 'MEG updates Severe Thunderstorm Watch (expands area to include ' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa [MO] and 11 counties in ' '[TN], continues 12 counties in [AR] and ' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa [MO] and 22 counties in ' '[MS] and aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, ' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, ' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, ' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, ' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, ' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa [TN]) till Jun 6, 7:00 PM ' 'CDT. http://localhost2014-O-EXA-KMEG-SV-A-0240' ) assert j[0][0] == ans
def test_150102_multiyear(dbcursor): """ WSWOUN See how well we span multiple years """ for i in range(13): print(datetime.datetime.utcnow()) print('Parsing Product: %s.txt' % (i,)) prod = vtecparser(get_test_file('WSWOUN/%i.txt' % (i,))) prod.sql(dbcursor) # Make sure there are no null issue times dbcursor.execute(""" SELECT count(*) from warnings_2014 where wfo = 'OUN' and eventid = 16 and phenomena = 'WW' and significance = 'Y' and issue is null """) assert dbcursor.fetchone()[0] == 0 if i == 5: dbcursor.execute(""" SELECT issue from warnings_2014 WHERE ugc = 'OKZ036' and wfo = 'OUN' and eventid = 16 and phenomena = 'WW' and significance = 'Y' """) row = dbcursor.fetchone() assert row[0] == utc(2015, 1, 1, 6, 0) warnings = filter_warnings(prod.warnings) warnings = filter_warnings(warnings, "Segment has duplicated") warnings = filter_warnings(warnings, "VTEC Product appears to c") assert not warnings
def test_170824_sa_format(): """Don't be so noisey when we encounter SA formatted products""" utcnow = utc(2017, 8, 24, 14) prod = PARSER( get_test_file("METAR/sa.txt"), utcnow=utcnow, nwsli_provider=NWSLI_PROVIDER) assert not prod.metars
def test_issue9(dbcursor): """A product crossing year bondary""" utcnow = utc(2017, 12, 31, 9, 24) prod = vtecparser(get_test_file('vtec/crosses_0.txt'), utcnow=utcnow) prod.sql(dbcursor) utcnow = utc(2018, 1, 1, 16, 0) prod = vtecparser(get_test_file('vtec/crosses_1.txt'), utcnow=utcnow) prod.sql(dbcursor) warnings = filter_warnings(prod.warnings) # We used to emit a warning for this, but not any more assert not warnings utcnow = utc(2018, 1, 1, 21, 33) prod = vtecparser(get_test_file('vtec/crosses_2.txt'), utcnow=utcnow) prod.sql(dbcursor) warnings = filter_warnings(prod.warnings) assert not warnings
def test_nbm(cursor): """Can we parse the NBM data.""" utcnow = utc(2018, 11, 7, 15) prod = mosparser(get_test_file("MOS/NBSUSA.txt"), utcnow=utcnow) assert len(prod.data) == 2 inserts = prod.sql(cursor) assert inserts == (2 * 21) cursor.execute(""" SELECT count(*), max(ftime) from t2018 where model = 'NBS' and station = 'KALM' and runtime = %s """, (utcnow, )) row = cursor.fetchone() assert row[0] == 21 assert row[1] == utc(2018, 11, 10, 9)
def iemob(): """Database.""" res = blah() ts = utc(2015, 9, 1, 1, 0) sid = ''.join(random.choice( string.ascii_uppercase + string.digits) for _ in range(7)) res.iemid = 0 - random.randint(0, 1000) res.ob = observation.Observation(sid, 'FAKE', ts) res.conn = get_dbconn('iem') res.cursor = res.conn.cursor( cursor_factory=psycopg2.extras.DictCursor) # Create fake station, so we can create fake entry in summary # and current tables res.cursor.execute(""" INSERT into stations(id, network, iemid, tzname) VALUES (%s, 'FAKE', %s, 'UTC') """, (sid, res.iemid)) res.cursor.execute(""" INSERT into current(iemid, valid) VALUES (%s, '2015-09-01 00:00+00') """, (res.iemid, )) res.cursor.execute(""" INSERT into summary_2015(iemid, day) VALUES (%s, '2015-09-01') """, (res.iemid, )) return res
def main(): """Go Main Go.""" f0 = utc(2018, 10, 5, 0) fx = f0 + datetime.timedelta(hours=168) grbs = pygrib.open('p06m_2018100500f006.grb') grb = grbs[1] """ keys = list(grb.keys()) keys.sort() for key in keys: try: print("%s %s" % (key, getattr(grb, key, None))) except RuntimeError as exp: print("%s None" % (key, )) """ grb['dayOfEndOfOverallTimeInterval'] = fx.day grb['endStep'] = 168 grb['hourOfEndOfOverallTimeInterval'] = fx.hour grb['lengthOfTimeRange'] = 168 grb['stepRange'] = "0-168" grb = pygrib.reload(grb) # grb['validityDate'] = int(fx.strftime("%Y%m%d")) # grb['validityTime'] = int(fx.strftime("%H%M")) fp = open('test.grb', 'wb') fp.write(grb.tostring()) fp.close()
def test_170324_badformat(): """Look into exceptions""" utcnow = utc(2017, 3, 22, 2, 35) prod = parser(get_test_file('LSRPIH.txt'), utcnow=utcnow) prod.get_jabbers('http://iem.local/') assert len(prod.warnings) == 2 assert not prod.lsrs
def test_future(): """Can we handle products that are around the first""" utcnow = utc(2017, 12, 1) prod = PARSER(get_test_file("METAR/first.txt"), utcnow=utcnow) assert len(prod.metars) == 2 assert prod.metars[0].time.month == 11 assert prod.metars[1].time.month == 12
def test_170815_pywwa_issue3(): """This example was in pyWWA issues list, so lets test here""" utcnow = utc(2015, 9, 30, 16, 56) tp = parser(get_test_file('SIGMETS/SIGE.txt'), utcnow, nwsli_provider=NWSLI_PROVIDER) assert len(tp.sigmets) == 4
def test_190503_badgeom(): """This SIGMET produced a traceback in prod.""" utcnow = utc(2019, 5, 3, 18, 25) tp = parser( get_test_file('SIGMETS/SIGC_badgeom.txt'), utcnow, nwsli_provider=NWSLI_PROVIDER) assert len(tp.sigmets) == 4
def test_180201_unparsed(): """For some reason, this collective was not parsed?!?!""" utcnow = utc(2018, 2, 1, 0) prod = PARSER( get_test_file("METAR/collective2.txt"), utcnow=utcnow, nwsli_provider=NWSLI_PROVIDER) assert len(prod.metars) == 35 assert prod.metars[0].time.month == 1
def plotter(fdict): """ Go """ import matplotlib matplotlib.use('agg') from pyiem.plot import MapPlot bins = [0, 1, 14, 31, 91, 182, 273, 365, 730, 1460, 2920, 3800] pgconn = get_dbconn('postgis') cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) ctx = get_autoplot_context(fdict, get_description()) phenomena = ctx['phenomena'] significance = ctx['significance'] edate = ctx.get('edate') if edate is not None: edate = utc(edate.year, edate.month, edate.day, 0, 0) cursor.execute(""" select wfo, extract(days from (%s::date - max(issue))) as m from warnings where significance = %s and phenomena = %s and issue < %s GROUP by wfo ORDER by m ASC """, (edate, significance, phenomena, edate)) else: cursor.execute(""" select wfo, extract(days from ('TODAY'::date - max(issue))) as m from warnings where significance = %s and phenomena = %s GROUP by wfo ORDER by m ASC """, (significance, phenomena)) edate = datetime.datetime.utcnow() if cursor.rowcount == 0: raise ValueError(("No Events Found for %s (%s.%s)" ) % (vtec.get_ps_string(phenomena, significance), phenomena, significance)) data = {} rows = [] for row in cursor: wfo = row[0] if row[0] != 'JSJ' else 'SJU' rows.append(dict(wfo=wfo, days=row[1])) data[wfo] = max([row[1], 0]) df = pd.DataFrame(rows) df.set_index('wfo', inplace=True) mp = MapPlot(sector='nws', axisbg='white', nocaption=True, title='Days since Last %s by NWS Office' % ( vtec.get_ps_string(phenomena, significance), ), subtitle='Valid %s' % (edate.strftime("%d %b %Y %H%M UTC"),)) mp.fill_cwas(data, bins=bins, ilabel=True, units='Days', lblformat='%.0f') return mp.fig, df
def main(): """Go Main Go""" pgconn = util.get_dbconn("mesosite") mcursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor) props = util.get_properties() ftp_pass = props["rwis_ftp_password"] utcnow = util.utc() # we work from here os.chdir("/mesonet/data/dotcams") # Every three hours, clean up after ourselves :) if utcnow.hour % 3 == 0 and utcnow.minute < 5: subprocess.call("/usr/sbin/tmpwatch 6 165.206.203.34/rwis_images", shell=True) # Make dictionary of webcams we are interested in cameras = {} mcursor.execute("SELECT * from webcams WHERE network = 'IDOT'") for row in mcursor: cameras[row["id"]] = row proc = subprocess.Popen( ("wget --timeout=20 -m --ftp-user=rwis " "--ftp-password=%s " "ftp://165.206.203.34/rwis_images/*.jpg") % (ftp_pass, ), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) _stdout, stderr = proc.communicate() stderr = stderr.decode("utf-8") lines = stderr.split("\n") for line in lines: # Look for RETR (.*) tokens = re.findall( ("RETR Vid-000512([0-9]{3})-([0-9][0-9])-([0-9][0-9])" "-([0-9]{4})-([0-9][0-9])-([0-9][0-9])-([0-9][0-9])-" "([0-9][0-9]).jpg"), line, ) if not tokens: continue process(tokens[0], cameras, mcursor) mcursor.close() pgconn.commit() pgconn.close()
def main(argv): """ Go main go """ date = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) pgconn = get_dbconn("isuag") nt = NetworkTable("ISUSM") # Get our obs df = read_sql( """ SELECT station, rain_mm_tot_qc / 25.4 as obs from sm_daily where valid = %s ORDER by station ASC """, pgconn, params=(date,), index_col="station", ) hdf = get_hdf(nt, date) if hdf.empty: LOG.info("hdf is empty, abort fix_precip for %s", date) return # lets try some QC for station in df.index.values: # the daily total is 12 CST to 12 CST, so that is always 6z # so we want the 7z total sts = utc(date.year, date.month, date.day, 7) ets = sts + datetime.timedelta(hours=24) # OK, get our data ldf = hdf[ (hdf["station"] == station) & (hdf["valid"] >= sts) & (hdf["valid"] < ets) ] df.at[station, "stage4"] = ldf["precip_in"].sum() df["diff"] = df["obs"] - df["stage4"] # We want to QC the case of having too low of precip, how low is too low? # if stageIV > 0.1 and obs < 0.05 df2 = df[(df["stage4"] > 0.1) & (df["obs"] < 0.05)] for station, row in df2.iterrows(): LOG.info( "ISUSM fix_precip %s %s stageIV: %.2f obs: %.2f", date, station, row["stage4"], row["obs"], ) print_debugging(station) update_precip(date, station, hdf)
def test_140527_00000_hvtec_nwsli(dbcursor): """Test the processing of a HVTEC NWSLI of 00000 """ utcnow = utc(2014, 5, 27) prod = vtecparser(get_test_file("FLSBOU.txt"), utcnow=utcnow) prod.sql(dbcursor) j = prod.get_jabbers("http://localhost/", "http://localhost/") ans = ("BOU extends time of Flood Advisory " "for ((COC049)), ((COC057)) [CO] till May 29, 9:30 PM MDT " "http://localhost/2014-O-EXT-KBOU-FA-Y-0018_2014-05-27T00:00Z") assert j[0][0] == ans ans = ("BOU extends time of Flood " "Advisory for ((COC049)), ((COC057)) [CO] till " "May 29, 9:30 PM MDT " "http://localhost/2014-O-EXT-KBOU-FA-Y-0018_2014-05-27T00:00Z") assert j[0][2]["twitter"] == ans
def test_150915_line(): """ See about parsing a SIGMET LINE """ utcnow = utc(2015, 9, 15, 2, 55) ugc_provider = {} nwsli_provider = { "MSP": dict(lon=-83.39, lat=44.45), "MCW": dict(lon=-85.50, lat=42.79), } tp = parser( get_test_file("SIGMETS/SIGC_line.txt"), utcnow, ugc_provider, nwsli_provider, ) assert abs(tp.sigmets[0].geom.area - 0.47) < 0.01
def main(): """Go Main Go""" # Run for the 12z file yesterday today = datetime.date.today() - datetime.timedelta(days=1) valid = utc(today.year, today.month, today.day, 12) # Create tiles from -104 36 through -80 50 for west in np.arange(-104, -80, 2): for south in np.arange(36, 50, 2): # psims divides its data up into 2x2-degree tiles, # with the first number in the file name being number # of tiles since 90 degrees north, and the second number # being number of tiles since -180 degrees eas ncfn = "clim_%04i_%04i.tile.nc4" % ((90 - south) / 2, (180 - (0 - west)) / 2 + 1) workflow(valid, ncfn, west, south)
def test_180710_issue58(): """Crazy MST during MDT""" utcnow = utc(2018, 7, 9, 22, 59) prod = parser(get_test_file("LSR/LSRPSR.txt"), utcnow=utcnow) j = prod.get_jabbers("http://iem.local/") ans = ("At 3:57 PM MST, 5 WNW Florence [Pinal Co, AZ] TRAINED SPOTTER " "reports FLASH FLOOD. STREET FLOODING WITH WATER OVER THE CURBS " "IN THE MERRILL RANCH DEVELOPMENT OF FLORENCE. " "http://iem.local/#PSR/201807092257/201807092257") assert j[0][2]["twitter"] == ans ans = ("5 WNW Florence [Pinal Co, AZ] TRAINED SPOTTER reports FLASH FLOOD " "at 3:57 PM MST -- STREET FLOODING WITH WATER OVER THE CURBS IN " "THE MERRILL RANCH DEVELOPMENT OF FLORENCE. " "http://iem.local/#PSR/201807092257/201807092257") assert j[0][0] == ans
def test_cli(): """ CLIJUN Test the processing of a CLI product """ prod = cliparser(get_test_file('CLI/CLIJNU.txt')) assert prod.data[0]['cli_valid'] == datetime.datetime(2013, 6, 30) assert prod.valid == utc(2013, 7, 1, 0, 36) assert prod.data[0]['data']['temperature_maximum'] == 75 assert prod.data[0]['data']['temperature_maximum_time'] == "259 PM" assert prod.data[0]['data']['temperature_minimum_time'] == "431 AM" assert prod.data[0]['data']['precip_today'] == TRACE_VALUE j = prod.get_jabbers("http://localhost") ans = ('JUNEAU Jun 30 Climate Report: High: 75 ' 'Low: 52 Precip: Trace Snow: M ' 'http://localhost?pid=201307010036-PAJK-CDAK47-CLIJNU') assert j[0][0] == ans
def main(argv): """Go Main Go""" if len(argv) == 4: utcnow = utc(int(argv[1]), int(argv[2]), int(argv[3])) workflow(utcnow) return utcnow = datetime.datetime.utcnow() utcnow = utcnow.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.utc) # Run for 'yesterday' and 35 days ago for day in [1, 35]: workflow(utcnow - datetime.timedelta(days=day))
def test_tornado_emergency(): """ See what we do with Tornado Emergencies """ utcnow = utc(2012, 4, 15, 3, 27) prod = vtecparser(get_test_file('TOR_emergency.txt'), utcnow=utcnow) assert prod.segments[0].is_emergency j = prod.get_jabbers('http://localhost', 'http://localhost') ans = ("<p>ICT <a href=\"http://localhost" "2012-O-NEW-KICT-TO-W-0035\">issues Tornado Warning</a> " "[tornado: OBSERVED, tornado damage threat: CATASTROPHIC, " "hail: 2.50 IN] for ((KSC015)), ((KSC173)) [KS] till 11:00 PM CDT " "* AT 1019 PM CDT...<span style=\"color: #FF0000;\">TORNADO " "EMERGENCY</span> FOR THE WICHITA METRO AREA. A CONFIRMED LARGE..." "VIOLENT AND EXTREMELY DANGEROUS TORNADO WAS LOCATED NEAR " "HAYSVILLE...AND MOVING NORTHEAST AT 50 MPH.</p>") assert j[0][1] == ans
def main(argv): """Go Main Go.""" year = utc().year if len(argv) == 1 else int(argv[1]) dbconn = get_dbconn("postgis") cursor = dbconn.cursor() nt = NetworkTable("RAOB") df = read_sql( f""" select f.fid, f.station, pressure, dwpc, tmpc, drct, smps, height, levelcode from raob_profile_{year} p JOIN raob_flights f on (p.fid = f.fid) WHERE not computed and height is not null and pressure is not null ORDER by pressure DESC """, dbconn, ) if df.empty or pd.isnull(df["smps"].max()): return u, v = wind_components( df["smps"].values * units("m/s"), df["drct"].values * units("degrees_north"), ) df["u"] = u.to(units("m/s")).m df["v"] = v.to(units("m/s")).m count = 0 progress = tqdm(df.groupby("fid"), disable=not sys.stdout.isatty()) for fid, gdf in progress: progress.set_description("%s %s" % (year, fid)) try: do_profile(cursor, fid, gdf, nt) except (RuntimeError, ValueError, IndexError) as exp: LOG.debug( "Profile %s fid: %s failed calculation %s", gdf.iloc[0]["station"], fid, exp, ) cursor.execute( "UPDATE raob_flights SET computed = 't' WHERE fid = %s", (fid, )) if count % 100 == 0: cursor.close() dbconn.commit() cursor = dbconn.cursor() count += 1 cursor.close() dbconn.commit()
def main(): """Go Main Go""" # Run for 12z yesterday today = datetime.date.today() - datetime.timedelta(days=1) for hour in [0, 6, 12, 18]: valid = utc(today.year, today.month, today.day, hour) # Create netcdf file nc = create_netcdf(valid) # merge in the data for gribname, vname in zip( ["dswsfc", "tmax", "tmin", "prate"], ["srad", "high_tmpk", "low_tmpk", "p01d"], ): merge(nc, valid, gribname, vname) # profit nc.close()
def test_170324_waterspout(dbcursor): """Do we parse Waterspout tags!""" utcnow = utc(2017, 3, 24, 1, 37) prod = vtecparser(get_test_file('SMWMFL.txt'), utcnow=utcnow) j = prod.get_jabbers("http://localhost") ans = ("MFL issues Marine Warning [waterspout: POSSIBLE, " "wind: >34 KTS, hail: 0.00 IN] for " "((AMZ630)), ((AMZ651)) [AM] till 10:15 PM EDT " "http://localhost2017-O-NEW-KMFL-MA-W-0059") assert j[0][0] == ans prod.sql(dbcursor) dbcursor.execute("""SELECT * from sbw_2017 where wfo = 'MFL' and phenomena = 'MA' and significance = 'W' and eventid = 59 and status = 'NEW' and waterspouttag = 'POSSIBLE' """) assert dbcursor.rowcount == 1
def tzlocalize(self, tzinfo): """Localize the timestamps, tricky.""" offset = tzinfo.utcoffset(datetime.datetime(2000, 1, 1), is_dst=False).total_seconds() for name in [ 'high_time', 'low_time', 'time_sped_max', 'time_sped_gust_max' ]: val = getattr(self, name) if val is None: continue # Need to convert timestamp into standard time time, tricky ts = val - datetime.timedelta(seconds=offset) setattr( self, name, utc(ts.year, ts.month, ts.day, ts.hour, ts.minute).astimezone(tzinfo))
def main(argv): """Go Main Go.""" valid = utc(*[int(a) for a in argv[1:6]]) LOG.debug("valid is set to: %s", valid) for channel in [2, 9, 13]: with tempfile.NamedTemporaryFile() as tmpfd: try: run(valid, channel, tmpfd.name) except Exception as exp: LOG.exception(exp) finally: for part in ["", "_16", "_17"]: fn = "%s%s.tif" % (tmpfd.name, part) if not os.path.isfile(fn): continue os.unlink(fn)
def test_150915_isol(): """ See about parsing a SIGMET ISOL """ utcnow = utc(2015, 9, 12, 23, 55) ugc_provider = {} nwsli_provider = { "FTI": dict(lon=-83.39, lat=44.45), "CME": dict(lon=-85.50, lat=42.79), } tp = parser( get_test_file("SIGMETS/SIGC_ISOL.txt"), utcnow, ugc_provider, nwsli_provider, ) assert abs(tp.sigmets[0].geom.area - 0.30) < 0.01 assert abs(tp.sigmets[1].geom.area - 0.30) < 0.01
def workflow(sts, ets, i, j): """Return a dict of our data.""" res = {"data": [], "generated_at": utc().strftime(ISO)} # BUG here for Dec 31. fn = iemre.get_hourly_ncname(sts.year) if not os.path.isfile(fn): return res if i is None or j is None: return {"error": "Coordinates outside of domain"} with ncopen(fn) as nc: now = sts while now <= ets: offset = iemre.hourly_offset(now) res["data"].append( { "valid_utc": now.astimezone(pytz.UTC).strftime(ISO), "valid_local": now.strftime(ISO), "skyc_%": myrounder(nc.variables["skyc"][offset, j, i], 1), "air_temp_f": myrounder( datatypes.temperature( nc.variables["tmpk"][offset, j, i], "K" ).value("F"), 1, ), "dew_point_f": myrounder( datatypes.temperature( nc.variables["dwpk"][offset, j, i], "K" ).value("F"), 1, ), "uwnd_mps": myrounder( nc.variables["uwnd"][offset, j, i], 2 ), "vwnd_mps": myrounder( nc.variables["vwnd"][offset, j, i], 2 ), "hourly_precip_in": myrounder( nc.variables["p01m"][offset, j, i] / 25.4, 2 ), } ) now += datetime.timedelta(hours=1) return res
def handler( key, time, tmpf, max_tmpf, min_tmpf, dwpf, relh, sknt, pday, alti, drct ): """Handle the request, return dict""" # sys.stderr.write(repr(fields)) if not PROPS: PROPS.update(get_properties()) lookup = {} for sid in ["OT0013", "OT0014", "OT0015", "OT0016"]: lookup[PROPS.get("meteobridge.key." + sid)] = sid if key not in lookup: raise HTTPException(status_code=404, detail="BAD_KEY") sid = lookup[key] if len(time) == 14: _t = time now = utc( int(_t[:4]), int(_t[4:6]), int(_t[6:8]), int(_t[8:10]), int(_t[10:12]), int(_t[12:14]), ) else: now = datetime.datetime.utcnow() now = now.replace(tzinfo=pytz.UTC) ob = Observation(sid, "OT", now) for fname in [ "tmpf", "max_tmpf", "min_tmpf", "dwpf", "relh", "sknt", "pday", "alti", "drct", ]: if vars()[fname] == "M": continue ob.data[fname] = float(vars()[fname]) pgconn = get_dbconn("iem") cursor = pgconn.cursor() ob.save(cursor) cursor.close() pgconn.commit() return "OK"
def compute_times(self, utcnow): """Figure out when this DSM is valid for.""" if utcnow is None: utcnow = utc() ts = utcnow.replace(day=int(self.groupdict['day']), month=int(self.groupdict['month'])) # Is this ob from 'last year' if ts.month == 12 and utcnow.month == 1: ts = ts.replace(year=(ts.year - 1)) self.date = datetime.date(ts.year, ts.month, ts.day) self.high_time = compute_time(self.date, self.groupdict.get('hightime')) self.low_time = compute_time(self.date, self.groupdict.get('lowtime')) self.time_sped_max = compute_time(self.date, self.groupdict.get('time_sped_max')) self.time_sped_gust_max = compute_time( self.date, self.groupdict.get('time_sped_gust_max'))
def idot_dashcam_service( fmt: SupportedFormats, valid: datetime = Query( None, description="UTC timestamp to look for imagery." ), window: int = Query( 15, description=("Number of minutes to look around the given valid."), ), ): """Replaced Below.""" if valid is None: valid = utc() - timedelta(minutes=window * 2) if valid.tzinfo is None: valid = valid.replace(tzinfo=timezone.utc) df = handler(valid, window) return deliver_df(df, fmt)
def test_wcn(): """Special tweet logic for cancels and continues NOTE: with updated twitter tweet chars, these tests are not as fun """ utcnow = utc(2014, 6, 3) ugc_provider = {} for u in range(1, 201, 2): n = "a" * int(min((u + 1 / 2), 40)) ugc_provider["IAC%03i" % (u, )] = UGC("IA", "C", "%03i" % (u, ), name=n, wfos=["DMX"]) prod = vtecparser(get_test_file("SVS.txt"), utcnow=utcnow, ugc_provider=ugc_provider) j = prod.get_jabbers("http://localhost/", "http://localhost/") assert prod.is_homogeneous() ans = ("DMX updates Severe Thunderstorm Warning [wind: 60 MPH, hail: " "<.75 IN] (cancels aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa " "[IA], continues aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa [IA]) " "till 10:45 PM CDT " "http://localhost/2014-O-CON-KDMX-SV-W-0143_2014-06-03T00:00Z") assert j[0][2]["twitter"] == ans prod = vtecparser(get_test_file("WCN.txt"), utcnow=utcnow, ugc_provider=ugc_provider) j = prod.get_jabbers("http://localhost/", "http://localhost/") assert prod.is_homogeneous() ans = ("DMX updates Tornado Watch (cancels a, " "aaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" "aaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaa" "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa [IA], continues 12 counties " "in [IA]) till Jun 4, 1:00 AM CDT " "http://localhost/2014-O-CON-KDMX-TO-A-0210_2014-06-03T00:00Z") assert j[0][2]["twitter"] == ans ans = ("DMX updates Tornado Watch (cancels a, " "aaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" "aaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaa" "aaaaaaaaaaaaaaaaaaaaaaaaaaaaa [IA], continues 12 counties " "in [IA]) till Jun 4, 1:00 AM CDT. " "http://localhost/2014-O-CON-KDMX-TO-A-0210_2014-06-03T00:00Z") assert j[0][0] == ans
def test_50e(): """ See about parsing 50E properly """ utcnow = utc(2014, 8, 11, 18, 55) ugc_provider = {} nwsli_provider = { "ASP": dict(lon=-83.39, lat=44.45), "ECK": dict(lon=-82.72, lat=43.26), "GRR": dict(lon=-85.50, lat=42.79), } tp = parser( get_test_file("SIGMETS/SIGE3.txt"), utcnow, ugc_provider, nwsli_provider, ) assert abs(tp.sigmets[0].geom.area - 2.15) < 0.01
def main(argv): """ Go Main Go """ utcnow = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) if len(argv) == 6: utcnow = utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4]), int(argv[5]), ) do(utcnow) else: # If our time is an odd time, run 3 minutes ago utcnow = utcnow.replace(second=0, microsecond=0) if utcnow.minute % 2 == 1: do(utcnow - datetime.timedelta(minutes=5), True)
def test_tortag(): """See what we can do with warnings with tags in them """ utcnow = utc(2011, 8, 7, 4, 36) prod = vtecparser(get_test_file('TORtag.txt'), utcnow=utcnow) j = prod.get_jabbers('http://localhost/', 'http://localhost/') assert prod.is_homogeneous() ans = ("<p>DMX <a href=\"http://localhost/2011-" "O-NEW-KDMX-TO-W-0057\">issues Tornado Warning</a> [tornado: " "OBSERVED, tornado damage threat: SIGNIFICANT, hail: 2.75 IN] " "for ((IAC117)), ((IAC125)), ((IAC135)) [IA] till 12:15 AM CDT " "* AT 1132 PM CDT...NATIONAL WEATHER SERVICE DOPPLER RADAR " "INDICATED A SEVERE THUNDERSTORM CAPABLE OF PRODUCING A TORNADO. " "THIS DANGEROUS STORM WAS LOCATED 8 MILES EAST OF CHARITON..." "OR 27 MILES NORTHWEST OF CENTERVILLE...AND MOVING NORTHEAST " "AT 45 MPH.</p>") assert j[0][1] == ans
def get_description(): """ Return a dict describing how to call this plotter """ desc = dict() desc["cache"] = 300 desc["data"] = True desc[ "description"] = """This application generates a map showing the coverage of a given VTEC alert for a given office. The tricky part here is how time is handled for events whereby zones/counties can be added / removed from the alert. If you specific an exact time, you should get the proper extent of the alert at that time. If you do not specify the time, you should get the total inclusion of any zones/counties that were added to the alert. """ now = utc() desc["arguments"] = [ dict( optional=True, type="datetime", name="valid", default=now.strftime("%Y/%m/%d %H%M"), label="UTC Timestamp (inclusive) to plot the given alert at:", min="1986/01/01 0000", ), dict( type="networkselect", name="wfo", network="WFO", default="DMX", label="Select WFO:", ), dict(type="year", min=1986, default=2019, name="year", label="Year"), dict( type="vtec_ps", name="v", default="SV.W", label="VTEC Phenomena and Significance", ), dict( type="int", default=1, label="VTEC Event Identifier / Sequence Number", name="etn", ), ] return desc
def test_wcn(): """Special tweet logic for cancels and continues NOTE: with updated twitter tweet chars, these tests are not as fun """ utcnow = utc(2014, 6, 3) ugc_provider = {} for u in range(1, 201, 2): n = 'a' * int(min((u + 1 / 2), 40)) ugc_provider["IAC%03i" % (u, )] = UGC('IA', 'C', "%03i" % (u, ), name=n, wfos=['DMX']) prod = vtecparser(get_test_file('SVS.txt'), utcnow=utcnow, ugc_provider=ugc_provider) j = prod.get_jabbers('http://localhost/', 'http://localhost/') assert prod.is_homogeneous() ans = ("DMX updates Severe Thunderstorm Warning [wind: 60 MPH, hail: " "<.75 IN] (cancels aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa " "[IA], continues aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa [IA]) " "till 10:45 PM CDT http://localhost/2014-O-CON-KDMX-SV-W-0143") assert j[0][2]['twitter'] == ans prod = vtecparser(get_test_file('WCN.txt'), utcnow=utcnow, ugc_provider=ugc_provider) j = prod.get_jabbers('http://localhost/', 'http://localhost/') assert prod.is_homogeneous() assert j[0][2]['twitter'], ( "DMX updates Tornado Watch (cancels a, " "aaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" "aaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaa" "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa [IA], continues 12 counties " "in [IA]) till Jun 4, 1:00 AM CDT " "http://localhost/2014-O-CON-KDMX-TO-A-0210") assert j[0][0], ( 'DMX updates Tornado Watch (cancels a, ' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'aaaaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaa' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaa [IA], continues 12 counties ' 'in [IA]) till Jun 4, 1:00 AM CDT. ' 'http://localhost/2014-O-CON-KDMX-TO-A-0210')
def test_1(): """ PIREP.txt, can we parse it! """ utcnow = utc(2015, 1, 9, 0, 0) nwsli_provider = { "BIL": {"lat": 44, "lon": 99}, "LBY": {"lat": 45, "lon": 100}, "PUB": {"lat": 46, "lon": 101}, "HPW": {"lat": 47, "lon": 102}, } prod = pirepparser( get_test_file("PIREPS/PIREP.txt"), utcnow=utcnow, nwsli_provider=nwsli_provider, ) assert not prod.warnings j = prod.get_jabbers("unused") assert j[0][2]["channels"] == "UA.None,UA.PIREP"
def main(argv): """Go Main Go.""" log = logger() if len(argv) == 6: valid = utc(int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds = iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600) as nc: for vname in ds: if vname not in nc.variables: continue log.debug("copying database var %s to netcdf", vname) nc.variables[vname][idx, :, :] = ds[vname].values
def test_180202_issue54(dbcursor): """Are we doing the right thing with VTEC EXP actions?""" def get_expire(colname): """get expiration""" dbcursor.execute( f"SELECT distinct {colname} from warnings_2018 WHERE wfo = 'LWX' " "and eventid = 6 and phenomena = 'WW' and significance = 'Y'") assert dbcursor.rowcount == 1 return dbcursor.fetchone()[0] expirets = utc(2018, 2, 2, 9) for i in range(3): prod = vtecparser(get_test_file("vtec/WSWLWX_%s.txt" % (i, ))) prod.sql(dbcursor) warnings = filter_warnings(prod.warnings) assert not warnings assert get_expire("expire") == expirets assert get_expire("updated") == prod.valid
def test_140714_segmented_watch(): """ Two segmented watch text formatting stinks """ utcnow = utc(2014, 7, 14, 17, 25) prod = vtecparser(get_test_file('WCNPHI.txt'), utcnow=utcnow) j = prod.get_jabbers('http://localhost', 'http://localhost') ans = ("PHI issues Severe Thunderstorm Watch (issues ((DEC001)), " "((DEC003)), ((DEC005)) [DE] and ((MDC011)), ((MDC015)), " "((MDC029)), ((MDC035)), ((MDC041)) [MD] and ((NJC001)), " "((NJC005)), ((NJC007)), ((NJC009)), ((NJC011)), ((NJC015)), " "((NJC019)), ((NJC021)), ((NJC023)), ((NJC025)), ((NJC027)), " "((NJC029)), ((NJC033)), ((NJC035)), ((NJC037)), ((NJC041)) [NJ] " "and ((PAC011)), ((PAC017)), ((PAC025)), ((PAC029)), ((PAC045)), " "((PAC077)), ((PAC089)), ((PAC091)), ((PAC095)), ((PAC101)) [PA], " "issues ((ANZ430)), ((ANZ431)), ((ANZ450)), ((ANZ451)), " "((ANZ452)), ((ANZ453)), ((ANZ454)), ((ANZ455)) [AN]) " "till Jul 14, 8:00 PM EDT. " "http://localhost2014-O-NEW-KPHI-SV-A-0418") assert j[0][0] == ans