def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug=False endtime = datetime.utcnow() varioinst = '' scalainst = '' try: opts, args = getopt.getopt(argv,"hc:e:D",["config=","endtime=","debug=",]) except getopt.GetoptError: print ('getprimary.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print ('-------------------------------------') print ('Description:') print ('-- getprimary.py will determine the primary instruments --') print ('-----------------------------------------------------------------') print ('detailed description ..') print ('...') print ('...') print ('-------------------------------------') print ('Usage:') print ('python getprimary.py -c <config>') print ('-------------------------------------') print ('Options:') print ('-c (required) : configuration data path') print ('-------------------------------------') print ('Application:') print ('python getprimary.py -c /etc/marcos/analysis.cfg') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-e", "--endtime"): # get an endtime endtime = arg.split(',') elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print ("Running getprimary version {}".format(version)) print ("--------------------------------") if not os.path.exists(configpath): print ('Specify a valid path to configuration information') print ('-- check magnetism_products.py -h for more options and requirements') sys.exit() print ("1. Read and check validity of configuration data") config = GetConf(configpath) print ("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category="DataProducts", job=os.path.basename(__file__), newname='mm-dp-getprimary.log') name1 = "{}-process".format(config.get('logname')) name2 = "{}-vario".format(config.get('logname')) name3 = "{}-scalar".format(config.get('logname')) name4 = "{}-write".format(config.get('logname')) statusmsg[name1] = 'successful' print ("3. Connect databases and select first available") try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') if debug: print (" -- success") except: if debug: print (" -- database failed") statusmsg[name1] = 'database failed' print ("4. Checking variometer instruments") try: varioinst, statusmsg = PrimaryVario(db, config.get('variometerinstruments'), endtime=endtime, logname=name2, statusmsg=statusmsg, debug=debug) print (" -> Using {}".format(varioinst)) except: if debug: print (" -- vario failed") statusmsg[name1] = 'vario failed' print ("5. Checking scalar instruments") try: scalainst, statusmsg = PrimaryScalar(db, config.get('scalarinstruments'), endtime=endtime, logname=name3, statusmsg=statusmsg, debug=debug) print (" -> Using {}".format(scalainst)) except: if debug: print (" -- scalar failed") statusmsg[name1] = 'scalar failed' print ("6. Updating Current values") try: if not debug: statusmsg = UpdateCurrentValuePath(config.get('currentvaluepath'), varioinst=varioinst, scalainst=scalainst, logname=name4, statusmsg=statusmsg, debug=debug) except: if debug: print (" -- update current failed") statusmsg[name1] = 'current value update failed' if not debug: martaslog = ml(logfile=config.get('logfile'),receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print ("Debug selected - statusmsg looks like:") print (statusmsg)
def main(argv): """ METHODS: read_kpnow_data(source) read_swpam_data(source, limit=5) _execute_sql swtableinit() """ version = "1.0.0" kpsource = '/srv/archive/external/gfz/kp/' swsource = '/srv/archive/external/esa-nasa/ace/raw/' kpname = 'gfzkp*' swename = '*_swepam_1m.txt' magname = '*_mag_1m.txt' configpath = '' # is only necessary for monitoring sqllist = [] debug = False init = False # create table if TRUE statusmsg = {} usage = 'sw_extractor.py -c <config> -k <kpsource> -s <swesource> ' try: opts, args = getopt.getopt(argv,"hc:k:s:o:p:b:e:ID",["config=","kpsource=","swesource=","init=","debug=",]) except getopt.GetoptError: print(usage) sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('sw-extractor.py extracts CME predictions from Scoreboard ') print('-------------------------------------') print('Usage:') print(usage) print('-------------------------------------') print('Options:') print('-c : configuration data ') print('-k : Kp source - default is ') print('-s : SWE source - default is ') print('-------------------------------------') print('Examples:') print('---------') print('python3 sw_extractor.py -c /home/cobs/CONF/wic.cfg -k /srv/archive/external/gfz/kp/gfzkp* -s /srv/archive/external/esa-nasa/ace/raw/ -D') print('---------') sys.exit() elif opt in ("-c", "--config"): configpath = os.path.abspath(arg) elif opt in ("-k", "--kpsource"): kpsource = os.path.abspath(arg) elif opt in ("-s", "--swesource"): swsource = os.path.abspath(arg) elif opt in ("-n", "--kpname"): kpname = arg elif opt in ("-w", "--swename"): swname = arg elif opt in ("-m", "--magname"): magname = arg elif opt in ("-I", "--init"): init = True elif opt in ("-D", "--debug"): debug = True if debug: print ("Running sw-extractor version:", version) # 1. conf and logger: # ########################### if debug: print ("Read and check validity of configuration data") print (" and activate logging scheme as selected in config") config = GetConf(configpath) config = DefineLogger(config=config, category = "Info", job=os.path.basename(__file__), newname='mm-info-sw.log', debug=debug) if debug: print (" -> Done") # 2. database: # ########################### try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') connectdict = config.get('conncetedDB') except: statusmsg[name1] = 'database failed' # 3. Read Kp data: # ########################### try: sqllist = read_kpnow_data(os.path.join(kpsource,kpname),debug=debug) statusmsg['Kp access'] = 'success' except: statusmsg['Kp access'] = 'failed' # 4. Read ACE swepam data: # ########################### try: newsql = read_swepam_data(os.path.join(swsource,swename),debug=debug) sqllist.extend(newsql) statusmsg['ACE swepam access'] = 'success' except: statusmsg['ACE swepam access'] = 'failed' # 5. Read ACE mag data: # ########################### try: newsql = read_mag_data(os.path.join(swsource,magname),debug=debug) sqllist.extend(newsql) statusmsg['ACE mag access'] = 'success' except: statusmsg['ACE mag access'] = 'failed' # 6. Read GIC data: # ########################### try: newsql = read_gicnow_data(db,source='GICAUT',maxsensor=9, minutes=5, debug=debug) sqllist.extend(newsql) statusmsg['GIC data access'] = 'success' except: statusmsg['GIC data access'] = 'failed' # 7. Read GOES data: # ########################### try: if debug: print ("Running GOES") goespath = '/srv/archive/external/esa-nasa/goes' newsql = read_xrs_data(os.path.join(goespath,'XRS_GOES16*'), debug=debug) sqllist.extend(newsql) statusmsg['XRS data access'] = 'success' except: statusmsg['XRS data access'] = 'failed' # 8. Read PREDSTORM data: # ########################### try: if debug: print ("Running PREDSTORM") predpath = '/srv/archive/external/helio4cast/predstorm' psql = read_predstorm_data(os.path.join(predpath,'PREDSTORM*'), debug=debug) sqllist.extend(psql) statusmsg['PREDSTORM data access'] = 'success' except: # no predstorm data between 23:00 and 2:00 MET # just put success message if hitting except in this time range statusmsg['PREDSTORM data access'] = 'failed' if time_between(datetime.utcnow().time(),dttime(21,0),dttime(0,0)): statusmsg['PREDSTORM data access'] = 'success' sqllist = [el for el in sqllist if el] if debug: print ("Debug selected - sql call looks like:") print (sqllist) else: for dbel in connectdict: db = connectdict[dbel] print (" -- Writing data to DB {}".format(dbel)) if init: swtableinit(db) if len(sqllist) > 0: _execute_sql(db,sqllist, debug=debug) # Logging section # ########################### if not debug and config: martaslog = ml(logfile=config.get('logfile'),receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print ("Debug selected - statusmsg looks like:") print (statusmsg)
def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug=False endtime = datetime.utcnow() joblist = ['NEIC','AT'] stb = DataStream() sta = DataStream() errorcntAT = 0 errorcntNE = 0 uploadcheck = 1 path = '/tmp/neic_quakes.d' try: opts, args = getopt.getopt(argv,"hc:e:j:p:s:o:D",["config=","endtime=","joblist=","debug="]) except getopt.GetoptError: print ('quakes_import.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print ('-------------------------------------') print ('Description:') print ('-- quakes_import.py will determine the primary instruments --') print ('-----------------------------------------------------------------') print ('detailed description ..') print ('...') print ('...') print ('-------------------------------------') print ('Usage:') print ('python quakes_import.py -c <config>') print ('-------------------------------------') print ('Options:') print ('-c (required) : configuration data path') print ('-e : endtime, default is now') print ('-j : joblist: NEIC, AT') print ('-p : path for neic data') print ('-------------------------------------') print ('Application:') print ('python quakes_import.py -c /etc/marcos/analysis.cfg -p /home/cobs/ANALYSIS/Seismo/neic_quakes.d') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-e", "--endtime"): # get an endtime endtime = arg.split(',') elif opt in ("-j", "--joblist"): # get an endtime joblist = arg.split(',') elif opt in ("-p", "--path"): path = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print ("Running flagging version {}".format(version)) print ("--------------------------------") if not os.path.exists(configpath): print ('Specify a valid path to configuration information') print ('-- check magnetism_products.py -h for more options and requirements') sys.exit() print ("1. Read and check validity of configuration data") config = GetConf(configpath) print ("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category = "DataProducts", job=os.path.basename(__file__), newname='mm-di-quakes.log', debug=debug) namea = "{}-quakes-AT".format(config.get('logname')) nameb = "{}-quakes-NEIC".format(config.get('logname')) currentvaluepath = config.get('currentvaluepath') print ("3. Connect databases and select first available") try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') connectdict = config.get('conncetedDB') except: statusmsg[name1] = 'database failed' print (" -> connected databases: {}".format(connectdict)) proxy = '' prox = config.get('proxy','') proxport = config.get('proxyport') if prox: proxy = "--proxy http://{}:{} ".format(prox,proxport) (startlong, startlat) = dbcoordinates(db, 'A2') if 'AT' in joblist: try: print ("Getting Austrian data") print ("---------------------") statusmsg[namea] = 'Austrian data added' if not stb.length()[0] > 0: # only load it once print (" - getting Austrian data from geoweb") stb = read('http://geoweb.zamg.ac.at/static/event/lastweek.csv') stb.header['DataFormat'] = 'NEICCSV' stb.header['DataSource'] = 'Austrian Seismological Service' stb.header['DataReferences'] = 'http://geoweb.zamg.ac.at/static/event/lastmonth.csv' if debug: print (" - Found :", stb.length()) dct = stb._get_key_names() poslon = KEYLIST.index(dct.get('longitude')) poslat = KEYLIST.index(dct.get('latitude')) lonar = stb.ndarray[poslon] latar = stb.ndarray[poslat] # calculate distance between points from pyproj import Geod g = Geod(ellps='WGS84') ar = [] for idx,el in enumerate(lonar): (az12, az21, dist) = g.inv(startlong, startlat, el, latar[idx]) ar.append(dist/1000.) pos = KEYLIST.index('var5') stb.header['col-var5'] = 'distance from COBS' stb.header['unit-col-var5'] = 'km' stb.ndarray[pos] = np.asarray(ar) # Add to DB like BLV data, Kp data # Please Note: you must manually insert a DATAID in DATAINFO to upload header data # insert into DATAINFO (DataID, SensorID) VALUES ('QUAKES','QUAKES'); stb.header['StationID'] = 'SGO' stb = stb.extract('f',2,'>=') if debug: print (" - Found :", stb.length()) if not debug: dbupdateDataInfo(db, 'QUAKES', stb.header) stb = fix_datastream_for_db(stb) if not debug: for dbel in connectdict: dbt = connectdict[dbel] print (" -- Writing AT Quakes to DB {}".format(dbel)) writeDB(dbt,stb,tablename='QUAKES',StationID='SGO') print (" -> Done") else: print (" - Debug selected: ") print (" last line of AT {}".format(stb.length())) print (" -> Austrian data has been added to all databases") print ("----------------------------") print (" Now writing last AT update into current value") #statusmsg[namea] = 'Austrian data added' errorcntAT = 0 # update upload time in current data file fulldict = getcurrentdata(currentvaluepath) valdict = fulldict.get('logging',{}) uploadtime = datetime.strftime(datetime.utcnow(),"%Y-%m-%d %H:%M") valdict['seismoATdata'] = [uploadtime,''] fulldict[u'logging'] = valdict if not debug: print (" Writing update time to current data") writecurrentdata(currentvaluepath, fulldict) print (" -> Updating time has been written to current data") print ("----------------------------") print (" DONE") print ("----------------------------") except: errorcntAT += 1 if errorcntAT > 1: message = True #statusmsg[namea] = 'Austrian data failed' fulldict = getcurrentdata(currentvaluepath) valdict = fulldict.get('logging',{}) try: lastupload = datetime.strptime(valdict.get('seismoATdata',['',''])[0],"%Y-%m-%d %H:%M") if not lastupload < datetime.utcnow()-timedelta(hours=uploadcheck): message = False except: message = True if message: print ('Austrian data failed') statusmsg[namea] = 'Austrian data failed' if 'NEIC' in joblist: #try: print ("Downloading NEIC data") print ("---------------------") statusmsg[nameb] = 'NEIC data added' if not sta.length()[0] > 0: # only load it once print (" - getting NEIC data from usgs") os.system('curl https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/4.5_week.csv {}-s > {}'.format(proxy,path)) sta = read(path) sta.header['DataFormat'] = 'NEICCSV' #sta.header['DataSource'] = 'US National Earthquake information center' #sta.header['DataReferences'] = 'http://earthquake.usgs.gov' dct = sta._get_key_names() poslon = KEYLIST.index(dct.get('longitude')) poslat = KEYLIST.index(dct.get('latitude')) lonar = sta.ndarray[poslon] latar = sta.ndarray[poslat] # calculate distance between points from pyproj import Geod g = Geod(ellps='WGS84') ar = [] for idx,el in enumerate(lonar): (az12, az21, dist) = g.inv(startlong, startlat, el, latar[idx]) ar.append(dist/1000.) pos = KEYLIST.index('var5') sta.header['col-var5'] = 'distance from COBS' sta.header['unit-col-var5'] = 'km' sta.ndarray[pos] = np.asarray(ar) # Add to DB like BLV data, Kp data # Please Note: you must manually insert a DATAID in DATAINFO to upload header data # insert into DATAINFO (DataID, SensorID) VALUES ('QUAKES','QUAKES'); sta.header['StationID'] = 'SGO' sta = sta.extract('f',5,'>=') print (" got {} quakes above magnitude 5".format(sta.length()[0])) if not debug: dbupdateDataInfo(db, 'QUAKES', sta.header) sta = fix_datastream_for_db(sta) if not debug: for dbel in connectdict: dbt = connectdict[dbel] print (" -- Writing NEIC Quakes to DB {}".format(dbel)) writeDB(dbt,sta,tablename='QUAKES',StationID='SGO') print (" -> done") else: print (" - Debug selected: ") print (" last line of NEIC {}".format(stb.length())) print ("NEIC data has been added") print ("----------------------------") print (" Now writing last NEIC update into current value") #statusmsg[nameb] = 'NEIC data added' errorcntNE = 0 # update upload time in current data file fulldict = getcurrentdata(currentvaluepath) valdict = fulldict.get('logging',{}) uploadtime = datetime.strftime(datetime.utcnow(),"%Y-%m-%d %H:%M") valdict['seismoNEICdata'] = [uploadtime,''] fulldict[u'logging'] = valdict if not debug: writecurrentdata(currentvaluepath, fulldict) """ except: print (" error encountered") errorcntNE+=1 if errorcntNE > 1: message = True fulldict = getcurrentdata(currentvaluepath) valdict = fulldict.get('logging',{}) try: lastupload = datetime.strptime(valdict.get('seismoNEICdata',['',''])[0],"%Y-%m-%d %H:%M") if not lastupload < datetime.utcnow()-timedelta(hours=uploadcheck): message = False except: message = True if message: statusmsg[nameb] = 'NEIC data failed' """ print ("------------------------------------------") print (" quakes_import finished") print ("------------------------------------------") print ("SUCCESS") if not debug: martaslog = ml(logfile=config.get('logfile'),receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) pass else: print ("Debug selected - statusmsg looks like:") print (statusmsg)
def main(argv): version = __version__ configpath = '' statusmsg = {} path = '' dayrange = 1 debug = False endtime = datetime.utcnow() try: opts, args = getopt.getopt(argv, "hc:r:e:l:D", [ "config=", "range=", "endtime=", "debug=", ]) except getopt.GetoptError: print('job.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('-- general_graph.py will plot sensor data --') print( '-----------------------------------------------------------------' ) print('detailed description ..') print('...') print('...') print('-------------------------------------') print('Usage:') print('python general_graph.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-r : range in days') print('-e : endtime') print('-------------------------------------') print('Application:') print('python general_graph.py -c /etc/marcos/analysis.cfg') print('python general_graph.py -c /etc/marcos/analysis.cfg') print('# debug run on my machine') print( 'python3 general_graph.py -c ../conf/wic.cfg -s debug -k x,y,z -f none -D' ) sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-r", "--range"): # range in days dayrange = int(arg) elif opt in ("-e", "--endtime"): # endtime of the plot endtime = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True if debug: print("Running ... graph creator version {}".format(version)) if not os.path.exists(configpath): print('Specify a valid path to configuration information') print('-- check general_graph.py -h for more options and requirements') sys.exit(0) if endtime: try: endtime = DataStream()._testtime(endtime) except: print("Endtime could not be interpreted - Aborting") sys.exit(1) else: endtime = datetime.utcnow() #>> EDIT >>>>>>>>>>>>>>>>>>>>>>>> newloggername = 'mm-pp-myplot' category = "MyPlot" #<<<<<<<<<<<<<<<<<<<<<<<< EDIT << print("1. Read and check validity of configuration data") config = GetConf(configpath) print("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category=category, job=os.path.basename(__file__), newname=newloggername, debug=debug) monitorname = "{}-plot".format(config.get('logname')) print("3. Connect to databases") config = ConnectDatabases(config=config, debug=debug) #try: # print ("4. Read and Plot method") success = CreateDiagram(config=config, endtime=endtime, dayrange=dayrange, debug=debug) # statusmsg[namecheck1] = "success" # if not success: # statusmsg #except: # statusmsg[namecheck1] = "failure" if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): try: version = __version__ except: version = "1.0.0" configpath = '' statusmsg = {} joblist = ['vario', 'scalar'] debug = False endtime = None try: opts, args = getopt.getopt(argv, "hc:j:D", [ "config=", "joblist=", "endtime=", "debug=", ]) except getopt.GetoptError: print('magnetism_checkadj.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('-- magnetism_checkadj.py will analyse magnetic data --') print( '-----------------------------------------------------------------' ) print('detailed description ..') print('...') print('...') print('-------------------------------------') print('Usage:') print('python magnetism_checkadj.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-j : vario, scalar') print('-e : endtime') print('-------------------------------------') print('Application:') print('python magnetism_checkadj.py -c /etc/marcos/analysis.cfg') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-j", "--joblist"): # get a list of jobs (vario, scalar) joblist = arg.split(',') elif opt in ("-e", "--endtime"): endtime = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True if debug: print("Running magnetism_checkadj version {} - debug mode".format( version)) print("---------------------------------------") if not os.path.exists(configpath): print('Specify a valid path to configuration information') print( '-- check magnetism_products.py -h for more options and requirements' ) sys.exit() if endtime: try: endtime = DataStream()._testtime(endtime) except: print("Endtime could not be interpreted - Aborting") sys.exit(1) else: endtime = datetime.utcnow() print("1. Read configuration data") config = GetConf(configpath) config = ConnectDatabases(config=config, debug=debug) print("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category="DataProducts", job=os.path.basename(__file__), newname='mm-dp-magdatacheck.log', debug=debug) if debug: print(" -> Config contents:") print(config) print("3. Get basic information of current data") config, statusmsg = GetPrimaryInstruments(config=config, statusmsg=statusmsg, fallback=False, debug=debug) if 'vario' in joblist: print("4. Variometer analysis") namecheck1 = "{}-check-adjusted".format(config.get('logname')) msg = CompareAdjustedVario(config=config, endtime=endtime, debug=debug) statusmsg[namecheck1] = msg if 'scalar' in joblist: print("5. Scalar analysis") namecheck2 = "{}-check-f".format(config.get('logname')) msg = CompareFieldStrength(config=config, endtime=endtime, debug=debug) statusmsg[namecheck2] = msg if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): version = __version__ configpath = '' statusmsg = {} outpath='/tmp' sensordefpath='' sensordefs = {} dayrange = 3 plotstyle = 'magpy' # one in magpy, xxx starttime = None endtime = None newloggername = 'mm-pp-tilt.log' flaglist = [] plotname = 'debug' debug=False opacity = 0.7 fullday = False show = False confinex = False gridcolor = '#316931' dropflagged = False sensorid='LM_TILT01_0001' keylist=None try: opts, args = getopt.getopt(argv,"hc:i:o:y:s:e:r:l:D",["config=","input=","output=","style=","starttime=","endtime=","range=","loggername=","debug=",]) except getopt.GetoptError: print ('try general_graph.py -h for instructions') sys.exit(2) for opt, arg in opts: if opt == '-h': print ('-------------------------------------') print ('Description:') print ('-- general_graph.py will plot sensor data --') print ('-----------------------------------------------------------------') print ('detailed description ..') print ('...') print ('...') print ('-------------------------------------') print ('Usage:') print ('python general_graph.py -c <config>') print ('-------------------------------------') print ('Options:') print ('-c (required) : configuration data path') print ('-i : input json file for sensor information') print ('-o : output directory (or file) to save the graph') print ('-y : plot style') print ('-l : loggername e.g. mm-pp-tilt.log') print ('-s : starttime') print ('-e : endtime') print ('-r : range in days') print ('-------------------------------------') print ('Application:') print ('python3 general_graph.py -c ../conf/wic.cfg -i ../conf/sensordef_plot.json -e 2020-12-17') print ('# debug run on my machine') print ('python3 general_graph.py -c ../conf/wic.cfg -e 2020-12-17 -D') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-i", "--input"): # delete any / at the end of the string sensordefpath = os.path.abspath(arg) elif opt in ("-o", "--output"): # delete any / at the end of the string outpath = os.path.abspath(arg) elif opt in ("-y", "--style"): # define a plotstyle plotstyle = arg elif opt in ("-s", "--starttime"): # starttime of the plot starttime = arg elif opt in ("-e", "--endtime"): # endtime of the plot endtime = arg elif opt in ("-r", "--range"): # range in days dayrange = int(arg) elif opt in ("-l", "--loggername"): # loggername newloggername = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True if debug: print ("Running graph creator in analysis version {}".format(version)) if not os.path.exists(configpath): print ('Specify a valid path to configuration information') print ('-- check general_graph.py -h for more options and requirements') sys.exit() if not os.path.exists(sensordefpath): print ('Sensordefinitions not found...') if debug: print (' ... but debug selected - using dummy values') sensordefs = debugsensor # creating a dummy sensordefs file in tmp print (' ... and now creating an example in /tmp/sensordefinitions_default.json') WriteMemory('/tmp/sensordefinitions_default.json', sensordefs) else: print ('-- check general_graph.py -h for more options and requirements') sys.exit() if endtime: try: endtime = DataStream()._testtime(endtime) except: print ("Endtime could not be interpreted - Aborting") sys.exit(1) else: endtime = datetime.utcnow() if not starttime: starttime = endtime-timedelta(days=dayrange) else: try: starttime = DataStream()._testtime(starttime) dayrange = int((endtime-starttime).days) except: print ("Starttime could not be interpreted - Aborting") sys.exit(1) # general test environment: if debug and sensorid == 'travis': print (" basic code test successful") sys.exit(0) print ("1. Read and check validity of configuration data") config = GetConf(configpath) print ("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category = "TestPlotter", job=os.path.basename(__file__), newname=newloggername, debug=debug) print ("3. Connect to databases") config = ConnectDatabases(config=config, debug=debug) print ("4. Read sensordefinitions") if not sensordefs: print (sensordefpath) sensordefs = ReadMemory(sensordefpath) plotname = os.path.basename(sensordefpath).replace('.json','').replace('_plot','') print ("Plotname : ", plotname) statname = "plot-{}".format(plotname) pass else: statname = "plot-{}".format('debug') print ("4.1 Extracting some basic definitions from sensor configuartion") senspar = sensordefs.get('parameter',{}) if senspar.get('fullday','False') in ['True','true','TRUE',True]: fullday = True try: opacity = float(senspar.get('opacity',0.7)) except: pass if senspar.get('show','False') in ['True','true','TRUE',True]: show = True if senspar.get('confinex','False') in ['True','true','TRUE',True]: confinex = True gridcolor = senspar.get('gridcolor','#316931') bartrange = float(senspar.get('bartrange',0.06)) print (" Fullday: {}, Opacity: {}, Show: {}, Confinex: {}, Gridcolor: {}".format(fullday,opacity, show,confinex,gridcolor)) print ("5. Cycle through sensordefinitions") symbollist = [] specialdict = [] streamlist = [] keylist = [] filllist = [] paddinglist=[] annotatelist =[] colorlist=[] flaglist = [] for cnt,dataid in enumerate(sensordefs): if not dataid=='parameter': processname = "{}-{}".format(statname,dataid.replace("_","-")) statusmsg[processname] = "failure" useflags = False outlier = False dropflagged = False sensdict = sensordefs[dataid] revision = sensdict.get('revision','0001') print ("5.{}.1 Check SensorID - or whether DataID provided for {}".format(cnt+1,dataid)) sensorid, revision = CheckSensorID(dataid, revision, debug=debug) keys = sensdict.get('keys',[]) columns = sensdict.get('columns',[]) units = sensdict.get('units',[]) path = sensdict.get('source','') plotstyle = sensdict.get('plotstyle','line') flagtreatment = sensdict.get('flags','') if 'outlier' in flagtreatment: outlier = True dropflagged = True if 'flag' in flagtreatment or 'drop' in flagtreatment: useflags = True if 'drop' in flagtreatment: dropflagged = True filenamebegins = sensdict.get('filenamebegins','') filenameends = sensdict.get('filenameends','') mergepath = sensdict.get('basesource','') mergebegins = sensdict.get('basebegins','') mergeends = sensdict.get('baseends','') if keys: print ("5.{}.2 Read datastream for {}".format(cnt+1,dataid)) try: stream, fl = ReadDatastream(config=config, starttime=starttime, endtime=endtime, sensorid=sensorid, keylist=keys, revision=revision, flags=useflags, outlier=outlier, dropflagged=dropflagged, datapath=path, filenamebegins=filenamebegins, filenameends=filenameends,mergepath=mergepath, mergebegins=mergebegins, mergeends=mergeends, columns=columns, units=units, debug=debug) if stream and stream.length()[0]>1: print ("5.{}.3 Check out flagging and annotation".format(cnt+1)) if 'flag' in flagtreatment: print (" -> eventuallyadding existing standard flags from DB") flaglist = fl if 'quake' in flagtreatment: quakekey = sensdict.get('quakekey',keys[0]) print (" -> eventually adding QUAKES to column {}".format(quakekey)) fl = Quakes2Flags(config=config, endtime=endtime, timerange=dayrange+1, sensorid=sensorid, keylist=quakekey, debug=debug) flaglist = combinelists(flaglist,fl) if 'coil' in flagtreatment: print (" -> eventually adding COIL data to column xxx") pass if len(flaglist) > 0: print (" => total amount of {} flags added".format(len(flaglist))) stream = stream.flag(flaglist) print ("5.{}.4 Creating plot configuration lists".format(cnt+1)) streamlist.append(stream) keylist.append(keys) padding = sensdict.get('padding',[]) if not padding: padding = [0.0 for el in keys] paddinglist.append(padding) annotate = sensdict.get('annotate',[]) if not annotate: annotate = [False for el in keys] annotatelist.append(annotate) color = sensdict.get('color',[]) if not color: color = ['k' for el in keys] colorlist.extend(color) if plotstyle == 'line': symbol = ['-' for el in keys] elif plotstyle == 'bar': symbol = ['z' for el in keys] else: symbol = ['-' for el in keys] symbollist.extend(symbol) fill = sensdict.get('fill',[]) filllist.extend(fill) speciald = sensdict.get('specialdict',{}) print (speciald) #for el in speciald: # vals = speciald[el] # if isinstance(vals,list): # vals = [int(ele) for ele in vals] # speciald[el] = vals # print (speciald[el]) #print (specialdict) #specialdict.extend(speciald) print (" ==> section 5.{} done".format(cnt+1)) #print (specialdict) statusmsg[processname] = "success" except: print (" -- severe error in data treatment") pass else: print (" -- no keys defined - skipping this sensor") pass if len(streamlist) > 0: #mp.plotStreams(streamlist,keylist, fill=filllist, colorlist=colorlist, padding=paddinglist, annotate=annotatelist, gridcolor='#316931', confinex=True, opacity=0.7) print ("6. Creating plot") if os.path.isdir(outpath): # creating file name from sensorsdef input file fullplotpath = os.path.join(outpath,"{}_{}.png".format(plotname,datetime.strftime(endtime,"%Y-%m-%d"))) print (" -> Saving graph to {}".format(fullplotpath)) elif os.path.isfile(outpath): fullplotpath = outpath else: fullplotpath = '' CreateDiagram(streamlist,keylist, filllist=filllist, colorlist=colorlist, paddinglist=paddinglist, annotatelist=annotatelist, symbollist=symbollist, specialdict=specialdict, gridcolor=gridcolor, confinex=confinex, opacity=opacity, fullday=fullday, bartrange=bartrange, show=show, fullplotpath=fullplotpath, debug=debug) if not debug: martaslog = ml(logfile=config.get('logfile'),receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print ("Debug selected - statusmsg looks like:") print (statusmsg)
def main(argv): try: version = __version__ except: version = "1.0.0" configpath = '' statusmsg = {} joblist = ['default', 'service'] debug = False endtime = None testplot = False try: opts, args = getopt.getopt(argv, "hc:j:e:DP", [ "config=", "joblist=", "endtime=", "debug=", "plot=", ]) except getopt.GetoptError: print('gamma_products.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('-- gamma_products.py will analyse magnetic data --') print( '-----------------------------------------------------------------' ) print('detailed description ..') print('...') print('...') print('-------------------------------------') print('Usage:') print('python gamma_products.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-j : default, service') print('-e : endtime') print('-------------------------------------') print('Application:') print('python gamma_products.py -c /etc/marcos/analysis.cfg') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-j", "--joblist"): # get a list of jobs (vario, scalar) joblist = arg.split(',') elif opt in ("-e", "--endtime"): endtime = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True elif opt in ("-P", "--plot"): # delete any / at the end of the string testplot = True if debug: print("Running gamma_products version {} - debug mode".format(version)) print("---------------------------------------") if not os.path.exists(configpath): print('Specify a valid path to configuration information') print( '-- check magnetism_products.py -h for more options and requirements' ) sys.exit() if endtime: try: endtime = DataStream()._testtime(endtime) except: print("Endtime could not be interpreted - Aborting") sys.exit(1) else: endtime = datetime.utcnow() print("1. Read configuration data") config = GetConf(configpath) config = ConnectDatabases(config=config, debug=debug) print("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category="DataProducts", job=os.path.basename(__file__), newname='mm-dp-scaradon.log', debug=debug) config['testplot'] = testplot starttime = datetime.strftime(endtime - timedelta(days=7), "%Y-%m-%d") if 'default' in joblist: print("3. Create standard data table") statusmsg = CreateOldsProductsTables(config=config, statusmsg=statusmsg, start=starttime, end=endtime, debug=debug) if 'service' in joblist: print("4. Create Webservice table") statusmsg = CreateWebserviceTable(config=config, statusmsg=statusmsg, start=starttime, end=endtime, debug=debug) if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug = False dipath = '/srv/archive/WIC/DI' analyzepath = os.path.join(dipath, 'analyze') datapath = os.path.join(dipath, 'data') priminst = '/home/cobs/ANALYSIS/Logs/primaryinst.pkl' plotdir = '/home/cobs/ANALYSIS/Info/plots' endtime = datetime.utcnow() starttime = endtime - timedelta(days=380) pier = "A2" caption = '' channelconfig = '/home/cobs/ANALYSIS/Info/conf/tg_base.cfg' failedmsg = '' try: opts, args = getopt.getopt(argv, "hc:t:D", ["config=", "telegram=", "debug="]) except getopt.GetoptError: print('tg_base.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('-- tg_base.py will obtain baseline plots --') print( '-----------------------------------------------------------------' ) print('detailed description ..') print('...') print('...') print('-------------------------------------') print('Usage:') print('python tg_base.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-t (required) : telegram channel configuration') print('-------------------------------------') print('Application:') print( 'python tg_base.py -c /etc/marcos/analysis.cfg -t /etc/marcos/telegram.cfg' ) sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-t", "--channel"): # delete any / at the end of the string channelconfig = os.path.abspath(arg) elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print("Running tg_base version {}".format(version)) print("--------------------------------") if not os.path.exists(configpath): print('Specify a valid path to configuration information') print( '-- check magnetism_products.py -h for more options and requirements' ) sys.exit() print("1. Read and check validity of configuration data") config = GetConf(configpath) print("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category="Info", job=os.path.basename(__file__), newname='mm-info-base.log', debug=debug) name1 = "{}-tgbase".format(config.get('logname')) statusmsg[name1] = 'Baseline notification successful' print("3. Connect databases and select first available") try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') connectdict = config.get('conncetedDB') except: statusmsg[name1] = 'database failed' # SOME DEFINITIONS: datapath = config.get('dipath') dipath = os.path.join(config.get('dipath'), '..') analyzepath = os.path.join(dipath, 'analyze') pier = config.get('primarypier') if config.get('temporarygraphs'): plotdir = config.get('temporarygraphs') caption = '' # 1. get primary instruments: # ########################### #lst = pload(open(priminst,'rb')) config, statusmsg = GetPrimaryInstruments(config=config, statusmsg=statusmsg, debug=debug) variosens = config.get('primaryVario') scalarsens = config.get('primaryScalar') print("PRIMARY INSTRUMENTS: vario={}, scalar={}".format( variosens, scalarsens)) # 2. define BLV filename # ########################### blvname = "BLVcomp_{}_{}_{}.txt".format(variosens, scalarsens, pier) blvdata = os.path.join(datapath, blvname) print("BASEVALUE SOURCE: {}".format(blvdata)) # 3. Read BLV fiel and create BLV plot for the last year # ########################### plttitle = "{}: {} and {}".format(pier, variosens, scalarsens) caption = CreateBLVPlot(db, blvname, blvdata, starttime, endtime, plotdir, plttitle, debug=debug) # 4. read file list of *.txt files remaining in DI/analyse # ########################### failedmsg = GetFailed(analyzepath, endtime, debug=debug) # 5. send all info to telegramchannel # ########################### if not debug: with open(os.path.join(plotdir, 'basegraph.png'), "rb") as f: telegram_send.send(images=[f], captions=[caption], conf=channelconfig, parse_mode="markdown") if not failedmsg == '': telegram_send.send(messages=[failedmsg], conf=channelconfig, parse_mode="markdown") else: print("Debug selected") print("tg_base successfully finished") # 6. Logging section # ########################### if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): """ METHODS: extract_config() -> read analysis config get_readlist() -> get calls to read data chunks get_data() -> get_chunk_config() -> obtain details of chunk get_chunk_feature() -> get statistical features for each chunk - get_emd_features() - obtain_basic_emd_characteristics() - get_features() - get_wavelet_features() """ version = "1.0.0" swsource = 'https://kauai.ccmc.gsfc.nasa.gov/CMEscoreboard/' path = '' creddb = 'cobsdb' output = [] stime = '' etime = '' configpath = '' # is only necessary for monitoring cmedict = {} hours_threshold = 12 debug = False init = False # create table if TRUE statusmsg = {} meankpcrit = 4.5 maxkpcrit = 6.0 receivers = {'deutsch' : {'userid1': {'name':'roman leon', 'email':'*****@*****.**', 'language':'deutsch'}}} languagedict = {'english' : {'msgheader': "Coronal mass ejection - CME", 'msgnew':'New CME started at ', 'msgupdate':'Update on CME from ', 'msgarrival':'Estimated arrival: ', 'msgpred':'Expected geomagnetic activity (Kp): ', 'timezone':'UTC', 'msgref':'Based on experimental data from [CMEscoreboard](https://kauai.ccmc.gsfc.nasa.gov/CMEscoreboard/)', 'msguncert':'arrival time estimates usually have an uncertainty of +/- 7 hrs', 'channeltype':'telegram', 'channelconfig':'/etc/martas/telegram.cfg', }, 'deutsch' : {'msgheader': "Sonneneruption - CME", 'msgnew':'Neuer CME (koronaler Massenauswurf) am ', 'msgupdate':'Update zu CME vom ', 'msgarrival':'Geschätzte Ankunftszeit: ', 'msgpred':'Erwartete geomagnetische Aktivität (Kp): ', 'timezone':'UTC', 'msgref':'Basierend auf experimentellen Daten des [CMEscoreboard](https://kauai.ccmc.gsfc.nasa.gov/CMEscoreboard/)', 'msguncert':'geschätzte Ankunftszeiten sind meistens mit Fehlern von +/- 7 hrs behaftet', 'channeltype':'telegram', 'channelconfig':'/etc/martas/telegram_cobsnoti.cfg', } } #'channelconfig':'/etc/martas/tg_weltraum.cfg', #'channelconfig':'/etc/martas/tg_space.cfg', usage = 'cme-extractor.py -c <config> -s <source> -o <output> -p <path> -b <begin> -e <end>' try: opts, args = getopt.getopt(argv,"hc:s:o:p:b:e:ID",["config=","source=","output=","path=","begin=","end=","init=","debug=",]) except getopt.GetoptError: print(usage) sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('cme-extractor.py extracts CME predictions from Scoreboard ') print('-------------------------------------') print('Usage:') print(usage) print('-------------------------------------') print('Options:') print('-c : configuration data ') print('-s : source - default is ') print('-o : output (list like db,file)') print('-p : path') print('-b : starttime') print('-e : endtime') print('-------------------------------------') print('Examples:') print('---------') print('python3 cme_extractor.py -c /home/cobs/CONF/wic.cfg -o file,db,telegram,email -p /srv/archive/external/esa-nasa/cme/ -D') print('---------') sys.exit() elif opt in ("-c", "--config"): configpath = os.path.abspath(arg) elif opt in ("-s", "--source"): swsource = arg elif opt in ("-o", "--output"): output = arg.split(',') elif opt in ("-p", "--path"): path = os.path.abspath(arg) elif opt in ("-b", "--starttime"): stime = arg elif opt in ("-e", "--endtime"): etime = arg elif opt in ("-I", "--init"): init = True elif opt in ("-D", "--debug"): debug = True if debug: print ("Running cme-extractor version:", version) print ("Selected output:", output) # 1. conf and logger: # ########################### if debug: print ("Read and check validity of configuration data") print (" and activate logging scheme as selected in config") config = GetConf(configpath) config = DefineLogger(config=config, category = "Info", job=os.path.basename(__file__), newname='mm-info-cme.log', debug=debug) if debug: print (" -> Done") # 2. database: # ########################### try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') connectdict = config.get('conncetedDB') except: statusmsg[name1] = 'database failed' try: if swsource.startswith('http'): cmedict = get_cme_dictionary_scoreboard(url=swsource) elif os.path.isfile(swsource): cmedict = read_memory(swsource) else: sys.exit() statusmsg['CMEaccess'] = 'success' except: statusmsg['CMEaccess'] = 'failed' if debug: print (cmedict) if not cmedict: statusmsg['CMEaccess'] = 'failed' # read memory and extract new and updated data if path: if os.path.isdir(path): path = os.path.join(path,"cme_{}.json".format(datetime.strftime(datetime.utcnow(),"%Y"))) # open existing json, extend dictionary, eventually update contents data = read_memory(path,debug=False) full, new, up = get_new_inputs(data,cmedict) if debug: print ('Saveing to path:', path) print ('new:', new) print ('update:', up) if 'file' in output and (new or up): print (" Dealing with job: file") success = write_memory(path, cmedict) if success: statusmsg['CME2file'] = 'success' print (" -> everything fine") else: statusmsg['CME2file'] = 'failed' print (" -> failed") if 'db' in output and creddb: print (" Dealing with job: db") # Only add data with arrival time +12h > now to database # delete data with arrivaltime+12h < now from db success = False statusmsg['CME2db'] = 'failed' try: if debug: print ("Accessing database ...") config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') connectdict = config.get('conncetedDB') if debug: print (" ... done") success = True except: pass try: for dbel in connectdict: db = connectdict[dbel] print (" -- Writing data to DB {}".format(dbel)) if init: swtableinit(db) if success: update_database(db,full, new, up, swsource,debug=debug) statusmsg['CME2db'] = 'success' print (" -> everything fine") except: print (" -> failed") if 'telegram' in output or 'email' in output: print (" Dealing with jobs: telegram and email") # Access memory of already send data, send update/new statusmsg['CME2telegram'] = 'success' statusmsg['CME2mail'] = 'success' total = new + up for el in total: # Construct markdown message for each language provided valdict = full.get(el) moveon = True # Test some criteria before sending message (e.g. not too old, k large enough) print (" Checking notification criteria ...") arr = dparser.parse(valdict.get('arrival'),fuzzy=True) if arr < datetime.utcnow(): print (" -> arrival time in the past") moveon = False kprange = [float(vl) for vl in valdict.get('KPrange').split('-')] if mean(kprange) < meankpcrit: print (" -> expected average activity too small (< {})".format(meankpcrit)) moveon = False if max(kprange) < maxkpcrit and el in up: print (" -> expected maximum activity too small (< {}) - skipping update".format(maxkpcrit)) moveon = False if moveon: for lang in languagedict: langdic = languagedict[lang] msghead = "*{}*".format(langdic.get('msgheader')) if el in new: msgbody = "\n\n{} {}\n".format(langdic.get('msgnew'), valdict.get('start')) else: msgbody = "\n\n{} {}\n".format(langdic.get('msgupdate'), valdict.get('start')) msgbody += "\n{} *{}* {}\n".format(langdic.get('msgarrival'), valdict.get('arrival') ,langdic.get('timezone')) msgbody += "{} {}\n".format(langdic.get('msgpred'), valdict.get('KPrange')) msgbody += '{}\n'.format(langdic.get('msgref')) msgbody += "" msg = msghead+msgbody if debug: print (msg) if not debug and 'email' in output and lang == 'deutsch': #if 'email' in output and lang == 'deutsch': #TODO e-mail is not yet working print ("Now starting e-mail") # read e-mail receiver list from dictionary #try: mailcfg = config.get('emailconfig','/etc/martas/mail.cfg') maildict = read_meta_data(mailcfg) maildict['Subject'] = msghead # email is a comma separated string reclist = receivers.get(lang,{}) for dic in reclist: userdic = reclist[dic] email = userdic.get('email') name = userdic.get('name') preferedlang = userdic.get('language') maildict['Text'] = msgbody maildict['To'] = email #print (" receivers are: {}".format(maildict['To'])) #### Stop here with debug mode for basic tests without memory and mails if preferedlang == lang: print (" ... sending mail in language {} now: {}".format(lang,maildict)) sm(maildict) print (" ... done") statusmsg['CME2mail'] = 'success' print (" -> email fine") #except: #statusmsg['CME2mail'] = 'failed' if not debug and 'telegram' in output: print ("Now starting telegram") try: telegram_send.send(messages=[msg],conf=langdic.get('channelconfig'),parse_mode="markdown") print (" -> telegram fine") except: statusmsg['CME2telegram'] = 'failed' print (" -> telegram failed") if debug and 'telegram' in output and lang == 'deutsch': telegram_send.send(messages=[msg],conf=config.get('notificationconfig'),parse_mode="markdown") print (" -> debug telegram fine") # Logging section # ########################### if not debug and config: martaslog = ml(logfile=config.get('logfile'),receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print ("Debug selected - statusmsg looks like:") print (statusmsg)
def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug=False stime = None etime = datetime.utcnow() endtime = '' starttime = '' path = '/srv/archive/external/esa-nasa/ace' try: opts, args = getopt.getopt(argv,"hc:e:j:p:s:o:D",["config=","endtime=","joblist=","debug="]) except getopt.GetoptError: print ('ace_conversion.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print ('-------------------------------------') print ('Description:') print ('-- ace_conversion.py will determine the primary instruments --') print ('-----------------------------------------------------------------') print ('detailed description ..') print ('...') print ('...') print ('-------------------------------------') print ('Usage:') print ('python ace_conversion.py -c <config>') print ('-------------------------------------') print ('Options:') print ('-c (required) : configuration data path') print ('-e : endtime, default is now') print ('-p : path for neic data') print ('-------------------------------------') print ('Application:') print ('python neic_download.py -c /etc/marcos/analysis.cfg -p /home/cobs/ANALYSIS/Seismo/neic_quakes.d') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-e", "--endtime"): # get an endtime endtime = arg.split(',') elif opt in ("-s", "--starttime"): # get a starttime starttime = arg.split(',') elif opt in ("-p", "--path"): path = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print ("Running flagging version {}".format(version)) print ("--------------------------------") if not os.path.exists(configpath): print ('Specify a valid path to configuration information') print ('-- check magnetism_products.py -h for more options and requirements') sys.exit() print ("1. Read and check validity of configuration data") config = GetConf(configpath) print ("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category = "DataProducts", job=os.path.basename(__file__), newname='mm-dp-ace.log', debug=debug) name = "{}-ACE-conversion".format(config.get('logname')) currentvaluepath = config.get('currentvaluepath') # take localpath from config path = '/srv/archive/external/esa-nasa/ace' if starttime: # parse date to stime pass if endtime: # parse date to etime pass if not stime: datelist = [datetime.strftime(etime,"%Y%m%d")] else: # time range given... determine list print (" Not implemented so far") pass for datum in datelist: print ("Analyzing {}...".format(datum)) #-------------------------------------------------------------------- # PROCESS 1-MIN DATA #-------------------------------------------------------------------- process_ACE(datum, '1m', ['swepam', 'mag'], ['x','y','z','f','t1','t2'], localpath=path, debug=debug) #-------------------------------------------------------------------- # PROCESS 5-MIN DATA #-------------------------------------------------------------------- process_ACE(datum, '5m', ['epam', 'sis'], ['x','y'], skipcompression=True, localpath=path, debug=debug) statusmsg[name] = 'successfully finished' print ("------------------------------------------") print (" ace conversion finished") print ("------------------------------------------") print ("SUCCESS") if not debug: martaslog = ml(logfile=config.get('logfile'),receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print ("Debug selected - statusmsg looks like:") print (statusmsg)
def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug = False channelconfig = '' pastrange = 2 alldata, field, data = [], [], {} existalldata, existname, existdata = [], [], {} exceptlist = ["2020 SO"] # 2020 SO is not unique, two times the same name... datemax = datetime.strftime((datetime.now() + timedelta(days=730)), "%Y-%m-%d") print("Checking data until:", datemax) url = "https://ssd-api.jpl.nasa.gov/cad.api?dist-max=1LD&date-min=1900-01-01&date-max={}&sort=date".format( datemax) try: opts, args = getopt.getopt(argv, "hc:t:D", ["config=", "telegram=", "debug="]) except getopt.GetoptError: print('tg_base.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('-- tg_pha.py will obtain baseline plots --') print( '-----------------------------------------------------------------' ) print('detailed description ..') print('loading near earth objects database') print('...') print('-------------------------------------') print('Usage:') print('python tg_pha.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-t (optional) : telegram channel configuration') print('-------------------------------------') print('Application:') print( 'python tg_pha.py -c /etc/marcos/wic.cfg -t /etc/marcos/telegram.cfg' ) sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-t", "--channel"): # delete any / at the end of the string channelconfig = os.path.abspath(arg) elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print("Running tg_pha version {}".format(version)) print("--------------------------------") if not os.path.exists(configpath): print('Specify a valid path to configuration information') print( '-- check magnetism_products.py -h for more options and requirements' ) sys.exit() # 1. conf and logger: # ########################### print("Read and check validity of configuration data") config = GetConf(configpath) print(" -> Done") print("Activate logging scheme as selected in config") config = DefineLogger(config=config, category="Info", job=os.path.basename(__file__), newname='mm-info-pha.log', debug=debug) print(" -> Done") # SOME DEFINITIONS: proxyadd = '' if config.get('proxy'): proxyadd = "{}:{}".format(config.get('proxy'), config.get('proxyport')) temporarypath = config.get('temporarydata') phamem = os.path.join(temporarypath, 'tg-pha.json') if not channelconfig: channelconfig = config.get('notificationconfig') continueproc = True # 2. Download PHA: # ########################### try: print("Checking data until:", datemax) alldata, field, data = download_PHA_jpl(url, proxy=proxyadd, debug=debug) statusmsg['PHA'] = 'downloading new PHAs successful' except: statusmsg['PHA'] = 'downloading new PHAs failed' continueproc = False # 3. Get local PHA memory: # ########################### if continueproc and len(data) > 0: try: existalldata, existname, existdata = get_PHA_mem(phamem, data, field, debug=debug) statusmsg['PHA'] = 'getting PHA memory successful' except: statusmsg['PHA'] = 'getting PHA memory failed' continueproc = False # 4. sending PHA message: # ########################### if continueproc and len(alldata) > 0: try: check_new_PHA(alldata, existalldata, field, existname, languagedict=languagedict, exceptlist=exceptlist, pastrange=pastrange, debug=debug) statusmsg['PHA'] = 'sending PHA data successful' except: statusmsg['PHA'] = 'sending PHA data failed' continueproc = False if continueproc and len(data) > 0 and not debug: try: write_memory(phamem, data, debug=debug) statusmsg['PHA'] = 'successfully finished' except: statusmsg['PHA'] = 'writing new PHA memory failed' print("tg_pha successfully finished") # 6. Logging section # ########################### if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): try: version = __version__ except: version = "1.0.1" configpath = '' statusmsg = {} debug=False force=False starttime = None endtime = None joblist = ["adjusted", "quasidefinitive", "addon"] newloggername = 'mm-dp-magnetism.log' try: opts, args = getopt.getopt(argv,"hc:j:s:e:l:FD",["config=","joblist=","starttime=","endtime=","loggername=","force=","debug=",]) except getopt.GetoptError: print ('magnetism_products.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print ('-------------------------------------') print ('Description:') print ('-- magnetism_products.py will analyse magnetic data --') print ('-----------------------------------------------------------------') print ('detailed description ..') print ('...') print ('...') print ('-------------------------------------') print ('Usage:') print ('python magnetism_products.py -c <config>') print ('-------------------------------------') print ('Options:') print ('-c (required) : configuration data path') print ('-j : adjusted, quasidefinitive,addon') print (' : adjusted -> calculated adjusted data with constant') print (' : baseline approximation') print (' : quasidefinitive -> calculated data with spline') print (' : baseline and only if observer flags are present') print (' : addon -> requires adjusted, create diagtram and k values') print ('-l : loggername') print ('-s : starttime') print ('-e : endtime') print ('-F : force redo of quasidefinitve (not updating current.data)') print ('-------------------------------------') print ('Application:') print ('python magnetism_products.py -c /etc/marcos/analysis.cfg') print ('python magnetism_products.py -c /etc/marcos/analysisGAM.cfg -j adjusted -l mm-dp-magnetism-GAM') print ('python magnetism_products.py -c /etc/marcos/analysisGAM.cfg -j quasidefinitive -s starttime -e endtime -F -l mm-dp-qd-GAM') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-j", "--joblist"): # get a list of jobs e.g. "adjusted, quasidefinitive, addon" joblist = arg.split(',') elif opt in ("-s", "--starttime"): # define an endtime for the current analysis - default is now starttime = arg elif opt in ("-e", "--endtime"): # define an endtime for the current analysis - default is now endtime = arg elif opt in ("-l", "--loggername"): # define an endtime for the current analysis - default is now newloggername = arg elif opt in ("-F", "--force"): # delete any / at the end of the string force = True elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print ("Running magpy_products version {}".format(version)) if not os.path.exists(configpath): print ('Specify a valid path to configuration information') print ('-- check magnetism_products.py -h for more options and requirements') sys.exit() if "addon" in joblist and not "adjusted" in joblist: print ("joblist input 'addon' requires 'adjusted' as well - therefore skipping 'addon' option" ) if starttime: try: starttime = DataStream()._testtime(starttime) except: print ("Starttime could not be interpreted - using None") if endtime: try: endtime = DataStream()._testtime(endtime) except: print ("Endtime could not be interpreted - Aborting") sys.exit(1) else: endtime = datetime.utcnow() print ("1. Read and check validity of configuration data") config = GetConf(configpath) success = ValidityCheckConfig(config) #if not success: # sys.exit(1) # ############################################################# print ("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category = "DataProducts", job=os.path.basename(__file__), newname=newloggername, debug=debug) if debug: print (" -> Config contents:") print (config) print ("3. Check all paths and eventually remount directories") success,statusmsg = ValidityCheckDirectories(config=config, statusmsg=statusmsg, debug=debug) print ("4. Loading current.data and getting primary instruments") config, statusmsg = GetPrimaryInstruments(config=config, statusmsg=statusmsg, debug=debug) print ("5. Connect to databases") config = ConnectDatabases(config=config, debug=debug) if "adjusted" in joblist: print ("6. Obtain adjusted data") mindata,statusmsg = AdjustedData(config=config, statusmsg=statusmsg, endtime=endtime, debug=debug) if mindata.length()[0]>0 and "addon" in joblist: print ("8. Diagrams") suc,statusmsg = CreateDiagram(mindata, config=config,statusmsg=statusmsg, endtime=endtime) print ("9. K Values") suc,statusmsg = KValues(mindata, config=config,statusmsg=statusmsg) if "quasidefinitive" in joblist: print ("10. Obtain quasidefinitive data") statusmsg = QuasidefinitiveData(config=config, statusmsg=statusmsg, starttime=starttime, endtime=endtime, force=force, debug=debug) if not debug: martaslog = ml(logfile=config.get('logfile'),receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print ("Debug selected - statusmsg looks like:") print (statusmsg)
def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug = False channelconfig = '' try: opts, args = getopt.getopt(argv, "hc:t:D", ["config=", "telegram=", "debug="]) except getopt.GetoptError: print('tg_base.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('-- tg_base.py will obtain baseline plots --') print( '-----------------------------------------------------------------' ) print('detailed description ..') print('...') print('...') print('-------------------------------------') print('Usage:') print('python tg_base.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-t (required) : telegram channel configuration') print('-------------------------------------') print('Application:') print( 'python tg_base.py -c /etc/marcos/wic.cfg -t /etc/marcos/telegram.cfg' ) sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-t", "--channel"): # delete any / at the end of the string channelconfig = os.path.abspath(arg) elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print("Running tg_quake version {}".format(version)) print("--------------------------------") if not os.path.exists(configpath): print('Specify a valid path to configuration information') print( '-- check magnetism_products.py -h for more options and requirements' ) sys.exit() # 1. conf and logger: # ########################### print("Read and check validity of configuration data") config = GetConf(configpath) print(" -> Done") print("Activate logging scheme as selected in config") config = DefineLogger(config=config, category="Info", job=os.path.basename(__file__), newname='mm-info-quake.log', debug=debug) print(" -> Done") # 2. database: # ########################### name1 = "{}-tgquake".format(config.get('logname')) statusmsg[name1] = 'successful' continueeval = True try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') connectdict = config.get('conncetedDB') except: statusmsg[name1] = 'database failed' # SOME DEFINITIONS: temporarypath = config.get('temporarydata') memorypath = os.path.join(temporarypath, 'lastquake.npy') currentvaluepath = config.get('currentvaluepath') if not channelconfig: channelconfig = config.get('notificationconfig') if not channelconfig: print("No message channel defined - aborting") sys.exit(1) # 3. get quakes: # ########################### msg = 'problem with basic list generation' try: lastquakes = get_quakes(db, debug=debug) msg = 'problem with selecting relavant quakes' relevantquakes = select_relevant_quakes(lastquakes, criteria={}, debug=debug) msg = 'problem with new quakes' relevantquakes = new_quakes(relevantquakes, memorypath=memorypath, debug=debug) except: statusmsg[name1] = msg relevantquakes = [] # 4. sending notification: # ########################### try: if len(relevantquakes) > 0: send_quake_message(relevantquakes, tgconfig=channelconfig, memorypath=memorypath, debug=debug) except: statusmsg[name1] = 'problem sending notification' continueeval = False # 5. writing current data # ########################### try: if len(relevantquakes ) > 0 and currentvaluepath and continueeval and not debug: write_current_data(relevantquakes, currentvaluepath, debug=debug) except: statusmsg[name1] = 'problem writing current data' print("tg_quake successfully finished") # 6. Logging section # ########################### if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug = False channelconfig = '' try: opts, args = getopt.getopt(argv, "hc:t:D", ["config=", "telegram=", "debug="]) except getopt.GetoptError: print('tg_kval.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('-- tg_kval.py will obtain baseline plots --') print( '-----------------------------------------------------------------' ) print('detailed description ..') print('...') print('...') print('-------------------------------------') print('Usage:') print('python tg_base.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-l (required) : language dictionary') print('-t (optional) : telegram channel configuration') print('-------------------------------------') print('Application:') print( 'python tg_kval.py -c /etc/marcos/analysis.cfg -t /etc/marcos/telegram.cfg' ) sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-t", "--channel"): # delete any / at the end of the string channelconfig = os.path.abspath(arg) elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print("Running tg_kval version {}".format(version)) print("--------------------------------") if not os.path.exists(configpath): print('Specify a valid path to configuration information') print( '-- check magnetism_products.py -h for more options and requirements' ) sys.exit() # 1. conf and logger: # ########################### print("Read and check validity of configuration data") config = GetConf(configpath) print(" -> Done") print("Activate logging scheme as selected in config") config = DefineLogger(config=config, category="Info", job=os.path.basename(__file__), newname='mm-info-kavl.log', debug=debug) print(" -> Done") # 2. database: # ########################### name1 = "{}-kval".format(config.get('logname')) statusmsg[name1] = 'successful' try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') connectdict = config.get('conncetedDB') except: statusmsg[name1] = 'database failed' # SOME DEFINITIONS: currentvaluepath = config.get('currentvaluepath') if not channelconfig: channelconfig = config.get('notificationconfig') if not channelconfig: print("No message channel defined - aborting") sys.exit(1) # 3. get K values: # ########################### try: data = get_kvals(db, debug=debug) except: statusmsg[name1] = 'problem with list generation' data = DataStream() # 4. update Spaceweather database: # ########################### try: for dbel in connectdict: db = connectdict[dbel] print(" -- Writing data to DB {}".format(dbel)) k2sw_database(db, data, debug=False) statusmsg['K2SWDB'] = 'fine' except: statusmsg['K2SWDB'] = 'problem with uploading' # 5. sending notification: # ########################### if data.length()[0] > 0: try: send_kval_message(data, currentvaluepath=currentvaluepath, debug=debug) except: statusmsg[name1] = 'problem sending notification' print("tg_kval successfully finished") # 6. Logging section # ########################### if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug = False endtime = datetime.utcnow() joblist = [] varioinst = '' scalainst = '' #joblist = ['flag','clean','archive','update','delete'] joblist = ['flag'] flagfilearchivepath = '' # default: flagarchive : /srv/archive/flags flagfilepath = '' consecutivethreshold = 100000 delsensor = 'RCST7_20160114_0001' delcomment = 'aof - threshold 5.0 window 43200.0 sec' # each input looks like: # { SensorNamePart : [timerange, keys, threshold, window, markall, lowlimit, highlimit] flagdict = { 'LEMI036': [7200, 'x,y,z', 6, 'Default', True, 'None', 'None'], 'LEMI025': [7200, 'x,y,z', 6, 'Default', True, 'None', 'None'], 'FGE': [7200, 'x,y,z', 5, 'Default', True, 'None', 'None'], 'GSM90_14245': [7200, 'f', 5, 'default', False, 'None', 'None'], 'GSM90_6': [7200, 'f', 5, 300, False, 'None', 'None'], 'GSM90_3': [7200, 'f', 5, 300, False, 'None', 'None'], 'GP20S3NSS2': [7200, 'f', 5, 'Default', False, 'None', 'None'], 'POS1': [7200, 'f', 4, 100, False, 'None', 'None'], 'BM35': [7200, 'var3', 'None', 'None', False, 750, 1000] } try: opts, args = getopt.getopt(argv, "hc:e:j:p:s:o:D", [ "config=", "endtime=", "joblist=", "path=", "sensor=", "comment=", "debug=" ]) except getopt.GetoptError: print('flagging.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('-- flagging.py will determine the primary instruments --') print( '-----------------------------------------------------------------' ) print('detailed description ..') print('...') print('...') print('-------------------------------------') print('Usage:') print('python flagging.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-e : endtime, default is now') print( '-j : joblist: flag,clean,archive,update,delete; default is flag,clean' ) print( '-p : update - path to json files which end with flags.json' ) print('-s : delete - sensor') print('-o : delete - comment') print('-------------------------------------') print('Application:') print('python flagging.py -c /etc/marcos/analysis.cfg') print('Once per year:') print(' python flagging.py -c /etc/marcos/analysis.cfg -j archive') print('Eventually always:') print( ' python flagging.py -c /etc/marcos/analysis.cfg -j upload -p /srv/archive/flags/uploads/' ) print('Once a day/week:') print(' python flagging.py -c /etc/marcos/analysis.cfg -j clean') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-e", "--endtime"): # get an endtime endtime = arg.split(',') elif opt in ("-j", "--joblist"): # get an endtime joblist = arg.split(',') elif opt in ("-p", "--path"): # delete any / at the end of the string flagfilepath = os.path.abspath(arg) elif opt in ("-s", "--sensor"): # hidden: delete sensor data delsensor = arg elif opt in ("-o", "--comment"): delcomment = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print("Running flagging version {}".format(version)) print("--------------------------------") if not os.path.exists(configpath): print('Specify a valid path to configuration information') print( '-- check magnetism_products.py -h for more options and requirements' ) sys.exit() print("1. Read and check validity of configuration data") config = GetConf(configpath) print("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category="DataProducts", job=os.path.basename(__file__), newname='mm-dp-flagging.log', debug=debug) name1 = "{}-flag".format(config.get('logname')) name2 = "{}-flag-lemitest".format(config.get('logname')) name3 = "{}-cleaning".format(config.get('logname')) name4 = "{}-archive".format(config.get('logname')) name5 = "{}-upload".format(config.get('logname')) statusmsg[name1] = 'flagging data sets successful' statusmsg[name2] = 'Lemitest not performed' statusmsg[name3] = 'Cleanup: cleaning database successful' statusmsg[name4] = 'Archive: not time for archiving' statusmsg[name5] = 'Upload: nothing to do' flagfilearchivepath = config.get('flagarchive', '') print(flagfilearchivepath) if not os.path.isdir(flagfilearchivepath): flagfilearchivepath = '' if not os.path.isdir(flagfilepath): flagfilepath = '' print("3. Connect databases and select first available") try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') connectdict = config.get('conncetedDB') except: statusmsg[name1] = 'database failed' if 'flag' in joblist: print("4. Dealing with flagging dictionary") try: #ok = True #if ok: for elem in flagdict: print(" -------------------------------------------") print(" Dealing with sensorgroup which starts with {}".format( elem)) print(" -------------------------------------------") # Get parameter timerange = flagdict[elem][0] keyspar = flagdict[elem][1] if keyspar in ['Default', 'default', 'All', 'all', '', None]: keys = None else: keys = keyspar.split(',') threshold = flagdict[elem][2] if threshold in [ 'Default', 'default', 'None', 'none', '', None ]: threshold = None windowpar = flagdict[elem][3] if windowpar in [ 'Default', 'default', 'None', 'none', '', None ]: window = None else: window = timedelta(seconds=windowpar) markall = flagdict[elem][4] lowlimit = flagdict[elem][5] if lowlimit in [ 'Default', 'default', 'None', 'none', '', None ]: lowlimit = None highlimit = flagdict[elem][6] if highlimit in [ 'Default', 'default', 'None', 'none', '', None ]: highlimit = None starttime = datetime.utcnow() - timedelta(seconds=timerange) print( " - Using the following parameter: keys={},threshold={},window={},limits={}" .format(keys, threshold, window, [lowlimit, highlimit])) # Checking available sensors sensorlist = dbselect(db, 'DataID', 'DATAINFO', 'SensorID LIKE "{}%"'.format(elem)) print(" -> Found {}".format(sensorlist)) print(" a) select 1 second or highest resolution data" ) # should be tested later again validsensors1 = [] determinesr = [] srlist = [] for sensor in sensorlist: res = dbselect(db, 'DataSamplingrate', 'DATAINFO', 'DataID="{}"'.format(sensor)) try: sr = float(res[0]) print(" - Sensor: {} -> Samplingrate: {}".format( sensor, sr)) if sr >= 1: validsensors1.append(sensor) srlist.append(sr) except: print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") print("Check sampling rate {} of {}".format( res, sensor)) print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") determinesr.append(sensor) print(" b) checking sampling rate of failed sensors") for sensor in determinesr: lastdata = dbgetlines(db, sensor, timerange) if lastdata.length()[0] > 0: sr = lastdata.samplingrate() print(" - Sensor: {} -> Samplingrate: {}".format( sensor, sr)) if sr >= 1: validsensors1.append(sensor) srlist.append(sr) print(" c) Check for recent data") validsensors = [] validsr = [] for idx, sensor in enumerate(validsensors1): last = dbselect(db, 'time', sensor, expert="ORDER BY time DESC LIMIT 1") if debug: print(" Last time", last) try: dbdate = last[0] except: print(" - No data found for {}".format(sensor)) try: if getstringdate(dbdate) > starttime: print(" - Valid data for {}".format(sensor)) validsensors.append(sensor) validsr.append(srlist[idx]) except: print(" Dateformat problem for {}".format(sensor)) print(" d) Flagging data") for idx, sensor in enumerate(validsensors): lines = int(timerange / validsr[idx]) lastdata = dbgetlines(db, sensor, lines) print(" - got {} datapoints".format( lastdata.length()[0])) if lastdata.length()[0] > 0: sensorid = "_".join(sensor.split('_')[:-1]) print(" - getting existing flags for {}".format( sensorid)) vflag = db2flaglist(db, sensorid, begin=datetime.strftime( starttime, "%Y-%m-%d %H:%M:%S")) print(" - found {} existing flags".format( len(vflag))) if len(vflag) > 0: try: print(" - removing existing flags") lastdata = lastdata.flag(vflag) lastdata = lastdata.remove_flagged() print(" ...success") except: print( " ------------------------------------------------" ) print( " -- Failed to apply flags TODO need to check that" ) flaglist = [] if threshold: print(" - determining new outliers") if debug: print("MARK all: ", markall) flagls = lastdata.flag_outlier(keys=keys, threshold=threshold, timerange=window, returnflaglist=True, markall=markall) # now check flaglist---- if more than 20 consecutive flags... then drop them flaglist = consecutive_check(flagls, remove=True) #if len(flagls) > len(flaglist)+1 and sensor.startswith("LEMI036_1"): #+1 to add some room # statusmsg[name2] = 'Step1: removed consecutive flags for {}: Found {}, Clean: {}'.format(sensor, len(flagls), len(flaglist)) print( " - new outlier flags: {}; after combination: {}" .format(len(flagls), len(flaglist))) if lowlimit: print(" - flagging data below lower limit") flaglow = lastdata.flag_range( keys=keys, below=lowlimit, text='below lower limit {}'.format(lowlimit), flagnum=3) if len(flaglist) == 0: flaglist = flaglow else: flaglist.extend(flaglow) if highlimit: print(" - flagging data above higher limit") flaghigh = lastdata.flag_range( keys=keys, above=highlimit, text='exceeding higher limit {}'.format( highlimit), flagnum=3) if len(flaglist) == 0: flaglist = flaghigh else: flaglist.extend(flaghigh) print(" -> RESULT: found {} new flags".format( len(flaglist))) if not debug and len(flaglist) > 0: for dbel in connectdict: dbt = connectdict[dbel] print( " -- Writing flags for sensors {} to DB {}" .format(sensor, dbel)) print(" -- New flags: {}".format( len(flaglist))) prevflaglist = db2flaglist(dbt, sensorid) if len(prevflaglist) > 0: lastdata.flagliststats(prevflaglist, intensive=True) else: print( " - no flags so far for this sensor") name3 = "{}-toDB-{}".format( config.get('logname'), dbel) statusmsg[ name3] = 'flags successfully written to DB' try: flaglist2db(dbt, flaglist) except: statusmsg[ name3] = 'flags could not be written to DB - disk full?' aftflaglist = db2flaglist(dbt, sensorid) lastdata.flagliststats(aftflaglist, intensive=True) except: print(" -> flagging failed") statusmsg[name1] = 'Step1: flagging failed' if 'upload' in joblist and flagfilepath: print("5. Upload flagging lists from files") filelist = [] print(" Searching for new flagging files") for fi in os.listdir(flagfilepath): if fi.endswith("flags.json") or fi.endswith("flags.pkl"): print(" -> found: {}".format(os.path.join(flagfilepath, fi))) filelist.append(os.path.join(flagfilepath, fi)) if len(filelist) > 0: for fi in filelist: fileflaglist = loadflags(fi) try: instnamel = os.path.basename(fi).split('_') instname = ["_".join(instnamel[:3])] # Get instnames from fileflaglist except: instname = [] try: flagdict = [{ "starttime": el[0], "endtime": el[1], "components": el[2].split(','), "flagid": el[3], "comment": el[4], "sensorid": el[5], "modificationdate": el[6] } for el in fileflaglist] instname2 = [el.get('sensorid') for el in flagdict] uniqueinstnames = list(set(instname2)) instname = uniqueinstnames if debug: print(" - Sensorname(s) extracted from flag file") except: if debug: print(" - Sensorname(s) extracted from file name") pass if len(fileflaglist) > 0: print(" - Loaded {} flags from file {} for {}".format( len(fileflaglist), fi, instname)) # get all flags from DB dbflaglist = [] for inst in instname: tmpdbflaglist = db2flaglist(db, inst) dbflaglist = combinelists(dbflaglist, tmpdbflaglist) print(" - {} flags in DB for {}".format( len(dbflaglist), instname)) # combine flaglist fileflaglist = combinelists(fileflaglist, dbflaglist) # clean flaglist - necessary here as dublicates might come from combinations flaglist = DataStream().flaglistclean(fileflaglist) print( " - {} flags remaining after cleaning. Delete {} replicates" .format(len(flaglist), len(fileflaglist) - len(flaglist))) print(" - Combining consecutives") flaglist = consecutive_check(flaglist) print(" - {} flags remaining after joining consecutives.". format(len(flaglist))) # copy file to ...json.uploaded date = datetime.strftime(datetime.utcnow(), "%Y%m%d%H%M") if not debug: copyfile(fi, "{}.uploaded{}".format(fi, date)) # delete existing flags in DB and fill with new 'clean' flaglist for dbel in connectdict: dbt = connectdict[dbel] print(" -- Writing flags to DB {}".format(dbel)) for inst in instname: flaglist2db(db, flaglist, mode='delete', sensorid=inst) # delete flagfile os.remove(fi) print(" -> Done") else: print( " -> debug: will not modify or upload any datasets: {}" .format(date)) statusmsg[ name5] = 'Upload: new flagging data sets uploaded' else: print( " -> Flaglist {} is empty. If pkl file check python version..." .format(fi)) if 'delete' in joblist and flagfilearchivepath: print("Not existing. Deleting content") # not yet available #delsensor = 'RCST7_20160114_0001' #delsensor = 'LEMI036_3_0001' #delcomment = 'aof - threshold 5.0 window 43200.0 sec' #delcomment = 'aof - threshold: 6, window: 600.0 sec' # Backup any data too bee deleted? parameter = 'comment' if delcomment in ['0', '1', '2', '3', '4']: parameter = 'flagnumber' print( " - found a valid flagnumber as value: removing flags with FlagID {}" .format(delcomment)) elif delcomment in KEYLIST: parameter = 'key' print( " - found a valid key as value: removing all flags for key {}". format(delcomment)) print(" - selected sensor {}".format(delsensor)) flaglist = db2flaglist(db, delsensor) print(" - got {} flags".format(len(flaglist))) toberemovedflaglist = DataStream().flaglistmod('select', flaglist, parameter=parameter, value=delcomment) print(" - will backup and then remove {} flags matching your criteria". format(len(toberemovedflaglist))) flagfile = os.path.join( flagfilearchivepath, 'flags_{}_backup_{}.json'.format( delsensor, datetime.strftime(datetime.utcnow(), "%Y%m%d%H%M"))) succ = saveflags(toberemovedflaglist, flagfile) if succ: print(" - backup saved to {}".format(flagfile)) remainingflaglist = DataStream().flaglistmod('delete', flaglist, parameter=parameter, value=delcomment) print(" - remaining {} flags".format(len(remainingflaglist))) if not debug: for dbel in connectdict: dbt = connectdict[dbel] print(" -- Writing flags to DB {}".format(dbel)) flaglist2db(dbt, remainingflaglist, mode='delete', sensorid=delsensor) print(" -> Done") else: print(" -> Debug selected - no changes made to DB") else: print( " Backup could not be saved - aborting - check directory permissions" ) if 'clean' in joblist: print("6. Cleaning flagging list") try: print(" Cleaning up all records") cumflag = [] stream = DataStream() flaglist = db2flaglist(db, 'all') if debug: print(" -> Found {} flags in database".format(len(flaglist))) print(" --------------------------------------") stream.flagliststats(flaglist, intensive=True) print(" --------------------------------------") currentyear = endtime.year yearlist = [i for i in range(2000, currentyear + 2)] for year in yearlist: startyear = year - 1 print(" Checking data from {} until {}".format( startyear, year)) beg = '{}-01-01'.format(startyear) end = '{}-01-01'.format(year) flaglist_tmp = db2flaglist(db, 'all', begin=beg, end=end) print(" -> Found {} flags in database between {} and {}". format(len(flaglist_tmp), startyear, year)) if len(flaglist_tmp) > 0: print(" - Cleaning up flaglist") clflaglist_tmp = stream.flaglistclean(flaglist_tmp, progress=True) print(" -> {} flags remaining".format( len(clflaglist_tmp))) if len(clflaglist_tmp) < consecutivethreshold: # TODO this method leads to a killed process sometimes... print(" - Combining consecutives") coflaglist_tmp = consecutive_check( clflaglist_tmp) #debug=debug) else: coflaglist_tmp = clflaglist_tmp print(" -> {} flags remaining".format( len(coflaglist_tmp))) if len(cumflag) == 0: cumflag = coflaglist_tmp else: cumflag.extend(coflaglist_tmp) if debug: print(" -> cleaned record contains {} flags".format( len(cumflag))) print(" --------------------------------------") stream.flagliststats(cumflag, intensive=True) print(" --------------------------------------") if not debug: for dbel in connectdict: dbt = connectdict[dbel] print(" -- Writing flags to DB {}".format(dbel)) flaglist2db(dbt, cumflag, mode='delete', sensorid='all') print(" -> cleaned flaglist uploaded to DB") statusmsg[name3] = 'Cleanup: cleaning database successful' except: print(" -> failure while cleaning up") statusmsg[name3] = 'Cleanup: failure' # schedule with crontab at February 1st 6:00 (analyze yearly) flagging -c /wic.cfg -j archive if 'archive' in joblist and flagfilearchivepath: print("7. Saving archive and deleting old db contents") print(" Archiving flags") print(" ---------------") print(" Every year in February - archive full year two years ago") print(" Delete all inputs older than two years from DB") ## e.g. Feb 2019 -> Keep only 2017 and 2018 in DB ## archive everything before 2017 ## delete everything before 2017 ## -> archive containsnow : flags_2016_final.pkl, # and 2015,etc ## -> flags_archive.pkl current backup (only monthly) ## -> DB contains 2017 to present ... 3 years max stream = DataStream() flaglist = db2flaglist(db, 'all') if debug: print(" -> Found {} flags in database".format(len(flaglist))) print(" --------------------------------------") stream.flagliststats(flaglist) print(" --------------------------------------") # Backup and export all old flags minyear = 2015 succ = False currentyear = endtime.year if currentyear - 3 > minyear: yearlist = [i for i in range(minyear, currentyear - 2)] else: yearlist = [2015] flaglist_tmp = [] for year in yearlist: startyear = 2000 if year > 2015: startyear = year - 1 print(" Archiving flaglist until {}".format(year)) flagfile = os.path.join(flagfilearchivepath, 'flags_{}-{}.json'.format(startyear, year)) beg = '{}-01-01'.format(startyear) end = '{}-01-01'.format(year) flaglist_tmp = db2flaglist(db, 'all', begin=beg, end=end) if len(flaglist_tmp) > 0: print(" -> Found {} flags in database between {} and {}". format(len(flaglist_tmp), startyear, year)) if os.path.isfile(flagfile): fileflaglist = loadflags(flagfile) print(" -> Found {} flags in file".format( len(fileflaglist))) flaglist_tmp.extend(fileflaglist) # Cleaning has been done already print(" - Saving flag archive to {}".format(flagfilepath)) succ = saveflags(flaglist_tmp, flagfile, overwrite=True) print(" -> Done") if succ: # drop all flags from flaglist print(" Droping all flags until year {}".format(year)) newflaglist = stream.flaglistmod('delete', flaglist, starttime='2000-01-01', endtime=end) print(" -> remaining amount of flags: {}".format( len(newflaglist))) # Cleaning has been done already print( " Uploading new list to database and deleting all other inputs" ) print(" --------------------------------------") stream.flagliststats(newflaglist) print(" --------------------------------------") if not debug: for dbel in connectdict: dbt = connectdict[dbel] print(" -- Writing flags to DB {}".format(dbel)) flaglist2db(dbt, newflaglist, mode='delete') print(" -> Done") else: print(" -> Debug selected - no changes made to DB") statusmsg[name4] = 'Archiving flags: done until {}'.format(year) else: print(" -> Problem with saving files - aborting") statusmsg[name4] = 'Archiving flags: file saving problem' print("------------------------------------------") print(" flagging finished") print("------------------------------------------") print("SUCCESS") if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug = False age = 1 iterate = 'day' structure = '*' logname = 'default' try: opts, args = getopt.getopt(argv, "hc:p:s:a:i:D", [ "config=", "path=", "structure=", "age=", "iterate=", "debug=", ]) except getopt.GetoptError: print('logfiledates.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print('-- logfiledates.py will analyse magnetic data --') print( '-----------------------------------------------------------------' ) print('detailed description ..') print('...') print('...') print('-------------------------------------') print('Usage:') print('python logfiledates.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-p : path') print( '-s : structure like "*.json" or "*A16.txt". Default is "*" ' ) print('-a : tolerated age (integer). Default is 1') print( '-i : increment of age (day, hour, minute, second). Default is day' ) print('-l : name of the logger') print('-------------------------------------') print('Application:') print( 'python3 logfiledates.py -c ../conf/wic.cfg -p /srv/archive/flags/uploads/ -a 1 -i day' ) sys.exit() elif opt in ("-c", "--path"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-p", "--path"): # delete any / at the end of the string path = os.path.abspath(arg) elif opt in ("-s", "--structure"): # get a list of jobs (adjusted, quasidefinitive,upload,plots) structure = arg elif opt in ("-a", "--age"): # get a list of jobs (adjusted, quasidefinitive,upload,plots) age = int(arg) elif opt in ("-i", "--iterate"): # get a list of jobs (adjusted, quasidefinitive,upload,plots) iterate = arg elif opt in ("-l", "--logger"): # get a list of jobs (adjusted, quasidefinitive,upload,plots) logname = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True if debug: print("Running loggin file dates") if not os.path.exists(configpath): print('Specify a valid path to configuration information') print('-- check logfiledates.py -h for more options and requirements') sys.exit() print("1. Read and check validity of configuration data") config = GetConf(configpath) print("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category="DataProducts", job=os.path.basename(__file__), newname='mm-dp-logfiledate.log', debug=debug) name = "{}-{}".format(config.get('logname'), logname) statusmsg[name] = 'Latest file younger than {} {}'.format(age, iterate) print("3. Create file search") filepath = os.path.join(path, structure) print("4. Reading directory and getting latest file") fi = _latestfile(filepath, date=False, latest=True, debug=True) da = _latestfile(filepath, date=True, latest=True, debug=True) if os.path.isfile(fi): print(" - Got {} created at {}".format(fi, da)) diff = (datetime.utcnow() - da).total_seconds() print(" - Diff {} sec".format(diff)) accepteddiff = agerange(age, iterate) print(" - Accepted: {} sec".format(accepteddiff)) if not diff < accepteddiff: statusmsg[name] = 'Latest file older than {} {}'.format( age, iterate) else: print(" - No file(s) found - check path and structure") statusmsg[name] = 'No file(s) found' if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): try: version = __version__ except: version = "1.0.0" configpath = '' statusmsg = {} debug = False starttime = None endtime = None source = 'database' try: opts, args = getopt.getopt(argv, "hc:e:s:D", [ "config=", "endtime=", "starttime=", "debug=", ]) except getopt.GetoptError: print('current_weatherchanges.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('-------------------------------------') print('Description:') print( '-- current_weatherchanges.py will determine the primary instruments --' ) print( '-----------------------------------------------------------------' ) print('detailed description ..') print('...') print('...') print('-------------------------------------') print('Usage:') print('python weather_products.py -c <config>') print('-------------------------------------') print('Options:') print('-c (required) : configuration data path') print('-e : endtime - default is now') print( '-s : starttime - default is three days from now') print('-------------------------------------') print('Application:') print( 'python current_weatherchanges.py -c /etc/marcos/analysis.cfg') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-s", "--starttime"): # get an endtime starttime = arg elif opt in ("-e", "--endtime"): # get an endtime endtime = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print("Running current_weather version {}".format(version)) print("--------------------------------") if endtime: try: endtime = DataStream()._testtime(endtime) except: print(" Could not interprete provided endtime. Please Check !") sys.exit(1) else: endtime = datetime.utcnow() if starttime: try: starttime = DataStream()._testtime(starttime) except: print(" Could not interprete provided starttime. Please Check !") sys.exit(1) else: starttime = datetime.utcnow() - timedelta(days=3) if starttime >= endtime: print(" Starttime is larger than endtime. Please correct !") sys.exit(1) if not os.path.exists(configpath): print('Specify a valid path to configuration information') print( '-- check magnetism_products.py -h for more options and requirements' ) sys.exit() print("1. Read and check validity of configuration data") config = GetConf(configpath) print("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category="PeriodicGraphs", job=os.path.basename(__file__), newname='mm-pg-currentweatherchanges.log', debug=debug) name1 = "{}-weatherchange".format(config.get('logname')) statusmsg[name1] = 'weather change analysis successful' print("3. Connect databases and select first available") try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') except: statusmsg[name1] = 'database failed' # it is possible to save data also directly to the brokers database - better do it elsewhere print("4. Weather change analysis") success = weather_change(db, config=config, starttime=starttime, endtime=endtime, debug=debug) if not success: statusmsg[name1] = 'weather change analysis failed' if not debug: martaslog = ml(logfile=config.get('logfile'), receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print("Debug selected - statusmsg looks like:") print(statusmsg)
def main(argv): version = '1.0.0' configpath = '' statusmsg = {} debug=False sensor = '' outpath = '/tmp' duration=2 format_type='PYASCII' endtime = None try: opts, args = getopt.getopt(argv,"hc:s:o:e:d:f:D",["config=","sensor=","outputpath=","endtime=","duration=","format=","debug=",]) except getopt.GetoptError: print ('getprimary.py -c <config>') sys.exit(2) for opt, arg in opts: if opt == '-h': print ('-------------------------------------') print ('Description:') print ('-- getprimary.py will determine the primary instruments --') print ('-----------------------------------------------------------------') print ('detailed description ..') print ('...') print ('...') print ('-------------------------------------') print ('Usage:') print ('python getprimary.py -c <config>') print ('-------------------------------------') print ('Options:') print ('-c (required) : configuration data path') print ('-------------------------------------') print ('Application:') print ('python getprimary.py -c /etc/marcos/analysis.cfg') sys.exit() elif opt in ("-c", "--config"): # delete any / at the end of the string configpath = os.path.abspath(arg) elif opt in ("-s", "--sensor"): sensor = arg elif opt in ("-o", "--outputpath"): # delete any / at the end of the string outpath = os.path.abspath(arg) elif opt in ("-e", "--endtime"): # get an endtime endtime = arg elif opt in ("-d", "--duration"): duration = int(arg) elif opt in ("-f", "--format"): format_type = arg elif opt in ("-D", "--debug"): # delete any / at the end of the string debug = True print ("Running convert data version {}".format(version)) print ("--------------------------------") if not os.path.exists(configpath): print ('Specify a valid path to configuration information') print ('-- check magnetism_products.py -h for more options and requirements') sys.exit() if not sensor: print ('Specify a sensor name') sys.exit(0) if endtime: endtime = DataStream()._testtime(endtime) else: endtime = datetime.utcnow() print ("1. Read and check validity of configuration data") config = GetConf(configpath) print ("2. Activate logging scheme as selected in config") config = DefineLogger(config=config, category="DataProducts", job=os.path.basename(__file__), newname='mm-dp-convert{}.log'.format(sensor)) name1 = "{}-{}".format(config.get('logname'),sensor) statusmsg[name1] = 'successful' print ("3. Connect databases and select first available") try: config = ConnectDatabases(config=config, debug=debug) db = config.get('primaryDB') if debug: print (" -- success") except: if debug: print (" -- database failed") statusmsg[name1] = 'database failed' print ("4. Running conversion") try: starttime = datetime.strptime(datetime.strftime(endtime-timedelta(days=duration),"%Y-%m-%d"),"%Y-%m-%d") convert(db, sensor,outpath,starttime=starttime,endtime=endtime,format_type=format_type,debug=debug) except: if debug: print (" -- conversion failed") statusmsg[name1] = 'conversion failed' if not debug: martaslog = ml(logfile=config.get('logfile'),receiver=config.get('notification')) martaslog.telegram['config'] = config.get('notificationconfig') martaslog.msg(statusmsg) else: print ("Debug selected - statusmsg looks like:") print (statusmsg)