def handler(obj): try: fname = obj.filename.split("/")[-1] item = action_table.get(fname) if item == None: return apache.HTTP_NOT_FOUND (action, multipar) = item args = {} if obj.args != None: for arg in obj.args.split('&'): pv = arg.split('=', 1) if pv[0] in multipar: if pv[0] not in args: args[pv[0]] = [] if len(pv) == 1: args[pv[0]].append("") else: args[pv[0]].append(urllib.unquote(pv[1])) else: if len(pv) == 1: args[pv[0]] = "" else: args[pv[0]] = urllib.unquote(pv[1]) action(obj, args) except ArclinkAuthFailed, e: obj.err_headers_out['WWW-Authenticate'] = 'Basic realm="%s"' % (e.dcname,) logs.debug("unauthorized") return apache.HTTP_UNAUTHORIZED
def submit_email(to, subj, text): """ Sends an email with given subject and text to a specified address using the specified SMTP host. @arguments: to, a string storing the email address of the recipient subj, a string defining the email subject text, a string containing the email message text """ # I am not able to set sender to [email protected] with smtplib. # It is forced to [email protected], causing the message to be rejected # by the GFZ mail server. -Andres #msg = MIMEText("") #msg['Subject'] = subj #msg['From'] = EMAIL_ADDR #msg['To'] = to #msg.set_payload(text) #server = smtplib.SMTP(SMTP_SERVER) #server.sendmail(EMAIL_ADDR, to, msg.as_string()) #server.quit() cmd = "%s -I'From: %s' -I'To: %s' -I'Subject: %s' -a'Message-ID:' -A'X-Loop: %s' | %s -f'%s' -- '%s'" % \ (FORMAIL_BIN, EMAIL_FROM, to, subj, EMAIL_ADDR, SENDMAIL_BIN, EMAIL_ADDR, to) logs.debug("executing cmd: %s" % cmd) fd = os.popen(cmd, "w") try: fd.write(text) finally: fd.close()
def __parse_request(self, line): """ Gets a request line in Breq_fast format to match it against the corresponding pattern. If successful the request list will be completed; the fail string otherwise. @arguments: line, a request line in Breq_fast format """ loc = "*" self.request = "%s\n%s" % (self.request, line) m = re.search("\s+".join(self.__reqlist),line) if m: d = m.groupdict() logs.debug("request_line: %s" % line) # catch two digit year inputs if d["beg_2year"]: if int(d["beg_2year"]) > 50: d["beg_4year"] = "19%s" % d["beg_2year"] else: d["beg_4year"] = "20%s" % d["beg_2year"] if d["end_2year"]: if int(d["end_2year"]) > 50: d["end_4year"] = "19%s" % d["end_2year"] else: d["end_4year"] = "20%s" % d["end_2year"] # some users have problems with time... if int(d["beg_hour"]) > 23: d["beg_hour"] = "23" if int(d["end_hour"]) > 23: d["end_hour"] = "23" if int(d["beg_min"]) > 59: d["beg_min"] = "59" if int(d["end_min"]) > 59: d["end_min"] = "59" if int(d["beg_sec"]) > 59: d["beg_sec"] = "59" if int(d["end_sec"]) > 59: d["end_sec"] = "59" try: beg_time = datetime.datetime(int(d["beg_4year"]),int(d["beg_month"]),int(d["beg_day"]), int(d["beg_hour"]),int(d["beg_min"]),int(d["beg_sec"])) end_time = datetime.datetime(int(d["end_4year"]),int(d["end_month"]),int(d["end_day"]), int(d["end_hour"]),int(d["end_min"]),int(d["end_sec"])) except ValueError, e: self.failstr = "%s%s [error: wrong begin or end time: %s]\n" % (self.failstr, line, e) return # expand network and station for (network, station) in self.__expand_net_station(d["network"], d["station"], beg_time, end_time): cha_list = re.findall("([\w?\*]+)\s*",d["cha_list"]) if len(cha_list) == int(d['cha_num'])+1: loc = cha_list.pop() for cha in cha_list: cha = re.sub("[?]+","*",cha) self.reqlist.append((str(network),str(station),cha,loc,beg_time,end_time, {}, set())) logs.debug("reqlist.append: %s %s %s %s" % (str(network),str(station),cha,loc))
def parse_breqfast_from_handler(req, fh): parser = BreqParser() parser.parse_email_from_handler(fh) req.content = parser.reqlist logs.debug("") if parser.failstr: logs.error(parser.failstr) else: logs.info("parsed %d lines from breqfast message" % len(req.content))
def iterdata(self, time1, time2, net, sta, cha, loc): lasttime = None lastrecord = None recstream = [] if self.__is_iso(time1, time2, net, sta, os.path.exists): recstream.append(("isoarchive", self.isodir)) if self.__is_sds(time1, time2, net, sta, cha, loc, self.archdir, os.path.exists, os.listdir): recstream.append(("sdsarchive", self.archdir)) if self.__is_sds(time1, time2, net, sta, cha, loc, self.nrtdir, os.path.exists, os.listdir): recstream.append(("sdsarchive", self.nrtdir)) if not recstream and self.exists_db(time1, time2, net, sta, cha, loc): raise TemporaryUnavailabilityException for (service, source) in recstream: if lastrecord: try: etime = lastrecord.endTime() except Core.ValueException: logs.warning("SDS: record.endTime() raises Core.ValueException! Resulting SEED file maybe incorrect!") etime = lastrecord.startTime() timetuple = time.strptime(etime.toString("%Y-%m-%d %H:%M:%S"), "%Y-%m-%d %H:%M:%S") lasttime = datetime.datetime(*timetuple[:6])+datetime.timedelta(seconds=1) # avoids dublettes if lasttime >= time2: break time1 = lasttime lastrecord = None self._recstream = IO.RecordStream.Create(service) if not self._recstream: logs.error("Could not fetch recordstream service '%s'" % service) raise StopIteration if not self._recstream.setSource(source): logs.error("Could not set recordstream source '%s'" % source) self._recstream = None raise StopIteration logs.debug("%s %s: addStream for %s-%s" % (service, source, str(time1), str(time2))) self._recstream.addStream(net,sta,loc,cha,Core.Time.FromString(str(time1),"%Y-%m-%d %H:%M:%S"), Core.Time.FromString(str(time2),"%Y-%m-%d %H:%M:%S")) try: recinput = IO.RecordInput(self._recstream, Core.Array.DOUBLE, Core.Record.SAVE_RAW) record = recinput.next() while record: yield record.raw().str() lastrecord = record record = recinput.next() except Core.GeneralException, e: logs.error(e.what()) except Exception, e: logs.error("SDS: Unexpected exception occured: %s" % e)
def run(self): try: seiscompRoot = self.commandline().unrecognizedOptions()[0] sys.stderr.write("root directory: %s\n" % seiscompRoot) try: DCID = self.configGetString("datacenterID") except: logs.error("datacenterID not found in global.cfg") return False networkRestricted = {} incompleteResponse = {} global instdb instdb = Instruments(DCID) self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.dlsv")) # for backwards compatibility self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.tab.out")) self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.tab")) try: self.__load_file(instdb.load_db, os.path.join(seiscompRoot, "resp", "inst.db")) self.__load_file(instdb.load_sensor_attr, os.path.join(seiscompRoot, "resp", "sensor_attr.csv")) self.__load_file(instdb.load_datalogger_attr, os.path.join(seiscompRoot, "resp", "datalogger_attr.csv")) except (IOError, NettabError), e: logs.error("fatal error: " + str(e)) return False sc3Inv = seiscomp3.DataModel.Inventory() inventory = InventoryWrapper(sc3Inv, DCID) existingNetworks = set() existingStations = set() for f in glob.glob(os.path.join(seiscompRoot, "key", "network_*")): try: logs.debug("processing " + f) netCode = f.split("/network_")[-1] try: kf = Keyfile(f) except IOError, e: logs.error(str(e)) continue existingNetworks.add(netCode) networkRestricted[netCode] = False inventory.updateNetwork(netCode, kf) except ValueError, e: logs.error("%s: %s" % (f, str(e)))
def __init__(self, appName): # initialize SC3 environment env = seiscomp3.System.Environment_Instance() # set up logging self.__syslog = seiscomp3.Logging.SyslogOutput() self.__syslog.open(appName, syslog_facility) for (v, c) in ((1, "error"), (2, "warning"), (2, "notice"), (3, "info"), (4, "debug")): if verbosity >= v: self.__syslog.subscribe(seiscomp3.Logging.getGlobalChannel(c)) logs.debug = seiscomp3.Logging.debug logs.info = seiscomp3.Logging.info logs.notice = seiscomp3.Logging.notice logs.warning = seiscomp3.Logging.warning logs.error = seiscomp3.Logging.error logs.notice("Starting webinterface") # load SC3 config files from all standard locations (SEISCOMP_ROOT must be set) self.__cfg = seiscomp3.Config.Config() env.initConfig(self.__cfg, appName, env.CS_FIRST, env.CS_LAST, True) self.__action_table = {} self.__modules = {} # Common config variables self.server_folder = self.getConfigString('SERVER_FOLDER', None) if not self.server_folder: err = "%s: Cannot find server root, configuration not loaded" % ( appName) raise Exception(err) if not os.path.exists(self.server_folder): err = "%s: Server root directory not found" % (appName) raise Exception(err) # Add inventory cache here, to be accessible to all modules inventory = os.path.join(self.server_folder, 'data', 'Arclink-inventory.xml') self.ic = InventoryCache(inventory) # Load all modules in given directory. # Modules must contain a class WI_Module, whose __init__() takes # WebInterface object (our self) as an argument and calls our # addAction(). #for f in glob.glob(os.path.join(env.shareDir(), "plugins", "webinterface", "*.py")): for f in glob.glob( os.path.join(self.server_folder, "wsgi", "modules", "*.py")): self.__load_module(f) logs.debug(str(self))
def parse_native_from_handler(req, fd): rqline = fd.readline() while rqline: rqline = rqline.strip() if not rqline: rqline = fd.readline() logs.debug("skipping empty request line") continue rqsplit = rqline.split() if len(rqsplit) < 3: logs.error("invalid request line: '%s'" % (rqline,)) rqline = fd.readline() continue try: start_time = datetime.datetime(*map(int, rqsplit[0].split(","))) end_time = datetime.datetime(*map(int, rqsplit[1].split(","))) except ValueError, e: logs.error("syntax error (%s): '%s'" % (str(e), rqline)) rqline = fd.readline() continue network = rqsplit[2] station = "." channel = "." location = "." i = 3 if len(rqsplit) > 3 and rqsplit[3] != ".": station = rqsplit[3] i += 1 if len(rqsplit) > 4 and rqsplit[4] != ".": channel = rqsplit[4] i += 1 if len(rqsplit) > 5 and rqsplit[5] != ".": location = rqsplit[5] i += 1 while len(rqsplit) > i and rqsplit[i] == ".": i += 1 constraints = {} for arg in rqsplit[i:]: pv = arg.split('=', 1) if len(pv) != 2: raise ArclinkError, "invalid request syntax" constraints[pv[0]] = pv[1] req.add(network, station, channel, location, start_time, end_time, constraints) rqline = fd.readline()
def __init__(self, appName): # initialize SC3 environment env = seiscomp3.System.Environment_Instance() # set up logging self.__syslog = seiscomp3.Logging.SyslogOutput() self.__syslog.open(appName, syslog_facility) for (v, c) in ((1, "error"), (2, "warning"), (2, "notice"), (3, "info"), (4, "debug")): if verbosity >= v: self.__syslog.subscribe(seiscomp3.Logging.getGlobalChannel(c)) logs.debug = seiscomp3.Logging.debug logs.info = seiscomp3.Logging.info logs.notice = seiscomp3.Logging.notice logs.warning = seiscomp3.Logging.warning logs.error = seiscomp3.Logging.error logs.notice("Starting webinterface") # load SC3 config files from all standard locations (SEISCOMP_ROOT must be set) self.__cfg = seiscomp3.Config.Config() env.initConfig(self.__cfg, appName, env.CS_FIRST, env.CS_LAST, True) self.__action_table = {} self.__modules = {} # Common config variables self.server_folder = self.getConfigString('SERVER_FOLDER', None) if not self.server_folder: err="%s: Cannot find server root, configuration not loaded" % (appName) raise Exception(err) if not os.path.exists(self.server_folder): err="%s: Server root directory not found" % (appName) raise Exception(err) # Add inventory cache here, to be accessible to all modules inventory = os.path.join(self.server_folder, 'data', 'Arclink-inventory.xml') self.ic = InventoryCache(inventory) # Load all modules in given directory. # Modules must contain a class WI_Module, whose __init__() takes # WebInterface object (our self) as an argument and calls our # addAction(). #for f in glob.glob(os.path.join(env.shareDir(), "plugins", "webinterface", "*.py")): for f in glob.glob(os.path.join(self.server_folder, "wsgi", "modules", "*.py")): self.__load_module(f) logs.debug(str(self))
def application(environ, start_response): """Main WSGI handler that processes client requests and calls the proper functions. Begun by Javier Quinteros <*****@*****.**>, GEOFON team, June 2013 """ # Read the URI and save the first word in fname #fname = environ['PATH_INFO'].split("/")[-1] #fname = environ['PATH_INFO'].lstrip('/').split("/")[0] # print "environ['PATH_INFO'].lstrip('/')", environ['PATH_INFO'].lstrip('/') fname = environ['PATH_INFO'] if not len(fname): fname = 'default' logs.debug('fname: %s' % (fname)) item = wi.getAction(fname) logs.debug('item: %s' % (str(item))) # Among others, this will filter wrong function names, # but also the favicon.ico request, for instance. if item is None: status = '404 Not Found' return send_html_response(status, 'Error! ' + status, start_response) (action, multipar) = item parameters = {} try: form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ) except ValueError, e: if str(e) == "Maximum content length exceeded": # Add some user-friendliness (this message triggers an alert box on the client) return send_plain_response("400 Bad Request", "maximum request size exceeded", start_response) return send_plain_response("400 Bad Request", str(e), start_response)
def __expand_net_station(self, network, station, beg_time, end_time): """ Mathias, 31.05.2007 Expands a possibly wildcarded station code field by looking up in the inventory. Allowed wildcards: '?' and '*' @arguments: string network: lookup for stations in this network (e.g. GE or G* or ?? or * or *E ) string station: one station entry, (e.g. SNAA or SN?? or ???? or * or SN* or ? ) beg_time, end_time: begin & end time: limit search for net/stations to selected time window @return: list of expanded (network, station) codes tuples """ net_station_list = [] if re.search(r'[?\*]+', station): logs.debug("*** expanding %s for network: %s" % (station, network)) # FIXME: begin=None, end=None db = self.mgr.get_inventory(network=re.sub("[?]+", "*", network), station=re.sub("[?]+", "*", station), begin=None, end=None) netlist = [] netlist = db.network.keys() logs.debug("- netlist: %s" % netlist) for net_code in netlist: for net in db.network[net_code].itervalues(): for sta_code in net.station: #logs.debug("- net/stacode: %s %s" % (net_code, sta_code)) s = re.sub("^[?]$", "*", station) # single ? --> * s = re.sub("[?]", ".", s) # SN?? --> SN.. s = re.sub("[\*]", ".*", "^" + s + "$") # S*AA --> S.*AA #logs.debug("- station filter: %s" % s) if re.match(s, sta_code): net_station_list.append((net_code, sta_code)) else: net_station_list.append((network, station)) return net_station_list
def __expand_net_station(self, network, station, beg_time, end_time): """ Mathias, 31.05.2007 Expands a possibly wildcarded station code field by looking up in the inventory. Allowed wildcards: '?' and '*' @arguments: string network: lookup for stations in this network (e.g. GE or G* or ?? or * or *E ) string station: one station entry, (e.g. SNAA or SN?? or ???? or * or SN* or ? ) beg_time, end_time: begin & end time: limit search for net/stations to selected time window @return: list of expanded (network, station) codes tuples """ net_station_list = [] if re.search(r"[?\*]+", station): logs.debug("*** expanding %s for network: %s" % (station, network)) # FIXME: begin=None, end=None db = self.mgr.get_inventory( network=re.sub("[?]+", "*", network), station=re.sub("[?]+", "*", station), begin=None, end=None ) netlist = [] netlist = db.network.keys() logs.debug("- netlist: %s" % netlist) for net_code in netlist: for net in db.network[net_code].itervalues(): for sta_code in net.station: # logs.debug("- net/stacode: %s %s" % (net_code, sta_code)) s = re.sub("^[?]$", "*", station) # single ? --> * s = re.sub("[?]", ".", s) # SN?? --> SN.. s = re.sub("[\*]", ".*", "^" + s + "$") # S*AA --> S.*AA # logs.debug("- station filter: %s" % s) if re.match(s, sta_code): net_station_list.append((net_code, sta_code)) else: net_station_list.append((network, station)) return net_station_list
def send_notifiers(self, group): Nsize = DataModel.Notifier.Size() if Nsize > 0: logs.warning("trying to apply %d changes..." % Nsize) else: logs.notice("no changes to apply") return 0 Nmsg = DataModel.Notifier.GetMessage(True) it = Nmsg.iter() msg = DataModel.NotifierMessage() maxmsg = 100 sent = 0 mcount = 0 try: try: while it.get(): msg.attach(DataModel.Notifier_Cast(it.get())) mcount += 1 if msg and mcount == maxmsg: sent += mcount logs.debug("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0)) self.send(group, msg) msg.clear() mcount = 0 self.sync("fill-db") it.next() except: pass finally: if msg.size(): logs.debug("sending message (%5.1f %%)" % 100.0) self.send(group, msg) msg.clear() self.sync("fill-db") return mcount
def purge(obj, args): myaddr = args.get("arclink") req_id = args.get("req_id") try: (host, port) = myaddr.split(':') port = int(port) except (AttributeError, ValueError): raise ArclinkError, "invalid ArcLink address" if req_id is None or len(req_id) == 0: raise ArclinkError, "missing request ID " (sesskey, user, passwd) = init_session(obj, args) arcl.open_connection(host, port, user, passwd, timeout=ARCLINK_TIMEOUT) try: logs.debug("connected to %s at %s" % (arcl.software, arcl.organization)) arcl_status = arcl.purge(req_id) logs.debug("request deleted") finally: logs.debug("closing connection") arcl.close_connection() url = "http://%s%s/status?sesskey=%s&arclink=%s" % \ (obj.hostname, obj.uri.rsplit("/", 1)[0], sesskey, myaddr) obj.headers_out['Location'] = url raise apache.SERVER_RETURN, apache.HTTP_MOVED_PERMANENTLY
def purge(obj, args): myaddr = args.get("arclink") req_id = args.get("req_id") try: (host, port) = myaddr.split(':') port = int(port) except (AttributeError, ValueError): raise ArclinkError, "invalid ArcLink address" if req_id == None or len(req_id) == 0: raise ArclinkError, "missing request ID " (sesskey, user, passwd) = init_session(obj, args) arcl.open_connection(host, port, user, passwd, timeout=ARCLINK_TIMEOUT) try: logs.debug("connected to %s at %s" % (arcl.software, arcl.organization)) arcl_status = arcl.purge(req_id) logs.debug("request deleted") finally: logs.debug("closing connection") arcl.close_connection() url = "http://%s%s/status?sesskey=%s&arclink=%s" % \ (obj.hostname, obj.uri.rsplit("/", 1)[0], sesskey, myaddr) obj.headers_out['Location'] = url raise apache.SERVER_RETURN, apache.HTTP_MOVED_PERMANENTLY
def download(obj, args): myaddr = args.get("arclink") req_id = args.get("req_id") vol_id = args.get("vol_id") try: (host, port) = myaddr.split(':') port = int(port) except (AttributeError, ValueError): raise ArclinkError, "invalid ArcLink address" if req_id is None or len(req_id) == 0: raise ArclinkError, "missing request ID " (sesskey, user, passwd) = init_session(obj, args) arcl.open_connection(host, port, user, passwd, timeout=ARCLINK_TIMEOUT) try: logs.debug("connected to %s at %s" % (arcl.software, arcl.organization)) arcl_status = arcl.get_status(req_id) logs.debug("got request status") req_type = arcl_status.request[0].type fext = '' if "compression=bzip2" in arcl_status.request[0].args.split(): obj.content_type = "application/x-bzip2" fext = '.bz2' elif req_type == "WAVEFORM" or req_type == "RESPONSE": obj.content_type = "application/x-seed" else: obj.content_type = "application/xml" fext = '.xml' obj.headers_out[ 'Content-Disposition'] = 'attachment; filename=ArclinkRequest_%s%s' % ( str(req_id), fext) arcl.download_data(obj, req_id, vol_id) logs.debug("download finished") finally: logs.debug("closing connection") arcl.close_connection()
def download(obj, args): myaddr = args.get("arclink") req_id = args.get("req_id") vol_id = args.get("vol_id") try: (host, port) = myaddr.split(':') port = int(port) except (AttributeError, ValueError): raise ArclinkError, "invalid ArcLink address" if req_id == None or len(req_id) == 0: raise ArclinkError, "missing request ID " (sesskey, user, passwd) = init_session(obj, args) arcl.open_connection(host, port, user, passwd, timeout=ARCLINK_TIMEOUT) try: logs.debug("connected to %s at %s" % (arcl.software, arcl.organization)) arcl_status = arcl.get_status(req_id) logs.debug("got request status") req_type = arcl_status.request[0].type fext = '' if "compression=bzip2" in arcl_status.request[0].args.split(): obj.content_type = "application/x-bzip2" fext = '.bz2' elif req_type == "WAVEFORM" or req_type == "RESPONSE": obj.content_type = "application/x-seed" else: obj.content_type = "application/xml" fext = '.xml' obj.headers_out['Content-Disposition'] = 'attachment; filename=ArclinkRequest_%s%s' % (str(req_id), fext) arcl.download_data(obj, req_id, vol_id) logs.debug("download finished") finally: logs.debug("closing connection") arcl.close_connection()
def status(obj, args): myaddr = args.get("arclink") req_id = args.get("req_id", "ALL") try: (host, port) = myaddr.split(':') port = int(port) except (AttributeError, ValueError): raise ArclinkError, "invalid ArcLink address" (sesskey, user, passwd) = init_session(obj, args) arcl.open_connection(host, port, user, passwd, timeout=ARCLINK_TIMEOUT) try: arcl.send_command("USER_IP %s" % obj.remote_host) except: pass try: obj.content_type = "text/html; charset=UTF-8" obj.write(status_head) logs.debug("connected to %s at %s" % (arcl.software, arcl.organization)) arcl_status = arcl.get_status(req_id) logs.debug("got request status") for req in arcl_status.request: if req.error: req_status = "ERROR" elif req.ready: req_status = "READY" else: req_status = "PROCESSING" obj.write('Request ID: %s, Type: %s, Args: %s<br>\n' % \ (req.id, req.type, req.args)) obj.write('Status: %s, Size: %d, Info: %s<br>\n' % \ (req_status, req.size, req.message)) if req.user != "": obj.write('User: %s, Institution: %s<br>\n' % (req.user, req.institution)) if req.ready and not req.error and req.size > 0: obj.write('<a href="download?sesskey=%s&arclink=%s&req_id=%s">Download!</a> ' % \ (sesskey, myaddr, req.id)) obj.write('<a href="purge?sesskey=%s&arclink=%s&req_id=%s">Delete!</a><br>' % \ (sesskey, myaddr, req.id)) for vol in req.volume: obj.write(4 * ' ' + 'Volume ID: %s, Status: %s, Size: %d, Info: %s<br>\n' % \ (vol.id, arclink_status_string(vol.status), vol.size, vol.message)) if vol.status == STATUS_OK or vol.status == STATUS_WARN: obj.write(4 * ' ' + '<a href="download?sesskey=%s&arclink=%s&req_id=%s&vol_id=%s">Download!</a><br>' % \ (sesskey, myaddr, req.id, vol.id)) for rqln in vol.line: obj.write('<br>\n') obj.write(8 * ' ' + '<font face="courier">%s</font><br>\n' % (rqln.content, )) obj.write(8 * ' ' + 'Status: %s, Size: %d, Info: %s<br>\n' % \ (arclink_status_string(rqln.status), rqln.size, rqln.message)) obj.write('<br>\n') finally: logs.debug("closing connection") arcl.close_connection() obj.write(status_tail)
def run(self): try: seiscompRoot = self.commandline().unrecognizedOptions()[0] sys.stderr.write("root directory: %s\n" % seiscompRoot) try: DCID = self.configGetString("datacenterID") except: logs.error("datacenterID not found in global.cfg") return False networkRestricted = {} incompleteResponse = {} global instdb instdb = Instruments(DCID) self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.dlsv")) # for backwards compatibility self.__load_file( loadGains, os.path.join(seiscompRoot, "config", "gain.tab.out")) self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.tab")) try: self.__load_file(instdb.load_db, os.path.join(seiscompRoot, "resp", "inst.db")) self.__load_file( instdb.load_sensor_attr, os.path.join(seiscompRoot, "resp", "sensor_attr.csv")) self.__load_file( instdb.load_datalogger_attr, os.path.join(seiscompRoot, "resp", "datalogger_attr.csv")) except (IOError, NettabError) as e: logs.error("fatal error: " + str(e)) return False sc3Inv = seiscomp3.DataModel.Inventory() inventory = InventoryWrapper(sc3Inv, DCID) existingNetworks = set() existingStations = set() for f in glob.glob(os.path.join(seiscompRoot, "key", "network_*")): try: logs.debug("processing " + f) netCode = f.split("/network_")[-1] try: kf = Keyfile(f) except IOError as e: logs.error(str(e)) continue existingNetworks.add(netCode) networkRestricted[netCode] = False inventory.updateNetwork(netCode, kf) except ValueError as e: logs.error("%s: %s" % (f, str(e))) for f in glob.glob(os.path.join(seiscompRoot, "key", "station_*")): try: logs.debug("processing " + f) (netCode, staCode) = f.split("/station_")[-1].split('_', 1) try: kf = Keyfile(f) except IOError as e: logs.error(str(e)) continue existingStations.add((netCode, staCode)) if netCode not in existingNetworks: logs.warning( "network %s does not exist, ignoring station %s" % (netCode, staCode)) continue if not hasattr(kf, "latitude") or not kf.latitude: logs.warning("missing latitude for %s %s" % (netCode, staCode)) continue if not hasattr(kf, "longitude") or not kf.longitude: logs.warning("missing longitude for %s %s" % (netCode, staCode)) continue if not hasattr(kf, "elevation") or not kf.elevation: logs.warning("missing elevation for %s %s" % (netCode, staCode)) continue if not hasattr(kf, "depth1") or not kf.depth1: logs.warning( "missing depth of primary sensor for %s %s" % (netCode, staCode)) continue if decimal.Decimal(kf.latitude) == decimal.Decimal("0.0") and \ decimal.Decimal(kf.longitude) == decimal.Decimal("0.0"): logs.warning("missing coordinates for %s %s" % (netCode, staCode)) continue if not hasattr(kf, "orientation1") or not kf.orientation1: logs.warning( "missing orientation of primary sensor for %s %s, using default" % (netCode, staCode)) kf.orientation1 = "Z 0.0 -90.0; N 0.0 0.0; E 90.0 0.0" if not hasattr(kf, "orientation2"): kf.orientation2 = "" if not hasattr(kf, "unit1") or not kf.unit1: logs.warning( "missing unit of primary sensor for %s %s, using M/S" % (netCode, staCode)) kf.unit1 = "M/S" if not hasattr(kf, "unit2"): logs.warning( "missing unit of secondary sensor for %s %s, using M/S**2" % (netCode, staCode)) kf.unit2 = "M/S**2" if not hasattr(kf, "type"): kf.type = "" restricted = False # TODO: Make restricted part of the key file if not inventory.updateStation(netCode, staCode, restricted, kf): try: incNet = incompleteResponse[netCode] except KeyError: incNet = set() incompleteResponse[netCode] = incNet incNet.add(staCode) except ValueError as e: logs.error("%s: %s" % (f, str(e))) for (netCode, restricted) in networkRestricted.items(): inventory.setNetworkRestricted(netCode, restricted) for (netCode, network) in inventory.networks.items(): if netCode not in existingNetworks: logs.notice("deleting network %s from inventory" % (netCode, )) inventory.obj.remove(network.obj) for ((netCode, staCode), station) in inventory.stations.items(): if netCode in existingNetworks and ( netCode, staCode) not in existingStations: logs.notice("deleting station %s_%s from inventory" % (netCode, staCode)) inventory.networks[netCode].obj.remove(station.obj) if incompleteResponse: logs.info( "The following stations are missing full response data") logs.info("Use dlsv2inv if needed") # for netCode in sorted(incompleteResponse.keys()): # logs.info("%s: %s" % (netCode, " ".join(sorted(list(incompleteResponse[netCode]))))) tmpDict = sortDictionary(incompleteResponse) for netCode in list(tmpDict.keys()): tmpSortedList = list(tmpDict[netCode]) tmpSortedList.sort() logs.info("%s: %s" % (netCode, " ".join(tmpSortedList))) ar = seiscomp3.IO.XMLArchive() if not self.output: sys.stderr.write("Writing output to stdout\n") if not ar.create("-"): sys.stderr.write("Cannot open open stdout\n") return False else: sys.stderr.write("Writing output to %s\n" % self.output) if not ar.create(self.output): sys.stderr.write("Cannot open open %s\n" % self.output) return False ar.setFormattedOutput(self.commandline().hasOption("formatted")) ar.writeObject(sc3Inv) except Exception: logs.print_exc() return True
# Add some user-friendliness (this message triggers an alert box on the client) return send_plain_response("400 Bad Request", "maximum request size exceeded", start_response) return send_plain_response("400 Bad Request", str(e), start_response) if form: for k in form.keys(): if k in multipar: parameters[k] = form.getlist(k) else: parameters[k] = form.getfirst(k) logs.debug('parameters: %s' % (parameters)) body = [] # body.extend(["%s: %s" % (key, value) # for key, value in environ.iteritems()]) # status = '200 OK' # return send_plain_response(status, body, start_response) logs.debug('Calling %s' % action) try: res_string = action(environ, parameters) except PlsRedirect as redir:
def iterdata(self, time1, time2, net, sta, cha, loc): lasttime = None lastrecord = None recstream = [] if self.__is_iso(time1, time2, net, sta, os.path.exists): recstream.append(("isoarchive", self.isodir)) if self.__is_sds(time1, time2, net, sta, cha, loc, self.archdir, os.path.exists, os.listdir): recstream.append(("sdsarchive", self.archdir)) if self.__is_sds(time1, time2, net, sta, cha, loc, self.nrtdir, os.path.exists, os.listdir): recstream.append(("sdsarchive", self.nrtdir)) if not recstream and self.exists_db(time1, time2, net, sta, cha, loc): raise TemporaryUnavailabilityException for (service, source) in recstream: if lastrecord: try: etime = lastrecord.endTime() except Core.ValueException: logs.warning( "SDS: record.endTime() raises Core.ValueException! Resulting SEED file maybe incorrect!" ) etime = lastrecord.startTime() timetuple = time.strptime(etime.toString("%Y-%m-%d %H:%M:%S"), "%Y-%m-%d %H:%M:%S") lasttime = datetime.datetime( *timetuple[:6]) + datetime.timedelta( seconds=1) # avoids dublettes if lasttime >= time2: break time1 = lasttime lastrecord = None self._recstream = IO.RecordStream.Create(service) if not self._recstream: logs.error("Could not fetch recordstream service '%s'" % service) raise StopIteration if not self._recstream.setSource(source): logs.error("Could not set recordstream source '%s'" % source) self._recstream = None raise StopIteration logs.debug("%s %s: addStream for %s-%s" % (service, source, str(time1), str(time2))) self._recstream.addStream( net, sta, loc, cha, Core.Time.FromString(str(time1), "%Y-%m-%d %H:%M:%S"), Core.Time.FromString(str(time2), "%Y-%m-%d %H:%M:%S")) try: recinput = IO.RecordInput(self._recstream, Core.Array.DOUBLE, Core.Record.SAVE_RAW) record = recinput.next() while record: yield record.raw().str() lastrecord = record record = recinput.next() except Core.GeneralException, e: logs.error(e.what()) except Exception, e: logs.error("SDS: Unexpected exception occured: %s" % e)
kf = Keyfile(f) except IOError, e: logs.error(str(e)) continue existingNetworks.add(netCode) networkRestricted[netCode] = False inventory.updateNetwork(netCode, kf) except ValueError, e: logs.error("%s: %s" % (f, str(e))) for f in glob.glob(os.path.join(seiscompRoot, "key", "station_*")): try: logs.debug("processing " + f) (netCode, staCode) = f.split("/station_")[-1].split('_', 1) try: kf = Keyfile(f) except IOError, e: logs.error(str(e)) continue existingStations.add((netCode, staCode)) if netCode not in existingNetworks: logs.warning("network %s does not exist, ignoring station %s" % (netCode, staCode)) continue if not hasattr(kf, "latitude") or not kf.latitude: logs.warning("missing latitude for %s %s" % (netCode, staCode))
def status(obj, args): myaddr = args.get("arclink") req_id = args.get("req_id", "ALL") try: (host, port) = myaddr.split(':') port = int(port) except (AttributeError, ValueError): raise ArclinkError, "invalid ArcLink address" (sesskey, user, passwd) = init_session(obj, args) arcl.open_connection(host, port, user, passwd, timeout=ARCLINK_TIMEOUT) try: arcl.send_command("USER_IP %s" % obj.remote_host) except: pass try: obj.content_type = "text/html; charset=UTF-8" obj.write(status_head) logs.debug("connected to %s at %s" % (arcl.software, arcl.organization)) arcl_status = arcl.get_status(req_id) logs.debug("got request status") for req in arcl_status.request: if req.error: req_status = "ERROR" elif req.ready: req_status = "READY" else: req_status = "PROCESSING" obj.write('Request ID: %s, Type: %s, Args: %s<br>\n' % \ (req.id, req.type, req.args)) obj.write('Status: %s, Size: %d, Info: %s<br>\n' % \ (req_status, req.size, req.message)) if req.user != "": obj.write('User: %s, Institution: %s<br>\n' % (req.user, req.institution)) if req.ready and not req.error and req.size > 0: obj.write('<a href="download?sesskey=%s&arclink=%s&req_id=%s">Download!</a> ' % \ (sesskey, myaddr, req.id)) obj.write('<a href="purge?sesskey=%s&arclink=%s&req_id=%s">Delete!</a><br>' % \ (sesskey, myaddr, req.id)) for vol in req.volume: obj.write(4 * ' ' + 'Volume ID: %s, Status: %s, Size: %d, Info: %s<br>\n' % \ (vol.id, arclink_status_string(vol.status), vol.size, vol.message)) if vol.status == STATUS_OK or vol.status == STATUS_WARN: obj.write(4 * ' ' + '<a href="download?sesskey=%s&arclink=%s&req_id=%s&vol_id=%s">Download!</a><br>' % \ (sesskey, myaddr, req.id, vol.id)) for rqln in vol.line: obj.write('<br>\n') obj.write(8 * ' ' + '<font face="courier">%s</font><br>\n' % (rqln.content,)) obj.write(8 * ' ' + 'Status: %s, Size: %d, Info: %s<br>\n' % \ (arclink_status_string(rqln.status), rqln.size, rqln.message)) obj.write('<br>\n') finally: logs.debug("closing connection") arcl.close_connection() obj.write(status_tail)
def start(): """ Checks request spool directory for files => iterating and processing """ while True: names = set() checklist = [ f for f in os.listdir(os.path.join(SPOOL_DIR, "check")) if os.path.isfile(os.path.join(SPOOL_DIR, "check", f)) and not f.endswith("_checking") ] if not checklist: break for fname in checklist: fname = os.path.join(SPOOL_DIR, "check", fname) basename = os.path.basename(fname) m = re.match("^.+/(?P<req_name>.+)[_](?P<breq_id>\w+[_]\d+)$", fname) if m: (req_name, breq_id) = (m.group("req_name"), m.group("breq_id")) if req_name in names: continue names.add(req_name) sys.stderr.write("working on: %s %s\n" % (req_name, breq_id)) else: os.rename(fname, fname.replace("_checking", "_fail")) logs.error("Parsing of Breq_fast name and ID in %s failed" % fname) continue ### redirect the logging output to a logfile ### set_logger( os.path.join(BREQ_DIR, req_name, breq_id, "breq_mail.log")) ### mark the processed file with suffix _checking ### logs.debug("checking file %s" % fname) os.rename(fname, "_".join((fname, "checking"))) fname = "_".join((fname, "checking")) logs.debug("rename file in %s" % fname) ### parse the original breq_fast email ### email = os.path.join(BREQ_DIR, req_name, breq_id, "breq_mail.org") parser = BreqParser() parser.parse_email(email) logs.debug("parsing email %s" % email) ### create the response email message after checking this email ### emailmsg = check_request(email, basename, parser) emailmsg = "%s\n\nThis request has the request ID: %s_%s\n\n%s\n" % ( emailmsg, req_name, breq_id, _emailextro) emailmsg = "%s\n\nbreq_fast request header:\n%s" % (emailmsg, parser.head) emailmsg = "%s\nbreq_fast request lines:%s\n" % (emailmsg, parser.request) emailaddr = EMAIL_ADDR try: emailaddr = parser.tokendict["email"] except KeyError: pass errorstate = False if os.path.exists( os.path.join(SPOOL_DIR, "make", basename + "_running")): ### email was sent before crash, don't send it again logs.debug("email notification was already sent") os.unlink( os.path.join(SPOOL_DIR, "make", basename + "_running")) errorstate = True else: submit_email( emailaddr, "breq_fast request %s_%s checked" % (req_name, breq_id), emailmsg) logs.debug("email submitted with message: %s" % emailmsg) ### mark the processed file with suffix _done and move it to the check/done-dir in SPOOL_DIR ### shutil.move( fname, os.path.join(SPOOL_DIR, "check", "done", basename + "_done")) logs.debug("move file %s to check/done dir" % fname) fname = os.path.join(SPOOL_DIR, "make", basename) logs.debug("now look for file %s" % fname) if (os.path.exists(fname)): ### mark the processed file with suffix _running ### os.rename(fname, "_".join((fname, "running"))) fname = "_".join((fname, "running")) logs.debug("rename file in %s" % fname) try: ### submit the request to arclink server ### emailmsg = submit_request(parser, req_name, breq_id) ### submit the email containing the processing status of the Breq_fast request submit_email( emailaddr, "breq_fast request %s_%s processed" % (req_name, breq_id), emailmsg) logs.debug("email submitted with message: %s" % emailmsg) except (ArclinkError, socket.error), e: logs.error("quit processing: " + str(e)) if not errorstate: #submit_email("admin", "breqfast failure", str(e)) pass shutil.move( os.path.join(SPOOL_DIR, "check", "done", basename + "_done"), os.path.join(SPOOL_DIR, "check", basename)) break if errorstate: #submit_email("admin", "breqfast OK", "") pass ### mark the processed file with suffix _done and move it to the make/done-dir in SPOOL_DIR ### shutil.move( fname, os.path.join(SPOOL_DIR, "make", "done", basename + "_done")) logs.debug("move file %s in make/done dir" % fname)
def start(): """ Checks request spool directory for files => iterating and processing """ for fname in os.listdir(os.path.join(SPOOL_DIR, "check")): fname = os.path.join(SPOOL_DIR, "check", fname) if os.path.isfile(fname) and not fname.endswith("_checking"): basename = os.path.basename(fname) m = re.match("^.+/(?P<req_name>.+)[_](?P<breq_id>\w+[_]\d+)$", fname) if m: (req_name, breq_id) = (m.group("req_name"), m.group("breq_id")) sys.stderr.write("working on: %s %s\n" % (req_name, breq_id)) else: os.rename(fname, fname.replace("_checking", "_fail")) logs.error("Parsing of Breq_fast name and ID in %s failed" % fname) continue ### redirect the logging output to a logfile ### set_logger( os.path.join(BREQ_DIR, req_name, breq_id, "breq_mail.log")) ### mark the processed file with suffix _checking ### logs.debug("checking file %s" % fname) os.rename(fname, "_".join((fname, "checking"))) fname = "_".join((fname, "checking")) logs.debug("rename file in %s" % fname) ### parse the original breq_fast email ### email = os.path.join(BREQ_DIR, req_name, breq_id, "breq_mail.org") parser = BreqParser() parser.parse_email(email) logs.debug("parsing email %s" % email) ### create the response email message after checking this email ### emailmsg = check_request(email, basename, parser) emailmsg = "%s\n\nThis request has the request ID: %s_%s\n\n%s\n" % ( emailmsg, req_name, breq_id, _emailextro) emailmsg = "%s\n\nbreq_fast request header:\n%s" % (emailmsg, parser.head) emailmsg = "%s\nbreq_fast request lines:%s\n" % (emailmsg, parser.request) emailaddr = EMAIL_ADDR try: emailaddr = parser.tokendict["email"] except KeyError: pass submit_email( emailaddr, "breq_fast request %s_%s checked" % (req_name, breq_id), emailmsg) logs.debug("email submitted with message: %s" % emailmsg) ### mark the processed file with suffix _done and move it to the check/done-dir in SPOOL_DIR ### shutil.move( fname, os.path.join(SPOOL_DIR, "check", "done", basename + "_done")) logs.debug("move file %s to check/done dir" % fname) fname = os.path.join(SPOOL_DIR, "make", basename) logs.debug("now look for file %s" % fname) if (os.path.exists(fname)): ### mark the processed file with suffix _running ### os.rename(fname, "_".join((fname, "running"))) fname = "_".join((fname, "running")) logs.debug("rename file in %s" % fname) ### submit the request to arclink server ### emailmsg = submit_request(parser, req_name, breq_id) ### submit the email containing the processing status of the Breq_fast request submit_email( emailaddr, "breq_fast request %s_%s processed" % (req_name, breq_id), emailmsg) logs.debug("email submitted with message: %s" % emailmsg) ### mark the processed file with suffix _done and move it to the make/done-dir in SPOOL_DIR ### shutil.move( fname, os.path.join(SPOOL_DIR, "make", "done", basename + "_done")) logs.debug("move file %s in make/done dir" % fname)
kf = Keyfile(f) except IOError, e: logs.error(str(e)) continue existingNetworks.add(netCode) networkRestricted[netCode] = False inventory.updateNetwork(netCode, kf) except ValueError, e: logs.error("%s: %s" % (f, str(e))) for f in glob.glob(os.path.join(seiscompRoot, "key", "station_*")): try: logs.debug("processing " + f) (netCode, staCode) = f.split("/station_")[-1].split('_', 1) try: kf = Keyfile(f) except IOError, e: logs.error(str(e)) continue existingStations.add((netCode, staCode)) if netCode not in existingNetworks: logs.warning( "network %s does not exist, ignoring station %s" % (netCode, staCode)) continue if not hasattr(kf, "latitude") or not kf.latitude:
def __parse_request(self, line): """ Gets a request line in Breq_fast format to match it against the corresponding pattern. If successful the request list will be completed; the fail string otherwise. @arguments: line, a request line in Breq_fast format """ loc = "*" self.request = "%s\n%s" % (self.request, line) m = re.search("\s+".join(self.__reqlist), line) if m: d = m.groupdict() logs.debug("request_line: %s" % line) # catch two digit year inputs if d["beg_2year"]: if int(d["beg_2year"]) > 50: d["beg_4year"] = "19%s" % d["beg_2year"] else: d["beg_4year"] = "20%s" % d["beg_2year"] if d["end_2year"]: if int(d["end_2year"]) > 50: d["end_4year"] = "19%s" % d["end_2year"] else: d["end_4year"] = "20%s" % d["end_2year"] # some users have problems with time... if int(d["beg_hour"]) > 23: d["beg_hour"] = "23" if int(d["end_hour"]) > 23: d["end_hour"] = "23" if int(d["beg_min"]) > 59: d["beg_min"] = "59" if int(d["end_min"]) > 59: d["end_min"] = "59" if int(d["beg_sec"]) > 59: d["beg_sec"] = "59" if int(d["end_sec"]) > 59: d["end_sec"] = "59" try: beg_time = datetime.datetime(int(d["beg_4year"]), int(d["beg_month"]), int(d["beg_day"]), int(d["beg_hour"]), int(d["beg_min"]), int(d["beg_sec"])) end_time = datetime.datetime(int(d["end_4year"]), int(d["end_month"]), int(d["end_day"]), int(d["end_hour"]), int(d["end_min"]), int(d["end_sec"])) except: self.failstr = "%s%s [error: wrong begin or end time]\n" % ( self.failstr, line) return # expand network and station for (network, station) in self.__expand_net_station(d["network"], d["station"], beg_time, end_time): cha_list = re.findall("([\w?\*]+)\s*", d["cha_list"]) if len(cha_list) == int(d['cha_num']) + 1: loc = cha_list.pop() for cha in cha_list: cha = re.sub("[?]+", "*", cha) self.reqlist.append((str(network), str(station), cha, loc, beg_time, end_time, {}, set())) logs.debug("reqlist.append: %s %s %s %s" % (str(network), str(station), cha, loc)) else: self.failstr = "%s%s\n" % (self.failstr, line)
except ValueError, e: if str(e) == "Maximum content length exceeded": # Add some user-friendliness (this message triggers an alert box on the client) return send_plain_response("400 Bad Request", "maximum request size exceeded", start_response) return send_plain_response("400 Bad Request", str(e), start_response) if form: for k in form.keys(): if k in multipar: parameters[k] = form.getlist(k) else: parameters[k] = form.getfirst(k) logs.debug('parameters: %s' % (parameters)) body = [] # body.extend(["%s: %s" % (key, value) # for key, value in environ.iteritems()]) # status = '200 OK' # return send_plain_response(status, body, start_response) logs.debug('Calling %s' % action) try: res_string = action(environ, parameters) except PlsRedirect as redir:
def process_options(): parser = OptionParser( usage= "usage: %prog [-h|--help] [OPTIONS] -u USER -o OUTPUTFILE REQUEST", version="%prog v" + VERSION, add_help_option=False) parser.set_defaults(address="webdc.eu:18001", request_format="native", data_format="mseed", no_resp_dict=False, rebuild_volume=False, proxymode=False, timeout=300, retries=5, SSLpasswordFile="dcidpasswords.txt") parser.add_option("-h", "--help", action="store_true", dest="showhelp", default=False) parser.add_option("-l", "--longhelp", action="store_true", dest="showlonghelp", default=False) parser.add_option( "-w", "--password-file", type="string", dest="SSLpasswordFile", help= "file containing passwords used for decryption of encrypted data (default %default)" ) parser.add_option( "-a", "--address", type="string", dest="address", help="address of primary ArcLink node (default %default)") foptions = ("native", "breqfast") parser.add_option( "-f", "--request-format", type="choice", dest="request_format", choices=foptions, help="request format: breqfast, native (default %default)") koptions = ("mseed", "mseed4k", "fseed", "dseed", "inv", "inventory") parser.add_option( "-k", "--data-format", type="choice", dest="data_format", choices=koptions, help= "data format: mseed, mseed4k, fseed, dseed, inv[entory] (default %default)" ) parser.add_option( "-n", "--no-resp-dict", action="store_true", dest="no_resp_dict", help="avoid using response dictionary (default %default)") parser.add_option("-g", "--rebuild-volume", action="store_true", dest="rebuild_volume", help="rebuild SEED volume (default %default)") parser.add_option("-p", "--proxy", action="store_true", dest="proxymode", help="proxy mode, no routing (default %default)") parser.add_option("-t", "--timeout", type="int", dest="timeout", help="timeout in seconds (default %default)") parser.add_option("-x", "--retries", type="int", dest="retries", help="download retries (default %default)") parser.add_option("-v", action="callback", callback=add_verbosity, help="increase verbosity level") parser.add_option("-q", action="callback", callback=add_quietness, help="decrease verbosity level") parser.add_option("-u", "--user", type="string", dest="user", help="user's e-mail address") parser.add_option("-o", "--output-file", type="string", dest="output_file", help="file where downloaded data is written") (options, args) = parser.parse_args() if options.showhelp or options.showlonghelp: parser.print_help() if options.showlonghelp: print """ About ArcLink Protocol ====================== The ArcLink is a protocol used to request distributed archive seismological data. Today it gives you access to several European data archives (European Integrated Data Archive - EIDA) that are supporting the protocol developed by GEOFON ([email protected]) at the GeoForschungZentrum, Potsdam, Germany. You can find more information about it at the SeisComp3 and GEOFON web pages: * http://www.seiscomp3.org/ * http://geofon.gfz-potsdam.de/ ArcLink Password File (for decryption) ====================================== In this file (default: dcidpasswords.txt) you can store your private passwords given by different data centers. Each data center that you request encrypted data will send you a different password. The format of the file is really simple: just the data center ID followed by the password that you received. One data center ID and password per line. Any empty lines or lines starting with # are ignored. Example: gfz password1 odc password2 ipgp password3 The data center ID and password can be found on the automatically generated e-mail that you received from each data center. (You will only receive this email if you have been authorized to download encrypted data, and you have tried to download it.) Input File Format ================= ArcLink Fetch program supports two different input formats for the request file. It supports the traditional BREQ FAST format, and its own native format. Both formats contains the same information and they differ slightly. Native Format: -------------- The native format has the following format: YYYY,MM,DD,HH,MM,SS YYYY,MM,DD,HH,MM,SS Network Station Channel [Location] the Channel, Station and Location, can contains wildcards (*) and the Location field is optional. For matching all locations please use the '*' symbol. Example: 2010,02,18,12,00,00 2010,02,18,12,10,00 GE WLF BH* 2010,02,18,12,00,00 2010,02,18,12,10,00 GE VSU BH* 00 BREQ FAST Format: ----------------- The BREQ FAST format is a standard format used on seismology to request data. Each header line start with '.' and the request lines have the following format: Station Network {Time Start} {Time End} {Number of Channels} N x Channels Location Time Specification should have the following format: YYYY MM DD HH MM SS.TTTT Please read more about the BREQ FAST format at: http://www.iris.edu/manuals/breq_fast.htm """ sys.exit() errors = [] warnings = [] if options.user == None: errors.append("Username required") if options.output_file == None: errors.append("Output file required") if options.data_format.upper() != "FSEED" and options.rebuild_volume: errors.append("-g is only applicable to FSEED format") if len(args) == 0: errors.append("No request file supplied") else: for reqfile in args: if not os.path.exists(reqfile): errors.append("Request file '%s' not found." % reqfile) SSLpasswordDict = {} if os.path.exists(options.SSLpasswordFile): fd = open(options.SSLpasswordFile) line = fd.readline() while line: line = line.strip() if line and line[0] != "#": try: (dcid, password) = line.split() SSLpasswordDict[dcid] = password except ValueError: logs.error(options.SSLpasswordFile + " invalid line: " + line) fd.close() sys.exit() line = fd.readline() else: if options.SSLpasswordFile != parser.defaults['SSLpasswordFile']: errors.append("Supplied password file (%s) not found" % options.SSLpasswordFile) else: warnings.append("Default password file (%s) not found" % options.SSLpasswordFile) if len(errors) > 0: logs.error("\n** ArcLink Fetch %s **\n" % VERSION) parser.print_usage() logs.error("Errors detected on the command line:") for item in errors: logs.error("\t%s" % item) print "" if len(warnings) > 0: logs.debug("Warnings detected on the command line:") for item in warnings: logs.debug("\t%s" % item) print "" if len(errors) > 0: sys.exit() return (SSLpasswordDict, options.address, options.request_format, options.data_format, not options.no_resp_dict, options.rebuild_volume, options.proxymode, options.user, options.timeout, options.retries, options.output_file, args[0])
def start(): """ Checks request spool directory for files => iterating and processing """ while True: names = set() checklist = [ f for f in os.listdir(os.path.join(SPOOL_DIR, "check")) if os.path.isfile(os.path.join(SPOOL_DIR, "check", f)) and not f.endswith("_checking") ] if not checklist: break for fname in checklist: fname = os.path.join(SPOOL_DIR, "check", fname) basename = os.path.basename(fname) m = re.match("^.+/(?P<req_name>.+)[_](?P<breq_id>\w+[_]\d+)$", fname) if m: (req_name, breq_id) = (m.group("req_name"), m.group("breq_id")) if req_name in names: continue names.add(req_name) sys.stderr.write("working on: %s %s\n" % (req_name, breq_id)) else: os.rename(fname, fname.replace("_checking", "_fail")) logs.error("Parsing of Breq_fast name and ID in %s failed" % fname) continue ### redirect the logging output to a logfile ### set_logger(os.path.join(BREQ_DIR, req_name, breq_id, "breq_mail.log")) ### mark the processed file with suffix _checking ### logs.debug("checking file %s" % fname) os.rename(fname, "_".join((fname, "checking"))) fname = "_".join((fname, "checking")) logs.debug("rename file in %s" % fname) ### parse the original breq_fast email ### email = os.path.join(BREQ_DIR, req_name, breq_id, "breq_mail.org") parser = BreqParser() parser.parse_email(email) logs.debug("parsing email %s" % email) ### create the response email message after checking this email ### emailmsg = check_request(email, basename, parser) emailmsg = "%s\n\nThis request has the request ID: %s_%s\n\n%s\n" % ( emailmsg, req_name, breq_id, _emailextro, ) emailmsg = "%s\n\nbreq_fast request header:\n%s" % (emailmsg, parser.head) emailmsg = "%s\nbreq_fast request lines:%s\n" % (emailmsg, parser.request) emailaddr = EMAIL_ADDR try: emailaddr = parser.tokendict["email"] except KeyError: pass errorstate = False if os.path.exists(os.path.join(SPOOL_DIR, "make", basename + "_running")): ### email was sent before crash, don't send it again logs.debug("email notification was already sent") os.unlink(os.path.join(SPOOL_DIR, "make", basename + "_running")) errorstate = True else: submit_email(emailaddr, "breq_fast request %s_%s checked" % (req_name, breq_id), emailmsg) logs.debug("email submitted with message: %s" % emailmsg) ### mark the processed file with suffix _done and move it to the check/done-dir in SPOOL_DIR ### shutil.move(fname, os.path.join(SPOOL_DIR, "check", "done", basename + "_done")) logs.debug("move file %s to check/done dir" % fname) fname = os.path.join(SPOOL_DIR, "make", basename) logs.debug("now look for file %s" % fname) if os.path.exists(fname): ### mark the processed file with suffix _running ### os.rename(fname, "_".join((fname, "running"))) fname = "_".join((fname, "running")) logs.debug("rename file in %s" % fname) try: ### submit the request to arclink server ### emailmsg = submit_request(parser, req_name, breq_id) ### submit the email containing the processing status of the Breq_fast request submit_email(emailaddr, "breq_fast request %s_%s processed" % (req_name, breq_id), emailmsg) logs.debug("email submitted with message: %s" % emailmsg) except (ArclinkError, socket.error), e: logs.error("quit processing: " + str(e)) if not errorstate: # submit_email("admin", "breqfast failure", str(e)) pass shutil.move( os.path.join(SPOOL_DIR, "check", "done", basename + "_done"), os.path.join(SPOOL_DIR, "check", basename), ) break if errorstate: # submit_email("admin", "breqfast OK", "") pass ### mark the processed file with suffix _done and move it to the make/done-dir in SPOOL_DIR ### shutil.move(fname, os.path.join(SPOOL_DIR, "make", "done", basename + "_done")) logs.debug("move file %s in make/done dir" % fname)