示例#1
0
def show_status(request):
    try:
        logs.info(" datacenter name: " + request.dcname)
        rqstat = request.status()
    except ArclinkError, e:
        logs.error(str(e))
        return
示例#2
0
def loadGains(fileName):
    try:
        fd = open(fileName)
        try:
            line = fd.readline()
            lineno = 0
            while line:
                line = line.strip()
                lineno += 1

                if not line or line[0] == '#':
                    line = fd.readline()
                    continue

                try:
                    (deviceName, deviceIdPattern, streamPattern, gain) = line.split()
                    if deviceName in gainTable:
                        gainTable[deviceName].append((streamPattern, deviceIdPattern,
                            float(gain)))
                    else:
                        gainTable[deviceName] = [ (streamPattern, deviceIdPattern,
                            float(gain)) ]

                except (TypeError, ValueError):
                    logs.error("%s:%d: parse error" % (fileName, lineno))
                    sys.exit(1)

                line = fd.readline()

        finally:
            fd.close()

    except IOError, e:
        logs.error("cannot open %s: %s" % (fileName, str(e)))
        sys.exit(1)
示例#3
0
    def __get_status(self, server, user, user_ip, req_id, start=0, count=100):
        try:
            (host, port) = server.split(':')
            port = int(port)

        except ValueError:
            logs.error("invalid server address in network XML: %s" % server)
            raise wsgicomm.WIInternalError, "invalid server address"

        try:
            arcl = Arclink()
            arcl.open_connection(host,
                                 port,
                                 user,
                                 user_ip=user_ip,
                                 timeout=self.status_timeout)

            try:
                #new version requires an arclink update to support pagination
                #status = arcl.get_status(req_id, start, count)
                status = arcl.get_status(req_id)
                status.request = status.request[:count]
                return status

            finally:
                arcl.close_connection()

        except (ArclinkError, socket.error) as e:
            raise wsgicomm.WIServiceError, str(e)
示例#4
0
def parse_native(req, input_file):
    fd = open(input_file)
    try:
        rqline = fd.readline()
        while rqline:
            rqline = rqline.strip()
            if not rqline:
                rqline = fd.readline()
                logs.debug("skipping empty request line")
                continue

            rqsplit = rqline.split()
            if len(rqsplit) < 3:
                logs.error("invalid request line: '%s'" % (rqline, ))
                rqline = fd.readline()
                continue

            try:
                start_time = datetime.datetime(
                    *map(int, rqsplit[0].split(",")))
                end_time = datetime.datetime(*map(int, rqsplit[1].split(",")))
            except ValueError, e:
                logs.error("syntax error (%s): '%s'" % (str(e), rqline))
                rqline = fd.readline()
                continue

            network = rqsplit[2]
            station = "."
            channel = "."
            location = "."

            i = 3
            if len(rqsplit) > 3 and rqsplit[3] != ".":
                station = rqsplit[3]
                i += 1
                if len(rqsplit) > 4 and rqsplit[4] != ".":
                    channel = rqsplit[4]
                    i += 1
                    if len(rqsplit) > 5 and rqsplit[5] != ".":
                        location = rqsplit[5]
                        i += 1

            while len(rqsplit) > i and rqsplit[i] == ".":
                i += 1

            constraints = {}
            for arg in rqsplit[i:]:
                pv = arg.split('=', 1)
                if len(pv) != 2:
                    raise ArclinkHandlerError, "invalid request syntax"

                constraints[pv[0]] = pv[1]

            req.add(network, station, channel, location, start_time, end_time,
                    constraints)

            rqline = fd.readline()

    finally:
        fd.close()
示例#5
0
def show_status(request):
    try:
        logs.info("datacenter name: " + request.dcname)
        rqstat = request.status()
    except ArclinkError, e:
        logs.error(str(e))
        return
示例#6
0
def parseDate(datestr):
    m = _rx_datetime.match(datestr)
    if not m:
        logs.error("invalid date: " + datestr)
        return (seiscomp3.Core.Time(1980, 1, 1, 0, 0, 0), "1980-01-01T00:00:00.0000Z")

    try:
        year = int(m.group("year"))
        (month, mday) = _dy2mdy(int(m.group("doy")), year)

        if m.group("hour"):
            hour = int(m.group("hour"))
            minute = int(m.group("minute"))
        else:
            hour = 0
            minute = 0

        if m.group("second"):
            second = int(m.group("second"))
        else:
            second = 0

        coretime = seiscomp3.Core.Time(year, month, mday, hour, minute, second)

    except (TypeError, ValueError, IndexError):
        logs.error("invalid date: " + datestr)
        return (seiscomp3.Core.Time(1980, 1, 1, 0, 0, 0), "1980-01-01T00:00:00.0000Z")

    return (coretime, coretime.toString("%Y-%m-%dT%H:%M:%S.%fZ"))
示例#7
0
def parseDate(datestr):
    m = _rx_datetime.match(datestr)
    if not m:
        logs.error("invalid date: " + datestr)
        return (seiscomp3.Core.Time(1980, 1, 1, 0, 0, 0), "1980-01-01T00:00:00.0000Z")

    try:
        year = int(m.group("year"))
        (month, mday) = _dy2mdy(int(m.group("doy")), year)

        if m.group("hour"):
            hour = int(m.group("hour"))
            minute = int(m.group("minute"))
        else:
            hour = 0
            minute = 0

        if m.group("second"):
            second = int(m.group("second"))
        else:
            second = 0

        coretime = seiscomp3.Core.Time(year, month, mday, hour, minute, second)

    except (TypeError, ValueError, IndexError):
        logs.error("invalid date: " + datestr)
        return (seiscomp3.Core.Time(1980, 1, 1, 0, 0, 0), "1980-01-01T00:00:00.0000Z")

    return (coretime, coretime.toString("%Y-%m-%dT%H:%M:%S.%fZ"))
示例#8
0
文件: request.py 项目: Fran89/webdc3
    def __get_status(self, server, user, user_ip, req_id, start=0, count=100):
        try:
            (host, port) = server.split(':')
            port = int(port)

        except ValueError:
            logs.error("invalid server address in network XML: %s" % server)
            raise wsgicomm.WIInternalError, "invalid server address"

        try:
            arcl = Arclink()
            arcl.open_connection(host, port, user, user_ip=user_ip,
                timeout=self.status_timeout)

            try:
                #new version requires an arclink update to support pagination
                #status = arcl.get_status(req_id, start, count)
                status = arcl.get_status(req_id)
                status.request = status.request[:count]
                return status

            finally:
                arcl.close_connection()

        except (ArclinkError, socket.error) as e:
            raise wsgicomm.WIServiceError, str(e)
示例#9
0
    def scan_cha(d):
        last_file = {}

        for f in os.listdir(d):
            try:
                (net, sta, loc, cha, ext, year, doy) = f.split('.')
                nets.add((net, int(year)))

            except ValueError:
                logs.error("invalid SDS file:" + p, True)
                continue

            if (net, sta, loc, cha) not in timespan:
                continue

            try:
                if doy > last_file[loc][0]:
                    last_file[loc] = (doy, f)

            except KeyError:
                last_file[loc] = (doy, f)

        for (loc, (doy, f)) in last_file.items():
            with open(d + '/' + f, 'rb') as fd:
                nslc = tuple(f.split('.')[:4])
                rec = mseedlite.Record(fd)
                fd.seek(-rec.size, 2)
                rec = mseedlite.Record(fd)
                ts = timespan[nslc]

                if ts.start < rec.end_time < ts.end:
                    ts.start = rec.end_time

                elif rec.end_time >= ts.end:
                    del timespan[nslc]
示例#10
0
def parseOrientation(orientation):
    for x in orientation.split(';'):
        try:
            (code, azimuth, dip) = x.split()
            yield (code, float(azimuth), float(dip))
        except (TypeError, ValueError):
            logs.error("error parsing orientation %s at %s" % (orientation, x))
            continue
示例#11
0
def parseOrientation(orientation):
    for x in orientation.split(';'):
        try:
            (code, azimuth, dip) = x.split()
            yield (code, float(azimuth), float(dip))
        except (TypeError, ValueError):
            logs.error("error parsing orientation %s at %s" % (orientation, x))
            continue
示例#12
0
    def run(self):
        try:
            if self.dcid is None:
                print("Please specify datacenter/archive ID", file=sys.stderr)
                return False

            nettab = Nettab(self.dcid)
            instdb = Instruments(self.dcid)

            try:
                self.__load_file(instdb.load_db, self.inst_db_file)
                self.__load_file(nettab.load_statmap, self.stat_map_file)
                self.__load_file(nettab.load_access_net, self.access_net_file)
                self.__load_file(nettab.load_access_stat,
                                 self.access_stat_file)
                self.__load_file(instdb.load_sensor_attr,
                                 self.sensor_attr_file)
                self.__load_file(instdb.load_datalogger_attr,
                                 self.datalogger_attr_file)
                self.__load_file(nettab.load_network_attr,
                                 self.network_attr_file)
                self.__load_file(nettab.load_station_attr,
                                 self.station_attr_file)

                inv = SC3Inventory(seiscomp.datamodel.Inventory())

                idx = 1
                for tab in sorted(self.tab_files):
                    print("Loading %s (%d/%d)" %
                          (tab, idx, len(self.tab_files)),
                          file=sys.stderr)
                    self.__load_file(nettab.load_tab, tab)
                    print("Generating data structures", file=sys.stderr)
                    nettab.update_inventory(instdb, inv)
                    idx = idx + 1
                    if self.isExitRequested():
                        print("Exit requested, abort", file=sys.stderr)
                        return False

                print("Generating output", file=sys.stderr)
                ar = seiscomp.io.XMLArchive()
                ar.setFormattedOutput(
                    self.commandline().hasOption("formatted"))
                ar.create(self.out_file)
                ar.writeObject(inv.obj)
                ar.close()
                print("Finished", file=sys.stderr)

            except (IOError, NettabError) as e:
                logs.error("fatal error: " + str(e))
                return False

        except Exception:
            logs.print_exc()
            return False

        return True
示例#13
0
def parse_breqfast(req, input_file):
    parser = BreqParser()
    parser.parse_email(input_file)
    req.content = parser.reqlist
    logs.debug("")
    if parser.failstr:
        logs.error(parser.failstr)
    else:
        logs.info("parsed %d lines from breqfast message" % len(req.content))
示例#14
0
def main():
    (SSLpasswordDict, addr, request_format, data_format, label, resp_dict, rebuild_volume, proxymode, user, timeout, retries, output_file, input_file, spfr) = process_options()

    try:
        ret = _main(SSLpasswordDict, addr, request_format, data_format, label, resp_dict, rebuild_volume, proxymode, user, timeout, retries, output_file, input_file, spfr)

    except ArclinkError, e:
        logs.error(str(e))
        ret = 1
示例#15
0
def parse_breqfast_from_handler(req, fh):
    parser = BreqParser()
    parser.parse_email_from_handler(fh)
    req.content = parser.reqlist
    logs.debug("")
    if parser.failstr:
        logs.error(parser.failstr)
    else:
        logs.info("parsed %d lines from breqfast message" % len(req.content))
示例#16
0
def main():
    (SSLpasswordDict, addr, request_format, data_format, label, resp_dict, rebuild_volume, proxymode, user, timeout, retries, output_file, input_file, spfr) = process_options()

    try:
        ret = _main(SSLpasswordDict, addr, request_format, data_format, label, resp_dict, rebuild_volume, proxymode, user, timeout, retries, output_file, input_file, spfr)

    except ArclinkError, e:
        logs.error(str(e))
        ret = 1
示例#17
0
def parse_breqfast_from_handler(req, fh):
    parser = BreqParser()
    parser.parse_email_from_handler(fh)
    req.content = parser.reqlist
    logs.debug("")
    if parser.failstr:
        logs.error(parser.failstr)
    else:
        logs.info("parsed %d lines from breqfast message" % len(req.content))
示例#18
0
    def next(self):
        while True:
            try:
                return Record(self.__fd)

            except MSeedError, e:
                logs.error(str(e))
                
            except MSeedNoData:
                pass
示例#19
0
    def next(self):
        while True:
            try:
                return Record(self.__fd)

            except MSeedNoData:
                pass

            except MSeedError, e:
                logs.error(str(e))
示例#20
0
    def getConfigFloat(self, name, default=None):
        try:
            return float(self.__cfg.getString(name))

        except ValueError:
            logs.error("config parameter '%s' has invalid value" % name)
            return default

        #except seiscomp3.Config.OptionNotFoundException:
        except Exception, e:
            return default
示例#21
0
    def getConfigFloat(self, name, default = None):
        try:
            return float(self.__cfg.getString(name))

        except ValueError:
            logs.error("config parameter '%s' has invalid value" % name)
            return default

        #except seiscomp3.Config.OptionNotFoundException:
        except Exception, e:
            return default
示例#22
0
def parse_native_from_handler(req, fd):
    rqline = fd.readline()
    while rqline:
        rqline = rqline.strip()
        if not rqline:
            rqline = fd.readline()
            logs.debug("skipping empty request line")
            continue
            
        rqsplit = rqline.split()
        if len(rqsplit) < 3:
            logs.error("invalid request line: '%s'" % (rqline,))
            rqline = fd.readline()
            continue

        try:
            start_time = datetime.datetime(*map(int, rqsplit[0].split(",")))
            end_time = datetime.datetime(*map(int, rqsplit[1].split(",")))
        except ValueError, e:
            logs.error("syntax error (%s): '%s'" % (str(e), rqline))
            rqline = fd.readline()
            continue

        network = rqsplit[2]
        station = "."
        channel = "."
        location = "."

        i = 3
        if len(rqsplit) > 3 and rqsplit[3] != ".":
            station = rqsplit[3]
            i += 1
            if len(rqsplit) > 4 and rqsplit[4] != ".":
                channel = rqsplit[4]
                i += 1
                if len(rqsplit) > 5 and rqsplit[5] != ".":
                    location = rqsplit[5]
                    i += 1
                    
        while len(rqsplit) > i and rqsplit[i] == ".":
            i += 1
        
        constraints = {}
        for arg in rqsplit[i:]:
            pv = arg.split('=', 1)
            if len(pv) != 2:
                raise ArclinkError, "invalid request syntax"
            
            constraints[pv[0]] = pv[1]

        req.add(network, station, channel, location, start_time, end_time,
            constraints)
        
        rqline = fd.readline()
示例#23
0
    def __parse_arglist(self, arglist):
        d = {}
        for arg in arglist:
            pv = arg.split('=', 1)
            if len(pv) != 2:
                logs.error("invalid request args in status: " + args)
                continue

            d[pv[0]] = pv[1]

        return d
示例#24
0
文件: request.py 项目: Fran89/webdc3
    def __parse_arglist(self, arglist):
        d = {}
        for arg in arglist:
            pv = arg.split('=', 1)
            if len(pv) != 2:
                logs.error("invalid request args in status: " + args)
                continue

            d[pv[0]] = pv[1]

        return d
示例#25
0
    def request_purge(self, envir, params):
        """Delete one user request at a given server.

        Input:  server          server DCID
                user            user ID
                request         request ID

        Output: true

        """
        dcid = params.get("server")
        user = params.get("user")
        req_id = params.get("request")

        if dcid is None:
            raise wsgicomm.WIClientError, "missing server"

        else:
            try:
                server = self.nodes[dcid]['address']

            except KeyError:
                raise wsgicomm.WIClientError, "invalid server"

        if user is None:
            raise wsgicomm.WIClientError, "missing user ID"

        if req_id is None:
            raise wsgicomm.WIClientError, "missing request ID"

        try:
            (host, port) = server.split(':')
            port = int(port)

        except ValueError:
            logs.error("invalid server address in network XML: %s" % server)
            raise wsgicomm.WIInternalError, "invalid server address"

        try:
            arcl = Arclink()
            arcl.open_connection(host, port, user)

        except (ArclinkError, socket.error) as e:
            raise wsgicomm.WIServiceError, str(e)

        try:
            arcl.purge(req_id)
            return json.dumps(True)

        except (ArclinkError, socket.error) as e:
            arcl.close_connection()
            raise wsgicomm.WIServiceError, str(e)
示例#26
0
文件: request.py 项目: Fran89/webdc3
    def request_purge(self, envir, params):
        """Delete one user request at a given server.

        Input:  server          server DCID
                user            user ID
                request         request ID

        Output: true

        """
        dcid = params.get("server")
        user = params.get("user")
        req_id = params.get("request")

        if dcid is None:
            raise wsgicomm.WIClientError, "missing server"

        else:
            try:
                server = self.nodes[dcid]['address']

            except KeyError:
                raise wsgicomm.WIClientError, "invalid server"

        if user is None:
            raise wsgicomm.WIClientError, "missing user ID"

        if req_id is None:
            raise wsgicomm.WIClientError, "missing request ID"

        try:
            (host, port) = server.split(':')
            port = int(port)

        except ValueError:
            logs.error("invalid server address in network XML: %s" % server)
            raise wsgicomm.WIInternalError, "invalid server address"

        try:
            arcl = Arclink()
            arcl.open_connection(host, port, user)

        except (ArclinkError, socket.error) as e:
            raise wsgicomm.WIServiceError, str(e)

        try:
            arcl.purge(req_id)
            return json.dumps(True)

        except (ArclinkError, socket.error) as e:
            arcl.close_connection()
            raise wsgicomm.WIServiceError, str(e)
示例#27
0
    def update(self, rate, rateDiv, orientation, datalogger, dataloggerId,
               seismometer, seismometerId, channel, depth, azimuth, dip,
               gainFreq, gainMult, gainUnit, format, restricted, inv):

        errmsg = None

        try:
            (smsn, smgain) = (seismometerId.split('/') + [None])[:2]
            (datalogger, dlgain, seismometer, smgain, gainFreq,
             gainUnit) = instdb.update_inventory(inv, datalogger, dataloggerId,
                                                 gainMult, seismometer, smsn,
                                                 smgain, rate, rateDiv)

        except NettabError as e:
            errmsg = str(e)
            dlgain = []
            smgain = []

        self.obj.setSampleRateNumerator(rate)
        self.obj.setSampleRateDenominator(rateDiv)
        self.obj.setDatalogger(datalogger)
        self.obj.setDataloggerSerialNumber(dataloggerId)
        self.obj.setDataloggerChannel(channel)
        self.obj.setSensor(seismometer)
        self.obj.setSensorSerialNumber(seismometerId)
        self.obj.setSensorChannel(channel)
        self.obj.setDepth(depth)
        self.obj.setAzimuth(azimuth)
        self.obj.setDip(dip)
        self.obj.setGainFrequency(gainFreq)
        self.obj.setGainUnit(gainUnit)
        self.obj.setFormat(format)
        self.obj.setFlags("GC")
        self.obj.setRestricted(restricted)
        self.obj.setShared(True)

        complete = True

        try:
            gain = smgain[channel] * dlgain[channel]

        except IndexError:
            complete = False
            gain = gainMult * getGain(datalogger, dataloggerId, seismometer,
                                      seismometerId, self.obj.code())

            if gain == 0 and errmsg:
                logs.error(errmsg)

        self.obj.setGain(gain)

        return complete
示例#28
0
    def log_print(s):
        if logstream:
            logstream.write(s + "\n")

        else:
            try:
                fh = file(fname, "a")
                try:
                    fh.write(s + "\n")
                finally:
                    fh.close()
            except OSError:
                logs.error("Log file %s could not be opened!" % fname)
示例#29
0
    def log_print(s):
        if logstream:
            logstream.write(s + "\n")

        else:
            try:
                fh = file(fname, "a")
                try:
                    fh.write(s + "\n")
                finally:
                    fh.close()
            except OSError:
                logs.error("Log file %s could not be opened!" % fname)
示例#30
0
def parsePkgstr(pkgstr):
    result = {}
    for x in pkgstr.split():
        m = _rx_pkg.match(x)
        if not m:
            logs.error("error parsing %s at %s" % (pkgstr, x))
            continue

        result[m.group('pkg')] = m.group('profile')

    if 'trunk' not in result:
        result['trunk'] = None

    return result
示例#31
0
def parsePkgstr(pkgstr):
    result = {}
    for x in pkgstr.split():
        m = _rx_pkg.match(x)
        if not m:
            logs.error("error parsing %s at %s" % (pkgstr, x))
            continue

        result[m.group('pkg')] = m.group('profile')

    if 'trunk' not in result:
        result['trunk'] = None

    return result
示例#32
0
    def getStreamInfo(self, start_time, end_time, net, sta, cha, loc):
        try:
            stream_epochs = self.streamidx[(net, sta, cha, loc)]
        except KeyError:
            logs.error("%s,%s,%s,%s not found" % (net, sta, cha, loc))
            return None

        for stream in stream_epochs:
            try:
                station = self.stations[self.sensorsLoc[stream[0]][0]]

            except IndexError:
                logs.error("cache inconsistency")
                return None

            # stream_start = datetime.datetime(station[8], 1, 1)
            # stream_end = datetime.datetime(station[9], 1, 1) if station[9] \
            #         else datetime.datetime(2030, 1, 1)
            stream_start = stream[6]
            stream_end = stream[7] if stream[7] is not None \
                else (datetime.datetime.now() + datetime.timedelta(days=365))

            if start_time >= stream_end or end_time <= stream_start:
                continue

            result = {
                'latitude': station[5],
                'longitude': station[6],
                'elevation': station[10]
            }

            if stream[3] != 0:
                tdiff = end_time - start_time
                tdiff = tdiff.days * 86400 + tdiff.seconds
                samp = float(stream[4]) / float(stream[3])

                # assuming approximately 1 byte per sample (compressed),
                # 512 bytes record size
                bytesper = 1
                recsize = 512
                result['size'] = int(
                    recsize *
                    math.ceil(float(tdiff * samp * bytesper) / recsize))

            else:
                result['size'] = 0

            return result

        return None
示例#33
0
    def run(self):
        try:
            if self.dcid is None:
                print >>sys.stderr, "Please specify datacenter/archive ID"
                return False

            nettab = Nettab(self.dcid)
            instdb = Instruments(self.dcid)

            try:
                self.__load_file(instdb.load_db, self.inst_db_file)
                self.__load_file(nettab.load_statmap, self.stat_map_file)
                self.__load_file(nettab.load_access_net, self.access_net_file)
                self.__load_file(nettab.load_access_stat, self.access_stat_file)
                self.__load_file(instdb.load_sensor_attr, self.sensor_attr_file)
                self.__load_file(instdb.load_datalogger_attr, self.datalogger_attr_file)
                self.__load_file(nettab.load_network_attr, self.network_attr_file)
                self.__load_file(nettab.load_station_attr, self.station_attr_file)

                inv = SC3Inventory(DataModel.Inventory())

                idx = 1
                for tab in sorted(self.tab_files):
                    print >>sys.stderr, "Loading %s (%d/%d)" % (tab, idx, len(self.tab_files))
                    self.__load_file(nettab.load_tab, tab)
                    print >>sys.stderr, "Generating data structures"
                    nettab.update_inventory(instdb, inv)
                    idx = idx + 1
                    if self.isExitRequested():
                        print >>sys.stderr, "Exit requested, abort"
                        return False

                print >>sys.stderr, "Generating output"
                ar = IO.XMLArchive()
                ar.setFormattedOutput(self.commandline().hasOption("formatted"))
                ar.create(self.out_file)
                ar.writeObject(inv.obj)
                ar.close()
                print >>sys.stderr, "Finished"

            except (IOError, NettabError), e:
                logs.error("fatal error: " + str(e))
                return False

        except Exception:
            logs.print_exc()
            return False

        return True
示例#34
0
    def __load_module(self, path):
        modname = os.path.splitext(os.path.basename(path))[0].replace('.', '_')

        if modname in self.__modules:
            logs.error("'%s' is already loaded!" % modname)
            return

        try:
            mod = imp.load_source('__wi_' + modname, path)

        except:
            logs.error("Error loading '%s'" % modname)
            logs.print_exc()
            return

        self.__modules[modname] = mod.WI_Module(self)
示例#35
0
    def __load_module(self, path):
        modname = os.path.splitext(os.path.basename(path))[0].replace('.', '_')

        if modname in self.__modules:
            logs.error("'%s' is already loaded!" % modname)
            return

        try:
            mod = imp.load_source('__wi_' + modname, path)

        except:
            logs.error("Error loading '%s'" % modname)
            logs.print_exc()
            return

        self.__modules[modname] = mod.WI_Module(self)
示例#36
0
    def getStreamInfo(self, start_time, end_time, net, sta, cha, loc):
        try:
            stream_epochs = self.streamidx[(net, sta, cha, loc)]
        except KeyError:
            logs.error("%s,%s,%s,%s not found" % (net, sta, cha, loc))
            return None

        for stream in stream_epochs:
            try:
                station = self.stations[self.sensorsLoc[stream[0]][0]]

            except IndexError:
                logs.error("cache inconsistency")
                return None

            # stream_start = datetime.datetime(station[8], 1, 1)
            # stream_end = datetime.datetime(station[9], 1, 1) if station[9] \
            #         else datetime.datetime(2030, 1, 1)
            stream_start = stream[6]
            stream_end = stream[7] if stream[7] is not None \
                else (datetime.datetime.now() + datetime.timedelta(days=365))

            if start_time >= stream_end or end_time <= stream_start:
                continue

            result = {'latitude': station[5],
                      'longitude': station[6],
                      'elevation': station[10]}

            if stream[3] != 0:
                tdiff = end_time - start_time
                tdiff = tdiff.days * 86400 + tdiff.seconds
                samp = float(stream[4]) / float(stream[3])

                # assuming approximately 1 byte per sample (compressed),
                # 512 bytes record size
                bytesper = 1
                recsize = 512
                result['size'] = int(recsize * math.ceil(
                                     float(tdiff * samp * bytesper) / recsize))

            else:
                result['size'] = 0

            return result

        return None
示例#37
0
    def __init__(self, source):
        if isinstance(source, basestring):
            fp = open(source)
        elif hasattr(source, "read"):
            fp = source
        else:
            raise TypeError, "invalid source"

        try:
            filename = fp.name
        except AttributeError:
            filename = '<???>'

        try:
            lineno = 0
            while True:
                line = fp.readline()
                if not line:
                    break

                lineno = lineno + 1

                line = line.strip()
                if not line or line.startswith("#"): continue

                m = _rx_keyline.match(line.strip())
                if m:
                    s = m.group('p').replace('__', '.').split('_')
                    if s[0] == '':
                        s[0] = '_'

                    k = reduce(lambda x, y: x + y[0] + y[1:].lower(), s[1:],
                               s[0].lower())

                    if m.group('q') == '"':
                        self.__dict__[k.replace('.',
                                                '_')] = m.group('v').replace(
                                                    r'\"', r'"')
                    else:
                        self.__dict__[k.replace('.', '_')] = m.group('v')

                else:
                    logs.error("%s:%d: parse error" % (filename, lineno))

        finally:
            if fp is not source:
                fp.close()
示例#38
0
    def close(self):
        try:
            try:
                wfd = _WaveformData()

                self.__mseed_fd.seek(0)
                for rec in MSeedInput(self.__mseed_fd):
                    wfd.add_data(rec)

                wfd.output_data(self.__fd, 0)

            except (MSeedError, SEEDError, DBError), e:
                logs.error("error reblocking Mini-SEED data: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
示例#39
0
    def close(self):
        try:
            try:
                seed_volume = SEEDVolume(self.__inv, ORGANIZATION, LABEL, self.__resp_dict)

                self.__mseed_fd.seek(0)
                for rec in mseed.Input(self.__mseed_fd):
                    seed_volume.add_data(rec)

                seed_volume.output(self.__fd)

            except (mseed.MSeedError, SEEDError, DBError), e:
                logs.error("error creating SEED volume: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
示例#40
0
文件: sds.py 项目: aemanov/seiscomp3
    def iterdata(self, time1, time2, net, sta, cha, loc):
        lasttime = None
        lastrecord = None
        recstream = []

        if self.__is_iso(time1, time2, net, sta, os.path.exists):
            recstream.append(("isoarchive", self.isodir))
        if self.__is_sds(time1, time2, net, sta, cha, loc, self.archdir, os.path.exists, os.listdir):
            recstream.append(("sdsarchive", self.archdir))
        if self.__is_sds(time1, time2, net, sta, cha, loc, self.nrtdir, os.path.exists, os.listdir):
            recstream.append(("sdsarchive", self.nrtdir))
        
        if not recstream and self.exists_db(time1, time2, net, sta, cha, loc):
            raise TemporaryUnavailabilityException

        for (service, source) in recstream:
            if lastrecord:
                try:
                    etime = lastrecord.endTime()
                except Core.ValueException:
                    logs.warning("SDS: record.endTime() raises Core.ValueException! Resulting SEED file maybe incorrect!")
                    etime = lastrecord.startTime()
                timetuple = time.strptime(etime.toString("%Y-%m-%d %H:%M:%S"), "%Y-%m-%d %H:%M:%S")                
                lasttime = datetime.datetime(*timetuple[:6])+datetime.timedelta(seconds=1) # avoids dublettes
                if lasttime >= time2:
                    break
                time1 = lasttime
                lastrecord = None
                
            self._recstream = IO.RecordStream.Create(service)
            if not self._recstream:
                logs.error("Could not fetch recordstream service '%s'" % service)
                raise StopIteration
            
            if not self._recstream.setSource(source):
                logs.error("Could not set recordstream source '%s'" % source)
                self._recstream = None
                raise StopIteration

            logs.debug("%s %s: addStream for %s-%s" % (service, source, str(time1), str(time2)))
            self._recstream.addStream(net,sta,loc,cha,Core.Time.FromString(str(time1),"%Y-%m-%d %H:%M:%S"),
                                      Core.Time.FromString(str(time2),"%Y-%m-%d %H:%M:%S"))

            try:
                recinput = IO.RecordInput(self._recstream, Core.Array.DOUBLE, Core.Record.SAVE_RAW)
                record = recinput.next()
                
                while record:
                    yield record.raw().str()
                    lastrecord = record
                    record = recinput.next()                

            except Core.GeneralException, e:
                logs.error(e.what())
            except Exception, e:
                logs.error("SDS: Unexpected exception occured: %s" % e)
示例#41
0
    def run(self):
        try:
            seiscompRoot = self.commandline().unrecognizedOptions()[0]
            sys.stderr.write("root directory: %s\n" % seiscompRoot)

            try:
                DCID = self.configGetString("datacenterID")

            except:
                logs.error("datacenterID not found in global.cfg")
                return False

            networkRestricted = {}
            incompleteResponse = {}

            global instdb
            instdb = Instruments(DCID)

            self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.dlsv"))

            # for backwards compatibility
            self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.tab.out"))
            self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.tab"))

            try:
                self.__load_file(instdb.load_db, os.path.join(seiscompRoot, "resp", "inst.db"))
                self.__load_file(instdb.load_sensor_attr, os.path.join(seiscompRoot, "resp", "sensor_attr.csv"))
                self.__load_file(instdb.load_datalogger_attr, os.path.join(seiscompRoot, "resp", "datalogger_attr.csv"))

            except (IOError, NettabError), e:
                logs.error("fatal error: " + str(e))
                return False

            sc3Inv = seiscomp3.DataModel.Inventory()
            inventory = InventoryWrapper(sc3Inv, DCID)

            existingNetworks = set()
            existingStations = set()

            for f in glob.glob(os.path.join(seiscompRoot, "key", "network_*")):
                try:
                    logs.debug("processing " + f)
                    netCode = f.split("/network_")[-1]
                    try:
                        kf = Keyfile(f)
                    except IOError, e:
                        logs.error(str(e))
                        continue

                    existingNetworks.add(netCode)
                    networkRestricted[netCode] = False

                    inventory.updateNetwork(netCode, kf)

                except ValueError, e:
                    logs.error("%s: %s" % (f, str(e)))
示例#42
0
    def close(self):
        try:
            try:
                wfd = _WaveformData()

                self.__mseed_fd.seek(0)
                for rec in MSeedInput(self.__mseed_fd):
                    wfd.add_data(rec)

                wfd.output_data(self.__fd, 0)

            except (MSeedError, SEEDError, DBError), e:
                logs.error("error reblocking Mini-SEED data: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
示例#43
0
    def close(self):
        try:
            try:
                seed_volume = SEEDVolume(self.__inv, ORGANIZATION, LABEL,
                                         self.__resp_dict)

                self.__mseed_fd.seek(0)
                for rec in MSeedInput(self.__mseed_fd):
                    seed_volume.add_data(rec)

                seed_volume.output(self.__fd)

            except (MSeedError, SEEDError, DBError), e:
                logs.error("error creating SEED volume: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
示例#44
0
    def __load_nodelist(self, network_xml):
        for f in network_xml:
            try:
                tree = ET.parse(f)

            except Exception as e:
                logs.error("could not parse %s: %s" % (f, str(e)))
                continue

            root = tree.getroot()

            for e in root.findall('./node'):
                try:
                    dcid = e.attrib['dcid']
                    name = e.attrib['name']
                    addr = e.attrib['address'] + ':' + e.attrib['port']
                    self.nodes[dcid] = {'name': name, 'address': addr}
                    self.nodeaddr[addr] = dcid

                except KeyError:
                    logs.error("invalid node element in %s" % (f, ))
示例#45
0
def getGain(datalogger, dataloggerId, seismometer, seismometerId, streamCode):
    try:
        if datalogger == "DUMMY":
            dataloggerGain = 1.0

        else:
            for (streamPattern, dataloggerIdPattern, dataloggerGain) in gainTable[datalogger]:
                if fnmatch.fnmatch(streamCode, streamPattern) and \
                        fnmatch.fnmatch(dataloggerId, dataloggerIdPattern):
                    break

            else:
                logs.error("cannot find gain for %s, %s, %s" % (datalogger,
                                                                dataloggerId, streamCode))

                return 0

        if seismometer == "DUMMY":
            seismometerGain = 1.0

        else:
            for (streamPattern, seismometerIdPattern, seismometerGain) in gainTable[seismometer]:
                if fnmatch.fnmatch(streamCode, streamPattern) and \
                        fnmatch.fnmatch(seismometerId, seismometerIdPattern):
                    break

            else:
                logs.error("cannot find gain for %s, %s, %s" % (seismometer,
                                                                seismometerId, streamCode))

                return 0

    except KeyError, e:
        logs.error("cannot find gain for " + str(e))
        return 0
示例#46
0
def get_citation(nets, param, verbose):
    postdata = ""
    for (net, year) in nets:
        postdata += "%s * * * %d-01-01T00:00:00Z %d-12-31T23:59:59Z\n" \
                    % (net, year, year)

    if not isinstance(postdata, bytes):
        postdata = postdata.encode('utf-8')

    try:
        proc = exec_fetch(param, postdata, verbose, True)

    except OSError as e:
        logs.error(str(e))
        logs.error("error running fdsnws_fetch")
        return 1

    net_desc = {}

    for line in proc.stdout:
        try:
            if isinstance(line, bytes):
                line = line.decode('utf-8')

            if not line or line.startswith('#'):
                continue

            (code, desc, start) = line.split('|')[:3]

            year = dateutil.parser.parse(start).year

        except (ValueError, UnicodeDecodeError) as e:
            logs.error("error parsing text format: %s" % str(e))
            continue

        if code[0] in "0123456789XYZ":
            net_desc["%s_%d" % (code, year)] = desc

        else:
            net_desc[code] = desc

    logs.notice("You received seismic waveform data from the following "
                "network(s):")

    for code in sorted(net_desc):
        logs.notice("%s %s" % (code, net_desc[code]))

    logs.notice("\nAcknowledgment is extremely important for network operators\n"
                "providing open data. When preparing publications, please\n"
                "cite the data appropriately. The FDSN service at\n\n"
                "    http://www.fdsn.org/networks/citation/?networks=%s\n\n"
                "provides a helpful guide based on available network\n"
                "Digital Object Identifiers.\n"
                % "+".join(sorted(net_desc)))
示例#47
0
def parseSampling(sampling):
    compressionLevel = "2"
    instrumentCode = "H"
    locationCode = ""
    endPreamble = sampling.find('_')
    if endPreamble > 0:
        for x in sampling[:endPreamble].split('/'):
            if x[0] == 'F':
                compressionLevel = x[1:]
            elif x[0] == 'L':
                locationCode = x[1:]
            elif x[0] == 'T':
                instrumentCode = x[1:]
            else:
                logs.warning("unknown code %s in %s" % (x[0], sampling))

    if not sampling[endPreamble+1:]:
        return

    for x in sampling[endPreamble+1:].split('/'):
        m = _rx_samp.match(x)
        if not m:
            logs.error("error parsing sampling %s at %s" % (sampling, x))
            continue

        try:
            sampleRate = decimal.Decimal(m.group('sampleRate'))
        except decimal.InvalidOperation:
            logs.error("error parsing sampling %s at %s" % (sampling, x))
            continue

        bandCode = m.group('bandCode')
        if not bandCode:
            if sampleRate >= 80:
                bandCode = 'H'
            elif sampleRate >= 40:
                bandCode = 'S'
            elif sampleRate > 1:
                bandCode = 'B'
            elif sampleRate == 1:
                bandCode = 'L'
            elif sampleRate == decimal.Decimal("0.1"):
                bandCode = 'V'
            elif sampleRate == decimal.Decimal("0.01"):
                bandCode = 'U'
            else:
                logs.error(
                    "could not determine band code for %s in %s" (x, sampling))
                continue

        yield ((bandCode + instrumentCode, locationCode) +
               _rational(sampleRate) + ("Steim" + compressionLevel,))
示例#48
0
    def __parse_req_line(self, data):
        rqline = str(data).strip()
        if not rqline:
            logs.error("empty request line")
            return None

        rqsplit = rqline.split()
        if len(rqsplit) < 3:
            logs.error("invalid request line: %s" % (rqline, ))
            return None

        try:
            start_time = datetime.datetime(*map(int, rqsplit[0].split(",")))
            end_time = datetime.datetime(*map(int, rqsplit[1].split(",")))

        except ValueError as e:
            logs.error("syntax error (%s): '%s'" % (str(e), rqline))
            return None

        network = rqsplit[2]
        station = "."
        channel = "."
        location = "."

        i = 3
        if len(rqsplit) > 3 and rqsplit[3] != ".":
            station = rqsplit[3]
            i += 1
            if len(rqsplit) > 4 and rqsplit[4] != ".":
                channel = rqsplit[4]
                i += 1
                if len(rqsplit) > 5 and rqsplit[5] != ".":
                    location = rqsplit[5]
                    i += 1

        while len(rqsplit) > i and rqsplit[i] == ".":
            i += 1

        constraints = self.__parse_arglist(rqsplit[i:])

        return RequestLine(start_time, end_time, network, station, channel,
                           location)
示例#49
0
    def request_download(self, envir, params):
        """Download data.

        Input:  server          server DCID
                user            user ID
                request         request ID
                volume          volume ID (optional)

        Output: iterable datastream.

        """
        dcid = params.get("server")
        user = params.get("user")
        req_id = params.get("request")
        vol_id = params.get("volume")

        if dcid is None:
            raise wsgicomm.WIClientError, "missing server"

        else:
            try:
                server = self.nodes[dcid]['address']

            except KeyError:
                raise wsgicomm.WIClientError, "invalid server"

        if user is None:
            raise wsgicomm.WIClientError, "missing user ID"

        if req_id is None:
            raise wsgicomm.WIClientError, "missing request ID"

        try:
            (host, port) = server.split(':')
            port = int(port)

        except ValueError:
            logs.error("invalid server address in network XML: %s" % server)
            raise wsgicomm.WIInternalError, "invalid server address"

        user_ip = envir.get('REMOTE_ADDR')

        try:
            arcl = Arclink()
            arcl.open_connection(host,
                                 port,
                                 user,
                                 user_ip=user_ip,
                                 timeout=self.download_timeout)

        except (ArclinkError, socket.error) as e:
            raise wsgicomm.WIServiceError, str(e)

        try:
            status = arcl.get_status(req_id)
            meta = self.__get_meta(status, dcid, req_id, vol_id)

            if meta is None:
                arcl.close_connection()
                raise wsgicomm.WIServiceError, "request is not downloadable"

            it = arcl.iterdownload(req_id, vol_id, raw=True)
            it.filename = meta[0]
            it.content_type = meta[1]
            return it

        except (ArclinkError, socket.error) as e:
            arcl.close_connection()
            raise wsgicomm.WIServiceError, str(e)