def test_split_multiple(self): stream_epoch_orig = StreamEpoch(stream=self.stream, starttime=datetime.datetime( 2018, 1, 1), endtime=datetime.datetime(2018, 1, 8)) sub_reference_result = [ StreamEpoch(stream=self.stream, starttime=datetime.datetime(2018, 1, 1), endtime=datetime.datetime(2018, 1, 2, 18)), StreamEpoch(stream=self.stream, starttime=datetime.datetime(2018, 1, 2, 18), endtime=datetime.datetime(2018, 1, 4, 12)) ] reference_result = (sub_reference_result + [ StreamEpoch(stream=self.stream, starttime=datetime.datetime(2018, 1, 4, 12), endtime=datetime.datetime(2018, 1, 8)) ]) # NOTE(damb): Use the same stream epoch both for testing the splitting # method as well as for task initialization. In case of just testing # splitting we could have used any arbitrary stream epoch for task # initialization. t = SplitAndAlignTask(self.url, stream_epoch_orig, self.query_params) stream_epochs = t.split(stream_epoch_orig, t.DEFAULT_SPLITTING_CONST) sub_stream_epochs = t.split(stream_epochs[0], t.DEFAULT_SPLITTING_CONST) self.assertEqual(sub_stream_epochs, sub_reference_result) self.assertEqual(t.stream_epochs, reference_result)
def test_split_multiple_with_overlap(self, mock_get_temp_filepath, mock_raw_request): # NOTE(damb): We do not care about stream epoch splitting. We simply # test the task's aligning facilities. mock_get_temp_filepath.return_value = self.path_tempfile mock_raw_request.side_effect = [ HTTP413(), io.BytesIO( b'[{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-10T18:19:25.563Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":2,"sum_gaps":0,"sum_overlaps":161,"max_gap":null,"max_overlap":161,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":313,"start_time":"2018-01-01T00:00:00.000Z","end_time":"2018-01-02T00:00:00.000Z","format":"miniSEED","quality":"D"},{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-10T18:19:43.021Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":2,"sum_gaps":0,"sum_overlaps":251,"max_gap":null,"max_overlap":251,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":301,"start_time":"2018-01-02T00:00:00.000Z","end_time":"2018-01-03T00:00:00.000Z","format":"miniSEED","quality":"D"}]' ), # noqa io.BytesIO( b'[{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-10T18:19:43.021Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":2,"sum_gaps":0,"sum_overlaps":251,"max_gap":null,"max_overlap":251,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":301,"start_time":"2018-01-02T00:00:00.000Z","end_time":"2018-01-03T00:00:00.000Z","format":"miniSEED","quality":"D"}]' ), # noqa io.BytesIO( b'[{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-10T18:19:59.201Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":1,"sum_gaps":0,"sum_overlaps":175,"max_gap":null,"max_overlap":175,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":352,"start_time":"2018-01-03T00:00:00.000Z","end_time":"2018-01-04T00:00:00.000Z","format":"miniSEED","quality":"D"},{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-11T10:16:43.104Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":1,"sum_gaps":0,"sum_overlaps":0,"max_gap":null,"max_overlap":0,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":320,"start_time":"2018-01-04T00:00:00.000Z","end_time":"2018-01-05T00:00:00.000Z","format":"miniSEED","quality":"D"}]' ) ] # noqa reference_result = json.loads( '[{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-10T18:19:25.563Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":2,"sum_gaps":0,"sum_overlaps":161,"max_gap":null,"max_overlap":161,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":313,"start_time":"2018-01-01T00:00:00.000Z","end_time":"2018-01-02T00:00:00.000Z","format":"miniSEED","quality":"D"},{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-10T18:19:43.021Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":2,"sum_gaps":0,"sum_overlaps":251,"max_gap":null,"max_overlap":251,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":301,"start_time":"2018-01-02T00:00:00.000Z","end_time":"2018-01-03T00:00:00.000Z","format":"miniSEED","quality":"D"},{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-10T18:19:59.201Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":1,"sum_gaps":0,"sum_overlaps":175,"max_gap":null,"max_overlap":175,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":352,"start_time":"2018-01-03T00:00:00.000Z","end_time":"2018-01-04T00:00:00.000Z","format":"miniSEED","quality":"D"},{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-11T10:16:43.104Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":1,"sum_gaps":0,"sum_overlaps":0,"max_gap":null,"max_overlap":0,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":320,"start_time":"2018-01-04T00:00:00.000Z","end_time":"2018-01-05T00:00:00.000Z","format":"miniSEED","quality":"D"}]' ) # noqa stream_epoch_orig = StreamEpoch(stream=self.stream, starttime=datetime.datetime( 2018, 1, 1), endtime=datetime.datetime(2018, 1, 5)) result = WFCatalogSplitAndAlignTask(self.url, stream_epoch_orig, self.query_params)() data = None with open(result.data, 'rb') as ifd: data = json.loads(ifd.read().decode('utf-8')) self.assertEqual(data, reference_result) mock_raw_request.has_calls()
def test_split_missing(self, mock_get_temp_filepath, mock_raw_request, mock_method_update, mock_method_eratio): err = HTTP500() mock_get_temp_filepath.return_value = self.path_tempfile mock_raw_request.side_effect = [ io.BytesIO( b'[{"version":"1.0.0","producer":{"name":"SED","agent":"ObsPy mSEED-QC","created":"2018-01-10T18:19:25.563Z"},"station":"DAVOX","network":"CH","location":"","channel":"LHZ","num_gaps":0,"num_overlaps":2,"sum_gaps":0,"sum_overlaps":161,"max_gap":null,"max_overlap":161,"record_length":[512],"sample_rate":[1],"percent_availability":100,"encoding":["STEIM2"],"num_records":313,"start_time":"2018-01-01T00:00:00.000Z","end_time":"2018-01-02T00:00:00.000Z","format":"miniSEED","quality":"D"}]' ), # noqa err ] reference_result = Result.error('EndpointError', err.response.status_code, data=err.response.data, warning=str(err), extras={'type_task': ETask.SPLITALIGN}) stream_epoch_orig = StreamEpoch(stream=self.stream, starttime=datetime.datetime( 2018, 1, 1), endtime=datetime.datetime(2018, 1, 3)) task = WFCatalogSplitAndAlignTask(self.url, stream_epoch_orig, self.query_params) result = task() self.assertEqual(result, reference_result) mock_raw_request.has_calls()
def test_extract_net_elements(self, mock_max_threads): mock_max_threads.return_value = 5 mock_open = mock.mock_open( read_data= b'<?xml version="1.0" encoding="UTF-8"?><FDSNStationXML xmlns="http://www.fdsn.org/xml/station/1" schemaVersion="1.0"><Source>EIDA</Source><Created>2018-12-04T08:54:16.697388</Created><Network xmlns="http://www.fdsn.org/xml/station/1" code="CH" startDate="1980-01-01T00:00:00" restrictedStatus="open"><Description>National Seismic Networks of Switzerland</Description><Station code="BALST" startDate="2000-06-16T00:00:00" restrictedStatus="open"><Latitude>47.33578</Latitude><Longitude>7.69498</Longitude><Elevation>863</Elevation><Site><Name>Balsthal, SO</Name><Country>Switzerland</Country></Site><CreationDate>2000-06-16T00:00:00</CreationDate><Channel code="HHZ" startDate="2004-04-05T00:00:00" restrictedStatus="open" locationCode=""><Latitude>47.33578</Latitude><Longitude>7.69498</Longitude><Elevation>863</Elevation><Depth>4.5</Depth><Azimuth>0</Azimuth><Dip>-90</Dip><SampleRate>120</SampleRate><SampleRateRatio><NumberSamples>120</NumberSamples><NumberSeconds>1</NumberSeconds></SampleRateRatio><StorageFormat>Steim2</StorageFormat><ClockDrift>0</ClockDrift><Sensor resourceId="smi:ch.ethz.sed/Sensor/20150105111040.231924.93"><Type>Streckeisen STS2_gen3</Type><Manufacturer>Streckeisen</Manufacturer><Model>STS2_gen3</Model></Sensor><DataLogger resourceId="smi:ch.ethz.sed/Datalogger/20150105111040.23247.95"><Type>Nanometrics HRD24</Type><Manufacturer>Nanometrics</Manufacturer><Model>HRD24</Model></DataLogger><Response><InstrumentSensitivity><Value>627615000</Value><Frequency>1</Frequency><InputUnits><Name>M/S</Name></InputUnits><OutputUnits><Name/></OutputUnits></InstrumentSensitivity></Response></Channel></Station><Station code="DAVOX" startDate="2002-07-24T00:00:00" restrictedStatus="open"><Latitude>46.7805</Latitude><Longitude>9.87952</Longitude><Elevation>1830</Elevation><Site><Name>Davos, Dischmatal, GR</Name><Country>Switzerland</Country></Site><CreationDate>2002-07-24T00:00:00</CreationDate><Channel code="HHZ" startDate="2004-02-20T00:00:00" restrictedStatus="open" locationCode=""><Latitude>46.7805</Latitude><Longitude>9.87952</Longitude><Elevation>1830</Elevation><Depth>1.5</Depth><Azimuth>0</Azimuth><Dip>-90</Dip><SampleRate>120</SampleRate><SampleRateRatio><NumberSamples>120</NumberSamples><NumberSeconds>1</NumberSeconds></SampleRateRatio><StorageFormat>Steim2</StorageFormat><ClockDrift>0</ClockDrift><Sensor resourceId="smi:ch.ethz.sed/Sensor/20150105111043.257077.132"><Type>Streckeisen STS2_gen3</Type><Manufacturer>Streckeisen</Manufacturer><Model>STS2_gen3</Model></Sensor><DataLogger resourceId="smi:ch.ethz.sed/Datalogger/20150105111043.257697.134"><Type>Nanometrics Trident</Type><Manufacturer>Nanometrics</Manufacturer><Model>Trident</Model></DataLogger><Response><InstrumentSensitivity><Value>600000000</Value><Frequency>1</Frequency><InputUnits><Name>M/S</Name></InputUnits><OutputUnits><Name/></OutputUnits></InstrumentSensitivity></Response></Channel></Station></Network></FDSNStationXML>' ) # noqa routes = [ Route(url='http://eida.ethz.ch/fdsnws/station/1/query', streams=[ StreamEpoch(Stream(network='CH', station='BALST', location='', channel='HHZ'), starttime=datetime.datetime(2018, 1, 1), endtime=datetime.datetime(2018, 1, 2)) ]), Route(url='http://eida.ethz.ch/fdsnws/station/1/query', streams=[ StreamEpoch(Stream(network='CH', station='DAVOX', location='', channel='HHZ'), starttime=datetime.datetime(2018, 1, 1), endtime=datetime.datetime(2018, 1, 2)) ]) ] query_params = {'format': 'xml', 'level': 'channel'} t = StationXMLNetworkCombinerTask(routes, query_params) # XXX(damb): Using *open* from future requires mocking the function # within the module it is actually used. with mock.patch('eidangservices.federator.server.task.open', mock_open): net_element = t._extract_net_elements( path_xml='/path/to/station.xml')[0] # check the order of the sta elements self.assertEqual(net_element[0].tag, settings.STATIONXML_NAMESPACES[0] + 'Description') self.assertEqual(net_element[1].get('code'), 'BALST') self.assertEqual(net_element[2].get('code'), 'DAVOX') mock_max_threads.has_calls()
def _route(self): """ Create the routing table using the routing service provided. """ routing_request = RoutingRequestHandler(self._routing_service, self.query_params, self.stream_epochs) req = (routing_request.post() if self.post else routing_request.get()) self.logger.info("Fetching routes from %s" % routing_request.url) routing_table = [] try: with binary_request(req) as fd: # parse the routing service's output stream; create a routing # table urlline = None stream_epochs = [] while True: line = fd.readline() if not urlline: urlline = line.strip() elif not line.strip(): # set up the routing table if stream_epochs: routing_table.append( utils.Route(url=urlline, streams=stream_epochs)) urlline = None stream_epochs = [] if not line: break else: stream_epochs.append( StreamEpoch.from_snclline( line, default_endtime=self.DEFAULT_ENDTIME)) except NoContent as err: self.logger.warning(err) raise FDSNHTTPError.create( int( self.query_params.get( 'nodata', settings.FDSN_DEFAULT_NO_CONTENT_ERROR_CODE))) except RequestsError as err: self.logger.error(err) raise FDSNHTTPError.create(500, service_version=__version__) else: self.logger.debug('Number of routes received: {}'.format( len(routing_table))) return routing_table
def test_merge_sta_element_sta_extend(self, mock_max_threads, mock_elements_equal): mock_max_threads.return_value = 5 mock_elements_equal.return_value = True routes = [ Route(url='http://eida.ethz.ch/fdsnws/station/1/query', streams=[ StreamEpoch(Stream(network='CH', station='DAVOX', location='', channel='HHZ'), starttime=datetime.datetime(2018, 1, 1), endtime=datetime.datetime(2018, 1, 2)) ]), Route(url='http://eida.ethz.ch/fdsnws/station/1/query', streams=[ StreamEpoch(Stream(network='CH', station='DAVOX', location='', channel='BHZ'), starttime=datetime.datetime(2018, 1, 1), endtime=datetime.datetime(2018, 1, 2)) ]) ] query_params = {'format': 'xml', 'level': 'channel'} davox_bhz_xml = b'<Network code="CH" startDate="1980-01-01T00:00:00" restrictedStatus="open"><Description>National Seismic Networks of Switzerland</Description><Station code="DAVOX" startDate="2002-07-24T00:00:00" restrictedStatus="open"><Latitude>46.7805</Latitude><Longitude>9.87952</Longitude><Elevation>1830</Elevation><Site><Name>Davos, Dischmatal, GR</Name><Country>Switzerland</Country></Site><CreationDate>2002-07-24T00:00:00</CreationDate><Channel code="BHZ" startDate="2004-02-20T00:00:00" restrictedStatus="open" locationCode=""><Latitude>46.7805</Latitude><Longitude>9.87952</Longitude><Elevation>1830</Elevation><Depth>1.5</Depth><Azimuth>0</Azimuth><Dip>-90</Dip><SampleRate>40</SampleRate><SampleRateRatio><NumberSamples>40</NumberSamples><NumberSeconds>1</NumberSeconds></SampleRateRatio><StorageFormat>Steim2</StorageFormat><ClockDrift>0</ClockDrift><Sensor resourceId="smi:ch.ethz.sed/Sensor/20150105111043.257077.132"><Type>Streckeisen STS2_gen3</Type><Manufacturer>Streckeisen</Manufacturer><Model>STS2_gen3</Model></Sensor><DataLogger resourceId="smi:ch.ethz.sed/Datalogger/20150105111043.248921.110"><Type>Nanometrics Trident</Type><Manufacturer>Nanometrics</Manufacturer><Model>Trident</Model></DataLogger><Response><InstrumentSensitivity><Value>600000000</Value><Frequency>1</Frequency><InputUnits><Name>M/S</Name></InputUnits><OutputUnits><Name/></OutputUnits></InstrumentSensitivity></Response></Channel></Station></Network>' # noqa davox_hhz_xml = b'<Network code="CH" startDate="1980-01-01T00:00:00" restrictedStatus="open"><Description>National Seismic Networks of Switzerland</Description><Station code="DAVOX" startDate="2002-07-24T00:00:00" restrictedStatus="open"><Latitude>46.7805</Latitude><Longitude>9.87952</Longitude><Elevation>1830</Elevation><Site><Name>Davos, Dischmatal, GR</Name><Country>Switzerland</Country></Site><CreationDate>2002-07-24T00:00:00</CreationDate><Channel code="HHZ" startDate="2004-02-20T00:00:00" restrictedStatus="open" locationCode=""><Latitude>46.7805</Latitude><Longitude>9.87952</Longitude><Elevation>1830</Elevation><Depth>1.5</Depth><Azimuth>0</Azimuth><Dip>-90</Dip><SampleRate>120</SampleRate><SampleRateRatio><NumberSamples>120</NumberSamples><NumberSeconds>1</NumberSeconds></SampleRateRatio><StorageFormat>Steim2</StorageFormat><ClockDrift>0</ClockDrift><Sensor resourceId="smi:ch.ethz.sed/Sensor/20150105111043.257077.132"><Type>Streckeisen STS2_gen3</Type><Manufacturer>Streckeisen</Manufacturer><Model>STS2_gen3</Model></Sensor><DataLogger resourceId="smi:ch.ethz.sed/Datalogger/20150105111043.257697.134"><Type>Nanometrics Trident</Type><Manufacturer>Nanometrics</Manufacturer><Model>Trident</Model></DataLogger><Response><InstrumentSensitivity><Value>600000000</Value><Frequency>1</Frequency><InputUnits><Name>M/S</Name></InputUnits><OutputUnits><Name/></OutputUnits></InstrumentSensitivity></Response></Channel></Station></Network>' # noqa net_element = etree.fromstring(davox_bhz_xml) sta_element = etree.fromstring(davox_hhz_xml)[1] t = StationXMLNetworkCombinerTask(routes, query_params) t._merge_sta_element(net_element, sta_element, namespaces=('', )) reference_xml = b'<Network code="CH" startDate="1980-01-01T00:00:00" restrictedStatus="open"><Description>National Seismic Networks of Switzerland</Description><Station code="DAVOX" startDate="2002-07-24T00:00:00" restrictedStatus="open"><Latitude>46.7805</Latitude><Longitude>9.87952</Longitude><Elevation>1830</Elevation><Site><Name>Davos, Dischmatal, GR</Name><Country>Switzerland</Country></Site><CreationDate>2002-07-24T00:00:00</CreationDate><Channel code="BHZ" startDate="2004-02-20T00:00:00" restrictedStatus="open" locationCode=""><Latitude>46.7805</Latitude><Longitude>9.87952</Longitude><Elevation>1830</Elevation><Depth>1.5</Depth><Azimuth>0</Azimuth><Dip>-90</Dip><SampleRate>40</SampleRate><SampleRateRatio><NumberSamples>40</NumberSamples><NumberSeconds>1</NumberSeconds></SampleRateRatio><StorageFormat>Steim2</StorageFormat><ClockDrift>0</ClockDrift><Sensor resourceId="smi:ch.ethz.sed/Sensor/20150105111043.257077.132"><Type>Streckeisen STS2_gen3</Type><Manufacturer>Streckeisen</Manufacturer><Model>STS2_gen3</Model></Sensor><DataLogger resourceId="smi:ch.ethz.sed/Datalogger/20150105111043.248921.110"><Type>Nanometrics Trident</Type><Manufacturer>Nanometrics</Manufacturer><Model>Trident</Model></DataLogger><Response><InstrumentSensitivity><Value>600000000</Value><Frequency>1</Frequency><InputUnits><Name>M/S</Name></InputUnits><OutputUnits><Name/></OutputUnits></InstrumentSensitivity></Response></Channel><Channel code="HHZ" startDate="2004-02-20T00:00:00" restrictedStatus="open" locationCode=""><Latitude>46.7805</Latitude><Longitude>9.87952</Longitude><Elevation>1830</Elevation><Depth>1.5</Depth><Azimuth>0</Azimuth><Dip>-90</Dip><SampleRate>120</SampleRate><SampleRateRatio><NumberSamples>120</NumberSamples><NumberSeconds>1</NumberSeconds></SampleRateRatio><StorageFormat>Steim2</StorageFormat><ClockDrift>0</ClockDrift><Sensor resourceId="smi:ch.ethz.sed/Sensor/20150105111043.257077.132"><Type>Streckeisen STS2_gen3</Type><Manufacturer>Streckeisen</Manufacturer><Model>STS2_gen3</Model></Sensor><DataLogger resourceId="smi:ch.ethz.sed/Datalogger/20150105111043.257697.134"><Type>Nanometrics Trident</Type><Manufacturer>Nanometrics</Manufacturer><Model>Trident</Model></DataLogger><Response><InstrumentSensitivity><Value>600000000</Value><Frequency>1</Frequency><InputUnits><Name>M/S</Name></InputUnits><OutputUnits><Name/></OutputUnits></InstrumentSensitivity></Response></Channel></Station></Network>' # noqa self.assertEqual(etree.tostring(net_element), reference_xml) mock_max_threads.has_calls()
def _route(self, req, post=True, **kwargs): """ Route a request and create a routing table. Routing is performed by means of the routing service provided. :param req: Routing service request handler :type req: :py:class:`RoutingRequestHandler` :param bool post: Execute a the request to the routing service via HTTP POST :raises NoContent: If no routes are available :raises RequestsError: General exception if request to routing service failed """ _req = (req.post() if post else req.get()) routing_table = {} self.logger.info("Fetching routes from %s" % req.url) try: with binary_request(_req) as fd: # parse the routing service's output stream; create a routing # table urlline = None stream_epochs = [] while True: line = fd.readline() if not urlline: urlline = line.strip() elif not line.strip(): # set up the routing table if stream_epochs: routing_table[urlline] = stream_epochs urlline = None stream_epochs = [] if not line: break else: # XXX(damb): Do not substitute an empty endtime when # performing HTTP GET requests in order to guarantee # more cache hits (if eida-federator is coupled with # HTTP caching proxy). stream_epochs.append( StreamEpoch.from_snclline( line, default_endtime=(self._default_endtime if post else None))) except NoContent as err: self.logger.warning(err) nodata = int( kwargs.get('nodata', settings.FDSN_DEFAULT_NO_CONTENT_ERROR_CODE)) raise FDSNHTTPError.create(nodata) except RequestsError as err: self.logger.error(err) raise FDSNHTTPError.create(500, service_version=__version__) else: self.logger.debug('Number of routes received: {}'.format( len(routing_table))) return routing_table
def _process_request( self, args, stream_epochs, netloc_proxy=None): # resolve virtual network streamepochs vnet_stream_epochs = [] for stream_epoch in stream_epochs: self.logger.debug( 'Resolving {0!r} regarding VNET.'.format(stream_epoch)) vnet_stream_epochs.extend( dbquery.resolve_vnetwork(db.session, stream_epoch)) self.logger.debug('Stream epochs from VNETs: ' '{0!r}'.format(vnet_stream_epochs)) stream_epochs.extend(vnet_stream_epochs) # collect results for each stream epoch routes = [] for stream_epoch in stream_epochs: self.logger.debug('Processing request for %r' % (stream_epoch,)) # query _routes = dbquery.find_streamepochs_and_routes( db.session, stream_epoch, args['service'], level=args['level'], access=args['access'], minlat=args['minlatitude'], maxlat=args['maxlatitude'], minlon=args['minlongitude'], maxlon=args['maxlongitude']) # adjust stream_epoch regarding time_constraints for url, streams in _routes: streams.modify_with_temporal_constraints( start=stream_epoch.starttime, end=stream_epoch.endtime) routes.extend(_routes) # flatten response list self.logger.debug('StationLite routes: %s' % routes) # merge stream epochs for each route merged_routes = collections.defaultdict(StreamEpochsHandler) for url, stream_epochs in routes: merged_routes[url].merge(stream_epochs) self.logger.debug('StationLite routes (merged): %r' % merged_routes) for url, stream_epochs in merged_routes.items(): if args['level'] in ('network', 'station'): merged_routes[url] = [StreamEpoch.from_streamepochs(ses) for ses in stream_epochs] else: merged_routes[url] = [se for ses in stream_epochs for se in ses] # sort response routes = [utils.Route(url=url, streams=sorted(stream_epochs)) for url, stream_epochs in merged_routes.items()] # sort additionally by url routes.sort() ostream = OutputStream.create( args['format'], routes=routes, netloc_proxy=netloc_proxy) return str(ostream)
def harvest(self, session): vnet_tag = '{}vnetwork'.format(self.NS_ROUTINGXML) stream_tag = '{}stream'.format(self.NS_ROUTINGXML) self.logger.debug('Harvesting virtual networks for %s.' % self.node) # event driven parsing for event, vnet_element in etree.iterparse(self.config, events=('end', ), tag=vnet_tag): if event == 'end' and len(vnet_element): vnet = self._emerge_streamepoch_group(session, vnet_element) for stream_element in vnet_element.iter(tag=stream_tag): self.logger.debug("Processing stream element: {}".\ format(stream_element)) # convert attributes to dict stream = Stream.from_route_attrs( **dict(stream_element.attrib)) try: stream_starttime = UTCDateTime( stream_element.get('start'), iso8601=True).datetime endtime = stream_element.get('end') # reset endtime due to 'end=""' stream_endtime = ( UTCDateTime(endtime, iso8601=True).datetime if endtime is not None and endtime.strip() else None) except Exception as err: raise self.RoutingConfigXMLParsingError(err) # deserialize to StreamEpoch object stream_epoch = StreamEpoch(stream=stream, starttime=stream_starttime, endtime=stream_endtime) self.logger.debug( "Processing {0!r} ...".format(stream_epoch)) sql_stream_epoch = stream_epoch.fdsnws_to_sql_wildcards() # check if the stream epoch definition is valid i.e. there # must be at least one matching ChannelEpoch query = session.query(orm.ChannelEpoch).\ join(orm.Network).\ join(orm.Station).\ filter(orm.Network.name.like( sql_stream_epoch.network)).\ filter(orm.Station.name.like( sql_stream_epoch.station)).\ filter(orm.ChannelEpoch.locationcode.like( sql_stream_epoch.location)).\ filter(orm.ChannelEpoch.channel.like( sql_stream_epoch.channel)).\ filter((orm.ChannelEpoch.endtime == None) | # noqa (orm.ChannelEpoch.endtime > sql_stream_epoch.starttime)) if sql_stream_epoch.endtime: query = query.\ filter(orm.ChannelEpoch.starttime < sql_stream_epoch.endtime) cha_epochs = query.all() if not cha_epochs: self.logger.warn( 'No ChannelEpoch matching stream epoch ' '{0!r}'.format(stream_epoch)) continue for cha_epoch in cha_epochs: self.logger.debug( 'Processing virtual network configuration for ' 'ChannelEpoch object {0!r}.'.format(cha_epoch)) self._emerge_streamepoch(session, cha_epoch, stream_epoch, vnet)