def put_timeseries(self, timeseries, starttime=None, endtime=None, channels=None, type=None, interval=None): """Store timeseries data. Parameters ---------- timeseries : obspy.core.Stream stream containing traces to store. starttime : UTCDateTime time of first sample in timeseries to store. uses first sample if unspecified. endtime : UTCDateTime time of last sample in timeseries to store. uses last sample if unspecified. channels : array_like list of channels to store, optional. uses default if unspecified. type : {'definitive', 'provisional', 'quasi-definitive', 'variation'} data type, optional. uses default if unspecified. interval : {'daily', 'hourly', 'minute', 'monthly', 'second'} data interval, optional. uses default if unspecified. Raises ------ TimeseriesFactoryException if any errors occur. """ if len(timeseries) == 0: # no data to put return if not self.urlTemplate.startswith('file://'): raise TimeseriesFactoryException('Only file urls are supported') channels = channels or self.channels type = type or self.type interval = interval or self.interval stats = timeseries[0].stats delta = stats.delta observatory = stats.station starttime = starttime or stats.starttime endtime = endtime or stats.endtime urlIntervals = Util.get_intervals(starttime=starttime, endtime=endtime, size=self.urlInterval) for urlInterval in urlIntervals: url = self._get_url(observatory=observatory, date=urlInterval['start'], type=type, interval=interval, channels=channels) url_data = timeseries.slice( starttime=urlInterval['start'], # subtract delta to omit the sample at end: `[start, end)` endtime=(urlInterval['end'] - delta)) url_file = Util.get_file_from_url(url, createParentDirectory=True) # existing data file, merge new data into existing if os.path.isfile(url_file): try: existing_data = Util.read_file(url_file) existing_data = self.parse_string( existing_data, observatory=url_data[0].stats.station, type=type, interval=interval, channels=channels) # TODO: make parse_string return the correct location code for trace in existing_data: # make location codes match, just in case new_trace = url_data.select( network=trace.stats.network, station=trace.stats.station, channel=trace.stats.channel)[0] trace.stats.location = new_trace.stats.location url_data = TimeseriesUtility.merge_streams( existing_data, url_data) except IOError: # no data yet pass except NotImplementedError: # factory only supports output pass with open(url_file, 'wb') as fh: try: self.write_file(fh, url_data, channels) except NotImplementedError: raise NotImplementedError( '"put_timeseries" not implemented')
def put_timeseries(self, timeseries, starttime=None, endtime=None, channels=None, type=None, interval=None): """Store timeseries data. Parameters ---------- timeseries : obspy.core.Stream stream containing traces to store. starttime : UTCDateTime time of first sample in timeseries to store. uses first sample if unspecified. endtime : UTCDateTime time of last sample in timeseries to store. uses last sample if unspecified. channels : array_like list of channels to store, optional. uses default if unspecified. type : {'definitive', 'provisional', 'quasi-definitive', 'variation'} data type, optional. uses default if unspecified. interval : {'daily', 'hourly', 'minute', 'monthly', 'second'} data interval, optional. uses default if unspecified. Raises ------ TimeseriesFactoryException if any errors occur. """ if len(timeseries) == 0: # no data to put return if not self.urlTemplate.startswith('file://'): raise TimeseriesFactoryException('Only file urls are supported') channels = channels or self.channels type = type or self.type interval = interval or self.interval stats = timeseries[0].stats delta = stats.delta observatory = stats.station starttime = starttime or stats.starttime endtime = endtime or stats.endtime urlIntervals = Util.get_intervals( starttime=starttime, endtime=endtime, size=self.urlInterval) for urlInterval in urlIntervals: url = self._get_url( observatory=observatory, date=urlInterval['start'], type=type, interval=interval, channels=channels) url_data = timeseries.slice( starttime=urlInterval['start'], # subtract delta to omit the sample at end: `[start, end)` endtime=(urlInterval['end'] - delta)) url_file = Util.get_file_from_url(url, createParentDirectory=True) # existing data file, merge new data into existing if os.path.isfile(url_file): try: existing_data = Util.read_file(url_file) existing_data = self.parse_string(existing_data, observatory=url_data[0].stats.station, type=type, interval=interval, channels=channels) # TODO: make parse_string return the correct location code for trace in existing_data: # make location codes match, just in case new_trace = url_data.select( network=trace.stats.network, station=trace.stats.station, channel=trace.stats.channel)[0] trace.stats.location = new_trace.stats.location url_data = TimeseriesUtility.merge_streams( existing_data, url_data) except IOError: # no data yet pass except NotImplementedError: # factory only supports output pass with open(url_file, 'wb') as fh: try: self.write_file(fh, url_data, channels) except NotImplementedError: raise NotImplementedError( '"put_timeseries" not implemented')