def _multi_request(self, **kwargs): """ Make a series of requests to avoid the 100GB limit """ start_time = kwargs.pop('start_time', None) end_time = kwargs.pop('end_time', None) series = kwargs.pop('series', None) if any(x is None for x in (start_time, end_time, series)): return [] start_time = self._process_time(start_time) end_time = self._process_time(end_time) tr = TimeRange(start_time, end_time) returns = [] response, json_response = self._send_jsoc_request( start_time, end_time, series, **kwargs) # We skip these lines because a massive request is not a practical test. error_response = 'Request exceeds max byte limit of 100000MB' if (json_response['status'] == 3 and json_response['error'] == error_response): # pragma: no cover returns.append( self._multi_request(tr.start(), tr.center(), series, **kwargs)[0]) # pragma: no cover returns.append( self._multi_request(tr.center(), tr.end(), series, **kwargs)[0]) # pragma: no cover else: returns.append(response) return returns
def _multi_request(self, **kwargs): """ Make a series of requests to avoid the 100GB limit """ start_time = kwargs.pop('start_time', None) end_time = kwargs.pop('end_time', None) series = kwargs.pop('series', None) if any(x is None for x in (start_time, end_time, series)): return [] start_time = self._process_time(start_time) end_time = self._process_time(end_time) tr = TimeRange(start_time, end_time) returns = [] response, json_response = self._send_jsoc_request(start_time, end_time, series, **kwargs) # We skip these lines because a massive request is not a practical test. error_response = 'Request exceeds max byte limit of 100000MB' if (json_response['status'] == 3 and json_response['error'] == error_response): # pragma: no cover returns.append(self._multi_request(tr.start(), tr.center(), series, **kwargs)[0]) # pragma: no cover returns.append(self._multi_request(tr.center(), tr.end(), series, **kwargs)[0]) # pragma: no cover else: returns.append(response) return returns
def truncate(self, a, b=None): """Returns a truncated version of the timeseries object""" if isinstance(a, TimeRange): time_range = a else: time_range = TimeRange(a,b) truncated = self.data.truncate(time_range.start(), time_range.end()) return LightCurve(truncated, self.header.copy())
def truncate(self, a, b=None): """Returns a truncated version of the timeseries object""" if isinstance(a, TimeRange): time_range = a else: time_range = TimeRange(a, b) truncated = self.data.truncate(time_range.start(), time_range.end()) return self.__class__.create(truncated, self.meta.copy())
def truncate(self, a, b=None): """Returns a truncated version of the timeseries object""" if isinstance(a, TimeRange): time_range = a else: time_range = TimeRange(a, b) truncated = self.data.truncate(time_range.start(), time_range.end()) return LightCurve(truncated, self.header.copy())
def _multi_request(self, start_time, end_time, series, **kwargs): """ Make a series of requests to avoid the 100GB limit """ tr = TimeRange(start_time, end_time) returns = [] response, json_response = self._send_jsoc_request(start_time, end_time, series, **kwargs) if json_response['status'] == 3 and json_response['error'] == 'Request exceeds max byte limit of 100000MB': returns.append(self._multi_request(tr.start(), tr.center(), series, **kwargs)[0]) returns.append(self._multi_request(tr.center(), tr.end(), series, **kwargs)[0]) else: returns.append(response) return returns
def _get_url_for_date_range(*args, **kwargs): """Returns a URL to the RHESSI data for the specified date range. Parameters ---------- args : TimeRange, datetimes, date strings Date range should be specified using a TimeRange, or start and end dates at datetime instances or date strings. """ if len(args) == 1 and isinstance(args[0], TimeRange): time_range = args[0] elif len(args) == 2: time_range = TimeRange(parse_time(args[0]), parse_time(args[1])) if time_range.end() < time_range.start(): raise ValueError('start time > end time') url = rhessi.get_obssum_filename(time_range) return url