def parse_mileage(mileage): """ Parse a MileageStat from a a dict representation. Parameters ---------- mileage : dict The mileage as a dict. Returns ------- tuple A namedtuple containing mileage stat info. The error is None if everything went fine. Raises ------ ValueError If the dict does not contains the correct data. """ try: return MileageStat( isodate.parse_datetime(mileage['beginDate']), isodate.parse_datetime(mileage['endDate']), mileage['type'], mileage['value'], ) except ValueError as err: raise xee_exceptions.ParseException(err)
def parse_fullcalendar_request(request, timezone): """ Parses start and end from the given fullcalendar request. It is expected that no timezone is passed (the default). See `<http://fullcalendar.io/docs/timezone/timezone/>`_ :returns: A tuple of timezone-aware datetime objects or (None, None). """ start = request.params.get('start') end = request.params.get('end') if start and end: if 'T' in start: start = parse_datetime(start) end = parse_datetime(end) else: start = datetime.combine(parse_date(start), time(0, 0)) end = datetime.combine(parse_date(end), time(23, 59, 59, 999999)) start = sedate.replace_timezone(start, timezone) end = sedate.replace_timezone(end, timezone) return start, end else: return None, None
def test_serialize_datetime(self): date_obj = isodate.parse_datetime('2015-01-01T00:00:00') date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '2015-01-01T00:00:00.000Z') date_obj = isodate.parse_datetime('1999-12-31T23:59:59-12:00') date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '2000-01-01T11:59:59.000Z') with self.assertRaises(SerializationError): date_obj = isodate.parse_datetime('9999-12-31T23:59:59-12:00') date_str = Serializer.serialize_iso(date_obj) with self.assertRaises(SerializationError): date_obj = isodate.parse_datetime('0001-01-01T00:00:00+23:59') date_str = Serializer.serialize_iso(date_obj) date_obj = isodate.parse_datetime("2015-06-01T16:10:08.0121-07:00") date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '2015-06-01T23:10:08.0121Z') date_obj = datetime.min date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '0001-01-01T00:00:00.000Z') date_obj = datetime.max date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '9999-12-31T23:59:59.999999Z')
def test_datetime_rfc(self): config = AutoRestRFC1123DateTimeTestServiceConfiguration(base_url="http://localhost:3000") config.log_level = log_level client = AutoRestRFC1123DateTimeTestService(config) self.assertIsNone(client.datetimerfc1123.get_null()) with self.assertRaises(DeserializationError): client.datetimerfc1123.get_invalid() with self.assertRaises(DeserializationError): client.datetimerfc1123.get_underflow() with self.assertRaises(DeserializationError): client.datetimerfc1123.get_overflow() client.datetimerfc1123.get_utc_lowercase_max_date_time() client.datetimerfc1123.get_utc_uppercase_max_date_time() client.datetimerfc1123.get_utc_min_date_time() max_date = isodate.parse_datetime("9999-12-31T23:59:59.999999Z") client.datetimerfc1123.put_utc_max_date_time(max_date) min_date = isodate.parse_datetime("0001-01-01T00:00:00Z") client.datetimerfc1123.put_utc_min_date_time(min_date)
def test_check_timestamp_in_range_true(): timestamp = isodate.parse_datetime("2016-06-07T23:46:03+00:00") start_time = isodate.parse_datetime("2016-06-07T23:40:03+00:00") end_time = isodate.parse_datetime("2016-06-07T23:50:03+00:00") assert logs.check_timestamp_in_range(timestamp, start_time, end_time) is True
def datetime_is_iso(dt): """Attempts to parse a date formatted in ISO 8601 format""" try: isodate.parse_datetime(dt) return True, [] except isodate.ISO8601Error: return False, ['Datetime provided is not in a valid ISO 8601 format']
def data_request(self): """ This method is used to serve data request partner client http service. @param self the PartnerHTTPServices itself, it should be PartnerHTTPServices @throws PartnerClientError throws if request body is empty @throws Exception any error should be raised to caller. CherryPy will handle the error and translate to HTTP code 500 (refer to partnercli#handle_error) """ signature='hfppnetwork.partner.httpservices.PartnerHTTPServices.data_request' method_enter(signature,{"self":self}) # Read the data request XML request_body = cherrypy.request.body.read().decode("utf-8") logging.debug('%s:%s', 'request_body', request_body) if len(request_body)==0: raise PartnerClientError("request body can not be empty") # Parse data request XML root = ET.fromstring(request_body) request_id = root.findtext('./RequestID') study_id = root.findtext('./StudyID') query = root.findtext('./Query') expiration_time = isodate.parse_datetime(root.findtext('./ExpirationTime')) #CacheAvailable and CacheTimestamp could not exist cache_available = 'true' == root.findtext('./CacheAvailable') cache_timestamp = None if root.findtext('./CacheTimestamp'): cache_timestamp =isodate.parse_datetime(root.findtext('./CacheTimestamp')) # Kick off a new thread to handle the request handler = DataRequestHandler() t = Thread(target=handler.handle_data_request, args=(request_id, study_id, query, expiration_time, cache_available, cache_timestamp,)) t.daemon = False t.start() method_exit(signature)
def parse_trip(trip): """ Parse a trip from a a dict representation. Parameters ---------- trip : dict The trip as a dict. Returns ------- tuple A namedtuple containing trip info. The error is None if everything went fine. Raises ------ ValueError If the dict does not contains the correct data. """ try: return Trip( trip['id'], parse_location(trip['beginLocation']), parse_location(trip['endLocation']), isodate.parse_datetime(trip['beginDate']), isodate.parse_datetime(trip['endDate']) ) except ValueError as err: raise xee_exceptions.ParseException(err)
def map_block(model, mapped_models): resource = model.get("resource", None) if resource: model["resource"] = mapped_models["resource_models"][resource] model["start_time"] = parse_datetime(model["start_time"]) model["end_time"] = parse_datetime(model["end_time"]) return model
def transform_to_timeseries(self, dataframe): # NOTE: Timeseries_index must be some date time field. # Most of the complexity here is required for allowing # multi-indexing (i.e. indexing on more than just the time value). # See http://stackoverflow.com/questions/15799162/resampling-within-a-pandas-multiindex. # Have used this http://stackoverflow.com/questions/18677271/grouping-daily-data-by-month-in-python-pandas-and-then-normalising # to simplify the code from the first answer. # Without multindexing, creation of timeseries dataframe is fairly easy (just set index # to a datetime column and then use resample method). all_col_keys = [ TIMESERIES_INDEX_OPT, TIMESERIES_GROUP_BY, VALUES_OPT ] dataframe = self._drop_columns_not_requested(dataframe, all_col_keys) timeseries_index = self.request.query_params.get(TIMESERIES_INDEX_OPT) timeseries_freq = self.request.query_params.get(TIMESERIES_FREQ_OPT) timeseries_group_by = self.request.query_params.get(TIMESERIES_GROUP_BY) fill_range_start = self.request.query_params.get(TIMESERIES_FILL_RANGE_START) fill_range_end = self.request.query_params.get(TIMESERIES_FILL_RANGE_END) fieldnames = self.serializer_class.Meta.fieldnames only_valid = self._get_only_valid_func(dataframe) was_empty = False if dataframe.empty and fill_range_start and fill_range_end: # Create a fake row...this means that returned dataframe will be in same format # whether there is data or not dataframe = self._attach_empty_row(dataframe, timeseries_index, fill_range_start) was_empty = True dataframe = dataframe.set_index(timeseries_index) if timeseries_group_by: timeseries_group_by = filter(only_valid, timeseries_group_by.split(LIST_OPTS_DIVIDER)) if timeseries_freq: aggfuncs = self._get_aggfuncs() if timeseries_group_by: dataframe = dataframe.groupby(timeseries_group_by) # WARNING - This resampling has been known to cause segfaults with # numpy when the np.sum function is used. Not sure why, but possibly related # to https://github.com/numpy/numpy/issues/2995. Likely caused # by the above code to make resampling with multindexing a possibility # (i.e. resampling is not an issue when timeseries_group_by is not # used). See `AGGFUNC_OVERRIDES` variable above for explanation of # how this bug was fixed. dataframe = dataframe.resample(timeseries_freq, how=aggfuncs) # Fill empty date rows with zeros, rather than omitting them. if fill_range_start and fill_range_end: fill_range_start = isodate.parse_datetime(fill_range_start).strftime(FILL_DATETIME_FORMAT) fill_range_end = isodate.parse_datetime(fill_range_end).strftime(FILL_DATETIME_FORMAT) idx = pd.date_range(fill_range_start, fill_range_end) idx.name = timeseries_index dataframe = dataframe.reindex(idx, fill_value=0) if was_empty: # Zero out all the rows with a 1...this should only happen when nunique # aggfunc is used dataframe = dataframe.replace(to_replace=1, regex=False, value=0) return dataframe
def __init__(self, api, record): self.api = api self.id = record["id"] self.name = record["name"] self.ttl = record.get("ttl") self.email_address = record.get("emailAddress") self.created = isodate.parse_datetime(record["created"]) self.updated = isodate.parse_datetime(record["updated"]) self.comment = record.get("comment")
def __init__(self, api, record): self.api = api self.id = record['id'] self.name = record['name'] self.ttl = record.get('ttl') self.email_address = record.get('emailAddress') self.created = isodate.parse_datetime(record['created']) self.updated = isodate.parse_datetime(record['updated']) self.comment = record.get('comment')
def valid_8601_date(s): try: if 'T' in s: parse_datetime(s) else: parse_date(s) return True except ValueError: return False
def __init__(self, domain, record): self.api = domain.api self.domain = domain self.name = record["name"] self.id = record["id"] self.type = record["type"] self.data = record["data"] self.ttl = record["ttl"] self.priority = record.get("priority") self.created = isodate.parse_datetime(record["created"]) self.updated = isodate.parse_datetime(record["updated"]) self.comment = record.get("comment")
def __init__(self, domain, record): self.api = domain.api self.domain = domain self.name = record['name'] self.id = record['id'] self.type = record['type'] self.data = record['data'] self.ttl = record['ttl'] self.priority = record.get('priority') self.created = isodate.parse_datetime(record['created']) self.updated = isodate.parse_datetime(record['updated']) self.comment = record.get('comment')
def load(self): """ Läd die Daten aus dem Dateisystem """ # Neueste JSON-Datei ermitteln filelist = glob.glob(os.path.join(self.datadir_current_path, "*.json")) if not filelist: return datafile_path = os.path.abspath(sorted(filelist)[-1]) # JSON-Datei laden with io.open(datafile_path, "rb") as datafile: loaded_data = json.loads(datafile.read()) if not loaded_data: return # Sprachunabhängige Daten aus der JSON-Datei zur Klasseninstanz hinzufügen for data_key_item in self.all_data_keys: data_key_name = data_key_item["name"] data_key_type = data_key_item["type"] if data_key_type == TYPE_TIMESTAMP: timestamp_iso = loaded_data.get(data_key_name, None) if timestamp_iso: setattr(self, data_key_name, isodate.parse_datetime(timestamp_iso)) else: setattr(self, data_key_name, getattr(self, data_key_name)) else: setattr(self, data_key_name, loaded_data.get(data_key_name, getattr(self, data_key_name))) # Sprachabhängige Daten aus der JSON-Datei zu den # sprachabhängigen Klasseninstanzen hinzufügen for language_id, language_data in self.items(): assert isinstance(language_data, LangData) for data_key_item in language_data.all_data_keys: data_key_name = data_key_item["name"] data_key_type = data_key_item["type"] if data_key_type == TYPE_TIMESTAMP: timestamp_iso = loaded_data.get(data_key_name, {}).get(language_id, None) if timestamp_iso: setattr(language_data, data_key_name, isodate.parse_datetime(timestamp_iso)) else: setattr(language_data, data_key_name, None) else: setattr( language_data, data_key_name, loaded_data.get(data_key_name, {}).get(language_id, getattr(language_data, data_key_name)), )
def iso8601_parse(text): '''Parse a date or datetime in ISO 8601 format and return a datetime object. For datetimes, can handle either "T" or " " as a separator.''' # WARNING: ISO dates have no notion of time zone. Thus, if you want a # datetime in a time zone other than UTC, you must include a time. try: text = ISO8601_SPACE_SEP.sub(r'\1T\3', text) dt = isodate.parse_datetime(text) except ValueError: # no time specified, assume midnight at beginning of day dt = isodate.parse_datetime(text + 'T00:00') if (dt.tzinfo is None): # make aware, assuming UTC dt = utcify(dt) return dt
def parse_time(timeElement): if timeElement.tag == '{%s}time' % EPG_NS: time = Time(isodate.parse_datetime(timeElement.attrib['time']), isodate.parse_duration(timeElement.attrib['duration']), isodate.parse_datetime(timeElement.attrib.get('actualTime')) if timeElement.attrib.has_key('actualTime') else None, isodate.parse_duration(timeElement.attrib.get('actualDuration')) if timeElement.attrib.has_key('actualDuration') else None) return time if timeElement.tag == '{%s}relativeTime' % EPG_NS: time = RelativeTime(isodate.parse_duration(timeElement.attrib['time']), isodate.parse_duration(timeElement.attrib['duration']), isodate.parse_duration(timeElement.attrib.get('actualTime')) if timeElement.attrib.has_key('actualTime') else None, isodate.parse_duration(timeElement.attrib.get('actualDuration')) if timeElement.attrib.has_key('actualDuration') else None) return time else: raise ValueError('unknown time element: %s' % timeElement)
def genDateRange(startDate, endDate, interval): import isodate # https://github.com/gweis/isodate dates = [] dateFrom = isodate.parse_datetime(startDate) dateTo = isodate.parse_datetime(endDate) dateInterval = isodate.parse_duration(interval) currentDate = dateFrom while currentDate <= dateTo: datetime = isodate.datetime_isoformat(currentDate) dates.append(datetime) currentDate = currentDate + dateInterval return dates
def test_paasta_log_line_passes_filter_true_when_valid_time(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' components = ['build', 'deploy'] line = 'fake_line' formatted_line = format_log_line(levels[0], clusters[0], service, instance, components[0], line, timestamp="2016-06-07T23:46:03+00:00") start_time = isodate.parse_datetime("2016-06-07T23:40:03+00:00") end_time = isodate.parse_datetime("2016-06-07T23:50:03+00:00") assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters, start_time=start_time, end_time=end_time) is True
def retrieve_revision(commit_id, username, project, revision=None): commit_url = 'https://api.github.com/repos/%s/%s/git/commits/%s' % ( username, project, commit_id) commit_json = fetch_json(commit_url) date = isodate.parse_datetime(commit_json['committer']['date']) tag = retrieve_tag(commit_id, username, project) if revision: # Overwrite any existing data we might have for this revision since # we never want our records to be out of sync with the actual VCS: # We need to convert the timezone-aware date to a naive (i.e. # timezone-less) date in UTC to avoid killing MySQL: revision.date = date.astimezone( isodate.tzinfo.Utc()).replace(tzinfo=None) revision.author = commit_json['author']['name'] revision.message = commit_json['message'] revision.full_clean() revision.save() return {'date': date, 'message': commit_json['message'], 'body': "", # TODO: pretty-print diffs 'author': commit_json['author']['name'], 'author_email': commit_json['author']['email'], 'commitid': commit_json['sha'], 'short_commit_id': commit_json['sha'][0:7], 'parents': commit_json['parents'], 'tag': tag}
def serialize_iso(attr, **kwargs): """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: SerializationError if format invalid. """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) try: if not attr.tzinfo: _LOGGER.warning( "Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() if utc.tm_year > 9999 or utc.tm_year < 1: raise OverflowError("Hit max or min date") microseconds = str(float(attr.microsecond)*1e-6)[1:].ljust(4, '0') date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec) return date + microseconds + 'Z' except (ValueError, OverflowError) as err: msg = "Unable to serialize datetime object." raise_with_traceback(SerializationError, msg, err) except AttributeError as err: msg = "ISO-8601 object must be valid Datetime object." raise_with_traceback(TypeError, msg, err)
def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. :rtype: Datetime :raises: DeserializationError if string format invalid. """ try: attr = attr.upper() match = Deserializer.valid_date.match(attr) if not match: raise ValueError("Invalid datetime string: " + attr) check_decimal = attr.split('.') if len(check_decimal) > 1: decimal = "" for digit in check_decimal[1]: if digit.isdigit(): decimal += digit else: break if len(decimal) > 6: attr = attr.replace(decimal, decimal[0:-1]) date_obj = isodate.parse_datetime(attr) test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") except(ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." raise_with_traceback(DeserializationError, msg, err) else: return date_obj
def call(salty_inst, c_msg, **kwargs): if not salty_inst.is_live: return True, "Stream currently offline. Follow to receive notifications for when I am." start = isodate.parse_datetime(salty_inst.stream_start) current = datetime.datetime.now(pytz.utc) return True, "The stream has been live for {0}.".format(str(current - start)[:-7])
def poll(self): already_have = [] r = requests.get(self.url, verify=False) d = xmltodict.parse(r.text) entries = d['feed']['entry'] entries.reverse() now = datetime.datetime.utcnow() now = now.replace(tzinfo=pytz.utc, microsecond=0) print entries for e in entries: url = e['link']['@href'] name = e['author']['name'] title = e['title'] updated = isodate.parse_datetime(e['updated']) age = now - updated if url not in self.messages: age_str = 'just now' if age > datetime.timedelta(minutes=3): age_str = '%s ago' % str(age) msg = '%s edited "%s" %s %s' % (name, title, age_str, self.shorten(url)) self.messages.add(url) if self.synced or self.emit_start: if self.max_age is not None: if age > self.max_age: continue if (e['title'], e['author']['name']) not in already_have: # for my thing i just want the pages not the diffs already_have.append((e['title'], e['author']['name'])) yield msg self.synced = True
def get_user_id(self, validate=True): if (self._user_id is None) or (validate and (not self._valid_user)): try: jbox_cookie = self.get_cookie(JBoxCookies.COOKIE_AUTH) if jbox_cookie is None: return None jbox_cookie = json.loads(base64.b64decode(jbox_cookie)) if validate: sign = signstr(jbox_cookie['u'] + jbox_cookie['t'], JBoxCfg.get('sesskey')) if sign != jbox_cookie['x']: self.log_info("signature mismatch for " + jbox_cookie['u']) return None d = isodate.parse_datetime(jbox_cookie['t']) age = (datetime.datetime.now(pytz.utc) - d).total_seconds() if age > JBoxCookies.AUTH_VALID_SECS: self.log_info("cookie older than allowed days: " + jbox_cookie['t']) return None self._valid_user = True self._user_id = jbox_cookie['u'] except: self.log_error("exception while reading auth cookie") traceback.print_exc() return None return self._user_id
def human_readable_iso_date(dt): """ Python datetime to a human readable ISO datetime. """ if not isinstance(dt, (datetime.date, datetime.datetime)): dt = isodate.parse_datetime(dt) format = '%Y-%m-%d %H:%M:%S' return dt.strftime(format)
def __init__(self, jsonval=None): self.date = None if jsonval is not None: if 'T' in jsonval: self.date = isodate.parse_datetime(jsonval) else: self.date = isodate.parse_date(jsonval)
def get_date_boundaries(parameters): """Return the date boundaries in a set of parameters. Return a tuple with 2 datetime objects, the first one is the lower bound date and the second one is the upper bound date. """ default_date_range = datetime.timedelta(days=7) greater_than = None lower_than = None if not parameters.get("date"): lower_than = timezone.now() greater_than = lower_than - default_date_range else: for param in parameters["date"]: value = isodate.parse_datetime(split_on_operator(param)[1]) if "<" in param and (not lower_than or (lower_than and lower_than > value)): lower_than = value if ">" in param and (not greater_than or (greater_than and greater_than < value)): greater_than = value if not lower_than: # add a lower than that is now lower_than = timezone.now() if not greater_than: # add a greater than that is lower_than minus the date range greater_than = lower_than - default_date_range return (greater_than, lower_than)
def map_slot(model, mapped_models): resource = model.get("resource", None) if resource: model["resource"] = mapped_models["resource_models"][resource] person = model.get("person", None) if person: model["person"] = mapped_models["person_models"][person] organisation = model.get("organisation", None) if organisation: model["organisation"] = mapped_models["organisation_models"][organisation] model["start_time"] = parse_datetime(model["start_time"]) model["end_time"] = parse_datetime(model["end_time"]) return model
def extract_utc_timestamp_from_log_line(line): """ Extracts the timestamp from a log line of the format "<timestamp> <other data>" and returns a UTC datetime object or None if it could not parse the line """ # Extract ISO 8601 date per http://www.pelagodesign.com/blog/2009/05/20/iso-8601-date-validation-that-doesnt-suck/ iso_re = r'^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-2])(-?[1-7])?|' \ r'(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24\:?00)([\.,]\d+' \ r'(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)? ' tokens = re.match(iso_re, line) if not tokens: # Could not parse line return None timestamp = tokens.group(0).strip() dt = isodate.parse_datetime(timestamp) utc_timestamp = datetime_convert_timezone(dt, dt.tzinfo, dateutil.tz.tzutc()) return utc_timestamp
def decode_token(token, secret='', ttl=DEFAULT_TTL, verify=True): try: token = jwt.decode(str(token), secret, verify=verify) except jwt.DecodeError as e: raise TokenInvalid("error decoding JSON Web Token", e) if verify: issue_time = token.get('issuedAt') if issue_time is None: raise TokenInvalid("'issuedAt' is missing from token") issue_time = isodate.parse_datetime(issue_time) expiry_time = issue_time + datetime.timedelta(seconds=ttl) if issue_time > _now(): raise TokenInvalid("token is not yet valid") if expiry_time < _now(): raise TokenInvalid("token has expired") return token
def _safe_parse_datetime(dt): """ Parse a datetime, swallowing exceptions. :param dt: A string containing a datetime :returns: A datetime.datetime object representing ``dt``. If a datetime string is unparseable, it is represented as a datetime.datetime set to the epoch in UTC (i.e. a value which will never be the most recent). """ epoch = datetime.datetime(1970, 1, 1, tzinfo=dateutil.tz.tzutc()) try: parsed_dt = isodate.parse_datetime(dt) # I tried to limit this to isodate.ISO8601Error but parse_datetime() can # also throw "AttributeError: 'NoneType' object has no attribute 'split'", # and presumably other exceptions. except Exception as exc: log.debug("Failed to parse datetime '%s'" % dt) log.debug(exc) parsed_dt = epoch return parsed_dt
def cast(self, d): """ Cast a single value to a :class:`datetime.datetime`. :param date_format: An optional :func:`datetime.strptime` format string for parsing datetimes in this column. :returns: :class:`datetime.datetime` or :code:`None`. """ if isinstance(d, datetime.datetime) or d is None: return d elif isinstance(d, six.string_types): d = d.strip() if d.lower() in self.null_values: return None else: raise CastError('Can not parse value "%s" as datetime.' % d) if self.datetime_format: try: return datetime.datetime.strptime(d, self.datetime_format) except: raise CastError('Value "%s" does not match date format.' % d) value, status = self._parser.parseDT(d, sourceTime=self._source_time, tzinfo=self.timezone) if status == 3: return value try: dt = isodate.parse_datetime(d) return dt except: pass raise CastError('Can not parse value "%s" as datetime.' % d)
def parse_status(status): """ Parse a Status from a a dict representation. Parameters ---------- status : dict The status as a dict. Returns ------- tuple A namedtuple containing status info. The error is None if everything went fine. Raises ------ ValueError If the dict does not contains the correct data. """ try: accelerometer = None if 'accelerometer' in status: accelerometer_dict = status['accelerometer'] if accelerometer_dict: accelerometer = Accelerometer(accelerometer_dict['x'], accelerometer_dict['y'], accelerometer_dict['z'], isodate.parse_datetime(accelerometer_dict['date'])) location = None if 'location' in status: location_dict = status['location'] if location_dict: location = parse_location(location_dict) return Status( location, accelerometer, [parse_signal(signal) for signal in status['signals']] ) except ValueError as err: raise xee_exceptions.ParseException(err)
def test_post(self): print print "Post one doc" print "^^^^^^^^^^^^" db = self.db generic = controllers.Generic(db) # here is the basic function call being tested fn = "controllers.generic.get(db, **args)" # POST ONE ################################ host = self.host sample_doc = { "fNam": "johnathan", "lNam": "doe", "mOn": isodate.parse_datetime("2012-09-27T21:43:33.927Z"), "oBy": ObjectId("50468de92558713d84b03fd0"), "rBy": ObjectId("50468de92558713d84b03fd7"), "gen": 'm', "emails": [{ "email": "*****@*****.**" }] } args = {} args['class_name'] = self.class_name args['docs'] = [sample_doc] response = generic.post(**args) assert response['status'] == 200 data = response['response'] got_docs = data['docs'] assert data['total_inserted'] == 1 doc = data['docs'][0]['doc'] id = data['docs'][0]['id'] printIndentedString("INSERTED OBJECT_ID: " + id, 0) assert doc['fNam'] == sample_doc['fNam']
def fetch_extra_video_data(self, results): """ Method to fetch extra video data. Makes an extra call to the YouTube API to get extra data about each video in a result set. This is because the Playlist and Channel YouTube API endpoints don't return all video related data - duration and geo-location for example. """ updated_results = [] for result in results.get('items', []): extra = {} extra_data = self.youtube.videos().list( part='snippet,contentDetails,recordingDetails', id=result['contentDetails']['videoId'], fields='items(snippet(channelTitle),contentDetails(duration),' 'recordingDetails)').execute() # check we only have one result and then assign that result # to a local variable. assert len(extra_data.get('items', [])) == 1 item = extra_data.get('items')[0] # parse the duration timedelta = isodate.parse_duration( item['contentDetails']['duration']) extra['duration'] = int(timedelta.total_seconds()) # parse the geo-location data extra['geo'] = extra['recorded_date'] = None if 'recordingDetails' in item: if 'recordingDate' in item['recordingDetails']: rec_time = item['recordingDetails']['recordingDate'] extra['recorded_date'] = isodate.parse_datetime(rec_time) if 'location' in item['recordingDetails']: loc = item['recordingDetails']['location'] extra['geo'] = '%f %f' % (loc['latitude'], loc['longitude']) result['extra'] = extra updated_results.append(result) return updated_results
def get_verbose_status_of_marathon_app(app): """Takes a given marathon app object and returns the verbose details about the tasks, times, hosts, etc""" output = [] create_datetime = datetime_from_utc_to_local( isodate.parse_datetime(app.version)) output.append(" Marathon app ID: %s" % PaastaColors.bold(app.id)) output.append( " App created: %s (%s)" % (str(create_datetime), humanize.naturaltime(create_datetime))) output.append(" Tasks:") rows = [("Mesos Task ID", "Host deployed to", "Deployed at what localtime", "Health")] for task in app.tasks: local_deployed_datetime = datetime_from_utc_to_local(task.staged_at) if task.host is not None: hostname = "%s:%s" % (task.host.split(".")[0], task.ports[0]) else: hostname = "Unknown" if not task.health_check_results: health_check_status = PaastaColors.grey("N/A") elif marathon_tools.is_task_healthy(task): health_check_status = PaastaColors.green("Healthy") else: health_check_status = PaastaColors.red("Unhealthy") rows.append(( get_short_task_id(task.id), hostname, '%s (%s)' % ( local_deployed_datetime.strftime("%Y-%m-%dT%H:%M"), humanize.naturaltime(local_deployed_datetime), ), health_check_status, )) output.append('\n'.join([" %s" % line for line in format_table(rows)])) if len(app.tasks) == 0: output.append(" No tasks associated with this marathon app") return app.tasks, "\n".join(output)
def _parse(self, s: str): # Cast to data type, reverse of byte casting in paho.mqtt.client.Client.publish() if self.data_type == "integer": value = int(s) elif self.data_type == "float": value = float(s) elif self.data_type == "boolean": value = s == "true" elif self.data_type == "string": value = s elif self.data_type == "enum": value = s elif self.data_type == "color": value = s elif self.data_type == "datetime": value = isodate.parse_datetime(s) elif self.data_type == "duration": value = isodate.parse_duration(s) else: # Non-standard value = s.encode() return value
async def _fetch_forecasts(self) -> bool: """Fetch the forecasts for this rooftop site.""" resp = await self.request_data( f"/rooftop_sites/{self._resource_id}/forecasts", ssl=not self._disable_ssl) self._update_API_call_sensor() if resp is False: return False f = [] for forecast in resp.get("forecasts"): # Convert period_end and period. All other fields should already be the correct type forecast["period_end"] = parse_datetime(forecast["period_end"]) forecast["period"] = parse_duration(forecast["period"]) forecast[ "period_start"] = forecast["period_end"] - forecast["period"] f.append(forecast) self._forecasts = f return True
def deserialize_data(o): if isinstance(o, dict) and set(o.keys()) == {'@type', '@value'}: o_type, o_value = o['@type'], o['@value'] if o_type == 'uuid': return uuid.UUID(o_value) elif o_type == 'decimal': return decimal.Decimal(o_value) elif o_type == 'timedelta': return isodate.parse_duration(o_value) elif o_type == 'time': return isodate.parse_time(o_value) elif o_type == 'date': return isodate.parse_date(o_value) elif o_type == 'datetime': return isodate.parse_datetime(o_value) elif isinstance(o, dict): return {k: deserialize_data(v) for k, v in o.items()} elif isinstance(o, list): return [deserialize_data(v) for v in o] else: return o
def chronos_log_line_passes_filter(line, levels, service, components, clusters, start_time=None, end_time=None): """Given a (JSON-formatted) log line where the message is a Marathon log line, return True if the line should be displayed given the provided service; return False otherwise.""" try: parsed_line = json.loads(line) except ValueError: log.debug('Trouble parsing line as json. Skipping. Line: %r' % line) return False timestamp = isodate.parse_datetime(parsed_line.get('timestamp')) if not check_timestamp_in_range(timestamp, start_time, end_time): return False return chronos_tools.compose_job_id(service, '') in parsed_line.get('message', '')
def userformdata_decode(string): if not isinstance(string, six.string_types): return string if string.startswith(u'__date__@'): return isodate.parse_date(string[9:]) if string.startswith(u'__datetime__@'): return isodate.parse_datetime(string[13:]) if string.startswith(u'__time__@'): return isodate.parse_time(string[9:]) if string.startswith(u'__richtext__@'): data = json.loads(base64.b64decode(string[13:])) return RichTextValue(raw=data['raw'], mimeType=data['mime'], outputMimeType=data['output_mime'], encoding=data['encoding']) return string
def test_update_videos(self): """ Test :func:`greenday_core.youtube_client.YouTubeClient.update_videos <greenday_core.youtube_client.YouTubeClient.update_videos>` updates YouTubeVideo objects with updated data """ video_1 = milkman.deliver(YouTubeVideo, youtube_id='YxgsxaFWWHQ') video_2 = milkman.deliver(YouTubeVideo, youtube_id='O37yJBFRrfg') client = YouTubeClient() client.update_videos(( video_1, video_2, )) for vid in ( video_1, video_2, ): expected_data = YT_RAW_DATA[vid.youtube_id] self.assertEqual(expected_data['snippet']['title'], vid.name) self.assertEqual( float( expected_data['recordingDetails']['location']['latitude']), vid.latitude) self.assertEqual( float(expected_data['recordingDetails']['location'] ['longitude']), vid.longitude) self.assertEqual(expected_data['snippet']['description'], vid.notes) self.assertEqual( isodate.parse_datetime( expected_data['snippet']['publishedAt']), vid.publish_date) self.assertEqual(expected_data['snippet']['channelId'], vid.channel_id) self.assertEqual(expected_data['snippet']['channelTitle'], vid.channel_name) self.assertEqual( isodate.parse_duration(expected_data['contentDetails'] ['duration']).total_seconds(), vid.duration)
def migrate_contract_cancelled(self): auction = self.db.get(self.auction_id) now = get_now() pending_verification_award = award_fixture(auction, 'pending.verification', 0) unsuccessful_award = award_fixture(auction, 'unsuccessful', 1) auction['awards'] = [unsuccessful_award, pending_verification_award] auction['contracts'] = [{ 'awardID': unsuccessful_award['id'], 'suppliers': unsuccessful_award['suppliers'], 'value': unsuccessful_award['value'], 'date': now.isoformat(), 'items': auction['items'], 'contractID': '{}-11'.format(auction['auctionID']), 'status': 'cancelled' }] auction.update(auction) self.db.save(auction) self.migrate_data(self.app.app.registry) response = self.app.get('/auctions/{}'.format(self.auction_id)) auction = response.json['data'] self.assertEqual(auction['status'], u'active.qualification') self.assertEqual(auction['awards'][1]['status'], u'pending') response = self.app.get('/auctions/{}/contracts'.format(self.auction_id)) contracts = response.json['data'] self.assertEqual(len(contracts), 1) self.assertEqual(contracts[0]['status'], 'cancelled') signing_period_end_date = set_specific_hour( parse_datetime(unsuccessful_award['signingPeriod']['endDate']), CONTRACT_SIGNING_PERIOD_END_DATE_HOUR) unsuccessful_award['signingPeriod'][ 'endDate'] = signing_period_end_date.isoformat() self.assertEqual(contracts[0]['signingPeriod'], unsuccessful_award['signingPeriod'])
def __init__(self, cidrBlock: str = None, comment: str = None, ipAddress: str = None, links: list = None, last_used: str = None, count: int = None, last_used_address: str = None): """ For a single whitelist entry. Contains a bit of helper intelligence for ip addresses. :param cidrBlock: :param comment: :param ipAddress: :param links: """ self.last_used_address: Optional[IPv4Address] = None try: self.last_used_address = IPv4Address(last_used_address) except Exception: logging.warning('No last used address') self.count: Optional[int] = count self.last_used: Optional[datetime] = None try: self.last_used = parse_datetime(last_used) except Exception: logging.warning('Could not get last used date.') self.links = links self.ipAddress = ipAddress self.comment = comment self.cidrBlock = cidrBlock try: self.cidrBlockObj: IPv4Network = IPv4Network(self.cidrBlock) except Exception: self.cidrBlockObj = None try: self.ipAddressObj: IPv4Address = IPv4Address(self.ipAddress) except Exception: self.ipAddressObj = None
def verify_prices_in_db(post_message, values, db, swapped_sign: bool = False): """util method to verify that price data ended up in the database""" start = parse_datetime(post_message["start"]) end = start + parse_duration(post_message["duration"]) horizon = parse_duration(post_message["horizon"]) sensor = SensorField("market", "fm0").deserialize(post_message["market"]) resolution = sensor.event_resolution query = (db.session.query( TimedBelief.event_value, TimedBelief.belief_horizon).filter( (TimedBelief.event_start > start - resolution) & (TimedBelief.event_start < end)).filter( TimedBelief.belief_horizon == horizon - (end - (TimedBelief.event_start + resolution))).join(Sensor).filter( TimedBelief.sensor_id == Sensor.id).filter( Sensor.name == sensor.name)) df = pd.DataFrame( query.all(), columns=[col["name"] for col in query.column_descriptions]) if swapped_sign: df["event_value"] = -df["event_value"] assert df["event_value"].tolist() == values
def job_is_stuck(last_run_iso_time, interval_in_seconds, client, job_name): """Considers that the job is stuck when it hasn't run on time :param last_run_iso_time: ISO date and time of the last job run as a string :param interval_in_seconds: the job interval in seconds :param client: configured Chronos client :param job_name: Chronos job name :returns: True or False """ if last_run_iso_time is None or interval_in_seconds is None: return False dt_next_run = isodate.parse_datetime(last_run_iso_time) + timedelta(seconds=interval_in_seconds) dt_now_utc = datetime.now(pytz.utc) if dt_next_run >= dt_now_utc: return False try: expected_runtime = min(int(client.job_stat(job_name)['histogram']['99thPercentile']), interval_in_seconds) except KeyError: log.debug("Can't get 99thPercentile for %s. " "Assuming a runtime of %d seconds." % (job_name, interval_in_seconds)) expected_runtime = interval_in_seconds return (dt_next_run + timedelta(seconds=expected_runtime) < dt_now_utc)
def submission_date(short_name, student): global gradebook assignment = assignment_given_short_name(short_name) if assignment: assignment_id = assignment['id'] else: print("No such assignment named {0}".format(shrt_name)) return None if gradebook.get(student, False): students_assignments = gradebook[student].get('assignments', False) if not students_assignments: return None # student has no assignments this_assignment = students_assignments.get(assignment_id, False) if this_assignment: td = this_assignment.get('submitted_at', None) if td: return isodate.parse_datetime(td) return None
def view_file(request, name, path, r, info): rc = request.rc ul = os.path.join(settings.REPOS_ROOT, name + '/' + path) if request.GET.get('orig', None) is not None: return redirect(ul) rc.REPOS = get_repos_base(request, name) rc.mtime = parse_datetime(str(info.entry.commit.date)) rc.author = get_author(getattr(info.entry.commit, 'author', '')) fsize = int(str(getattr(info.entry, 'size', '0'))) rc.rev = info.entry.commit['revision'] ft = guess_type(path)[0] fname = os.path.split(path)[1] if no_preview_file(fname): rc.srcurl = ul elif ft is not None and ft.startswith('image'): rc.imgurl = ul elif fsize / (1024.0 * 1024.0) >= 1.0: # file to big rc.srcurl = ul else: is_binary = False try: content = svn.CAT(r) is_binary = is_binary_string(content[0:128]) except: is_binary = True if is_binary: rc.srcurl = ul else: rc.mimetype = get_ext_class(fname) rc.content = mark_highlight(content, fname) return send_response(request, 'repos/view_file.html')
def get_deployed_files(self, vfs_basepath='site/wwwroot/'): """Return a list of deployed files in this Webapp as a generator. This method also: - Parse mtime and crtime as datetime - Add a urlpath key with path fragment for future URL callable - Keys are normalized using normcase for future comparison with the system. """ # Kudu accepts a // ended path. Let's keep thing simply and always append an ending slash. kudu_folderpath = 'vfs/{}/'.format(vfs_basepath) for element in self.get(kudu_folderpath).json(): if element['mime'] == "inode/directory": for subelement, submeta in self.get_deployed_files( vfs_basepath + element['name'] + '/'): submeta['urlpath'] = "/".join( [element['name'], submeta['urlpath']]) yield os.path.normcase(submeta['urlpath']), submeta else: for key in ["mtime", "crtime"]: element[key] = parse_datetime(element[key]) element['urlpath'] = element['name'] yield os.path.normcase(element['name']), element
def is_invited(self, user_id): if (self.table() is None) or (self.item is None): return # is this handled well? if self.item.get('invited', None): return False try: expires = isodate.parse_datetime(self.item['expires_on']) except: self.log_info("Error parsing invite code expiry date: " + str(self.item['invite_id']) + str(self.item['expires_on'])) return False if expires < datetime.datetime.now(pytz.utc): # This invite code has expired, and hence invalid return False if self.item['invited'] == '*': # Anyone is allowed return True ids = map(str.strip, self.item['invited'].split(",")) return user_id in ids
def __init__(self, jsonval=None): self.date = None if jsonval is not None: isstr = isinstance(jsonval, str) if (not isstr and sys.version_info[0] < 3): # Python 2.x has 'str' and 'unicode' isstr = isinstance(jsonval, basestring) if not isstr: raise TypeError( "Expecting string when initializing {}, but got {}".format( type(self), type(jsonval))) try: if "T" in jsonval: self.date = isodate.parse_datetime(jsonval) else: self.date = isodate.parse_date(jsonval) except Exception as e: logging.warning( 'Failed to initialize FHIRDate from "{}": {}'.format( jsonval, e)) self.origval = jsonval
def datetime_u(s): fmt = "%Y-%m-%dT%H:%M:%S" try: return _strptime(s, fmt) except ValueError: try: # strip utc offset if s[-3] == ":" and s[-6] in (' ', '-', '+'): try: import iso8601 return iso8601.parse_date(s) except ImportError: pass try: import isodate return isodate.parse_datetime(s) except ImportError: pass try: import dateutil.parser return dateutil.parser.parse(s) except ImportError: pass warnings.warn('removing unsupported UTC offset. Install `iso8601`, `isodate` or `python-dateutil` package to support it', RuntimeWarning) s = s[:-6] # parse microseconds try: return _strptime(s, fmt + ".%f") except: return _strptime(s, fmt) except ValueError: # strip microseconds (not supported in this platform) if "." in s: warnings.warn('removing unsuppported microseconds', RuntimeWarning) s = s[:s.index(".")] return _strptime(s, fmt)
def getEventsData(): events_list = getBucketEvents( 'aw-watcher-window_Andress-MacBook-Pro.local') data = list() categories = getCategories() for event in events_list: data.append(event['data']) data[-1]['timestamp'] = event['timestamp'] data[-1]['duration'] = float(event['duration']) data[-1]['endstamp'] = isodate.parse_datetime( data[-1]['timestamp']) + relativedelta( seconds=+data[-1]['duration']) categorized = False for category in categories: if inCategory(event, categories[category]): data[-1]['category'] = category categorized = True #else: if not categorized: data[-1]['category'] = 'Uncategorized' return data
def get_video_statistics(youtube, video_id): result = {} res = youtube.videos().list( id=video_id, part="snippet,statistics,status,contentDetails", ).execute() if not res['items'][0].get('statistics'): return result['videoId'] = video_id result['title'] = res['items'][0]['snippet'].get('title', '') result['description'] = res['items'][0]['snippet'].get('description', '') result['thumbnails'] = res['items'][0]['snippet'].get('thumbnails', '') result['approved'] = res['items'][0]['contentDetails'].get( 'licensedContent', '') result['license'] = res['items'][0]['status'].get('license', '') result['publishedAt'] = isodate.parse_datetime( res['items'][0]['snippet'].get('publishedAt', '')).timestamp() result['tags'] = res['items'][0]['snippet'].get( 'tags', '') #Может отсутствовать (пример видео:XODqm66ooMQ) result['categoryId'] = res['items'][0]['snippet'].get('categoryId', '') result['likeCount'] = int(res['items'][0]['statistics'].get( 'likeCount', -1)) result['dislikeCount'] = int(res['items'][0]['statistics'].get( 'dislikeCount', -1)) if float(result['likeCount']) + float(result['dislikeCount']) != 0: result['likeRate'] = float(result['likeCount']) / ( float(result['likeCount']) + float(result['dislikeCount'])) else: result['likeRate'] = 0 result['viewCount'] = int(res['items'][0]['statistics'].get( 'viewCount', -1)) result['duration'] = isodate.parse_duration( res['items'][0]['contentDetails'].get('duration', '2010-08-18 08:15:30Z')).seconds result['videoUrl'] = 'https://www.youtube.com/watch?v=' + video_id return result
def youtubevideo(bot, args, sender, source): """Ran whenever a YouTube video is sent""" if not dave.config.redis.exists("youtube:{}".format(args[0])): req = get("{}&id={}".format(BASE_URL, args[0]), headers={'user-agent': 'irc bot (https://github.com/w4)'}) if req.status_code != 200: bot.msg( source, "Bad response from YouTube API: {}".format(req.status_code)) return req = req.json() if not req["pageInfo"]["totalResults"]: bot.msg(source, "That video doesn't exist.") return dave.config.redis.setex("youtube:{}".format(args[0]), 400, pickle.dumps(req)) else: req = pickle.loads(dave.config.redis.get("youtube:{}".format(args[0]))) resp = req["items"][0] bot.msg( source, assembleFormattedText(A.normal[ A.bold[resp["snippet"]["title"]], " ({}) by {} uploaded {}. {} views, +{}/-{}.".format( str(isodate.parse_duration(resp["contentDetails"]["duration"]) ), resp["snippet"]["channelTitle"], naturaltime( datetime.now(timezone.utc) - isodate.parse_datetime(resp["snippet"]["publishedAt"]) ), intcomma(resp["statistics"]["viewCount"] ), intcomma(resp["statistics"]["likeCount"]), intcomma(resp["statistics"]["dislikeCount"]))]))
def get_session_cookie(self): try: jbox_cookie = self.get_cookie(JBoxHandler.AUTH_COOKIE) if jbox_cookie is None: return None jbox_cookie = json.loads(base64.b64decode(jbox_cookie)) sign = signstr(jbox_cookie['u'] + jbox_cookie['t'], JBoxHandler._config['sesskey']) if sign != jbox_cookie['x']: self.log_info("signature mismatch for " + jbox_cookie['u']) return None d = isodate.parse_datetime(jbox_cookie['t']) age = (datetime.datetime.now(pytz.utc) - d).total_seconds() if age > JBoxHandler.AUTH_VALID_SECS: self.log_info("cookie older than allowed days: " + jbox_cookie['t']) return None return jbox_cookie except: self.log_error("exception while reading cookie") traceback.print_exc() return None
async def _fetch_estimated_actuals(self) -> bool: """Fetch the estimated (historical) actual values for this rooftop site.""" resp = await self.request_data( f"/rooftop_sites/{self._resource_id}/estimated_actuals", ssl=not self._disable_ssl) self._update_API_call_sensor() if resp is False: return False a = [] for estimated_actual in resp.get("estimated_actuals"): # Convert period_end and period. All other fields should already be the correct type estimated_actual["period_end"] = parse_datetime( estimated_actual["period_end"]) estimated_actual["period"] = parse_duration( estimated_actual["period"]) estimated_actual["period_start"] = ( estimated_actual["period_end"] - estimated_actual["period"]) a.append(estimated_actual) self._estimated_actuals = a return True
def paasta_app_output_passes_filter( line, levels, service, components, clusters, instances, start_time=None, end_time=None, ): try: parsed_line = json.loads(line) except ValueError: log.debug('Trouble parsing line as json. Skipping. Line: %r' % line) return False timestamp = isodate.parse_datetime(parsed_line.get('timestamp')) if not check_timestamp_in_range(timestamp, start_time, end_time): return False return (parsed_line.get('component') in components and (parsed_line.get('cluster') in clusters or parsed_line.get('cluster') == ANY_CLUSTER) and (instances is None or parsed_line.get('instance') in instances))