def _assert_user_dates(self, response, now, is_default_user=False): func = self.assertGreater if is_default_user else self.assertLess func(now, dt_parse(response.approval_status_mod_time)) func(now, dt_parse(response.password_last_set)) func(now, dt_parse(response.sign_up_time)) self.assertLess(now, dt_parse(response.password_expiry))
def _r2ts(row): try: time_stamp = datetime.combine( getattr(row, date_field).to_pydatetime(), getattr(row, time_field)) except: try: if isinstance(row[1], datetime): ts_str = str(row[1].date()) + 'T' + str(row[2]) elif isinstance(row[1], str): date_str = row[1].strip() # Try to guess if the date string has day first in the format (usually the case in Norway) if re.match(r'^\d{1,2}(\.|\-|\/)', date_str): dayfirst = True else: dayfirst = False try: c_date = dt_parse(date_str, dayfirst=dayfirst) except: c_date = dt_parse(date_str.split()[0], dayfirst=dayfirst) c_date = str(c_date.date()) # ts_str = row[1] + 'T' + str(row[2]) ts_str = c_date + 'T' + str(row[2]) else: raise TypeError time_stamp = dt_parse(ts_str) except: time_stamp = None return (time_stamp)
def get_summary(self): result = {"name": self.name, "id": self.pk, "documents": {}} years = {} for d in self.declarations.exclude(doc_type="Форма змін").exclude( exclude=True): if d.year in years: if dt_parse(d.source["infocard"]["created_date"]) > dt_parse( years[d.year].source["infocard"]["created_date"]): years[d.year] = d else: years[d.year] = d for k in sorted(years.keys()): result["documents"][k] = { "aggregated_data": years[k].source["aggregated_data"], "flags": self.get_flags(years[k].source["aggregated_data"]), "year": k, "infocard": years[k].source["infocard"], } if years: result["min_year"] = min(years.keys()) result["max_year"] = max(years.keys()) return result
def parse(file_name): """ Takes in an HTML file generated from the MessageSaver chrome extension and parses the results into a list of messages, with date/time and name metadata. """ # Remove header data and standardize line break tags file = open(file_name).read() file = file.strip("<head><meta charset=\"UTF-8\"></head><body>") file = file.replace("<br />", "</br>") lines = iter(file.split("</br>")) messages = [] # Default date if date isn't supplied in message date = datetime.now() name = "" while True: try: line = next(lines) except StopIteration: break try: date = dt_parse(line) assert dt_parse("2012") < date < datetime.now() name = next(lines) except Exception: if line != "": messages.append(Message(date, name, line)) return messages
def __init__(self, name: Optional[str] = None, instanceId: Optional[str] = None, createdTime: Optional[str] = None, lastUpdatedTime: Optional[str] = None, input: Optional[Any] = None, output: Optional[Any] = None, runtimeStatus: Optional[OrchestrationRuntimeStatus] = None, customStatus: Optional[Any] = None, history: Optional[List[Any]] = None, **kwargs): self._name: Optional[str] = name self._instance_id: Optional[str] = instanceId self._created_time: Optional[datetime] = \ dt_parse(createdTime) if createdTime is not None else None self._last_updated_time: Optional[datetime] = dt_parse(lastUpdatedTime) \ if lastUpdatedTime is not None else None self._input: Any = input self._output: Any = output self._runtime_status: Optional[ OrchestrationRuntimeStatus] = runtimeStatus if runtimeStatus is not None: self._runtime_status = OrchestrationRuntimeStatus(runtimeStatus) self._custom_status: Any = customStatus self._history: Optional[List[Any]] = history if kwargs is not None: for key, value in kwargs.items(): self.__setattr__(key, value)
def test_post_new_order(self): data = { 'trader': 'John Doe', 'order_type': 'market', 'action': 'buy', 'price': 10.0, "expiry": "2016-09-30T13:05:23.407", } response = self.client.post('/store_ns/orders', data=json.dumps(data), content_type='application/json') self.assertEqual(201, response.status_code) self.assertIn('Location', response.headers) # get the order and verify the content order_location = response.headers['Location'] response = self.client.get(order_location, content_type='application/json') self.assertEqual(200, response.status_code) order = json.loads(response.data) for key, value in data.items(): if key == 'expiry': self.assertEqual(dt_parse(value), dt_parse(order[key])) else: self.assertEqual(value, order[key])
def __init__(self, *args, **kwargs): super(Quote, self).__init__(*args, **kwargs) if "process" not in self: raise TypeError("Field 'Quote.process' is required") if not isinstance(self.get("process"), str): raise ValueError("Field 'Quote.process' must be a string.") if "user" not in self: raise TypeError("Field 'Quote.user' is required") if not isinstance(self.get("user"), str): raise ValueError("Field 'Quote.user' must be a string.") if "price" not in self: raise TypeError("Field 'Quote.price' is required") if not isinstance(self.get("price"), float): raise ValueError("Field 'Quote.price' must be a float number.") if "currency" not in self: raise TypeError("Field 'Quote.currency' is required") if not isinstance(self.get("currency"), str) or len(self.get("currency")) != 3: raise ValueError("Field 'Quote.currency' must be an ISO-4217 currency string code.") if "created" not in self: self["created"] = now() try: self["created"] = dt_parse(str(self.get("created"))).isoformat() except ValueError: raise ValueError("Field 'Quote.created' must be an ISO-8601 datetime string.") if "expire" not in self: self["expire"] = now() + timedelta(days=1) try: self["expire"] = dt_parse(str(self.get("expire"))).isoformat() except ValueError: raise ValueError("Field 'Quote.expire' must be an ISO-8601 datetime string.") if "id" not in self: self["id"] = str(uuid.uuid4())
def handle(self, *args, **options): end_date = dt_parse(options['end_date'][0]) \ if options['end_date'] \ else dt.datetime.utcnow() start_date = dt_parse(options['start_date'][0]) \ if options['start_date'] \ else end_date - dt.timedelta(minutes=5) granularity = options['granularity'][0] if options[ 'granularity'] else 60 max_failures = options['max_failures'][0] if options[ 'max_failures'] else 10 product_list = options['product'] \ if options['product'] \ else utils.PRODUCT_LIST log_level = options['log_level'][0] if options['log_level'] else 'INFO' assert end_date > start_date qd = QuoteDownloader(log_level) qd.run(start_dt=start_date, end_dt=end_date, granularity=granularity, max_failures=max_failures, product_list=product_list)
def test_all_the_args(): orchestration_started = get_event(HistoryEventType.ORCHESTRATOR_STARTED) execution_started = get_event(HistoryEventType.EXECUTION_STARTED) history = [orchestration_started, execution_started] response = dict(name=TEST_NAME, instanceId=TEST_INSTANCE_ID, createdTime=TEST_CREATED_TIME, lastUpdatedTime=TEST_LAST_UPDATED_TIME, input=TEST_INPUT, output=TEST_OUTPUT, runtimeStatus=TEST_RUNTIME_STATUS, customStatus=TEST_CUSTOM_STATUS, history=history) result = DurableOrchestrationStatus.from_json(response) assert result.runtime_status.name == TEST_RUNTIME_STATUS assert result.custom_status == TEST_CUSTOM_STATUS assert result.instance_id == TEST_INSTANCE_ID assert result.output == TEST_OUTPUT assert result.created_time == dt_parse(TEST_CREATED_TIME) assert result.last_updated_time == dt_parse(TEST_LAST_UPDATED_TIME) assert result.input_ == TEST_INPUT assert result.name == TEST_NAME assert result.history[0][ 'EventType'] == HistoryEventType.ORCHESTRATOR_STARTED assert result.history[1]['EventType'] == HistoryEventType.EXECUTION_STARTED
def dicom_best_dt(tags: typing.Dict, level=DLv.INSTANCE, strict=False, # Raise if unable to parse dt for requested level allow_no_dt=False # Raise if also unable to parse dt at _any_ level ): logger = logging.getLogger("dcm_best_dt") if level == DLv.INSTANCE: if tags.get('InstanceCreationTime') and tags.get("InstanceCreationDate"): s = f"{tags.get('InstanceCreationTime')} {tags.get('InstanceCreationDate')}" return dt_parse( s ) elif strict: raise ValueError else: return dicom_best_dt(tags, level=DLv.SERIES) elif level == DLv.SERIES: if tags.get('SeriesTime') and tags.get("SeriesDate"): s = f"{tags.get('SeriesTime')} {tags.get('SeriesDate')}" return dt_parse( s ) elif strict: raise ValueError else: return dicom_best_dt(tags, level=DLv.STUDY) elif level == DLv.STUDY: if tags.get('StudyTime') and tags.get("StudyDate"): s = f"{tags.get('StudyTime')} {tags.get('StudyDate')}" return dt_parse( s ) elif strict: raise ValueError elif allow_no_dt: logger.warning("Using dt can create unstable meta hashes") return datetime.now() else: raise ValueError(f"Bad dt level request ({level})")
def get_date(self): """rough match of a date, then pass to dateutil's magic abilities""" DATE_XPATHS = ( """//li/span[@class="byline_label"]""" """/following-sibling::span/@title""", ) # tynan.com if self.HTML_p is not None: info("checking date xpaths") for path in DATE_XPATHS: info(f"trying = '{path}'") xpath_result = self.HTML_p.xpath(path) if xpath_result: info(f"'{xpath_result=}'; '{path=}'") date = dt_parse(xpath_result[0]).strftime("%Y%m%d") info(f"date = '{date}'; xpath = '{path}'") if date != "": return date else: continue date_regexp = r"(\d+,? )?(%s)\w*(,? \d+)?(,? \d+)" % MONTHS try: dmatch = re.search(date_regexp, self.text, re.IGNORECASE) return dt_parse(dmatch.group(0)).strftime("%Y%m%d") except (AttributeError, TypeError, ValueError): date = time.strftime("%Y%m%d", NOW) info(f"making date NOW = {date}") return date
def fix_date(build_dict): return dict( build_id=build_dict["build_id"], status=build_dict["status"], created_on=dt_parse(build_dict["created_on"]), updated_on=dt_parse(build_dict["updated_on"]) )
def test_lt_compares_alerts_based_on_start_date(): alert_dict_1 = create_alert_dict() alert_dict_2 = create_alert_dict() alert_dict_1['starts_at'] = dt_parse('2021-04-21T11:30:00Z') alert_dict_2['starts_at'] = dt_parse('2021-04-21T12:30:00Z') assert Alert(alert_dict_1) < Alert(alert_dict_2)
def test_AlertDate_properties(): sample_datetime = dt_parse('2021-03-02T10:30:00Z') alerts_date = AlertDate(sample_datetime) assert alerts_date.as_lang == 'at 10:30am on Tuesday 2 March 2021' assert alerts_date.as_iso8601 == '2021-03-02T10:30:00+00:00' assert alerts_date.as_utc_datetime == dt_parse('2021-03-02T10:30:00Z') assert alerts_date.as_local_datetime == dt_parse('2021-03-02T10:30:00Z') assert alerts_date.as_url == '2-mar-2021'
def value_from_datadict(self, data, files, name): values = [widget.value_from_datadict(data, files, "{}_{}".format(name, i)) for i, widget in enumerate(self.widgets)] try: def toIntOrNone(value): return int(value) if value else None dtstart = dt_parse(values[0]) if values[0] else None freq = toIntOrNone(values[1]) interval = toIntOrNone(values[2]) or None #count = toIntOrNone(values[4]) or None dtuntil = dt_parse(values[5]) if values[5] else None ordChoices = [toIntOrNone(values[6]), toIntOrNone(values[8]), toIntOrNone(values[10])] dayChoices = [toIntOrNone(values[7]), toIntOrNone(values[9]), toIntOrNone(values[11])] wdayChoices = [] mdayChoices = None monChoices = [] if freq == WEEKLY: if values[3]: wdayChoices = [int(day) for day in values[3]] elif freq in (MONTHLY, YEARLY): if dayChoices[0] == DAY_OF_MONTH: # day of the month if ordChoices[0] == EVERY_DAY: # every day, == daily wdayChoices = range(7) elif ordChoices[0] == SAME_DAY: # the same day of the month mdayChoices = None else: mdayChoices = [ordChoices[0]] else: # a day of the week if ordChoices[0] == EVERY_DAY: # every of this weekday wdayChoices = [Weekday(dayChoices[0])] elif ordChoices[0] == SAME_DAY: # the same weekday of the month wdayNum = (dtstart.day - 1) // 7 + 1 wdayChoices = [Weekday(dayChoices[0], wdayNum)] else: wdayChoices = [Weekday(dayChoices[0], ordChoices[0])] if dayChoices[1] != None and ordChoices[1] != None: wdayChoices.append(Weekday(dayChoices[1], ordChoices[1])) if dayChoices[2] != None and ordChoices[2] != None: wdayChoices.append(Weekday(dayChoices[2], ordChoices[2])) if freq == YEARLY: if values[12]: monChoices = [int(month) for month in values[12]] retval = Recurrence(dtstart = dtstart, freq = freq, interval = interval, byweekday = wdayChoices, #count = count, until = dtuntil, bymonthday = mdayChoices, bymonth = monChoices) except (TypeError, ValueError): retval = None return retval
def test_AlertDate_properties_work_with_bst(): sample_datetime = dt_parse('2021-04-20T23:30:00Z') alerts_date = AlertDate(sample_datetime) assert alerts_date.as_lang == 'at 12:30am on Wednesday 21 April 2021' assert alerts_date.as_iso8601 == '2021-04-21T00:30:00+01:00' assert alerts_date.as_utc_datetime == dt_parse('2021-04-20T23:30:00Z') assert alerts_date.as_local_datetime == dt_parse('2021-04-21T00:30:00+01:00') assert alerts_date.as_local_date == date(2021, 4, 21) assert alerts_date.as_url == '21-apr-2021'
def test_last_updated(alert_dict): alert_dict['starts_at'] = dt_parse('2021-04-21T11:10:00Z') alert_dict_2 = alert_dict.copy() alert_dict_2['starts_at'] = dt_parse('2021-04-21T11:20:00Z') alerts = Alerts([alert_dict, alert_dict_2]) assert len(alerts) == len(alerts.current_and_public) == 2 assert isinstance(alerts.last_updated_date, AlertDate) assert alerts.last_updated == alert_dict_2['starts_at']
def test_get_url_for_alert_skips_non_public_alerts(): the_days_alerts = [ create_alert_dict(starts_at=dt_parse('2021-04-21T12:00:00Z'), channel='operator'), create_alert_dict(starts_at=dt_parse('2021-04-21T13:00:00Z'), channel='severe'), ] alerts = Alerts(the_days_alerts) # doesn't have the -2 suffix as we skip the operator alert assert get_url_for_alert(Alert(the_days_alerts[1]), alerts) == '21-apr-2021'
def _validate_date(self, field, value, container): """ """ prefix = value[0:2] if prefix in FSPR_VALUE_PRIFIXES_MAP: date_val = value[2:] else: date_val = value try: dt_parse(date_val) except ValueError: container.append((field, "{0} is not valid date string!".format(value)))
def external_url(self): if (getattr(self.report_title, "NREG", "") or "").lower() == "true": return "https://stockmarket.gov.ua/db/xml/news/{}/show".format( self.report_id) else: if relativedelta(dt_parse(self.report_title["FID"]), dt_parse(self.report_title["STD"])).months > 4: return "https://stockmarket.gov.ua/db/xml/yearreports/{}/show".format( self.report_id) else: return "https://stockmarket.gov.ua/db/xml/kvreports/{}/show".format( self.report_id)
def test_get_url_for_alert_returns_url_with_count_for_alerts_on_same_day(index, expected_url): the_days_alerts = [ create_alert_dict(id=UUID(int=0), starts_at=dt_parse('2021-04-20T22:59:00Z')), create_alert_dict(id=UUID(int=1), starts_at=dt_parse('2021-04-20T23:00:00Z')), create_alert_dict(id=UUID(int=2), starts_at=dt_parse('2021-04-21T12:31:00Z')), create_alert_dict(id=UUID(int=3), starts_at=dt_parse('2021-04-21T12:31:00Z')), create_alert_dict(id=UUID(int=4), starts_at=dt_parse('2021-04-21T23:00:00Z')), ] alerts = Alerts(the_days_alerts) assert get_url_for_alert(Alert(the_days_alerts[index]), alerts) == expected_url
def test_parse_nested_dict_all_sio_elems(self): locality = Dict('locality', Int('type'), Text('name'), AsIs('coords'), Decimal('geo_skip'), Float('geo_diff')) address = Dict('address', locality, UUID('street_id'), CSV('prefs'), DateTime('since'), List('types'), Opaque('opaque1')) email = Dict('email', Text('value'), Bool('is_business'), Date('join_date'), DictList('preferred_order', 'name', 'pos')) customer = Dict('customer', 'name', email, address) class MyService(Service): class SimpleIO: input = customer CySimpleIO.attach_sio(self.get_server_config(), MyService) data = Bunch() data.customer = Bunch() data.customer.name = 'my-name' data.customer.email = Bunch() data.customer.email.value = 'my-email' data.customer.email.is_business = True data.customer.email.join_date = '1999-12-31' data.customer.email.preferred_order = [{'name':'address2', 'pos':'2'}, {'name':'address1', 'pos':'1'}] data.customer.address = Bunch() data.customer.address.locality = Bunch() data.customer.address.locality.type = '111' data.customer.address.locality.name = 'my-locality' data.customer.address.locality.coords = object() data.customer.address.locality.geo_skip = '123.456' data.customer.address.locality.geo_diff = '999.777' data.customer.address.street_id = uuid4().hex data.customer.address.prefs = '1,2,3,4' data.customer.address.since = '27-11-1988T11:22:33' data.customer.address.types = ['a', 'b', 'c', 'd'] data.customer.address.opaque1 = object() input = MyService._sio.parse_input(data, DATA_FORMAT.JSON) self.assertIsInstance(input, Bunch) self.assertEquals(input.customer.name, data.customer.name) self.assertEquals(input.customer.email.value, data.customer.email.value) self.assertEquals(input.customer.email.is_business, data.customer.email.is_business) self.assertEquals(input.customer.email.join_date, dt_parse(data.customer.email.join_date)) self.assertListEqual(input.customer.email.preferred_order, data.customer.email.preferred_order) self.assertEquals(input.customer.address.locality.type, int(data.customer.address.locality.type)) self.assertEquals(input.customer.address.locality.name, data.customer.address.locality.name) self.assertIs(input.customer.address.locality.coords, data.customer.address.locality.coords) self.assertEquals(input.customer.address.locality.geo_skip, decimal_Decimal(data.customer.address.locality.geo_skip)) self.assertEquals(input.customer.address.locality.geo_diff, float(data.customer.address.locality.geo_diff)) self.assertEquals(input.customer.address.street_id, uuid_UUID(data.customer.address.street_id)) self.assertEquals(input.customer.address.prefs, data.customer.address.prefs.split(',')) self.assertEquals(input.customer.address.since, dt_parse(data.customer.address.since)) self.assertEquals(input.customer.address.types, data.customer.address.types) self.assertIs(input.customer.address.opaque1, data.customer.address.opaque1)
def parse_wikidata_dob(self, dob_obj): if dob_obj["precision"] == 11: return dt_parse(dob_obj["time"][1:]), 0 if dob_obj["precision"] == 10: dob_obj["time"] = dob_obj["time"].replace("-00", "-01") return dt_parse(dob_obj["time"][1:]), 1 if dob_obj["precision"] == 9: # sometimes date field from wiki contain value like '+1883-00-00T00:00:00Z' dob_obj["time"] = dob_obj["time"].replace("-00", "-01") return dt_parse(dob_obj["time"][1:]), 2
def value_from_datadict(self, data, files, name): values = [widget.value_from_datadict(data, files, "{}_{}".format(name, i)) for i, widget in enumerate(self.widgets)] try: def toIntOrNone(value): return int(value) if value else None dtstart = dt_parse(values[0]) if values[0] else None frequency = toIntOrNone(values[1]) interval = toIntOrNone(values[2]) or None #count = toIntOrNone(values[4]) or None dtuntil = dt_parse(values[5]) if values[5] else None ordChoice = toIntOrNone(values[6]) dayChoice = toIntOrNone(values[7]) wdayChoices = None mdayChoices = None monChoices = None if frequency == WEEKLY: if values[3]: wdayChoices = [int(day) for day in values[3]] elif frequency in (MONTHLY, YEARLY): if dayChoice == _DayOfMonth: # day of the month if ordChoice == _EveryDay: # every day, == daily wdayChoices = range(7) elif ordChoice == _SameDay: # the same day of the month mdayChoices = None else: mdayChoices = [ordChoice] else: # a day of the week if ordChoice == _EveryDay: # every of this weekday wdayChoices = [Weekday(dayChoice)] elif ordChoice == _SameDay: # the same weekday of the month wdayNum = (dtstart.day - 1) // 7 + 1 wdayChoices = [Weekday(dayChoice, wdayNum)] else: wdayChoices = [Weekday(dayChoice, ordChoice)] if frequency == YEARLY: monChoices = [int(values[8])] retval = Recurrence(dtstart = dtstart, freq = frequency, interval = interval, byweekday = wdayChoices, #count = count, until = dtuntil, bymonthday = mdayChoices, bymonth = monChoices) except (TypeError, ValueError): retval = None return retval
def test_get_super_user(self): now = datetime.utcnow() response = self.get( '/zato/sso/user/session', { 'current_ust': self.ctx.super_user_ust, 'target_ust': self.ctx.super_user_ust, }) self.assertLess(dt_parse(response.creation_time), now) self.assertGreater(dt_parse(response.expiration_time), now) # Instead of an assertion, this will raise an exception if remote_addr cannot be parsed ip_address(response.remote_addr)
def test_alert_says_expired_alert_stopped(client_get, mocker): mocker.patch('app.models.alerts.Alerts.load', return_value=Alerts([ create_alert_dict( id=uuid4(), content='test 1', starts_at=dt_parse('2021-04-21T11:00:00Z'), cancelled_at=None, finishes_at=dt_parse('2021-04-21T15:00:00Z'), ) ])) html = client_get('alerts/21-apr-2021') assert html.select_one('main h2').text.strip( ) == 'Stopped sending at 4:00pm on Wednesday 21 April 2021'
def test_alert_says_active_alert_is_active(client_get, mocker): mocker.patch('app.models.alerts.Alerts.load', return_value=Alerts([ create_alert_dict( id=uuid4(), content='test 1', starts_at=dt_parse('2021-04-21T11:00:00Z'), cancelled_at=None, finishes_at=dt_parse('2021-04-21T15:00:00Z'), ) ])) html = client_get('alerts/21-apr-2021') # no "Stopped sending at ..." h2 assert html.find('main h2') is None
def ts2dt(ts, milli=False, tz_aware=False): """ convert timestamp int's (seconds) to datetime objects """ # anything already a datetime will still be returned # tz_aware, if set to true is_true(HAS_DATEUTIL, "`pip install python_dateutil` required") if isinstance(ts, (datetime, date)): pass elif is_empty(ts, except_=False): return None # its not a timestamp elif isinstance(ts, (int, float, long)) and ts < 0: return None elif isinstance(ts, basestring): try: ts = float(ts) except (TypeError, ValueError): # maybe we have a date like string already? try: ts = dt_parse(ts) except Exception: raise TypeError("unable to derive datetime from timestamp string: %s" % ts) elif milli: ts = float(ts) / 1000.0 # convert milli to seconds else: ts = float(ts) # already in seconds return _get_datetime(ts, tz_aware)
def setUp(self): super(NotificationBasicTests, self).setUp() # extract exprected answers from the test data no_xml = self._open_xml(self.new_order_fn) self.order_number = long(no_xml.findtext(xpq_order_number)) self.new_order_timestamp = self._extract_timestamp(no_xml) expires_utc = dt_parse(no_xml.findtext(xpq_good_until_date)) self.dt_expires = self._trans_utc_to_local(expires_utc) osc1_xml = self._open_xml(self.order_state_change_1_fn) self.order_state_change_1_timestamp = self._extract_timestamp(osc1_xml) osc2_xml = self._open_xml(self.order_state_change_2_fn) self.order_state_change_2_timestamp = self._extract_timestamp(osc2_xml) osc3_xml = self._open_xml(self.order_state_change_3_fn) self.order_state_change_3_timestamp = self._extract_timestamp(osc3_xml) ri_xml = self._open_xml(self.risk_information_fn) self.risk_information_timestamp = self._extract_timestamp(ri_xml) self.risk_info_xml_node = ri_xml.find(xpq_risk_info) aa_xml = self._open_xml(self.authorization_amount_fn) self.authorization_amount_timestamp = self._extract_timestamp(aa_xml) self.authorization_amount = \ Decimal(aa_xml.findtext(xpq_authorization_amount)) ca_xml = self._open_xml(self.charge_amount_fn) self.charge_amount_timestamp = self._extract_timestamp(ca_xml) self.latest_amount = Decimal(ca_xml.findtext(xpq_latest_charge_amount)) self.total_amount = Decimal(ca_xml.findtext(xpq_total_charge_amount))
def _fetch_mtime(self, last_update, parse_timestamp): mtime = None if last_update: if isinstance(last_update, basestring): mtime = dt_parse(last_update) else: mtime = last_update else: mtime = self.get_last_field('_start') # convert timestamp to datetime object mtime = ts2dt(mtime) logger.info("Last update mtime: %s" % mtime) if mtime: if parse_timestamp is None: parse_timestamp = self.get_property('parse_timestamp', default=True) if parse_timestamp: if not (hasattr(mtime, 'tzinfo') and mtime.tzinfo): # We need the timezone, to readjust relative to the # server's tz mtime = mtime.replace(tzinfo=pytz.utc) mtime = mtime.strftime('%Y-%m-%d %H:%M:%S %z') dt_format = "yyyy-MM-dd HH:mm:ss z" mtime = "parseTimestamp('%s', '%s')" % (mtime, dt_format) else: mtime = "'%s'" % mtime return mtime
def format_log_entry(unformatted_log_entry) -> dict: return { 'timestamp': int(dt_parse(unformatted_log_entry['timestamp']).timestamp() * 1000), 'message': get_log_message(unformatted_log_entry) }
def handle(self, *args, **options): reader = DictReader(options["in_file"]) for l in tqdm(reader): edrpou = l["code"].strip().lstrip("0") if not edrpou or not edrpou.isdigit(): print("Cannot identify company by edrpou {}, pep line was {}". format(edrpou, l)) continue edrpou = int(edrpou) if l["capital"]: capital = float(l["capital"]) else: capital = None if l["reg_date"]: reg_date = dt_parse(l["reg_date"], yearfirst=True) else: reg_date = None CompanyRecord.objects.filter(company_id=edrpou).nocache().update( charter_capital=capital, reg_date=reg_date, form=l["form"], phone1=l["phone1"], phone2=l["phone2"], email=l["email"], fax=l["fax"], )
def test_planned_tests_page_with_current_operator_test( mocker, client_get, extra_json_fields, ): mocker.patch('app.models.alerts.PlannedTests.from_yaml', return_value=[]) mocker.patch('app.models.alerts.Alerts.load', return_value=Alerts([ create_alert_dict( channel='operator', starts_at=dt_parse('2021-04-21T09:00:00Z'), **extra_json_fields) ])) html = client_get("alerts/planned-tests") assert [normalize_spaces(h2.text) for h2 in html.select('main h2')] == ['Wednesday 21 April 2021'] assert not html.select('main h3') assert [normalize_spaces(p.text) for p in html.select('main p')] == [ 'Some mobile phone networks in the UK will test emergency alerts.', 'Most phones and tablets will not get a test alert.', 'Find out more about mobile network operator tests.', 'The alert will say:', ('This is a mobile network operator test of the Emergency Alerts ' 'service. You do not need to take any action. To find out more, ' 'search for gov.uk/alerts'), ]
def scalar_coerce(_, val): if val == '': raise TypeError('%r is not a valid date' % val) dt = dt_parse(val) if any(x > 0 for x in (dt.hour, dt.minute, dt.second, dt.microsecond)): msg = "Can not coerce %r to type Date, contains time information" raise TypeError(msg % val) return dt.date()
def id_when(id): if isinstance(id, datetime): return id else: when = re.sub( '[_ T](\d\d)[:-](\d\d)[:-](\d\d)$', 'T\g<1>:\g<2>:\g<3>', id) return dt_parse(when)
def parse_created_at(self): raw_str = self.info_table.get("created_at") if raw_str: try: cleaned_str = raw_str.split("(")[1].split(")")[0] self._created_at = dt_parse(cleaned_str) except Exception: log.exception("Failed to parse created_at: {}".format(raw_str))
def isnewer(self): """Check if broadcast is newer than last broadcast time""" last_broadcast = self.api.session.config.get('last_check') if not last_broadcast: return None elif self.start_dt > dt_parse(last_broadcast): return True else: return False
def scalar_coerce(_, val): if val == '': raise TypeError('%r is not a valid date' % val) dt = dt_parse(val) if dt.time(): # TODO: doesn't work with python 3.5 raise TypeError( "Can not coerce %r to type Date, contains time information" % val ) return dt.date()
def post(self, request, *args, **kwargs): notify_xml = XML(request.raw_post_data) self.notify_type = self._extract_notify_type(notify_xml) self.notify_type_const = \ GoogleOrder.trans_notify_type_const(self.notify_type) ts_utc = dt_parse(notify_xml.findtext(xpq_timestamp)) self.timestamp = self._trans_utc_to_local(ts_utc) self.serial_number = notify_xml.get('serial-number') self.order_number = long(notify_xml.findtext(xpq_order_number)) backend_class = get_backend_class(ngc_settings.BACKEND) self.backend = backend_class(request, notify_xml=notify_xml) self.cart = self.backend.get_cart() # if we don't find a cart, we do actually go ahead and continue # processing. The idea is the data npo_google_checkout holds should # match all of what google checkout holds msg_target = "cart '{0}'".format(self.cart) \ if self.cart else 'unknown cart' msg = "GC {0} #{1} received for {2}.".format( self.notify_type, self.serial_number, msg_target), logger.info(msg, extra={'request': request}) # notification type-specific handling if self.notify_type_const == GoogleOrder.NEW_ORDER_NOTIFY_TYPE: self._post_new_order(notify_xml) else: try: order = GoogleOrder.objects.get(number=self.order_number) except GoogleOrder.DoesNotExist: order = None else: order.last_notify_type = self.notify_type_const order.last_notify_dt = self.timestamp if not order: # silently ignore notifications for orders we didn't see the # new-order-notification for pass elif self.notify_type_const == GoogleOrder.ORDER_STATE_CHANGE_NOTIFY_TYPE: self._post_order_state_change(order, notify_xml) elif self.notify_type_const == GoogleOrder.RISK_INFORMATION_NOTIFY_TYPE: self._post_risk_informaiton(order, notify_xml) elif self.notify_type_const == GoogleOrder.AUTHORIZATION_AMOUNT_NOTIFY_TYPE: self._post_authorization_amount(order, notify_xml) elif self.notify_type_const == GoogleOrder.CHARGE_AMOUNT_NOTIFY_TYPE: self._post_charge_amount(order, notify_xml) else: msg = "Unrecognized notification '{0}' recieved". \ format(self.notify_type) raise RuntimeError(msg) return super(NotificationListenerView, self).get( request, *args, **kwargs)
def transform(self, doc): extracted = doc['extracted']['data'] self.data = extracted f = lambda x: dt_parse(x).timestamp() self.data['created_at'] = f(self.data['created_at']) if 'retweeted_status' in self.data: self.data['retweeted_status']['created_at'] = f( self.data['retweeted_status']['created_at']) self.data['source'] = 'weibo' self.data['user']['source'] = 'weibo'
def get_log_line(line, format): parsed = parse(format, line) if parsed: link = get_link(parsed['message']) if link: print('Found link: %s' % link) anchor = r'<a href="%s">%s</a>' % (link, link) message = parsed['message'].replace(link, anchor) return LogLine(dt_parse(parsed['timestamp']), parsed['user'], message)
def id_when(id): ''' Convert akwards datetime strings to ISO format. ''' if isinstance(id, datetime): return id else: when = re.sub( '[_ T](\d\d)[:-](\d\d)[:-](\d\d)$', 'T\g<1>:\g<2>:\g<3>', id) return dt_parse(when)
def parse(self, event): parsed_dt = dt_parse(self.data, fuzzy=True) # pylint: disable=no-member if self.to_utc: try: parsed_dt = parsed_dt.astimezone(UTC) except ValueError: self.logger.info('Failed to convert to UTC') event[self.out_field] = parsed_dt.strftime(self.timestamp_fmt) # pylint: enable=no-member return event
def dt2ts(dt, drop_micro=False): ''' convert datetime objects to timestamp seconds (float) ''' if isinstance(dt, (int, long, float, complex)): # its a ts already ts = dt elif isinstance(dt, basestring): # convert to datetime first ts = dt2ts(dt_parse(dt)) else: ts = timegm(dt.timetuple()) if drop_micro: return float(int(ts)) else: return float(ts)
def _fetch_mtime(self, last_update): mtime = None if last_update: if isinstance(last_update, basestring): mtime = dt_parse(last_update) else: mtime = last_update else: mtime = self.get_last_field('_start') # convert timestamp to datetime object mtime = ts2dt(mtime) self.logger.info("Last update mtime: %s" % mtime) return mtime
def decision_properties(doc): """ Extract a dictionary of properties from the contents of a Decision Document. """ sections = decision_sections(doc) attrs = {} props = {} if "properties" in sections: attrs.update(somerville_properties(sections["properties"])) if "header" in sections: header_props = somerville_properties(sections["header"]) copy_header_attrs = ["Decision", "Date of Decision", "Date Filed with City Clerk"] attrs.update( {k: header_props[k] for k in copy_header_attrs if k in header_props}) if "Date" in attrs: del attrs["Date"] if "Site" in attrs: del attrs["Site"] try: if "Legal Notice" in header_props and \ not doc.proposal.attributes.filter(handle="legal_notice").exists(): attrs["Legal Notice"] = header_props["Legal Notice"] if not doc.proposal.summary.strip(): props["summary"] = header_props["Legal Notice"] except AttributeError: pass vote, decision = find_vote(" ".join(sections["decision"])) if vote: concur, dissent, *_ = re.findall(r"\d+", vote) approved = bool(re.match(r"(?i)approve", decision)) attrs["Vote"] = vote attrs["Votes to Approve"] = concur if approved else dissent attrs["Votes to Deny"] = dissent if approved else concur if "Decision" not in attrs: attrs["Decision"] = decision.title() props["status"] = "Approved" if approved else "Denied" try: props["complete"] = pytz.timezone("US/Eastern").localize( dt_parse(attrs["Date of Decision"])).isoformat() except (ValueError, KeyError): props["complete"] = doc.published.isoformat() return props, attrs
def handle_incoming_email(message): msg = email.message_from_string(message.encode('utf-8')) text = msg.get_payload().strip().decode('utf-8') received_on = dt_parse(msg.get('Date').strip()).replace(tzinfo=timezone.utc) identity = msg.get('From').strip().split('@', 1)[0] sms = SMSMessage(identity=identity, text=text, received_on=received_on) logger.info("Incoming SMS {}".format(sms)) try: return handle_incoming_sms(sms) except Exception as exp: logger.error("Exception in handling {}: {}".format(sms, exp)) return None
def get_from_to(args=None): '''apply date-range (from..to) params, if present. ''' if not args: args = request.args # we need a 'default date' (that we can then safely ignore), because by # default, dateutil's "default value is the current date, at 00:00:00am." default_ignore_date = datetime.datetime.strptime('1970-01-01 00:00:00', '%Y-%m-%d %H:%M:%S') if not args: args = request.args c_from = args.get('from') c_to = args.get('to') try: if c_from: c_from = dt_parse(c_from, default=default_ignore_date) if c_to: c_to = dt_parse(c_to, default=default_ignore_date) except Exception as e: pass return c_from, c_to
def is_valid_reauthn(session_info, max_age=60) -> bool: """ :param session_info: The SAML2 session_info :param max_age: Max time (in seconds) since authn :return: True if authn instant is no older than max_age :rtype: bool """ utc_tz = tzutc() authn_instant = dt_parse(session_info['authn_info'][0][2]) max_age = timedelta(seconds=max_age) if authn_instant >= (datetime.now(tz=utc_tz) - max_age): return True current_app.logger.error('Asserted authn instant was older than required') current_app.logger.error('Authn instant: {}'.format(authn_instant)) current_app.logger.error('Oldest accepted: {}'.format(datetime.utcnow() + max_age)) return False
def _post_new_order(self, notify_xml): expires_utc_node = notify_xml.find(xpq_good_until_date) expires = None if expires_utc_node is not None: expires_utc = dt_parse(expires_utc_node.text) expires = self._trans_utc_to_local(expires_utc) # check to see if we already have seen that order before inserting it # recovering from a db IntegrityError on failure is more complicated if not GoogleOrder.objects.filter(number=self.order_number).exists(): order = GoogleOrder.objects.create( cart=self.cart, number=self.order_number, dt_init=self.timestamp, last_notify_dt=self.timestamp, dt_expires=expires) notification_new_order.send(self, cart=self.cart, order=order)
def dt2ts(dt, drop_micro=False): ''' convert datetime objects to timestamp seconds (float) ''' # the equals check to 'NaT' is hack to avoid adding pandas as a dependency if repr(dt) == 'NaT': return None elif not dt: return dt elif isinstance(dt, (int, long, float, complex)): # its a ts already ts = dt elif isinstance(dt, basestring): # convert to datetime first ts = dt2ts(dt_parse(dt)) else: ts = timegm(dt.timetuple()) if drop_micro: return float(int(ts)) else: return float(ts)
def parse_snippet(self, snippet, as_type=None): # search for a date override in the beginning of each snippet # otherwise default to completion date of 'today' search = SNIPPET_RE.search(snippet) if not search: log.error('Failed to parse snippet as expected!') raise RuntimeError # default to TODAY if no on_date is set in-line on_date = dt_parse(search.group(1) or TODAY) text = search.group(2).strip() if not text: log.debug('Found empty snippet!') return # skip empty snippets # space separated ^emails search = AT_RE.findall(snippet) at_csv = [', '.join(s[0].split(' ')) for s in search if s][0] if search else '' # #Tags search = TAG_RE.findall(snippet) tag_csv = [', '.join([x.rstrip('#') for x in s[0].split(' ')]) for s in search if s][0] if search else '' owner_email = self.email # Make sure each snippest has an on_date _snippet = { 'on_date': on_date, 'text': text, 'owner_email': owner_email, 'at_csv': at_csv, 'tag_csv': tag_csv, 'topic': self.topic, } if as_type is str: snippet = ( '{on_date} {owner_email} {at_csv} {text} {tag_csv}'.format( **_snippet)) else: snippet = _snippet return snippet
def timezone_converter(self, dt): if dt is None: return None elif isinstance(dt, basestring): dt = dt_parse(dt) if dt.tzinfo: # datetime instance already has tzinfo set # WARN if not dt.tzinfo == from_tz? try: dt = dt.astimezone(utc) except ValueError: # date has invalid timezone; replace with expected dt = dt.replace(tzinfo=from_tz) dt = dt.astimezone(utc) else: # set tzinfo as from_tz then convert to utc dt = from_tz.localize(dt).astimezone(utc) return dt
def fetch_document(doc_id): """Copy the given document (proposal.models.Document) to a local directory. """ doc = Document.objects.get(pk=doc_id) url = doc.url if doc.document and os.path.exists(doc.document.path): # Has the document been updated? updated = proposal_utils.last_modified(doc.url) # No? Then we're good. if not updated or updated <= doc.published: return doc.pk # TODO Special handling of updated documents url_components = parse.urlsplit(url) ext = extension(os.path.basename(url_components.path)) filename = "download.%s" % ext path = os.path.join(settings.MEDIA_ROOT, "doc", str(doc.pk), filename) # Ensure that the intermediate directories exist: pathdir = os.path.dirname(path) os.makedirs(pathdir, exist_ok=True) logger.info("Fetching Document #%i", doc.pk) with request.urlopen(url) as resp, open(path, "wb") as out: shutil.copyfileobj(resp, out) doc.document = path logger.info("Copied Document #%i to %s", doc.pk, path) file_published = files_metadata.published_date(path) if file_published: doc.published = file_published elif "Last-Modified" in resp.headers: doc.published = dt_parse(resp.headers["Last-Modified"]) doc.save() return doc.pk
def parse_timestamp(self, ts): """Parse a logs timestamps and break it down into its individual parts @param ts - The timestamp string from a log @return - a "datetime" object """ timedata = self.prog_timestamp.match(ts).groupdict() # Now, have to convert month to an int. dt_str = '{YEAR}-{MONTH}-{DAY} {HOUR}-{MINUTE}-{SECOND} {TZ}'.format( YEAR=timedata['year'], MONTH=MONTH_LOOKUP[timedata['month']], DAY=timedata['day'], HOUR=timedata['hour'], MINUTE=timedata['minute'], SECOND=timedata['second'], TZ=timedata['tz'], ) dt = dt_parse(dt_str) dt = dt.replace(microsecond=int(int(timedata['nanosecond']) / 1000)) return dt
def save_from_url(doc, url, filename_base=None): """ Downloads the document at `url` and saves it locally, storing the path in the given Document. :param doc: a Document model :param url: URL string :param filename_base: optional subpath specifying where to save the document Returns a tuple: (success, status_code, updated) """ filename = path.basename(parse.urlsplit(url).path) if filename_base: filename = "{}.{}".format(filename_base, extension(filename)) exists = doc.document and path.exists(doc.document.path) if exists: published = doc.published headers = {"If-Modified-Since": published.strftime("%a, %d %b %Y %H:%M:%S GMT")} else: headers = {} with requests.get(url, headers=headers, stream=True) as response: if response: if response.status_code == 304: return (True, response.status_code, False) doc.document.save(filename, File(response.raw), save=False) file_published = files.published_date(doc.document.path) if file_published: doc.published = file_published elif "Last-Modified" in response.headers: doc.published = dt_parse(response.headers["Last-Modified"]) doc.save() return (True, response.status_code, exists) else: return (False, response.status_code, response.reason)
def getattr(self, inode): entry = llfuse.EntryAttributes() entry.st_ino = inode entry.st_mode = self._stat.get(inode, stat.S_IFREG | 0o444) if inode <= 3 or (stat.S_IFLNK & entry.st_mode) == stat.S_IFLNK: return entry if 'uuid' in self._metadata[inode]: entry.st_mode |= 0o644 dt = int(dt_parse(self._metadata[inode]['date']).timestamp() * 1e9) entry.st_mtime_ns = dt entry.st_ctime_ns = dt entry.st_atime_ns = dt entry.st_size = self._metadata[inode]['size'] return entry
def tail(self, spec=None, limit=None, format_=None): if not spec: spec = {} else: spec = json.loads(spec) if not format_: format_ = "%(processName)s:%(message)s" # spec 'when' key needs to be converted from string to datetime if "when" in spec: spec["when"]["$gt"] = dt_parse(spec["when"]["$gt"]) if not limit: limit = 20 else: limit = int(limit) if limit < 0: raise ValueError("limit must be an integer value > 0") docs = self.mongodb_config.c_logs.find(spec, limit=limit, sort=[("when", -1)]) _result = sorted([doc for doc in docs], key=itemgetter("when")) try: # get the last log.when so client knows from where to # start next... last_when = _result[-1]["when"] meta = last_when result = "\n".join([format_ % doc for doc in _result]) except KeyError: raise KeyError("Invalid log format key (%s)" % format_) except ValueError: raise ValueError("Invalid log format string (%s)" % format_) except IndexError: result = None meta = None return result, meta
def dt2ts(dt, drop_micro=False): """ convert datetime objects to timestamp seconds (float) """ is_true(HAS_DATEUTIL, "`pip install python_dateutil` required") if is_empty(dt, except_=False): ts = None elif isinstance(dt, (int, long, float)): # its a ts already ts = float(dt) elif isinstance(dt, basestring): # convert to datetime first try: parsed_dt = float(dt) except (TypeError, ValueError): parsed_dt = dt_parse(dt) ts = dt2ts(parsed_dt) else: assert isinstance(dt, (datetime, date)) # keep micros; see: http://stackoverflow.com/questions/7031031 ts = ((timegm(dt.timetuple()) * 1000.0) + (dt.microsecond / 1000.0)) / 1000.0 if ts is None: pass elif drop_micro: ts = float(int(ts)) else: ts = float(ts) return ts