def validate_expiration_time(self, original_value, value_in_question, type_=None): """ Validate the expiration time value passed to Update or Create Methods. Args: original_value: The original value that needs to be compared (e.g., SLICE creation date) value_in_question: The value that is doubted for correctness (e.g., Expiry time update date) Returns: a boolean value to indicate whether the expiration time valid or not """ parsed_original_value = pyrfc3339.parse(original_value) parsed_value_in_question = pyrfc3339.parse(value_in_question) now = pytz.timezone("UTC").localize(datetime.datetime.utcnow()) # Check if the object has already expired if now > parsed_original_value: raise GFedv2ArgumentError("Update is not possible because the object has already expired: "+str(now)+" > "+str(parsed_original_value)) if type_: maximum_expansion_duration = self.STATIC['CONFIG'][type_]['max_%s_extension_time' %type_.lower()] configuration_delta = datetime.timedelta(days=maximum_expansion_duration) delta_time_days = parsed_value_in_question - parsed_original_value return True if parsed_original_value <= parsed_value_in_question and delta_time_days < configuration_delta else False else: return parsed_original_value <= parsed_value_in_question
def current_calendar_events(calId, time_window=1): http = httplib2.Http() service = build(serviceName='calendar', version='v3', http=http, developerKey='AIzaSyA96dI1CPIUEuzgi3-_H8dQVyM34rak5vE') # get a list of all events +/- the specified number of days from now now = datetime.utcnow().replace(tzinfo=pytz.utc) diffTime = timedelta(days=time_window) queryStart = now - diffTime queryEnd = now + diffTime dayStartString = pyrfc3339.generate(queryStart) dayEndString = pyrfc3339.generate(queryEnd) events = service.events().list(calendarId=calId, singleEvents=True, timeMin=dayStartString, timeMax=dayEndString, orderBy='updated').execute() eventList = [] for event in events['items']: endTime = pyrfc3339.parse(event['end']['dateTime']) startTime = pyrfc3339.parse(event['start']['dateTime']) if now > startTime and now < endTime: eventList.append(event) return eventList
def validate_expiration_time(self, original_value, value_in_question, type_=None): """ Validate the expiration time value passed to Update or Create Methods. Args: original_value: The original value that needs to be compared (e.g., SLICE creation date) value_in_question: The value that is doubted for correctness (e.g., Expiry time update date) Returns: a boolean value to indicate whether the expiration time valid or not """ parsed_original_value = pyrfc3339.parse(original_value) parsed_value_in_question = pyrfc3339.parse(value_in_question) if type_: maximum_expansion_duration = self.STATIC['CONFIG'][type_][ 'max_%s_extension_time' % type_.lower()] configuration_delta = datetime.timedelta( days=maximum_expansion_duration) delta_time_days = parsed_value_in_question - parsed_original_value return True if parsed_original_value < parsed_value_in_question and delta_time_days < configuration_delta else False else: return parsed_original_value < parsed_value_in_question
def _daterange_filter(query, params, state): """ handles filtering by start and end date paramters: startdate, enddate """ startdate = params.get('startdate') if startdate is not None: try: del params['startdate'] try: startdate = parse_date(startdate, '%Y-%m-%d') except ValueError: startdate = pyrfc3339.parse(startdate) query = query.filter(pub_date__gte=startdate) except ValueError: raise QueryError('Invalid start date "%s", must be YYYY-MM-DD or rfc3339' % startdate) enddate = params.get('enddate') if enddate is not None: try: del params['enddate'] try: enddate = parse_date(enddate, '%Y-%m-%d') except ValueError: enddate = pyrfc3339.parse(enddate) query = query.filter(pub_date__lte=enddate) except ValueError: raise QueryError('Invalid end date "%s", must be YYYY-MM-DD or rfc3339' % enddate) return query, params, state
def transactiondataset(dataset, ttype): if 'commissionAmount' in dataset: commamount = dataset['commissionAmount']['amount'] dataset['commissionCurrency'] = dataset['commissionAmount']['currency'] dataset.pop('commissionAmount') dataset['commissionAmount'] = commamount if 'saleAmount' in dataset: saleamount = dataset['saleAmount']['amount'] dataset['saleCurrency'] = dataset['saleAmount']['currency'] dataset.pop('saleAmount') dataset['saleAmount'] = saleamount if 'clickRefs' in dataset and dataset['clickRefs'] != None: dataset.update(dataset.pop('clickRefs')) dataset['datasettype'] = ttype if 'customParameters' in dataset and dataset['customParameters'] != None: dataset['customParameters'] = str(dict([(i['key'], i['value']) \ for i in dataset['customParameters']])) if 'transactionParts' in dataset: transactionparts = [parts for parts in dataset['transactionParts']] dataset.pop('transactionParts') dataset["startDate"] = str( parse(STATE['last_fetched']) + timedelta(days=1)) dataset["endDate"] = str( parse(STATE['last_fetched']) + timedelta(days=AUTH['increment'])) for data in transactionparts: dataset.update(data) singer.write_record('Transactions', dataset)
def schedule_message(self, data, time_slot): now_obj = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) min_time_to_start = datetime.timedelta(hours=time_slot*4-4) max_time_to_start = datetime.timedelta(hours=time_slot*4) if time_slot == 0: intro = '**Current Rotation (Ends in {} hours and {} minutes):**\n' else: intro = '**In {} hours and {} minutes:**\n' for data_set in data['schedule']: start = pyrfc3339.parse(data_set['datetime_begin']) end = pyrfc3339.parse(data_set['datetime_end']) time_to_start = start - now_obj time_to_end = end - now_obj # print(min_time_to_start.total_seconds(), time_to_start.total_seconds(), max_time_to_start.total_seconds()) if min_time_to_start <= time_to_start <= max_time_to_start: reg_1 = data_set['stages']['regular'][0]['name'] reg_2 = data_set['stages']['regular'][1]['name'] rank_1 = data_set['stages']['gachi'][0]['name'] rank_2 = data_set['stages']['gachi'][1]['name'] mode = data_set['gachi_rule'] if time_slot == 0: time_left = time_to_end else: time_left = time_to_start hours = int(time_left.total_seconds() / 3600) minutes = int(time_left.total_seconds()/60) % 60 msg = intro + "Turf War is {} and {}\n{} is {} and {}" return msg.format(hours, minutes, reg_1, reg_2, mode, rank_1, rank_2) return "There is no data currently for this time slot."
def _daterange_filter(query, params, state): """ handles filtering by start and end date paramters: startdate, enddate """ startdate = params.get('startdate') if startdate is not None: try: del params['startdate'] try: startdate = datetime.datetime.strptime(startdate, '%Y-%m-%d') except ValueError: startdate = pyrfc3339.parse(startdate) query = query.filter(pub_date__gte=startdate) except ValueError: raise QueryError( 'Invalid start date "%s", must be YYYY-MM-DD or rfc3339' % startdate) enddate = params.get('enddate') if enddate is not None: try: del params['enddate'] try: enddate = datetime.datetime.strptime(enddate, '%Y-%m-%d') except ValueError: enddate = pyrfc3339.parse(enddate) query = query.filter(pub_date__lte=enddate) except ValueError: raise QueryError( 'Invalid end date "%s", must be YYYY-MM-DD or rfc3339' % enddate) return query, params, state
def getprogrammesdetails(): singer.write_schema('ProgrammesDetails', PROGRAMMES_DETAILS, ['id']) for publisher in PUBLISHERS: for advertiser in ADVERTISERS: progdetails = requests.get('https://api.awin.com/publishers/' + str(publisher) +\ '/programmedetails?advertiserId' + str(advertiser), headers={"User-Agent":AUTH['user_agent']}) if progdetails.status_code != 200: LOGGER.info('Error ' + str(progdetails.content).replace('\n', ' ') + \ ' in programmes details for Publisher:' + str(publisher) + \ ' and avdertiser:' + str(advertiser)) else: progdetails.update(progdetails['kpi']) del progdetails['kpi'] progdetails.update(progdetails['programmeInfo']) del progdetails['programmeInfo'] progdetails['countryCode'] = progdetails['primaryRegion'][ 'countryCode'] progdetails['countryName'] = progdetails['primaryRegion'][ 'name'] del progdetails['primaryRegion'] progdetails['validDomains'] = ','.join([domain['domain'] \ for domain in progdetails['validDomains']]) progdetails['amountmin'], progdetails['amountmax'] = \ [(i['max'], i['min']) for i in progdetails['commissionRange'] \ if i['type'] == 'amount'][0] progdetails['percentagemin'], progdetails['percentagemax'] = \ [(i['max'], i['min']) for i in progdetails['commissionRange'] \ if i['type'] == 'percentage'][0] progdetails["startDate"] = str( parse(STATE['last_fetched']) + timedelta(days=1)) progdetails["endDate"] = str(parse(STATE['last_fetched']) + \ timedelta(days=AUTH['increment'])) singer.write_record('ProgrammesDetails', progdetails) time.sleep(10)
def _assert_usable_tango_package(self, napdr): """ Assert that we have read enough information to have a 'usable' 5GTANGO package at hand. Where 'usable' means that the minimum set of fields is available to let any 5GTANGO component work with the package contents. Contains hard-coded checks that might evolve over time. raises MetadataValidationException """ try: # check for empty fields assert (napdr.vendor is not None) assert (napdr.name is not None) assert (napdr.version is not None) assert (napdr.package_type is not None) assert (napdr.maintainer is not None) assert (napdr.release_date_time is not None) assert (len(napdr.metadata) > 0) # check if date strings can be parsed pyrfc3339.parse(napdr.release_date_time) # TODO extend as needed return True except AssertionError as e: m = "Package metadata vailidation failed. Package unusable. Abort." LOG.exception(m) del e raise MetadataValidationException(m) return False
def validate_expiration_time(self, original_value, value_in_question, type_=None): """ Validate the expiration time value passed to Update or Create Methods. Args: original_value: The original value that needs to be compared (e.g., SLICE creation date) value_in_question: The value that is doubted for correctness (e.g., Expiry time update date) Returns: a boolean value to indicate whether the expiration time valid or not """ parsed_original_value = pyrfc3339.parse(original_value) parsed_value_in_question = pyrfc3339.parse(value_in_question) now = pytz.timezone("UTC").localize(datetime.datetime.utcnow()) # Check if the object has already expired if now > parsed_original_value: raise GFedv2ArgumentError( "Update is not possible because the object has already expired: " + str(now) + " > " + str(parsed_original_value)) if type_: maximum_expansion_duration = self.STATIC['CONFIG'][type_][ 'max_%s_extension_time' % type_.lower()] configuration_delta = datetime.timedelta( days=maximum_expansion_duration) delta_time_days = parsed_value_in_question - parsed_original_value return True if parsed_original_value <= parsed_value_in_question and delta_time_days < configuration_delta else False else: return parsed_original_value <= parsed_value_in_question
def _event_sort_key(event): if event["kind"] == "drive#revision": return rfc3339.parse(event["modifiedDate"]) elif event["kind"] == "drive#comment" or event[ "kind"] == "drive#commentReply": return rfc3339.parse(event["createdDate"]) else: raise ValueError("unexpected event kind: %s" % event["kind"])
def datetime(text): """Parses the ISO8601 timestamp to a standard python datetime object""" if text[10:19] == "T24:00:00": # Deal with the quirky end-of-day timestamps from the AQTS Publish API # Parse a normalized version and add a day return pyrfc3339.parse(text.replace("T24:", "T00:")) + timedelta(days=1) return pyrfc3339.parse(text)
def main(): global STATE global AUTH try: AUTH = utils.load_json(PATH) except FileNotFoundError: LOGGER.error('Config file not found') sys.exit(1) if STATE_PATH is not None: try: state = utils.load_json(STATE_PATH) except FileNotFoundError: LOGGER.error('State file not found') sys.exit(1) if AUTH['type'] == 'day': LOGGER.info('Started data load for daily level metrics') STATE = {"filter":state, "increment":AUTH['increment'], "type":AUTH['type']} start_load_day(AUTH) elif AUTH['type'] == 'minute': LOGGER.info('Started data load for minutes level metrics') STATE = {"filter":state, "increment":AUTH['increment'], "type":AUTH['type']} start_load_day(AUTH) else: LOGGER.error('Load type should be minute or day') sys.exit(1) else: LOGGER.info('--state option is not passed running tap with default options') if AUTH['type'] == 'minute': STATE = DEFAULT_FILTER_MIN try: date = str(parse(AUTH['start_date']).date()) time_portion = str(parse(AUTH['start_date']).time())[0:5] except ValueError: LOGGER.error('Start date not in RFC3339 format') sys.exit(1) STATE['filter']['date_ranges'][0]['last_day'] = date STATE['filter']['time_ranges'][0]['until'] = time_portion STATE['increment'] = AUTH['increment'] STATE['type'] = AUTH['type'] start_load_min(AUTH) LOGGER.info('Minute Level info done') elif AUTH['type'] == 'day': STATE = DEFAULT_FILTER_DAY try: date = str(parse(AUTH['start_date']).date()) except ValueError: LOGGER.error('start date not in RC3339 format') sys.exit(1) STATE['filter']['date_ranges'][0]['last_day'] = date STATE['increment'] = AUTH['increment'] STATE['type'] = AUTH['type'] start_load_day(AUTH) LOGGER.info('Day Level Filter Done') else: LOGGER.error('Load type should me minute or day') sys.exit(1)
def execute(self): if self.__mode == 'eventview': if self.__updateSec > 60: now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time try: eventsResult = self.__service.events().list( calendarId=self.__calendarID, timeMin=now, maxResults=5, singleEvents=True, orderBy='startTime').execute() except HttpError, err: if err.resp.status in [403, 500, 503]: time.sleep(5) else: if err.resp.get('content-type', '').startswith('application/json'): reason = json.loads(err.content).reason print reason raise events = eventsResult.get('items', []) self.__lg19.load_text("Calendar", 1,True, center=True, color="yellow") i = 3; if not events: self.__lg19.load_text("No upcoming events found.", 2) for event in events: if event['start'].get('dateTime') == None: start = datetime.datetime.strptime(event['start'].get('date'),"%Y-%m-%d") end = datetime.datetime.strptime(event['start'].get('date'),"%Y-%m-%d") now = datetime.datetime.today() prefix = " " if start <= now <= end: prefix = "-" self.__lg19.load_text(prefix + start.strftime("%d/%m") + " " +event['summary'], i) else: start = parse(event['start'].get('dateTime'), utc=True) end = parse(event['end'].get('dateTime'), utc=True) now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) inhour = now + datetime.timedelta(hours=1) late = now + datetime.timedelta(hours=6) color = "white" if start >= late: color = "DarkGray" if start <= inhour <= end: color = "gold" if start <= now <= end: color = "red" start = start.astimezone(get_localzone()) self.__lg19.load_text(start.strftime("%d/%m %H:%M") + " " +event['summary'], i, color=color) i += 1 self.__lg19.set_text() self.__updateSec = 0 self.__updateSec += 1 time.sleep(1)
def test_zero_offset(self): ''' Both +00:00 and -00:00 are equivalent to the offset 'Z' (UTC). ''' timestamp = '2009-01-01T10:02:03+00:00' dt = parse(timestamp) eq_(dt.tzinfo, pytz.utc) timestamp = '2009-01-01T10:02:03-00:00' dt = parse(timestamp) eq_(dt.tzinfo, pytz.utc)
def check_datetime(value): """ Check if value is a valid RFC3339 string. See RFC3339 for more details: http://www.ietf.org/rfc/rfc3339.txt Args: value: item to check Raises: Exception: value is not of valid RFC3339 string """ pyrfc3339.parse(value)
def cancelEvent(evt_id, user_list, mail_list): service = initService() event = service.events().get(calendarId=calId, eventId=evt_id).execute() if not event.has_key('description') or event['description'].strip() == '': return 'CANCEL_NULL' user_str = event['description'] for x in user_list: if user_str.find(x) != -1: event['description'] = '' update_event = service.events().update(calendarId=calId, eventId=event['id'], body=event).execute() creator_email = event['creator']['email'] creator = event['summary'] mail_list.append(creator_email) dtstart = parse(event['start']['dateTime']).strftime('%Y-%m-%d %H:%M:%S') dtend = parse(event['end']['dateTime']).strftime('%Y-%m-%d %H:%M:%S') for x in mail_list: if x.strip() != '': mail.send_mail(sender=creator_email, to=x, subject="Cancelation of Private Tutoring Reservation", body=""" Hi, Instructor: %(instructor)s Start Time: %(dtstart)s End Time: %(dtend)s Participants: %(partici)s This mail confirms you that your request of Private Tutoring has been canceled. Best, %(creator)s """ % {'instructor':creator, 'dtstart':dtstart, 'dtend':dtend, 'partici':user_str, 'creator':creator}) return 'CANCEL_OK' return 'CANCEL_INVALID'
def __googleAppointmentsToDTO(self, googleAppointments): appointmentDTO = [] for appointment in googleAppointments: if appointment['start'].get('dateTime'): start = parse(appointment['start'].get('dateTime')) else: start = datetime.datetime.strptime(appointment['start'].get('date'), "%Y-%m-%d") if appointment['end'].get('dateTime'): end = parse(appointment['end'].get('dateTime')) else: end = datetime.datetime.strptime(appointment['end'].get('date'), "%Y-%m-%d") appointmentDTO.append(AppointmentDTO(appointment['id'], start, end, appointment['summary'])) return appointmentDTO
def SQLdatetime(pydatetime_or_string): if hasattr(pydatetime_or_string, 'strftime'): dtobj = pydatetime_or_string else: # assume pyrfc3339 string dtobj = parse(pydatetime_or_string) return dtobj.strftime(SQLDATE_FMT)
def test_parse_naive_utc(self): ''' Test parsing a UTC timestamp to a naive datetime. ''' dt1 = parse('2009-01-01T10:01:02Z', produce_naive=True) eq_(dt1.tzinfo, None)
def expiry_time(ns, cavs): ''' Returns the minimum time of any time-before caveats found in the given list or None if no such caveats were found. The ns parameter is :param ns: used to determine the standard namespace prefix - if the standard namespace is not found, the empty prefix is assumed. :param cavs: a list of pymacaroons.Caveat :return: datetime.DateTime or None. ''' prefix = ns.resolve(STD_NAMESPACE) time_before_cond = condition_with_prefix(prefix, COND_TIME_BEFORE) t = None for cav in cavs: if not cav.first_party(): continue cav = cav.caveat_id_bytes.decode('utf-8') name, rest = parse_caveat(cav) if name != time_before_cond: continue try: et = pyrfc3339.parse(rest, utc=True).replace(tzinfo=None) if t is None or et < t: t = et except ValueError: continue return t
def _parse_time(input): """ :param input: Either a number as milliseconds since Unix Epoch, or a string as a valid RFC3339 timestamp :return: milliseconds since Unix epoch, or None if input was invalid. """ # bool is a subtype of int, and we don't want to try and compare it as a time. if isinstance(input, bool): log.warn("Got unexpected bool type when attempting to parse time") return None if isinstance(input, Number): return float(input) if isinstance(input, six.string_types): try: parsed_time = pyrfc3339.parse(input) timestamp = (parsed_time - epoch).total_seconds() return timestamp * 1000.0 except Exception as e: log.warn("Couldn't parse timestamp:" + str(input) + " with message: " + str(e)) return None log.warn("Got unexpected type: " + type(input) + " with value: " + str(input) + " when attempting to parse time") return None
def autoset_timestamp_end(params: dict): scale = params.get("scale") if scale is None: logging.error("scale is not set!!") return expected_test_duration = params.get("expected-test-duration") if expected_test_duration is None: logging.info("auto set timestamp_end is disabled") return qps = 300 * 1000 if params.get("limiter-max-qps") is not None and params.get( "use-qps-limiter" ) is not None and params.get("use-qps-limiter") is True: qps = params.get("limiter-max-qps") usecase = params.get("use-case") if params.get( "use-case") in use_case_metrics_count.keys() else "devops" log_interval = params.get("log_interval") if params.get( "log_interval") is not None else 10 timestamp_start = params.get("timestamp-start") if params.get( "timestamp-start") is not None else "2020-01-01T00:00:00Z" timestamp_start = parse(timestamp_start) delta_time = log_interval * expected_test_duration * qps / ( use_case_metrics_count[usecase] * scale) + 1 params["timestamp-end"] = generate(timestamp_start + timedelta(seconds=delta_time)) logging.info(params)
def _notAfterBefore(cert_path, method): """Internal helper function for finding notbefore/notafter. :param str cert_path: path to a cert in PEM format :param function method: one of ``OpenSSL.crypto.X509.get_notBefore`` or ``OpenSSL.crypto.X509.get_notAfter`` :returns: the notBefore or notAfter value from the cert at cert_path :rtype: :class:`datetime.datetime` """ # pylint: disable=redefined-outer-name with open(cert_path) as f: x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, f.read()) # pyopenssl always returns bytes timestamp = method(x509) reformatted_timestamp = [timestamp[0:4], b"-", timestamp[4:6], b"-", timestamp[6:8], b"T", timestamp[8:10], b":", timestamp[10:12], b":", timestamp[12:]] timestamp_str = b"".join(reformatted_timestamp) # pyrfc3339 uses "native" strings. That is, bytes on Python 2 and unicode # on Python 3 if six.PY3: timestamp_str = timestamp_str.decode('ascii') return pyrfc3339.parse(timestamp_str)
def go_to_py_cookie(go_cookie): '''Convert a Go-style JSON-unmarshaled cookie into a Python cookie''' expires = None if go_cookie.get('Expires') is not None: t = pyrfc3339.parse(go_cookie['Expires']) expires = t.timestamp() return cookiejar.Cookie( version=0, name=go_cookie['Name'], value=go_cookie['Value'], port=None, port_specified=False, # Unfortunately Python cookies don't record the original # host that the cookie came from, so we'll just use Domain # for that purpose, and record that the domain was specified, # even though it probably was not. This means that # we won't correctly record the CanonicalHost entry # when writing the cookie file after reading it. domain=go_cookie['Domain'], domain_specified=not go_cookie['HostOnly'], domain_initial_dot=False, path=go_cookie['Path'], path_specified=True, secure=go_cookie['Secure'], expires=expires, discard=False, comment=None, comment_url=None, rest=None, rfc2109=False, )
def __init__(self, header, start=None, end=None): if (start is None) and ('Start' in header): self.start = rfc3339.parse(header['Start'][0]) else: self.start = start if (end is None) and ('End' in header): self.end = rfc3339.parse(header['End'][0]) else: self.end = end self.header = dict() for (k, vlist) in header.items(): for v in vlist: self.header_add(k, v)
def p_expression_tagged_element(p): """expression : TAG expression""" tag = p[1] element = p[2] if tag == 'inst': length = len(element) hyphens_count = element.count('-') if length == 10 and hyphens_count == 2: output = datetime.datetime.strptime(element, '%Y-%m-%d').date() elif length == 7 and hyphens_count == 1: output = datetime.datetime.strptime(element, '%Y-%m').date() elif length == 4 and hyphens_count == 0: output = datetime.datetime.strptime(element, '%Y').date() else: output = pyrfc3339.parse(element) elif tag == 'uuid': output = uuid.UUID(element) elif tag in _serializers: output = _serializers[tag](element) else: raise NotImplementedError( u"Don't know how to handle tag ImmutableDict({})".format(tag)) p[0] = output
def _onFolderReceived(self, id, response): if self.terminate == False: results = response.get("files", []) nextPage = response.get("nextPageToken", None) # print("received") # print(id, response) # print("pageToken", nextPage) if nextPage != None: folder_str, _a = self.folderPageTokens[id] self.folderPageTokens[id] = (folder_str, nextPage) self.unfinished_folder_req += 1 self.searchFolderQueue.put(id, block=False) elif nextPage == None and id in self.folderPageTokens: self.folderPageTokens.pop(id, "") for f in results: folder: str = "'{}' in parents".format(f["id"]) self.folderQueries.put(folder, block=False) self.fileQueries.put(folder, block=False) self.folderCount += 1 modifiedTime: str = f["modifiedTime"] newTime: str = str( pyrfc3339.parse(modifiedTime).replace( tzinfo=None)) # modified time is always in UTC f["modifiedTime"] = newTime if len(results) > 0: self.folderWriteQueue.put(results, block=False)
def stations(self): if not self._stations: logger.info('Load stations...') if not os.path.exists(self._data_path): self.refresh() with zipfile.ZipFile(self._data_path, 'r') as kmz_file: filename = kmz_file.namelist()[0] with kmz_file.open(filename) as kml_file: kml = lxml.etree.parse(kml_file) node = kml.getroot().find('{http://www.opengis.net/kml/2.2}Document') node = node.find('{http://www.opengis.net/kml/2.2}ExtendedData') node = node.find( '{https://opendata.dwd.de/weather/lib/pointforecast_dwd_extension_V1_0.xsd}ProductDefinition' ) node = node.find( '{https://opendata.dwd.de/weather/lib/pointforecast_dwd_extension_V1_0.xsd}ForecastTimeSteps' ) times = [pyrfc3339.parse(i.text) for i in node] document = kml.getroot().find('{http://www.opengis.net/kml/2.2}Document') nodes = document.findall('{http://www.opengis.net/kml/2.2}Placemark') self._stations = [Station(node, times) for node in nodes] else: logger.debug('Use cached stations...') return self._stations
def get_events(n_events=10): """Shows basic usage of the Google Calendar API. Creates a Google Calendar API service object and outputs a list of the next 10 events on the user's calendar. """ credentials = get_credentials() http = credentials.authorize(httplib2.Http()) service = discovery.build('calendar', 'v3', http=http) now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time eventsResult = service.events().list( calendarId='primary', timeMin=now, maxResults=n_events, singleEvents=True, orderBy='startTime').execute() events = eventsResult.get('items', []) if not events: return [] else: event_list = [] for event in events: rawStart = event['start'].get('dateTime', None) start = pyrfc3339.parse(rawStart) if rawStart != None else \ datetime.datetime.strptime(event['start'].get('date'), '%Y-%m-%d') eventData = (event['summary'], start.strftime('%a {} %b'.format(ordinal(start.day)))) event_list.append(eventData) return event_list
def fetchPredictions(self, stopTag): r = requests.get( self.baseURL + "/rtt/public/utility/file.aspx?contenttype=SQLXML&Name=RoutePositionET.xml&PlatformTag=" + stopTag ) r.raise_for_status() predictionsTree = etree.fromstring(r.content) predictionData = { "agency": self.agency, "tag": stopTag, "expires": parse( predictionsTree.xpath("/c:RoutePositionET/c:Content/@Expires", namespaces=NSMAP)[0], utc=True ), "predictions": [], } for trip in predictionsTree.xpath( "/c:RoutePositionET/c:Platform/c:Route/c:Destination/c:Trip", namespaces=NSMAP ): predictionData["predictions"].append( { "minutes": int(trip.attrib["ETA"]), "destination": trip.xpath("../@Name")[0], "route": trip.xpath("../../@RouteNo")[0], } ) predictionData["predictions"].sort(key=lambda x: x["minutes"]) return predictionData
def expiry_time(ns, cavs): ''' Returns the minimum time of any time-before caveats found in the given list or None if no such caveats were found. The ns parameter is :param ns: used to determine the standard namespace prefix - if the standard namespace is not found, the empty prefix is assumed. :param cavs: a list of pymacaroons.Caveat :return: datetime.DateTime or None. ''' prefix = ns.resolve(STD_NAMESPACE) time_before_cond = condition_with_prefix( prefix, COND_TIME_BEFORE) t = None for cav in cavs: if not cav.first_party(): continue cav = cav.caveat_id_bytes.decode('utf-8') name, rest = parse_caveat(cav) if name != time_before_cond: continue try: et = pyrfc3339.parse(rest) if t is None or et < t: t = et except ValueError: continue return t
def commit_reply(self, filepath, reply): EventCommitter.prep_directory_for_file(filepath) with codecs.open(filepath, "a", "utf8") as fd: if "verb" in reply: if reply["verb"] == "resolve": fd.write( u"---\n##Resolved by %s at %s:\n\n" % (reply["author"]["displayName"], reply["createdDate"])) message = "Resolved by %s" % reply["author"]["displayName"] elif reply["verb"] == "reopen": fd.write( u"---\n##Reopened by %s at %s:\n\n" % (reply["author"]["displayName"], reply["createdDate"])) message = "Reopened by %s" % reply["author"]["displayName"] else: fd.write(u"---\n##Reply by %s at %s:\n%s\n\n" % (reply["author"]["displayName"], reply["createdDate"], reply["content"])) message = "Reply by %s" % reply["author"]["displayName"] # Commit changes to repository self.repo.index.add(os.path.relpath(filepath, self.repo.workdir)) self.repo.index.write() tree = self.repo.index.write_tree() mt = rfc3339.parse(reply["createdDate"]) author = git.Signature(reply["author"]["displayName"], "n/a", calendar.timegm(mt.utctimetuple()), mt.utcoffset().seconds / 60) parents = [] if self.last_commit is None else [self.last_commit] self.last_commit = self.repo.create_commit("refs/heads/master", author, author, message, tree, parents) logging.info("Commit reply: %s : %s", self.last_commit, message)
def _notAfterBefore(cert_path, method): """Internal helper function for finding notbefore/notafter. :param str cert_path: path to a cert in PEM format :param function method: one of ``crypto.X509.get_notBefore`` or ``crypto.X509.get_notAfter`` :returns: the notBefore or notAfter value from the cert at cert_path :rtype: :class:`datetime.datetime` """ # pylint: disable=redefined-outer-name with open(cert_path, "rb") as f: # type: IO[bytes] x509 = crypto.load_certificate(crypto.FILETYPE_PEM, f.read()) # pyopenssl always returns bytes timestamp = method(x509) reformatted_timestamp = [ timestamp[0:4], b"-", timestamp[4:6], b"-", timestamp[6:8], b"T", timestamp[8:10], b":", timestamp[10:12], b":", timestamp[12:] ] # pyrfc3339 always uses the type `str`. This means that in Python 2, it # expects str/bytes and in Python 3 it expects its str type or the Python 2 # equivalent of the type unicode. timestamp_bytes = b"".join(reformatted_timestamp) if six.PY3: timestamp_str = timestamp_bytes.decode('ascii') else: timestamp_str = timestamp_bytes return pyrfc3339.parse(timestamp_str)
def _notAfterBefore(cert_path: str, method: Callable[[crypto.X509], Optional[bytes]]) -> datetime.datetime: """Internal helper function for finding notbefore/notafter. :param str cert_path: path to a cert in PEM format :param function method: one of ``crypto.X509.get_notBefore`` or ``crypto.X509.get_notAfter`` :returns: the notBefore or notAfter value from the cert at cert_path :rtype: :class:`datetime.datetime` """ # pylint: disable=redefined-outer-name with open(cert_path, "rb") as f: x509 = crypto.load_certificate(crypto.FILETYPE_PEM, f.read()) # pyopenssl always returns bytes timestamp = method(x509) if not timestamp: raise errors.Error("Error while invoking timestamp method, None has been returned.") reformatted_timestamp = [timestamp[0:4], b"-", timestamp[4:6], b"-", timestamp[6:8], b"T", timestamp[8:10], b":", timestamp[10:12], b":", timestamp[12:]] # pyrfc3339 always uses the type `str` timestamp_bytes = b"".join(reformatted_timestamp) timestamp_str = timestamp_bytes.decode('ascii') return pyrfc3339.parse(timestamp_str)
def _notAfterBefore(cert_path, method): """Internal helper function for finding notbefore/notafter. :param str cert_path: path to a cert in PEM format :param function method: one of ``crypto.X509.get_notBefore`` or ``crypto.X509.get_notAfter`` :returns: the notBefore or notAfter value from the cert at cert_path :rtype: :class:`datetime.datetime` """ # pylint: disable=redefined-outer-name with open(cert_path) as f: x509 = crypto.load_certificate(crypto.FILETYPE_PEM, f.read()) # pyopenssl always returns bytes timestamp = method(x509) reformatted_timestamp = [ timestamp[0:4], b"-", timestamp[4:6], b"-", timestamp[6:8], b"T", timestamp[8:10], b":", timestamp[10:12], b":", timestamp[12:] ] timestamp_str = b"".join(reformatted_timestamp) # pyrfc3339 uses "native" strings. That is, bytes on Python 2 and unicode # on Python 3 if six.PY3: timestamp_str = timestamp_str.decode('ascii') return pyrfc3339.parse(timestamp_str)
def _notAfterBefore(cert_path, method): """Internal helper function for finding notbefore/notafter. :param str cert_path: path to a cert in PEM format :param function method: one of ``OpenSSL.crypto.X509.get_notBefore`` or ``OpenSSL.crypto.X509.get_notAfter`` :returns: the notBefore or notAfter value from the cert at cert_path :rtype: :class:`datetime.datetime` """ with open(cert_path) as f: x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, f.read()) timestamp = method(x509) reformatted_timestamp = [ timestamp[0:4], "-", timestamp[4:6], "-", timestamp[6:8], "T", timestamp[8:10], ":", timestamp[10:12], ":", timestamp[12:], ] return pyrfc3339.parse("".join(reformatted_timestamp))
def updateEvent(evt_id, user_str, mail_list): service = initService() event = service.events().get(calendarId=calId, eventId=evt_id).execute() #return event['id'] creator_email = event['creator']['email'] creator = event['summary'] if event.has_key('description') and event['description'].strip() != '': return 'RESERVED' event['description'] = user_str update_event = service.events().update(calendarId=calId, eventId=event['id'], body=event).execute() mail_list.append(creator_email) dtstart = parse(event['start']['dateTime']).strftime('%Y-%m-%d %H:%M:%S') dtend = parse(event['end']['dateTime']).strftime('%Y-%m-%d %H:%M:%S') for x in mail_list: if x.strip() != '': mail.send_mail(sender=creator_email, to=x, subject="Confirmation of Private Tutoring Reservation", body=""" Hi, Instructor: %(instructor)s Start Time: %(dtstart)s End Time: %(dtend)s Participants: %(partici)s This mail confirms you that your request of Private Tutoring has been received by GSI. Once location is determined, GSI will contact you with details. Best, %(creator)s """ % {'instructor':creator, 'dtstart':dtstart, 'dtend':dtend, 'partici':user_str, 'creator':creator}) return 'RESERVE_OK'
def get_execution_time(text): soup = BeautifulSoup(text) execution_time = soup.find("executiontime") if not execution_time: error = handle_error(text) raise ValueError(error) else: return parse(execution_time.text)
def decode(self, json_string): obj = super(CustomJsonDecoder, self).decode(json_string) for k, v in obj.items(): try: obj[k] = parse(v) except ValueError: pass return obj
def test_deepcopy(self): ''' Tests that deepcopy works and doesn't crash ''' timestamp = '2009-01-01T10:02:03+02:00' dt = parse(timestamp) deepcopy(dt)
def test_parse_microseconds(self): ''' Test parsing timestamps with microseconds. ''' timestamp = '2009-01-01T10:02:03.25Z' dt = parse(timestamp) eq_(dt.microsecond, 250000)
def test_generate_local_parse_local(self): ''' Generate a local timestamp and parse it into a local datetime. ''' eastern = pytz.timezone('US/Eastern') dt1 = eastern.localize(datetime.utcnow()) dt2 = parse(generate(dt1, utc=False, microseconds=True), utc=False) eq_(dt1, dt2)
def test_generate_utc_parse_utc(self): ''' Generate a UTC timestamp and parse it into a UTC datetime. ''' dt1 = datetime.utcnow() dt1 = dt1.replace(tzinfo=pytz.utc) dt2 = parse(generate(dt1, microseconds=True)) eq_(dt1, dt2)
def test_mixed_case(self): ''' Timestamps may use either 'T' or 't' and either 'Z' or 'z' according to :RFC:`3339`. ''' dt1 = parse('2009-01-01t10:01:02z') dt2 = datetime(2009, 1, 1, 10, 1, 2, tzinfo=pytz.utc) eq_(dt1, dt2)
def _notafterbefore(self, method, version): """Internal helper function for finding notbefore/notafter.""" if version is None: target = self.current_target("cert") else: target = self.version("cert", version) pem = open(target).read() x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, pem) i = method(x509) return pyrfc3339.parse(i[0:4] + "-" + i[4:6] + "-" + i[6:8] + "T" + i[8:10] + ":" + i[10:12] + ":" + i[12:])
def utc_roundtrip(self, tz_name): ''' Generates a local datetime using the given timezone, produces a local timestamp from the datetime, parses the timestamp to a UTC datetime, and verifies that the two datetimes are equal. ''' tzinfo = pytz.timezone(tz_name) dt1 = tzinfo.localize(datetime.utcnow()) timestamp = generate(dt1, utc=False, microseconds=True) dt2 = parse(timestamp) eq_(dt1, dt2)
def _check_time_before(ctx, cond, arg): clock = ctx.get(TIME_KEY) if clock is None: now = pytz.UTC.localize(datetime.utcnow()) else: now = clock.utcnow() try: if pyrfc3339.parse(arg) <= now: return 'macaroon has expired' except ValueError: return 'cannot parse "{}" as RFC 3339'.format(arg) return None
def validate_expiration_time(self, original_value, value_in_question, type_=None): """ Validate the expiration time value passed to Update or Create Methods. Args: original_value: The original value that needs to be compared (e.g., SLICE creation date) value_in_question: The value that is doubted for correctness (e.g., Expiry time update date) Returns: a boolean value to indicate whether the expiration time valid or not """ parsed_original_value = pyrfc3339.parse(original_value) parsed_value_in_question = pyrfc3339.parse(value_in_question) if type_: maximum_expansion_duration = self.STATIC['CONFIG'][type_]['max_%s_extension_time' %type_.lower()] configuration_delta = datetime.timedelta(days=maximum_expansion_duration) delta_time_days = parsed_value_in_question - parsed_original_value return True if parsed_original_value < parsed_value_in_question and delta_time_days < configuration_delta else False else: return parsed_original_value < parsed_value_in_question
def splatfest_message(self, data): now_obj = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) msg = '**Splatfest Time! (Ends in {} hours and {} minutes)**\n' \ 'Teams are **{}** and **{}**\n' \ 'Maps are {}, {}, and {}' end = pyrfc3339.parse(data['schedule'][0]['datetime_end']) time_to_end = end - now_obj hours = int(time_to_end.total_seconds() / 3600) minutes = int(time_to_end.total_seconds()/60) % 60 team_a = data['schedule'][0]['team_alpha_name'] team_b = data['schedule'][0]['team_bravo_name'] stage_1 = data['schedule'][0]['stages'][0]['name'] stage_2 = data['schedule'][0]['stages'][1]['name'] stage_3 = data['schedule'][0]['stages'][2]['name'] return msg.format(hours, minutes, team_a, team_b, stage_1, stage_2, stage_3)
def cred_from_cred_id(cred_id): cred_from_db = r.table('credentials').get(cred_id).run(flask.g.db_conn) credentials = client.OAuth2Credentials( cred_from_db['access_token'], cred_from_db['client_id'], cred_from_db['client_secret'], cred_from_db['refresh_token'], pyrfc3339.parse(cred_from_db['token_expiry']).replace(tzinfo=None), cred_from_db['token_uri'], cred_from_db['user_agent'], revoke_uri=cred_from_db['revoke_uri'], id_token=cred_from_db['id_token'], token_response=cred_from_db['token_response'] ) return credentials
def p_expression_tagged_element(p): """expression : TAG expression""" tag = p[1] element = p[2] if tag == 'inst': output = pyrfc3339.parse(element) elif tag == 'uuid': output = uuid.UUID(element) elif tag in _serializers: output = _serializers[tag](element) else: raise NotImplementedError("Don't know how to handle tag {}".format(tag)) p[0] = output