def test_date(self): config = AutoRestDateTestServiceConfiguration( base_url="http://localhost:3000") config.log_level = log_level client = AutoRestDateTestService(config) max_date = isodate.parse_date("9999-12-31T23:59:59.999999Z") min_date = isodate.parse_date("0001-01-01T00:00:00Z") client.date_model.put_max_date(max_date) client.date_model.put_min_date(min_date) self.assertEqual(max_date, client.date_model.get_max_date()) self.assertEqual(min_date, client.date_model.get_min_date()) self.assertIsNone(client.date_model.get_null()) # Python isodate.parse support too wild input, and won't raise error #with self.assertRaises(DeserializationError): # client.date_model.get_invalid_date() with self.assertRaises(DeserializationError): client.date_model.get_overflow_date() with self.assertRaises(DeserializationError): client.date_model.get_underflow_date()
def parse_fullcalendar_request(request, timezone): """ Parses start and end from the given fullcalendar request. It is expected that no timezone is passed (the default). See `<http://fullcalendar.io/docs/timezone/timezone/>`_ :returns: A tuple of timezone-aware datetime objects or (None, None). """ start = request.params.get('start') end = request.params.get('end') if start and end: if 'T' in start: start = parse_datetime(start) end = parse_datetime(end) else: start = datetime.combine(parse_date(start), time(0, 0)) end = datetime.combine(parse_date(end), time(23, 59, 59, 999999)) start = sedate.replace_timezone(start, timezone) end = sedate.replace_timezone(end, timezone) return start, end else: return None, None
def getStats(): today = date.today() back30_days = (datetime.now() - timedelta(days=30)).date() back7_days = (datetime.now() - timedelta(days=7)).date() collection = 'greekgovernmentgazette' query_all = f'collection:({collection})' res = {} try: s = get_session() s.mount_http_adapter() search_results = s.search_items(query_all, fields=['identifier', 'addeddate']) lst_res = list(search_results) docs_last30days = [ i for i in lst_res if isodate.parse_date(i['addeddate']) >= back30_days ] docs_last7days = [ i for i in lst_res if isodate.parse_date(i['addeddate']) >= back7_days ] docs_today = [ i for i in lst_res if isodate.parse_date(i['addeddate']) == today ] res['count_all'] = len(lst_res) res['count_last30days'] = len(docs_last30days) res['count_last7days'] = len(docs_last7days) res['count_today'] = len(docs_today) finally: return res
def test_offset_inputs(self): """ Test offset inputs. This test uses the same graph arrangement as test_two_merged_inputs(), but for this test case, none of the inputs have perfect alignment. """ clear_graph() timeseries_1 = [[isodate.parse_date('2000-01-01'), 1]] timeseries_2 = [[isodate.parse_date('2000-01-02'), 200], [isodate.parse_date('2000-01-03'), 300]] emit_data_block_1 = EmitTimeSeries(timeseries_1) emit_data_block_2 = EmitTimeSeries(timeseries_2) emit_data_block_1.set_debug_name('emit_data_block_1') emit_data_block_2.set_debug_name('emit_data_block_2') # We should see the sequence start on the first day for which we have data for BOTH inputs, # and we should see another call for every time that we have updated information for ANY input: expected_data = [[isodate.parse_date('2000-01-02'), 1, 200], [isodate.parse_date('2000-01-03'), 1, 300]] confirm_sequence_block = ConfirmMergedSequence(expected_data, nose.tools.assert_equal) confirm_sequence_block.set_debug_name('confirm_sequence_block') connect(emit_data_block_1, 'value', confirm_sequence_block, 'input_a') connect(emit_data_block_2, 'value', confirm_sequence_block, 'input_b') trace = core.run() executed_set = executed_block_set(trace) nose.tools.assert_equal(executed_set, set(['emit_data_block_1', 'emit_data_block_2', 'confirm_sequence_block']))
def create_temporal_coverage(temporal: Mapping) -> PeriodOfTime: period = PeriodOfTime() period.start_date = parse_date(temporal["from"]).strftime("%Y-%m-%d") period.end_date = parse_date(temporal["to"]).strftime("%Y-%m-%d") return period
def db_ebay_got_seller_list_date(self): """ Provides a method for preventing the `get_seller_list` command from being ran multiple times a day Returns True when there is either no date, or the date was in the past. Returns False when there is a date that is today or in the future """ query = "SELECT value FROM ebay_internals WHERE key = 'got_seller_list_date'" self.__execute(query) last_date = self.__fetchone('value', 'no') if last_date != 'no' and isodate.parse_date(last_date) >= datetime.date.today(): msg = 'We already ran get_seller_list today (or in the future). Wait until tomorrow' self.log.warning(msg) return False if last_date == 'no' or isodate.parse_date(last_date) < datetime.date.today(): query = "UPDATE ebay_internals SET value = :isodate WHERE key = 'got_seller_list_date'" today = isodate.date_isoformat(datetime.date.today()) self.__execute(query, {'isodate': today}) self.db_ebay_zero_request_counter() return True
def parse_date_range_from_dict(cls, data): verify_request_contains_mandatory_fields(data, ["start_date", "end_date"]) try: start_date = parse_date(data["start_date"]) end_date = parse_date(data["end_date"]) return (start_date, end_date) except ISO8601Error, exception: cls.raiseException(make_error_dict(str(exception)), 404)
def test_date(self, client, value_header): client.header.param_date("valid", isodate.parse_date("2010-01-01")) client.header.param_date("min", datetime.min) response = client.header.response_date("valid", cls=value_header) assert response == isodate.parse_date("2010-01-01") response = client.header.response_date("min", cls=value_header) assert response == isodate.parse_date("0001-01-01")
def test_date_valid(self, client): date1 = isodate.parse_date("2000-12-01") date2 = isodate.parse_date("1980-01-02") date3 = isodate.parse_date("1492-10-12") date_array = client.array.get_date_valid() assert date_array, [date1, date2 == date3] client.array.put_date_valid([date1, date2, date3])
def test_get_date_invalid(self, client): date_null_dict = {"0":isodate.parse_date("2012-01-01"), "1":None, "2":isodate.parse_date("1776-07-04")} assert date_null_dict == client.dictionary.get_date_invalid_null() with pytest.raises(DeserializationError): client.dictionary.get_date_invalid_chars()
def is_iso8601_date(datestamp: str) -> bool: """Check if argument is a valid ISO 8601 date""" try: isodate.parse_date(datestamp) return True except ISO8601Error: return False except ValueError: return False
def valid_8601_date(s): try: if 'T' in s: parse_datetime(s) else: parse_date(s) return True except ValueError: return False
def datetime_is_iso(dt): """Attempts to parse a date formatted in ISO 8601 format""" try: if len(dt) > 10: isodate.parse_datetime(dt) else: isodate.parse_date(dt) return True, [] except isodate.ISO8601Error: return False, ['Datetime provided is not in a valid ISO 8601 format']
def test_primitive_get_and_put_date(self, client): # GET primitive/date dateResult = client.primitive.get_date() assert isodate.parse_date("0001-01-01") == dateResult.field assert isodate.parse_date("2016-02-29") == dateResult.leap dateRequest = DateWrapper( field=isodate.parse_date('0001-01-01'), leap=isodate.parse_date('2016-02-29')) client.primitive.put_date(dateRequest)
def check_date(x): if not isinstance(x, str): return False try: parse_date(x) except: return False return True
def test_date_valid(self, client): date1 = isodate.parse_date("2000-12-01T00:00:00Z") date2 = isodate.parse_date("1980-01-02T00:00:00Z") date3 = isodate.parse_date("1492-10-12T00:00:00Z") valid_date_dict = {"0":date1, "1":date2, "2":date3} date_dictionary = client.dictionary.get_date_valid() assert date_dictionary == valid_date_dict client.dictionary.put_date_valid(valid_date_dict)
def test_two_blocks_via_timeseries(self): timeseries = [[isodate.parse_date('2000-01-01'), 1], [isodate.parse_date('2000-01-02'), 3], [isodate.parse_date('2000-01-03'), 5], [isodate.parse_date('2000-01-04'), 9]] emit_data_block = EmitTimeSeries(timeseries) confirm_sequence_block = ConfirmSequence([1, 3, 5, 9], nose.tools.assert_equal) connect(emit_data_block, 'value', confirm_sequence_block, 'value') graph = Graph(emit_data_block) graph.run()
def test_discarded_inputs(self): """ This test uses the same graph arrangement as test_two_merged_inputs(), but for this test case, one of the input data streams has several entries that will be discarded before the other input is ready. """ timeseries_1 = [[isodate.parse_date('2000-01-01'), 1], [isodate.parse_date('2000-01-02'), 2], [isodate.parse_date('2000-01-03'), 3], [isodate.parse_date('2000-01-04'), 4], [isodate.parse_date('2000-01-05'), 5]] timeseries_2 = [[isodate.parse_date('2000-01-04'), 400], [isodate.parse_date('2000-01-05'), 500]] emit_data_block_1 = EmitTimeSeries(timeseries_1) emit_data_block_2 = EmitTimeSeries(timeseries_2) emit_data_block_1.set_debug_name('emit_data_block_1') emit_data_block_2.set_debug_name('emit_data_block_2') # We should see the sequence start on the first day for which we have data for BOTH inputs, # and we should see another call for every time that we have updated information for ANY input: expected_data = [[isodate.parse_date('2000-01-04'), 4, 400], [isodate.parse_date('2000-01-05'), 5, 500]] confirm_sequence_block = ConfirmMergedSequence(expected_data, nose.tools.assert_equal) confirm_sequence_block.set_debug_name('confirm_sequence_block') connect(emit_data_block_1, 'value', confirm_sequence_block, 'input_a') connect(emit_data_block_2, 'value', confirm_sequence_block, 'input_b') graph = Graph() graph.add_head(emit_data_block_1) graph.add_head(emit_data_block_2) graph.run()
def _validate_type_iso_date(self, date_value): is_valid = True try: isodate.parse_date(date_value) if not re.match(r'^([0-9]{4})-?((1[0-2]|0[1-9])-?(3[01]|0[1-9]|[12][0-9])|(W([0-4]\d|5[0-2])(-?[1-7]))|((00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6]))))$', date_value): # pylint: disable=line-too-long is_valid = False except isodate.ISO8601Error as ex: is_valid = False if is_valid: return True
def signature_graph_data(request, channel): '''Return data for the graph of crashes/ADU against build date''' params = get_validated_params(request) if isinstance(params, http.HttpResponseBadRequest): # There was an error in the form, let's return it. return params signature = params['signature'][0] product = params['product'][0] # Initialise start and end dates start_date = None end_date = None # Check for dates if 'date' in params: for date in params['date']: # Set the latest given start date as the start date if date.startswith('>'): d = isodate.parse_date(date.strip('>')) if not start_date or d < start_date: start_date = d # Set the earliest given end date as the end date elif date.startswith('<'): d = isodate.parse_date(date.strip('<')) if not end_date or d > end_date: end_date = d # If start date wasn't given, set it to 7 days before the end date # If end date wasn't given either, set it to 7 days before today if not start_date: if end_date: start_date = end_date - datetime.timedelta(days=7) else: start_date = datetime.datetime.utcnow() - datetime.timedelta( days=7 ) # If end date wasn't given, set it to today if not end_date: end_date = datetime.datetime.utcnow() # Get the graph data api = models.AduBySignature() data = api.get( signature=signature, product_name=product, start_date=start_date, end_date=end_date, channel=channel ) return data
def parse_repeating_slot(data): start_date = parse_date(data["start_date"]) end_date = parse_date(data["end_date"]) start_time = parse_time(data["start_time"]) end_time = parse_time(data["end_time"]) week_day = data["week_day"] return RammetidSlot( week_day, start_date, end_date, start_time, end_time )
def parse_slot_request(self, data): start_date = parse_date(data["start_date"]) end_date = parse_date(data["end_date"]) start_time = parse_time(data["start_time"]) end_time = parse_time(data["end_time"]) week_day = data["week_day"] return RepeatingSlotRequest( week_day, start_date, end_date, start_time, end_time )
def validate_date(text): """ Based on http://tools.ietf.org/html/rfc2425#section-5.8.4 and the fact that it specifies a subset of ISO 8601. @param text: String """ if VALID_DATE.match(text) is None: raise VCardValueError(NOTE_INVALID_DATE, {'String': text}) try: isodate.parse_date(text) except (isodate.ISO8601Error, ValueError): raise VCardValueError(NOTE_INVALID_DATE, {'String': text})
def test_two_blocks_via_timeseries(self): timeseries = [[isodate.parse_date('2000-01-01'), 1], [isodate.parse_date('2000-01-02'), 3], [isodate.parse_date('2000-01-03'), 5], [isodate.parse_date('2000-01-04'), 9]] clear_graph() emit_data_block = EmitTimeSeries(timeseries) emit_data_block.set_debug_name('EmitTimeSeries') confirm_sequence_block = ConfirmSequence([1, 3, 5, 9], nose.tools.assert_equal) confirm_sequence_block.set_debug_name('ConfirmSequence') connect(emit_data_block, 'value', confirm_sequence_block, 'value') trace = core.run() executed_set = executed_block_set(trace) nose.tools.assert_equal(executed_set, set(['EmitTimeSeries', 'ConfirmSequence']))
def decorated_route(*args, **kwargs): if 'start' in request.args: try: kwargs['start'] = parse_date(request.args['start']) except Exception: return fail("Failed to parse 'start' as ISO8601 date.", 400) if 'end' in request.args: try: kwargs['end'] = parse_date(request.args['end']) except Exception: return fail("Failed to parse 'end' as ISO8601 date.", 400) return route(*args, **kwargs)
def get(self, start, end): start_date = parse_date(start) end_date = parse_date(end) statistics = get_statistics(start_date, end_date) fieldname_mapping = OrderedDict() fieldname_mapping['unit_type'] = 'Type enhet' fieldname_mapping['unit_number'] = 'Enhetskode' fieldname_mapping['unit_name'] = 'Navn på enhet' fieldname_mapping['name'] = 'Navn på lokalet' fieldname_mapping['hours'] = 'Antall timer utlån fra %s til %s' % (start_date.strftime('%d-%m-%Y'), end_date.strftime('%d-%m-%Y')) return output_csv(statistics, 200, fieldname_mapping=fieldname_mapping)
def parse_slot(self, data, application): start_date = parse_date(data["start_date"]) end_date = parse_date(data["end_date"]) start_time = parse_time(data["start_time"]) end_time = parse_time(data["end_time"]) week_day = data["week_day"] return RepeatingSlot( application, week_day, start_date, end_date, start_time, end_time )
def import_annee(self): """ Détermine l'année scolaire à partir des fichiers fournis, et la crée si nécessaire (ceci n'est possible qu'avec le fichier STS). Cette fonction lève une exception ValueError si l'année ne peut pas être déterminée car elle est incohérente entre les fichiers fournis, ou bien si elle n'existe pas en base de données et qu'elle ne peut pas être créée car le fichier STS est manquant. """ if self.sts_et: annee_et = self.sts_et.getroot().find('PARAMETRES/ANNEE_SCOLAIRE') annee_fichier = annee_et.attrib['ANNEE'] debut = isodate.parse_date(annee_et.find('DATE_DEBUT').text) fin = isodate.parse_date(annee_et.find('DATE_FIN').text) self.annee, _ = Annee.objects.update_or_create(nom=annee_fichier, defaults={ 'debut': debut, 'fin': fin }) annee_erreurs = [] for xml_siecle in (self.structures_et, self.nomenclatures_et, self.eleves_et): if xml_siecle is None: continue annee_fichier = xml_siecle.getroot().find( 'PARAMETRES/ANNEE_SCOLAIRE').text if self.annee: if annee_fichier != self.annee.nom: annee_erreurs.append( ValueError('annee-mismatch', xml_siecle)) else: try: self.annee = Annee.objects.get(nom=annee_fichier) except Annee.DoesNotExist: annee_erreurs.append( ValueError('annee-inexistante', xml_siece)) if annee_erreurs: raise ValueError(annee_erreurs) # On tente de synchroniser les vacances si on connait l'académie # via l'établissement. try: self.annee.synchro_vacances(self.etablissement.academie) except: pass
def racetime_race_json_to_dict(race, entrant): dict = {} dict['platform'] = 'racetime' dict['id'] = race['name'].replace('oot/','') dict['goal'] = race['goal'] dict['date'] = race['ended_at'] if dict['date']: dict['date'] = isodate.parse_date(dict['date']) else: dict['date'] = dt.date(1970, 1, 1) dict['goal'] = race['goal']['name'] dict['num_entrants'] = race['entrants_count'] if race['info']: dict['goal'] += f" {race['info']}" dict['recordable'] = race['recordable'] dict['time'] = entrant['finish_time'] if dict['time']: dict['time'] = isodate.parse_duration(dict['time']) else: dict['time'] = dt.timedelta(seconds=0) dict['forfeit'] = entrant['status']['value'] == 'dnf' dict['dq'] = entrant['status']['value'] == 'dq' dict['rank'] = entrant['place'] dict['points'] = entrant['score'] if entrant['score'] else 0 dict['comment'] = entrant['comment'] if entrant['comment'] else '' return dict
def date_from_datetime(cls, datetime): try: date = parse_date(datetime) except TypeError: date = datetime return DimensionUTCDate.objects.get(date=date)
def get_album_info(self, url): """Returns an AlbumInfo object for a bandcamp album page. """ try: html = self._get(url) name_section = html.find(id='name-section') album = name_section.find(attrs={'itemprop': 'name'}).text.strip() # Even though there is an item_id in some urls in bandcamp, it's not # visible on the page and you can't search by the id, so we need to use # the url as id. album_id = url artist = name_section.find(attrs={'itemprop': 'byArtist'}) .text.strip() release = html.find('meta', attrs={'itemprop': 'datePublished'})['content'] release = isodate.parse_date(release) artist_url = url.split('/album/')[0] tracks = [] for row in html.find(id='track_table').find_all(attrs={'itemprop': 'tracks'}): track = self._parse_album_track(row) track.track_id = '{0}{1}'.format(artist_url, track.track_id) tracks.append(track) return AlbumInfo(album, album_id, artist, artist_url, tracks, year=release.year, month=release.month, day=release.day, country='XW', media='Digital Media', data_source='bandcamp', data_url=url, mediums=1) except requests.exceptions.RequestException as e: self._log.debug("Communication error while fetching album {0!r}: " "{1}".format(url, e)) except (TypeError, AttributeError) as e: self._log.debug("Unexpected html while scraping album {0!r}: {1}".format(url, e)) except BandcampException as e: self._log.debug('Error: {0}'.format(e))
def format_output(item, show_url=False): """ takes a voat post and returns a formatted string """ if not item["Title"]: item["Title"] = formatting.truncate(item["Linkdescription"], 70) else: item["Title"] = formatting.truncate(item["Title"], 70) item["link"] = voat_fill_url.format(item["Subverse"], item["Id"]) raw_time = isodate.parse_date(item["Date"]) item["timesince"] = timeformat.time_since(raw_time, count=1, simple=True) item["comments"] = formatting.pluralize(item["CommentCount"], "comment") item["points"] = formatting.pluralize(item["Likes"], "point") if item["Type"] == 2: item["warning"] = " \x02Link\x02" else: item["warning"] = "" if show_url: return ( "\x02{Title} : {Subverse}\x02 - {comments}, {points}" " - \x02{Name}\x02 {timesince} ago - {link}{warning}".format(**item) ) else: return ( "\x02{Title} : {Subverse}\x02 - {comments}, {points}" " - \x02{Name}\x02, {timesince} ago{warning}".format(**item) )
def post(self): data = request.get_json() settings = current_app.db_session.query(Settings).all() leieform_fields = ["single_booking_allowed", "repeating_booking_allowed", "strotime_booking_allowed"] date_fields = ["repeating_booking_deadline", "repeating_booking_enddate", "single_booking_enddate"] for field in settings: if field.key in leieform_fields and field.key in data: if isinstance(data[field.key], bool): field.value = str(data[field.key]) current_app.db_session.add(field) else: abort( 400, __error__=[u'Feil format på leieform felt.'] ) elif field.key in date_fields and field.key in data and data[field.key]: try: field.value = str(parse_date(data[field.key])) current_app.db_session.add(field) except ValueError: abort( 400, __error__=[u'Søknadsfrist for lån må være dato.'] ) current_app.db_session.commit() return self.convertSettingsToDict(settings), 201
def generate_test_data(): # Set seed to generate consistent test data npr.seed(1) data_lists = [] n_recs = 110 fk = Factory.create() for i in range(n_recs): payband = npr.choice([1, 2, 3], p=[0.7, 0.25, 0.05]) payband = int(payband) salary = 0, if payband == 1: salary = fk.random_int(min=60000, max=120000) elif payband == 2: salary = fk.random_int(min=120000, max=160000) else: salary = fk.random_int(min=160000, max=200000) list_record = [ i + 1, '{}, {}'.format(fk.last_name(), fk.first_name()), isodate.parse_date(fk.date()), payband, salary ] data_lists.append(list_record) return data_lists
def transform_date(spec): start_date = parse_date(spec['start_date']) start_time = maybe_parse_time(spec['start_time']) or time(0, 0) end_date = maybe_parse_date(spec['end_date']) or start_date end_time = maybe_parse_time(spec['end_time']) or start_time is_continuous = spec['is_continuous'] is_date_based = spec['start_time'] is None if is_continuous: start = datetime.combine(start_date, start_time) end = datetime.combine(end_date, end_time) else: overflows_into_next_day = ( spec['end_time'] and spec['start_time'] and spec['end_time'] <= spec['start_time'] ) start = datetime.combine(start_date, start_time) end = datetime.combine(end_date, end_time) if overflows_into_next_day: end += timedelta(days=1) if is_date_based: return { 'start': start.date().isoformat(), 'end': end.date().isoformat(), } else: return { 'start': start.isoformat(), 'end': end.isoformat(), }
def __init__(self, jsonval=None): self.date = None if jsonval is not None: if 'T' in jsonval: self.date = isodate.parse_datetime(jsonval) else: self.date = isodate.parse_date(jsonval)
def split_date(spec): if spec['is_continuous']: yield project(spec, ( 'is_continuous', 'start_date', 'end_date', 'start_time', 'end_time', )) return start_date = parse_date(spec['start_date']) end_date = maybe_parse_date(spec['end_date']) or start_date days = int((end_date - start_date).total_seconds() / (60 * 60 * 24)) schedules = spec['schedules'] or [{ 'days_of_week': [0, 1, 2, 3, 4, 5, 6], 'start_time': spec['start_time'], 'end_time': spec['end_time'], }] for day in range(days + 1): this_date = start_date + timedelta(days=day) matching_schedules = filter( lambda s: this_date.isoweekday() - 1 in s['days_of_week'], schedules ) for schedule in matching_schedules: yield { 'is_continuous': False, 'start_date': this_date.isoformat(), 'end_date': None, 'start_time': schedule['start_time'], 'end_time': schedule['end_time'], }
def format_output(item, show_url=False): """ takes a voat post and returns a formatted string """ if not item["Title"]: item["Title"] = formatting.truncate(item["Linkdescription"], 70) else: item["Title"] = formatting.truncate(item["Title"], 70) item["link"] = voat_fill_url.format(item["Subverse"], item["Id"]) raw_time = isodate.parse_date(item['Date']) item["timesince"] = timeformat.time_since(raw_time, count=1, simple=True) item["comments"] = formatting.pluralize(item["CommentCount"], 'comment') item["points"] = formatting.pluralize(item["Likes"], 'point') if item["Type"] == 2: item["warning"] = " \x02Link\x02" else: item["warning"] = "" if show_url: return "\x02{Title} : {Subverse}\x02 - {comments}, {points}" \ " - \x02{Name}\x02 {timesince} ago - {link}{warning}".format(**item) else: return "\x02{Title} : {Subverse}\x02 - {comments}, {points}" \ " - \x02{Name}\x02, {timesince} ago{warning}".format(**item)
def transform_date(spec): start_date = parse_date(spec['start_date']) start_time = maybe_parse_time(spec['start_time']) or time(0, 0) end_date = maybe_parse_date(spec['end_date']) or start_date end_time = maybe_parse_time(spec['end_time']) or start_time is_continuous = spec['is_continuous'] is_date_based = spec['start_time'] is None if is_continuous: start = datetime.combine(start_date, start_time) end = datetime.combine(end_date, end_time) else: overflows_into_next_day = (spec['end_time'] and spec['start_time'] and spec['end_time'] <= spec['start_time']) start = datetime.combine(start_date, start_time) end = datetime.combine(end_date, end_time) if overflows_into_next_day: end += timedelta(days=1) if is_date_based: return { 'start': start.date().isoformat(), 'end': end.date().isoformat(), } else: return { 'start': start.isoformat(), 'end': end.isoformat(), }
def youtube_search(vid_id): video_response = youtube.videos().list( id=vid_id, part='contentDetails,snippet,statistics' ).execute() result = { 'id': vid_id, 'duration': { 'seconds': isodate.parse_duration( video_response['items'][0]['contentDetails']['duration']).total_seconds(), 'human_readable': str(timedelta(seconds=isodate.parse_duration( video_response['items'][0]['contentDetails']['duration']).total_seconds())) }, 'published_at': isodate.parse_date( video_response['items'][0]['snippet']['publishedAt']).isoformat(), 'video_title': video_response['items'][0]['snippet']['title'], 'youtuber': { 'title': video_response['items'][0]['snippet']['channelTitle'], 'channel_Id': video_response['items'][0]['snippet']['channelId'] }, 'thumbnails': { 'small': video_response['items'][0]['snippet']['thumbnails']['default']['url'], 'large': video_response['items'][0]['snippet']['thumbnails']['high']['url'] }, 'YT_popularity': { 'like_count': int(float(video_response['items'][0]['statistics']['likeCount'])), 'view_count': int(float(video_response['items'][0]['statistics']['viewCount'])), 'view_count_human_readable': intcomma( video_response['items'][0]['statistics']['viewCount']) } }, return result;
def split_date(spec): if spec['is_continuous']: yield project(spec, ( 'is_continuous', 'start_date', 'end_date', 'start_time', 'end_time', )) return start_date = parse_date(spec['start_date']) end_date = maybe_parse_date(spec['end_date']) or start_date days = int((end_date - start_date).total_seconds() / (60 * 60 * 24)) schedules = spec['schedules'] or [{ 'days_of_week': [0, 1, 2, 3, 4, 5, 6], 'start_time': spec['start_time'], 'end_time': spec['end_time'], }] for day in range(days + 1): this_date = start_date + timedelta(days=day) matching_schedules = filter( lambda s: this_date.isoweekday() - 1 in s['days_of_week'], schedules) for schedule in matching_schedules: yield { 'is_continuous': False, 'start_date': this_date.isoformat(), 'end_date': None, 'start_time': schedule['start_time'], 'end_time': schedule['end_time'], }
def prepare_query_value(self, op, value): # If value is a string, try to parse it as a datetime.datetime or # datetime.date. if isinstance(value, basestring): try: value = isodate.parse_datetime(value) except (ValueError, isodate.ISO8601Error): try: value = isodate.parse_date(value) except (ValueError, isodate.ISO8601Error): value = None if value is None: return value # Ensure timezone info is set, if not, default to UTC. if isinstance(value, datetime): if not value.tzinfo: value.replace(tzinfo=isodate.UTC) return value # Assume default time of 00:00, and default timezone of UTC on just a # datetime.date object. if isinstance(value, date): # This will assume time is 00:00 value = datetime.fromordinal(value.toordinal()) # Assume UTC value.replace(tzinfo=isodate.UTC) return value
def __init__(self, jsonval=None): self.date = None if jsonval is not None: ## Accept python datetime types if isinstance(jsonval, datetime.date) or isinstance(jsonval, datetime.datetime): self.date = jsonval self.origval = self.isostring return # Validate strings isstr = isinstance(jsonval, str) if not isstr and sys.version_info[0] < 3: # Python 2.x has 'str' and 'unicode' isstr = isinstance(jsonval, basestring) if not isstr: raise TypeError("Expecting string when initializing {}, but got {}" .format(type(self), type(jsonval))) # Parse the dates try: if 'T' in jsonval: self.date = isodate.parse_datetime(jsonval) else: self.date = isodate.parse_date(jsonval) except Exception as e: logger.warning("Failed to initialize FHIRDate from \"{}\": {}" .format(jsonval, e)) self.origval = jsonval
def get_howstheorgintent_response(intent, session): """ return the general sentiment """ session_attributes = {} reprompt_text = None orgname = intent['slots']['orgname']['value'] date_from_str = intent['slots']['datefrom']['value'] date_from = isodate.parse_date(date_from_str) today_date = date.today() while date_from > today_date: date_from -= timedelta(days=365) print(orgname) print(date_from) speech_output = get_speech_output(orgname, str(date_from)) should_end_session = True # Setting reprompt_text to None signifies that we do not want to reprompt # the user. If the user does not respond or says something that is not # understood, the session will end. return build_response( session_attributes, build_speechlet_response(intent['name'], speech_output, reprompt_text, should_end_session))
def to_python(self) -> datetime: """ """ self._validate() if "T" in self: return isodate.parse_datetime(self) else: return isodate.parse_date(self)
def get_binning_index(self): if self.time_format.lower() == "unix": return datetime.datetime.utcfromtimestamp(float( self.datestring)).strftime('%Y.%m.%d') if self.time_format.lower() == "iso": return isodate.parse_date(self.datestring).strftime('%Y.%m.%d') if self.time_format.lower() == "human": return dateparser.parse(self.datestring).strftime('%Y.%m.%d')
def parse_isodate(iso_date): """parse the given iso8601 date string into a python date object""" date = None try: date = isodate.parse_date(iso_date) except Exception, e: log.msg(e.message, level=log.WARNING)
def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. :rtype: Date :raises: DeserializationError if string format invalid. """ return isodate.parse_date(attr)
def get_casconfirmdate(action): value = action.get('CASConfirmDate') if not value: raise make_internal_server_error(u'Missing Attribute: CASConfirmDate') if 'T' in value: return isodate.parse_datetime(value) else: return isodate.parse_date(value)
def get_isodate(date_str): """convert the given date_str string into an iso 8601 date""" iso_date = None if not date_str: return None # first, is it already a valid isodate? try: isodate.parse_date(date_str) return date_str except isodate.ISO8601Error, e: # if not, try to parse it try: iso_date = isodate.date_isoformat(timelib.strtodatetime(date_str)) except Exception, e: log.msg(e.message, level=log.WARNING) return None
def test_parse(self): """ Parse an ISO date string and compare it to the expected value. """ if expectation is None: self.assertRaises(ISO8601Error, parse_date, datestring, yeardigits) else: result = parse_date(datestring, yeardigits) self.assertEqual(result, expectation)
def _to_python(self, value, state): try: if 'T' in value: return isodate.parse_datetime(value) else: return isodate.parse_date(value) except (ValueError, isodate.ISO8601Error), e: raise Invalid( self.message('invalidDate', state, exception=str(e)), value, state)
def serialize_date(attr, **kwargs): """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str """ if isinstance(attr, str): attr = isodate.parse_date(attr) t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) return t
def build_releases(currentversions): """ currentversions service returns a very unwieldy data structure. make something more suitable for templates. """ now = datetime.datetime.utcnow().date() releases = OrderedDict() for release in copy.deepcopy(currentversions): start_date = isodate.parse_date(release['start_date']) end_date = isodate.parse_date(release['end_date']) if now >= start_date and now <= end_date: product = release['product'] del release['product'] if product not in releases: releases[product] = [release] else: releases[product].append(release) return releases
def datetime_is_iso(date_str): """Attempts to parse a date formatted in ISO 8601 format""" try: if len(date_str) > 10: dt = isodate.parse_datetime(date_str) else: dt = isodate.parse_date(date_str) return True, [] except: # Any error qualifies as not ISO format return False, ['Datetime provided is not in a valid ISO 8601 format']
def _from(self, s): t = None try: t = isodate.parse_date(s) except ValueError: pass if not t: t = datetime.datetime.strptime(s, '%Y-%m-%d') t = t.date() return t