def munge(dct: Dict) -> AttrDict: dct = AttrDict(**dct) if 'start' in dct: dct.start = parse_datetime(dct.start) if 'end' in dct: dct.end = parse_datetime(dct.end) return dct
def run(self): self.remove_output_on_overwrite() with self.input().open('r') as input_file: course_structure = json.load(input_file) with self.output().open('w') as output_file: courses_list = course_structure.get('results') if not courses_list: # If there are no courses, or 'results' is not a key in the json, output nothing. return for course in courses_list: # To maintain robustness, ignore any non-dictionary data that finds its way into the API response. try: start_string = course.get('start') end_string = course.get('end') if start_string is None: cleaned_start_string = '\N' else: cleaned_start_string = ciso8601.parse_datetime(start_string) if end_string is None: cleaned_end_string = '\N' else: cleaned_end_string = ciso8601.parse_datetime(end_string) line = [ course.get('id', '\N'), course.get('org', '\N'), course.get('course', '\N'), course.get('run', '\N'), coerce_timestamp_for_hive(cleaned_start_string), coerce_timestamp_for_hive(cleaned_end_string), course.get('name', '\N') ] output_file.write('\t'.join([v.encode('utf-8') for v in line])) output_file.write('\n') except AttributeError: # If the course is not a dictionary, move on to the next one. continue
def graph_one(self, start_timestamp, end_timestamp, payload_list): series = [] payload = {"from": parse_datetime(start_timestamp).isoformat(), "to":parse_datetime(end_timestamp).isoformat(), "temporary":False, "averages":True, } for sys, subsys, col, idx in payload_list: for s in series: if s['system'] == sys and s['subsystem'] == subsys: if col: if col not in s['columns']: s['columns'].append(col) else: raise ValueError('Need a column') if idx: if idx not in s['indexes']: s['indexes'].append(idx) break else: if idx: idx = [idx] else: idx = [] series.append({'system':sys, 'subsystem':subsys, 'columns':[col], 'indexes':idx}) payload["series"] = series print payload self.graph(payload)
def test_play_video(self): template = self.event_templates['play_video'] event = self.create_event_log_line(template=template) expected_key = (self.DEFAULT_DATE, self.task.PROJECT_NAME) expected_dict = { 'input_file': '', 'source': self.task.PROJECT_NAME, 'event_type': 'play_video', 'emitter_type': 'browser', 'timestamp': ciso8601.parse_datetime('2013-12-17T15:38:32.805444+00:00'), 'received_at': ciso8601.parse_datetime('2013-12-17T15:38:32.805444+00:00'), 'date': datetime.date(*[int(x) for x in self.DEFAULT_DATE.split('-')]), 'username': '******', 'course_id': self.encoded_course_id, 'org_id': self.encoded_org_id, 'user_id': '10', 'referrer': 'long meaningful url', 'agent_type': 'desktop', 'agent_device_name': 'Other', 'agent_os': 'Mac OS X', 'agent_browser': 'Safari', 'agent_touch_capable': False, 'raw_event': self.get_raw_event(event), } expected_value = JsonEventRecord(**expected_dict).to_separated_values() self.assert_single_map_output(event, expected_key, expected_value)
def departures_from_response(self, res): return sorted([{ 'time': ciso8601.parse_datetime(item['ScheduledArrival']), 'live': ciso8601.parse_datetime(item['ExpectedArrival']), 'service': self.get_service(item['LineName']), 'destination': item['DestinationName'], } for item in res.json()['Predictions']['Prediction'] if item['ExpectedArrival']], key=lambda d: d['live'])
def run(self): self.remove_output_on_overwrite() with self.input().open("r") as input_file: course_structure = json.load(input_file) with self.output().open("w") as output_file: courses_list = course_structure.get("results") if not courses_list: # If there are no courses, or 'results' is not a key in the json, output nothing. return for course in courses_list: # To maintain robustness, ignore any non-dictionary data that finds its way into the API response. try: start_string = course.get("start") end_string = course.get("end") if start_string is None: cleaned_start_string = "\N" else: cleaned_start_string = ciso8601.parse_datetime(start_string) if end_string is None: cleaned_end_string = "\N" else: cleaned_end_string = ciso8601.parse_datetime(end_string) line = [ course.get("id", "\N"), course.get("org", "\N"), course.get("course", "\N"), course.get("run", "\N"), coerce_timestamp_for_hive(cleaned_start_string), coerce_timestamp_for_hive(cleaned_end_string), course.get("name", "\N"), ] output_file.write("\t".join([v.encode("utf-8") for v in line])) output_file.write("\n") except AttributeError: # If the course is not a dictionary, move on to the next one. continue
def get_row(self, item): live_time = self._get_time(item.get('expected_departure_time')) time = self._get_time(item['aimed_departure_time']) if not time: time = live_time if not time: return if item.get('date') is not None: time = timezone.make_aware(ciso8601.parse_datetime(item['date'] + ' ' + time)) if live_time: live_time = timezone.make_aware(ciso8601.parse_datetime(item['date'] + ' ' + live_time)) if (item['source'].startswith('Traveline timetable') and time.date() > self.today): return else: time = timezone.make_aware(datetime.datetime.combine( self.today, dateutil.parser.parse(time).time() )) if live_time: live_time = timezone.make_aware(datetime.datetime.combine( self.today, dateutil.parser.parse(live_time).time() )) return { 'time': time, 'live': live_time, 'service': self.get_service(item.get('line').split('--', 1)[0].split('|', 1)[0]), 'destination': self._get_destination(item), }
def departures_from_response(self, res): res = res.json() if '_embedded' in res: return [{ 'time': ciso8601.parse_datetime(item['aimedDepartureTime']), 'live': ciso8601.parse_datetime(item['expectedDepartureTime']), 'service': self.get_service(item['_links']['transmodel:line']['name']), 'destination': item['destinationName'], } for item in res['_embedded']['timetable:visit'] if 'expectedDepartureTime' in item]
def test_aware_offset(self): self.assertEqual( ciso8601.parse_datetime('2014-12-05T12:30:45.123456+05:30'), datetime.datetime(2014, 12, 5, 12, 30, 45, 123456, pytz.FixedOffset(330)) ) self.assertEqual( ciso8601.parse_datetime('2014-12-05T12:30:45.123456-05:30'), datetime.datetime(2014, 12, 5, 12, 30, 45, 123456, pytz.FixedOffset(-330)) ) self.assertEqual( ciso8601.parse_datetime('2014-12-05T12:30:45.123456-06:00'), datetime.datetime(2014, 12, 5, 12, 30, 45, 123456, pytz.FixedOffset(-360)) )
def test_aware_utc(self): expected = datetime.datetime(2014, 12, 5, 12, 30, 45, 123456, pytz.UTC) self.assertEqual( ciso8601.parse_datetime('2014-12-05T12:30:45.123456Z'), expected ) self.assertEqual( ciso8601.parse_datetime('2014-12-05T12:30:45.123456+00:00'), expected, ) self.assertEqual( ciso8601.parse_datetime('2014-12-05T12:30:45.123456-00:00'), expected, )
def service_vehicles_history(request, slug): service = get_object_or_404(Service, slug=slug) date = request.GET.get('date') today = timezone.now().date() if date: try: date = ciso8601.parse_datetime(date).date() except ValueError: date = None journeys = service.vehiclejourney_set if not date: try: date = journeys.values_list('datetime', flat=True).latest('datetime').date() except VehicleJourney.DoesNotExist: date = today locations = VehicleLocation.objects.filter(journey=OuterRef('pk')) journeys = journeys.filter(datetime__date=date).select_related('vehicle').annotate(locations=Exists(locations)) operator = service.operator.select_related('region').first() return render(request, 'vehicles/vehicle_detail.html', { 'breadcrumb': [operator.region, operator, service], 'date': date, 'today': today, 'object': service, 'journeys': journeys, })
def parse_datetime_user(string): """Parse datetime string from user. We accept the normal ISO-8601 formats, but kick through to the formats supported by the system's date command if parsing fails. Args: string (str): Datetime string to parse Returns: datetime.datetime: Parsed datetime object """ try: datetime_ = parse_datetime(string) except ValueError: try: output = check_output(['date', '--utc', '--iso-8601=seconds', '-d', string]) datetime_ = ciso8601.parse_datetime(output.strip()[:19]) except subprocess.CalledProcessError: datetime_ = None if not datetime_: raise ValueError('Unable to parse timestamp %r' % (safer_repr(string), )) return datetime_
def str_to_datetime(str_repr): """Convert a string into a datetime.""" # Allow the caller to be stupid. if type(str_repr) == datetime.datetime: return str_repr if not str_repr: return None return ciso8601.parse_datetime(str_repr)
def decode_datetime_objects(nested_value): if isinstance(nested_value, list): return [decode_datetime_objects(item) for item in nested_value] elif isinstance(nested_value, dict): for key, value in nested_value.items(): if isinstance(value, dict) and 'type' in value.keys(): if value['type'] == 'encoded_datetime': nested_value[key] = ciso8601.parse_datetime(value['value']) if value['type'] == 'encoded_date': nested_value[key] = ciso8601.parse_datetime(value['value']).date() if value['type'] == 'encoded_time': nested_value[key] = ciso8601.parse_datetime(value['value']).time() if value['type'] == 'encoded_decimal': nested_value[key] = Decimal(value['value']) elif isinstance(value, dict): nested_value[key] = decode_datetime_objects(value) elif isinstance(value, list): nested_value[key] = decode_datetime_objects(value) return nested_value return nested_value
def test_problem_check(self): template = self.event_templates['problem_check'] event = self.create_event_log_line(template=template) expected_key = (self.DEFAULT_DATE, self.task.PROJECT_NAME) expected_dict = { 'input_file': '', 'source': self.task.PROJECT_NAME, 'event_type': 'problem_check', 'emitter_type': 'server', 'timestamp': ciso8601.parse_datetime('2013-12-17T15:38:32.805444+00:00'), 'received_at': ciso8601.parse_datetime('2013-12-17T15:38:32.805444+00:00'), 'date': datetime.date(*[int(x) for x in self.DEFAULT_DATE.split('-')]), 'username': '******', 'course_id': self.encoded_course_id, 'org_id': self.encoded_org_id, 'user_id': '10', 'raw_event': self.get_raw_event(event), } expected_value = JsonEventRecord(**expected_dict).to_separated_values() self.assert_single_map_output(event, expected_key, expected_value)
def age(cls, loan): """Return the age of a loan in days. The age of a loan is calculated based on the loan transaction date. :param loan: the loan to check. :return loan_age in number of days """ transaction_date = ciso8601.parse_datetime( loan.get('transaction_date')) loan_age = (transaction_date.replace(tzinfo=None) - datetime.utcnow()) return loan_age.days
def extend_loan_data_is_valid(end_date, renewal_duration, library_pid): """Checks extend loan will be valid.""" end_date = ciso8601.parse_datetime(end_date) current_date = datetime.now(timezone.utc) library = Library.get_record_by_pid(library_pid) calculated_due_date = current_date + timedelta( days=renewal_duration) first_open_date = library.next_open( date=calculated_due_date - timedelta(days=1)) if first_open_date.date() <= end_date.date(): return False return True
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) journeys = self.object.vehiclejourney_set context['pending_edits'] = self.object.vehicleedit_set.filter( approved=None).exists() dates = get_dates(journeys, vehicle=self.object) if self.object.operator: context['breadcrumb'] = [ self.object.operator, Vehicles(self.object.operator) ] context['previous'] = self.object.get_previous() context['next'] = self.object.get_next() if dates: context['dates'] = dates date = self.request.GET.get('date') if date: try: date = parse_datetime(date).date() except ValueError: date = None if not date: date = context['dates'][-1] context['date'] = date journeys = journeys.filter( datetime__date=date).order_by('datetime') # calls = Call.objects.filter(journey=OuterRef('pk')) # locations = VehicleLocation.objects.filter(journey=OuterRef('pk')) journeys = journeys.select_related('service') try: r = redis.from_url(settings.CELERY_BROKER_URL) pipe = r.pipeline() for journey in journeys: pipe.exists(f'journey{journey.id}') locations = pipe.execute() previous = None for i, journey in enumerate(journeys): journey.locations = locations[i] if journey.locations: if previous: previous.next = journey journey.previous = previous previous = journey except redis.exceptions.ConnectionError: pass context['journeys'] = journeys return context
def patch_contract_active_status(self): response = self.app.post_json( f"/agreements/{self.agreement_id}/contracts/{self.contract_id}/milestones?acc_token={self.framework_token}", {"data": {"type": "ban"}} ) self.assertEqual(response.status, "201 Created") response = self.app.get(f"/agreements/{self.agreement_id}/contracts/{self.contract_id}") self.assertEqual(response.status, "200 OK") self.assertEqual(response.json["data"]["status"], "banned") response = self.app.patch_json( f"/frameworks/{self.framework_id}?acc_token={self.framework_token}", {"data": { "qualificationPeriod": {"endDate": (get_now() + timedelta(days=CONTRACT_BAN_DURATION+1)).isoformat()} }} ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") response = self.app.get(f"/agreements/{self.agreement_id}") self.assertEqual(response.status, "200 OK") next_check = response.json["data"]["next_check"] self.assertEqual(response.json["data"]["contracts"][0]["status"], "banned") with freeze_time((parse_datetime(next_check) + timedelta(hours=1)).isoformat()): self.check_chronograph() response = self.app.get(f"/agreements/{self.agreement_id}") self.assertEqual(response.status, "200 OK") self.assertEqual(response.json["data"]["status"], "active") self.assertEqual(response.json["data"]["contracts"][0]["status"], "active") response = self.app.post_json( "/submissions", {"data": self.initial_submission_data}, ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") submission_id = response.json["data"]["id"] submission_token = response.json["access"]["token"] response = self.app.patch_json( f"/submissions/{submission_id}?acc_token={submission_token}", {"data": {"status": "active"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual( response.json["errors"], [{'description': "Tenderer can't activate submission with active/banned contract " f'in agreement for framework {self.framework_id}', 'location': 'body', 'name': 'data'}] )
def parse_article(self, response): article = ItemLoader(item=NewsCrawlerItem(), response=response) article.add_value('country', 'uk') article.add_xpath('language', '//html/@lang') article.add_xpath('headline', '//head/meta[@property="og:title"]/@content') article.add_value('url', response.url) # Function to parse published time to iso6801 time_in = Compose( Join(), lambda v: '' if (ciso8601.parse_datetime(v) is None) else ciso8601.parse_datetime(v).isoformat(sep='T') ) article.nested_css('meta[property="article:published_time"]').add_xpath( 'published_time', './@content', time_in, ) article.add_xpath('category', '//head/meta[@property="keywords"]/@content', lambda v: v[0].split(',') if v else None) article.add_value('encoding', response.encoding) article.nested_css('div[itemprop="articleBody"]').add_xpath('body', './p//text()') return article.load_item()
def parse_date(date_str): """Parse elastic datetime string.""" if not date_str: return None try: date = ciso8601.parse_datetime(date_str) if not date: date = arrow.get(date_str).datetime except TypeError: date = arrow.get(date_str[0]).datetime return date
def from_json(self, serialized_value): if self.is_nullable and serialized_value is None: return None try: # ciso8601 is significantly faster than dateutil.parser for parsing iso8601 strings, so we try it first parsed_value = ciso8601.parse_datetime(serialized_value) assert parsed_value is not None # Caveat: asserts won't run if python is run with -O. except Exception as e: parsed_value = dateutil.parser.parse(serialized_value) return make_naive(parsed_value)
def __parse_result(self, json_obj): """Parse the retrieved data.""" try: current_maxlevel = 0 expected_maxlevel = 0 current_warnings = [] expected_warnings = [] if json_obj["timeStamp"]: try: self.last_update = ciso8601.parse_datetime( json_obj["timeStamp"]) except: # pylint: disable=bare-except # noqa: E722 self.last_update = datetime.datetime.now( datetime.timezone.utc) else: self.last_update = datetime.datetime.now(datetime.timezone.utc) if json_obj["numberReturned"]: for feature in json_obj["features"]: warning = convert_warning_data(feature["properties"]) current_time = datetime.datetime.now(datetime.timezone.utc) # pylint: disable=bad-continuation if (warning["end_time"] is not None and current_time > warning["end_time"]): continue if (warning["start_time"] is not None and warning["start_time"] < current_time): current_warnings.append(warning) current_maxlevel = max(warning["level"], current_maxlevel) else: expected_warnings.append(warning) expected_maxlevel = max(warning["level"], expected_maxlevel) # pylint: enable=bad-continuation self.current_warning_level = current_maxlevel self.current_warnings = current_warnings self.expected_warning_level = expected_maxlevel self.expected_warnings = expected_warnings self.data_valid = True except: # pylint: disable=bare-except # noqa: E722 self.data_valid = False self.last_update = None self.current_warning_level = None self.current_warnings = None self.expected_warning_level = None self.expected_warnings = None
def is_loan_overdue(self): """Check if the loan is overdue.""" from .utils import get_circ_policy circ_policy = get_circ_policy(self) now = datetime.now(timezone.utc) end_date = self.get('end_date') due_date = ciso8601.parse_datetime(end_date) days_after = circ_policy.get('number_of_days_after_due_date') if now > due_date + timedelta(days=days_after): return True return False
def parse_article(self, response): article = ItemLoader(item=NewsCrawlerItem(), response=response) article.add_value("country", 'uk') article.add_value("language", 'english') article.nested_css("div.main-content-column").add_xpath("body", './div/p//text()') article.add_xpath("headline", '//head/meta[@property="og:title"]/@content') # Function to parse published time to iso6801 time_in = Compose( Join(), lambda v: '' if (ciso8601.parse_datetime(v) is None) else ciso8601.parse_datetime(v).isoformat(sep='T') ) article.nested_css('meta[property="article:published_time"]').add_xpath( 'published_time', './@content', time_in, ) article.add_xpath("category", '//head/meta[@property="article:section"]/@content') article.add_xpath("keywords", '//head/meta[@name="keywords"]/@content') article.add_value("url", response.url) article.add_value("encoding", response.encoding) return article.load_item()
def test_get(self): name = self._get_random_data() value = self._get_random_data() expiration = 900 self.post( '/zato/sso/user/attr', { 'ust': self.ctx.super_user_ust, 'user_id': self.ctx.super_user_id, 'name': name, 'value': value, 'expiration': expiration }) response = self.get( '/zato/sso/user/attr', { 'ust': self.ctx.super_user_ust, 'user_id': self.ctx.super_user_id, 'name': name, }) self.assertTrue(response.found) self.assertEqual(response.name, name) self.assertEqual(response.value, value) # Will raise an exception if date parsing fails parse_datetime(response.creation_time) parse_datetime(response.last_modified) parse_datetime(response.expiration_time)
def test_android_screen(self): template = self.event_templates['android_screen'] event = self.create_event_log_line(template=template) expected_key = (self.DEFAULT_DATE, self.DEFAULT_PROJECT) expected_dict = { 'input_file': '', 'source': self.DEFAULT_PROJECT, 'event_type': 'screen', 'emitter_type': 'server', 'timestamp': ciso8601.parse_datetime('2013-12-17T15:38:32+00:00'), 'received_at': ciso8601.parse_datetime('2013-12-17T15:38:32.796000+00:00'), 'date': datetime.date(*[int(x) for x in self.DEFAULT_DATE.split('-')]), 'agent_type': 'tablet', 'agent_device_name': 'Samsung SM-N920A', 'agent_os': 'Android', 'agent_browser': 'Android', 'agent_touch_capable': True, 'anonymous_id': self.DEFAULT_ANONYMOUS_ID, 'category': 'screen', 'label': 'Launch\\0', 'raw_event': self.get_raw_event(event), } expected_value = JsonEventRecord(**expected_dict).to_separated_values() self.assert_single_map_output(event, expected_key, expected_value)
def family_presence(): checkList = {} checkList['alexander'] = [ 'javascript.0.host.alex.state', 'javascript.0.host.handyalex.state', 'javascript.0.host.depacnglw1nb0mx.state', 'javascript.0.host.depacnglw1nb0mx-wlan.state' ] checkList['helga'] = [ ' javascript.0.host.helga.state', 'javascript.0.host.handyhelga.state' ] checkList['martin'] = [ 'javascript.0.host.martin2.state', 'javascript.0.host.martin2-wlan.state', 'javascript.0.host.handymartin.state' ] checkList['daniel'] = [ 'javascript.0.host.surface.state', 'javascript.0.host.surface-wlan.state', 'javascript.0.host.handydaniel.state' ] values = [] for name in checkList.keys(): objs = get_iobroker_values(args.iobroker, checkList[name]) presence = False for val in objs.keys(): if not 'val' in objs[val].keys() or not objs[val]['val']: continue presence = True now = datetime.datetime.now() duration = 0.0 if presence: state = 'home' values.append('javascript.0.family.{0}.lastseen={1}'.format( name, now.strftime('%Y-%m-%d %H:%M:%S'))) else: lastseen = get_iobroker_value( args.iobroker, 'javascript.0.family.{0}.lastseen'.format(name)) duration = 1.0 if lastseen: duration = (now - ciso8601.parse_datetime(lastseen) ).total_seconds() / 3600.0 if duration >= 24.0: state = 'gone' elif duration >= 1.0: state = 'away' else: state = 'inactive' log.info('PRESENCE {0} = {1} {2:.1f}h'.format(name, state, duration)) values.append('javascript.0.family.{0}.presence={1}'.format( name, state)) if len(values) > 0: set_iobroker_values(args.iobroker, values)
def test_checkout_temporary_item_type(client, librarian_martigny, lib_martigny, loc_public_martigny, patron_martigny, item_lib_martigny, item_type_on_site_martigny, circ_policy_short_martigny, circ_policy_default_martigny): """Test checkout or item with temporary item_types""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny assert item.status == ItemStatus.ON_SHELF # test basic behavior cipo_used = CircPolicy.provide_circ_policy( lib_martigny.organisation_pid, lib_martigny.pid, patron_martigny.patron_type_pid, item.item_type_circulation_category_pid) assert cipo_used == circ_policy_short_martigny # add a temporary_item_type on item # due to this change, the cipo used during circulation operation should # be different from the first cipo found. item['temporary_item_type'] = { '$ref': get_ref_for_pid('itty', item_type_on_site_martigny.pid) } item = item.update(data=item, dbcommit=True, reindex=True) cipo_tmp_used = CircPolicy.provide_circ_policy( lib_martigny.organisation_pid, lib_martigny.pid, patron_martigny.patron_type_pid, item.item_type_circulation_category_pid) assert cipo_tmp_used == circ_policy_default_martigny delta = timedelta(cipo_tmp_used.get('checkout_duration')) expected_date = datetime.now() + delta expected_dates = [expected_date, lib_martigny.next_open(expected_date)] expected_dates = [d.strftime('%Y-%m-%d') for d in expected_dates] # try a checkout and check the transaction end_date is related to the cipo # corresponding to the temporary item_type params = dict(item_pid=item.pid, patron_pid=patron_martigny.pid, transaction_user_pid=librarian_martigny.pid, transaction_location_pid=loc_public_martigny.pid) res, data = postdata(client, 'api_item.checkout', params) assert res.status_code == 200 transaction_end_date = data['action_applied']['checkout']['end_date'] transaction_end_date = ciso8601.parse_datetime(transaction_end_date).date() transaction_end_date = transaction_end_date.strftime('%Y-%m-%d') assert transaction_end_date in expected_dates # reset the item to original value del item['temporary_item_type'] item.update(data=item, dbcommit=True, reindex=True)
def decode_datetime_objects(nested_value): if isinstance(nested_value, list): return [decode_datetime_objects(item) for item in nested_value] elif isinstance(nested_value, dict): for key, value in nested_value.items(): if isinstance(value, dict) and 'type' in value.keys(): if value['type'] == 'encoded_datetime': nested_value[key] = ciso8601.parse_datetime(value['value']) if value['type'] == 'encoded_date': nested_value[key] = ciso8601.parse_datetime( value['value']).date() if value['type'] == 'encoded_time': nested_value[key] = ciso8601.parse_datetime( value['value']).time() if value['type'] == 'encoded_decimal': nested_value[key] = Decimal(value['value']) elif isinstance(value, dict): nested_value[key] = decode_datetime_objects(value) elif isinstance(value, list): nested_value[key] = decode_datetime_objects(value) return nested_value return nested_value
def parse_article(self, response): article = ItemLoader(item=NewsCrawlerItem(), response=response) article.add_value('country', 'uk') article.add_value('language', 'english') article.nested_css('meta[property="og:title"]').add_xpath('headline', './@content', re=r'(.*) - BBC') article.add_value('url', response.url) # Function to parse published time to iso6801 time_in = Compose( Join(), lambda v: '' if (ciso8601.parse_datetime(v) is None) else ciso8601.parse_datetime(v).isoformat(sep='T') ) article.add_xpath( 'published_time', '//*[@id="responsive-news"]/head/script[1]/text()', time_in, re=r'"datePublished": "(.*)"', ) article.nested_css('meta[property="article:section"]').add_xpath('category', './@content') article.add_value('encoding', response.encoding) article.nested_css('div.story-body__inner').add_xpath('body', './p//text()') article.nested_css('map-body').add_xpath('body', './p//text()') return article.load_item()
def poll_stats(container_name, delay_sec, out): with docker.Client(base_url='unix://var/run/docker.sock') as cli: delta = timedelta(seconds=4) try: last_timestamp = datetime.today() + timedelta(seconds=-4) for stat in cli.stats(container_name, decode=True): timestamp = ciso8601.parse_datetime(stat['read']) if timestamp - last_timestamp > delta: out.write(json.dumps(stat) + '\n') last_timestamp = timestamp # cpu_percent = (data['cpu_stats']['cpu_usage']['total_usage'] - data['precpu_stats']['cpu_usage']['total_usage']) / (data['cpu_stats']['system_cpu_usage'] - data['precpu_stats']['system_cpu_usage']) * len(data['cpu_stats']['cpu_usage']['percpu_usage']) except KeyboardInterrupt: return
def _create_report_period(self, row, cluster_id, report_db_accessor, cluster_alias): """Create a report period object. Args: row (dict): A dictionary representation of a CSV file row cluster_id (str): cluster ID cluster_alias (str): cluster alias Returns: (str): The DB id of the report period object """ table_name = OCPUsageReportPeriod start = ciso8601.parse_datetime(row.get("report_period_start").replace(" +0000 UTC", "+0000")) end = ciso8601.parse_datetime(row.get("report_period_end").replace(" +0000 UTC", "+0000")) key = (cluster_id, start, self._provider_uuid) if key in self.processed_report.report_periods: return self.processed_report.report_periods[key] if key in self.existing_report_periods_map: return self.existing_report_periods_map[key] data = { "cluster_id": cluster_id, "cluster_alias": cluster_alias, "report_period_start": start, "report_period_end": end, "provider_id": self._provider_uuid, } with transaction.atomic(): report_period_id = report_db_accessor.insert_on_conflict_do_nothing( table_name, data, conflict_columns=["cluster_id", "report_period_start", "provider_id"] ) self.processed_report.report_periods[key] = report_period_id return report_period_id
def parse_datetime(dt_str: str) -> dt.datetime | None: """Parse a string and return a datetime.datetime. This function supports time zone offsets. When the input contains one, the output uses a timezone with a fixed offset from UTC. Raises ValueError if the input is well formatted but not a valid datetime. Returns None if the input isn't well formatted. """ with suppress(ValueError, IndexError): return ciso8601.parse_datetime(dt_str) if not (match := DATETIME_RE.match(dt_str)): return None
def attachments( id ): #get task attachments and store them in database with all info from task url_part = 'https://icstech.bitrix24.ru' fp = 'temp/' rest = bitrix.bx24.callMethod('tasks.task.get', taskId=id) task_title = rest['task']['title'] task_body = rest['task']['description'] status = rest['task']['status'] time_start = rest['task']['createdDate'] time_start = ciso8601.parse_datetime(time_start) time_stop = rest['task']['closedDate'] time_stop = ciso8601.parse_datetime(time_stop) try: rest = bitrix.bx24.callMethod('task.commentitem.getlist', taskId=int(id)) attachments = rest[-2]['ATTACHED_OBJECTS'] last_comment = rest[-2]['POST_MESSAGE'] attachmnets_v = attachments.values() attachmnets_i = iter(attachmnets_v) first_value = next(attachmnets_i) url_part2 = first_value['DOWNLOAD_URL'] fp = fp + first_value['NAME'] url = url_part + url_part2 response = requests.get(url) img = Image.open(BytesIO(response.content)) img = img.save(fp) image = open(fp, 'rb') image_read = image.read() image_64_encode = base64.encodestring(image_read) with connection.cursor() as cursor: sql = "INSERT INTO `tasks` (`id`, `title`, `body`, `status`, `stop_comm`, `file`, `timestmp_start`, `timestmp_stop`) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)" cursor.execute( sql, (id, task_title, task_body, status, last_comment, image_64_encode, time_start, time_stop)) connection.commit() os.remove(fp) except KeyError: print("No attachments bruh")
def departures_from_response(self, response): departures = [] for item in response.json(): time = ciso8601.parse_datetime(item['ETA']) row = { 'time': time, 'destination': item['Destination'], 'service': self.get_service(item['ServiceNumber']) } if item['IsTracked']: row['live'] = time departures.append(row) return departures
def is_loan_overdue(self): """Check if the loan is overdue.""" from .utils import get_circ_policy if self.state != LoanState.ITEM_ON_LOAN: return False circ_policy = get_circ_policy(self) now = datetime.now(timezone.utc) due_date = ciso8601.parse_datetime(self.end_date) days_after = circ_policy.initial_overdue_days if days_after and now > due_date + timedelta(days=days_after - 1): return True return False
def test_invalid(self): self.assertEqual( ciso8601.parse_datetime_unaware('asdf'), None, ) self.assertEqual( ciso8601.parse_datetime_unaware('Z'), None, ) self.assertEqual( ciso8601.parse_datetime('2014-12-05asdfasdf'), datetime.datetime(2014, 12, 5) )
def convert_datetime_m(m_df): import ciso8601 m_df['datetime'] = m_df["date"] + m_df["time"] m_df['datetime'] = m_df['datetime'].str.replace('.', '-') m_df['datetime'] = m_df['datetime'].str[:10] + 'T' + m_df['datetime'].str[10:] m_df['datetime'] = m_df['datetime'].apply(lambda x: ciso8601.parse_datetime(x)) #drop date and time string columns m_df = m_df.drop(['date', 'time'], axis=1) #move datetime column to first column cols = list(m_df.columns) cols = [cols[-1]] + cols[:-1] m_df = m_df[cols] return m_df
def check_get_start_time(self, start_time, now, expected): start_time = parse_datetime(start_time) self.now = parse_datetime(now) expected = parse_datetime(expected) interval = 1 # Days data = {'start_time':[], 'now':[]} def wait_iter_cb(start_time, now, *ignored): data['start_time'].append(start_time) data['now'].append(now) with patch('zato.scheduler.backend.datetime', self._datetime): interval = Interval(days=interval) job = Job(rand_int(), rand_string(), SCHEDULER.JOB_TYPE.INTERVAL_BASED, start_time=start_time, interval=interval) job.wait_iter_cb = wait_iter_cb job.wait_sleep_time = 0.1 self.assertEquals(job.start_time, expected) self.assertTrue(job.keep_running) self.assertFalse(job.max_repeats_reached) self.assertIs(job.max_repeats_reached_at, None) spawn(job.run) sleep(0.2) len_start_time = len(data['start_time']) len_now = len(data['now']) self.assertNotEquals(len_start_time, 0) self.assertNotEquals(len_now, 0) for item in data['start_time']: self.assertEquals(expected, item) for item in data['now']: self.assertEquals(self.now, item)
def date_hook(json_dict): for (key, value) in list(json_dict.items()): try: # json_dict[key] = dateutil.parser.parse(value) # json_dict[key] = datetime.datetime.fromisoformat(value) # json_dict[key] = datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S%Z") json_dict[key] = ciso8601.parse_datetime(value) except: if key == 'Expiration': print(key) raise pass return json_dict
def _add_request_expiration_date(record): """Add the request expiration date to record is needed. When a request is validated, we need to set the `request_expire_date` field with the expiration date of this request. When this date is reached and item is still AT_DESK, this request should be cancelled. This value is consistent only if the loan is a validated request (loan.state == ITEM_AT_DESK). If the loan state is different this value could represent an other concept. :param record: the record metadata. """ from .utils import get_circ_policy if record.state == LoanState.ITEM_AT_DESK and \ 'request_expire_date' not in record: cipo = get_circ_policy(record) duration = cipo.get('pickup_hold_duration') library = record.pickup_library if cipo.get('allow_requests') and duration and library: # the expiration date should be calculated using the pickup # library calendar trans_date = ciso8601.parse_datetime(record.transaction_date) try: expire_date = trans_date + timedelta(days=duration) expire_date = expire_date.replace( hour=23, minute=59, second=00, microsecond=000, tzinfo=None) expire_date = library.next_open(expire_date) except LibraryNeverOpen: # 10 days by default ... it's better than place a random # date value default_duration = current_app.config.get( 'RERO_ILS_DEFAULT_PICKUP_HOLD_DURATION', 10) expire_date = trans_date + timedelta(days=default_duration) expire_date = expire_date.replace( hour=23, minute=59, second=00, microsecond=000, tzinfo=None) # localize the date on the library timezone # NOTE: if we create a datetime using `tzinfo`, the conversion # to iso format return very precise timestamp (+00:34 for # Zurich). But using `localize` method we keep rational +01:00 # value. This value is well interpreted by browser (+00:34) is # not. # # https://coderedirect.com/questions/421775/python-pytz- # timezone-conversion-returns-values-that-differ-from- # timezone-offset (Search into response with ~45 points) expire_date = library.get_timezone().localize(expire_date) record['request_expire_date'] = expire_date.isoformat() record['request_start_date'] = datetime.now().isoformat()
def generate_pretty_roll(roll): pretty_roll = None victim = dao.get_bakchod_by_id(roll["victim"]) try: if roll["winrar"] is None: pretty_roll = "Roll a {} to {} {}!".format( roll["roll_number"], pretty_roll_rule(roll["rule"]), util.extract_pretty_name_from_bakchod(victim), ) else: winrar = dao.get_bakchod_by_id(roll["winrar"]) try: roll_expiry = ciso8601.parse_datetime(roll["expiry"]) except Exception as e: roll_expiry = roll["expiry"] now = datetime.datetime.now() td = roll_expiry - now if roll["rule"] == "kick_user": pretty_roll = "{} won by rolling a {}! {} has been kicked from this group!".format( util.extract_pretty_name_from_bakchod(winrar), roll["roll_number"], util.extract_pretty_name_from_bakchod(victim), ) else: pretty_roll = "{} won by rolling a {}! {} is now {} for {}".format( util.extract_pretty_name_from_bakchod(winrar), roll["roll_number"], util.extract_pretty_name_from_bakchod(victim), pretty_roll_rule(roll["rule"]), util.pretty_time_delta(td.total_seconds()), ) except Exception as e: logger.error( "Caught Error in roll.generate_pretty_roll - {} \n {}", e, traceback.format_exc(), ) return pretty_roll
def _create_report(self, row, report_period_id, report_db_accessor): """Create a report object. Args: row (dict): A dictionary representation of a CSV file row report_period_id (str): report period object id Returns: (str): The DB id of the report object """ table_name = OCPUsageReport start = ciso8601.parse_datetime( row.get("interval_start").replace(" +0000 UTC", "+0000")) end = ciso8601.parse_datetime( row.get("interval_end").replace(" +0000 UTC", "+0000")) key = (report_period_id, start) if key in self.processed_report.reports: return self.processed_report.reports[key] if key in self.existing_report_map: return self.existing_report_map[key] data = { "report_period_id": report_period_id, "interval_start": start, "interval_end": end } with transaction.atomic(): report_id = report_db_accessor.insert_on_conflict_do_nothing( table_name, data, conflict_columns=["report_period_id", "interval_start"]) self.processed_report.reports[key] = report_id return report_id
def test_checkout_library_level_policy(client, lib_martigny, librarian_martigny, patron_martigny, loc_public_martigny, item_type_standard_martigny, item_lib_martigny, json_header, circ_policy_short_martigny): """Test circ policy parameters""" login_user_via_session(client, librarian_martigny.user) item = item_lib_martigny item_pid = item.pid patron_pid = patron_martigny.pid # checkout res, data = postdata( client, 'api_item.checkout', dict(item_pid=item_pid, patron_pid=patron_pid, transaction_user_pid=librarian_martigny.pid, transaction_location_pid=loc_public_martigny.pid)) assert res.status_code == 200 actions = data.get('action_applied') loan = actions[LoanAction.CHECKOUT] end_date = loan.get('end_date') start_date = loan.get('start_date') checkout_duration = (ciso8601.parse_datetime(end_date) - ciso8601.parse_datetime(start_date)).days assert checkout_duration >= circ_policy_short_martigny.get( 'checkout_duration') # checkin res, _ = postdata( client, 'api_item.checkin', dict(item_pid=item_pid, pid=loan.get('pid'), transaction_user_pid=librarian_martigny.pid, transaction_location_pid=loc_public_martigny.pid)) assert res.status_code == 200
def _create_cost_entry_bill(self, row, report_db_accessor): """Create a cost entry bill object. Args: row (dict): A dictionary representation of a CSV file row Returns: (str): A cost entry bill object id """ row_date = row.get("usagedatetime") report_date_range = utils.month_date_range(ciso8601.parse_datetime(row_date)) start_date, end_date = report_date_range.split("-") start_date_utc = ciso8601.parse_datetime(start_date).replace(hour=0, minute=0, tzinfo=pytz.UTC) end_date_utc = ciso8601.parse_datetime(end_date).replace(hour=0, minute=0, tzinfo=pytz.UTC) key = (start_date_utc, self._provider_uuid) if key in self.processed_report.bills: return self.processed_report.bills[key] if key in self.existing_bill_map: return self.existing_bill_map[key] data = self._get_data_for_table(row, AzureCostEntryBill._meta.db_table) data["provider_id"] = self._provider_uuid data["billing_period_start"] = datetime.strftime(start_date_utc, "%Y-%m-%d %H:%M%z") data["billing_period_end"] = datetime.strftime(end_date_utc, "%Y-%m-%d %H:%M%z") with transaction.atomic(): bill_id = report_db_accessor.insert_on_conflict_do_nothing( AzureCostEntryBill, data, conflict_columns=["billing_period_start", "provider_id"] ) self.processed_report.bills[key] = bill_id return bill_id
def on_message(data): print(data) vehicle, created = Vehicle.objects.get_or_create(operator_id='MSOT', code=data['ass']) datetime = parse_datetime(data['date']) latlong = Point(data['lng'], data['lat']) journey = None service = None if not created: latest_location = vehicle.latest_location current = latest_location and (datetime - latest_location.datetime).total_seconds() < 180 if current and latest_location.journey.service and latest_location.journey.service.geometry.overlaps(latlong): journey = latest_location.journey if not journey: try: service = Service.objects.filter( operator='MSOT', journey__datetime__lte=datetime, journey__stopusageusage__datetime__gte=datetime, geometry__bboverlaps=latlong ).distinct().get() except (Service.DoesNotExist, Service.MultipleObjectsReturned): pass if current and not service and not latest_location.journey.service: journey = latest_location.journey if not journey: journey = VehicleJourney.objects.create( vehicle=vehicle, datetime=datetime, source=globalism['source'], service=service, route_name=data['extra'].get('custR', '') ) with atomic(): if not created and latest_location: latest_location.journey.vehiclelocation_set.update(current=False) vehicle.latest_location = VehicleLocation.objects.create( journey=journey, datetime=datetime, latlong=latlong, heading=data['dir'], current=True ) vehicle.save()
def test_android_screen(self): template = self.event_templates['android_screen'] event = self.create_event_log_line(template=template) expected_key = (self.DEFAULT_DATE, self.DEFAULT_PROJECT) expected_dict = { 'input_file': '', 'source': self.DEFAULT_PROJECT, 'event_type': 'screen', 'emitter_type': 'server', 'timestamp': ciso8601.parse_datetime('2013-12-17T15:38:32.700000+00:00'), 'received_at': ciso8601.parse_datetime('2013-12-17T15:38:32.796000+00:00'), 'date': datetime.date(*[int(x) for x in self.DEFAULT_DATE.split('-')]), 'agent_type': 'tablet', 'agent_device_name': 'Samsung SM-N920A', 'agent_os': 'Android', 'agent_browser': 'Android', 'agent_touch_capable': True, 'anonymous_id': self.DEFAULT_ANONYMOUS_ID, 'category': 'screen', 'label': 'Launch\\0', 'raw_event': self.get_raw_event(event), } expected_value = JsonEventRecord(**expected_dict).to_separated_values() self.assert_single_map_output(event, expected_key, expected_value)
def parse_datetime(string): """Parse ISO-8601 datetime string. Args: string (str): Datetime string to parse Returns: datetime.datetime: Parsed datetime object """ if not string: datetime_ = datetime.datetime.utcnow().replace(tzinfo=utc) else: datetime_ = ciso8601.parse_datetime(string) if not datetime_: raise ValueError('Unable to parse timestamp %r' % string) return datetime_
def graph(self, payload): req = post("http://commonscontrol.harleyschool.org:8000/data/api/series/", data=dumps(payload)).json() for series in req: data = series['data'] dates = [self.zone.normalize(parse_datetime(point["Time"])) for point in data] cols = [key for key in data[0] if key != "Time"] for col in cols: if series["index"]: label = "%s %s %s" % (series['index'], series['subsystem'], col) else: label = "%s %s" % (series['subsystem'], col) plt.plot(dates, [point[col] for point in data], label=label) plt.gcf().autofmt_xdate() plt.legend(loc=0, fontsize=12, fancybox=True) plt.show()
def parse_datetime(__string: str) -> datetime.datetime: """Parse ISO-8601 datetime string. Args: __string: Datetime string to parse Returns: Parsed datetime object """ if not __string: datetime_ = datetime.datetime.now(datetime.timezone.utc) else: # pylint: disable=no-member datetime_ = ciso8601.parse_datetime(__string) if datetime_.tzinfo is None: datetime_ = datetime_.replace(tzinfo=datetime.timezone.utc) return datetime_
def get_items(self): fifteen_minutes_ago = timezone.now() - timedelta(minutes=15) for params in self.get_extents(): try: response = self.session.get(self.url, params=params, timeout=5) items = response.json()['items'] any_items = False if items: for item in items: if parse_datetime(item['reported']) > fifteen_minutes_ago: any_items = True yield item if not any_items: print(response.url) except (RequestException, KeyError): continue sleep(1)
def parse_datetime(string): """Parse datetime string. Args: string (str): Datetime string to parse Returns: datetime.datetime: Parsed datetime object """ if not string: datetime_ = datetime.datetime.utcnow() else: datetime_ = ciso8601.parse_datetime(string[:19]) if not datetime_: raise ValueError('Unable to parse timestamp %r' % (safer_repr(string), )) return datetime_
def set_date(self, date): if date and not isinstance(date, datetime.date): date = ciso8601.parse_datetime(date).date() if hasattr(self, 'date'): if date == self.date: return for grouping in self.groupings: for row in grouping.rows: row.times.clear() grouping.column_feet.clear() self.date = date for grouping in self.groupings: for journey in grouping.journeys: if journey.should_show(self.date, self): journey.add_times() grouping.do_heads_and_feet()
def test_unaware(self): expected = datetime.datetime(2014, 12, 5, 12, 30, 45, 123456) self.assertEqual( ciso8601.parse_datetime('2014-12-05T12:30:45.123456'), expected ) # parse_datetime_unaware ignores tz offset self.assertEqual( ciso8601.parse_datetime_unaware('2014-12-05T12:30:45.123456Z'), expected ) self.assertEqual( ciso8601.parse_datetime_unaware('2014-12-05T12:30:45.123456+00:00'), expected, ) self.assertEqual( ciso8601.parse_datetime_unaware('2014-12-05T12:30:45.123456-05:00'), expected, )
def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) if self.object.operator: context['breadcrumb'] = [self.object.operator.region, self.object.operator] date = self.request.GET.get('date') context['today'] = timezone.now().date() if date: try: date = ciso8601.parse_datetime(date).date() except ValueError: date = None journeys = self.object.vehiclejourney_set if not date: try: date = journeys.values_list('datetime', flat=True).latest('datetime').date() except VehicleJourney.DoesNotExist: date = context['today'] context['date'] = date journeys = journeys.filter(datetime__date=date) locations = VehicleLocation.objects.filter(journey=OuterRef('pk')) context['journeys'] = journeys.select_related('service').annotate(locations=Exists(locations)) return context