def create_entry(self, project, description, hours, date=None): if not date: utc_start_datetime = self.tz.localize(datetime.now()).astimezone( pytz.utc) localized_end_datetime = utc_start_datetime + timedelta( hours=float(hours)) utc_end_datetime = localized_end_datetime.astimezone(pytz.utc) start_date = isodate.datetime_isoformat(utc_start_datetime) end_date = isodate.datetime_isoformat(utc_end_datetime) else: start_date = self.local_date_string_to_utc_iso_8601(date) localized_datetime = self.local_date_string_to_localized_datetime( date) end_date = self.add_hours_to_localized_datetime_and_convert_to_iso_8601( localized_datetime, hours) data = { "start": start_date, "end": end_date, "billable": "false", "description": description, "projectId": project, "tagIds": [] } url = self.url + 'workspaces/' + self.workspace + '/timeEntries/' response = self.post(url, data) return response.json()
def events(): timezone = request.args.get('timezone') start = arrow.get(request.args.get('start')).replace(tzinfo=timezone) end = arrow.get(request.args.get('end')).replace(tzinfo=timezone) events = [] # fullcalendar compatible events start = start.datetime end = end.datetime for allocation in scheduler.allocations_in_range(start, end): classes = ['allocation'] availability = scheduler.availability(allocation.start, allocation.end) if 80 <= availability and availability <= 100: classes.append('available') elif 0 < availability and availability <= 80: classes.append('partly-available') else: classes.append('unavailable') events.append( dict( id=allocation.id, className=' '.join(classes), start=isodate.datetime_isoformat(allocation.display_start()), end=isodate.datetime_isoformat(allocation.display_end()), allDay=allocation.whole_day, title='{} Tickets left'.format(allocation.quota_left) ) ) return json.dumps(events)
def _fetch_events(service, now): events = [] def append(e): if not any(x for x in events if x['id'] == e['id']): events.append(e) page_token = None while True: calendar_list = service.calendarList().list(pageToken=page_token, showHidden=True).execute() for calendar_list_entry in calendar_list['items']: if not 'selected' in calendar_list_entry or calendar_list_entry['selected'] == False: continue #print calendar_list_entry calendar_id = calendar_list_entry['id'] #print u"%s (%s)" % (calendar_list_entry['summary'], calendar_id) list_response = service.events().list( calendarId = calendar_id, timeMin = isodate.datetime_isoformat(now), timeMax = isodate.datetime_isoformat(now + datetime.timedelta(days=7)) ).execute() for event in list_response['items']: if 'recurrence' in event: recurrence_events = service.events().instances( calendarId = calendar_id, eventId = event['id'], timeMin = isodate.datetime_isoformat(now), timeMax = isodate.datetime_isoformat(now + datetime.timedelta(days=7)) ).execute() for recurrence_event in recurrence_events['items']: append(recurrence_event) else: append(event) page_token = calendar_list.get('nextPageToken') if not page_token: break return events
def get_signals(self, car_id, access_token, **options): """ Fetch a list of signals for a specific car within a period. Parameters ---------- car_id : str the id of the car you are looking for the signals. access_token : str the access token of the user. begin : datetime, optional The first datetime of the interval you want the signals. Default value is first day of month at 00:00:00.000. end : datetime, optional The last datetime of the interval you want the signals. Default value is current moment. limit : int, optional The maximum number of values you want back. Default value is no limit. names : list, optional The list if signals names you want to filter the result. For example ['Odometer', 'FuelLevel']. Default value is all the signals available. Returns ------- tuple A tuple containing [Signals], Error. The error is None if everything went fine. """ route = '{host}/vehicles/{car_id}/signals'.format(host=self.host, car_id=car_id) params = {} o_limit = options.get('limit', None) if o_limit is not None: if o_limit > 0: params['limit'] = o_limit else: raise ValueError("limit must be a non 0 positive integer, " + str(o_limit) + " given") if options.get('begin', None) is not None: params['begin'] = isodate.datetime_isoformat(options['begin']) if options.get('end', None) is not None: params['end'] = isodate.datetime_isoformat(options['end']) if options.get('names', None) is not None: params['name'] = ','.join(options['names']) if bool(params): route = '?'.join([route, url_parser.urlencode(params)]) try: response = xee_utils.do_get_request(route, access_token) return [xee_entities.parse_signal(signal) for signal in response], None except ValueError: # Happens when the signals list is empty return [], None except (xee_exceptions.APIException, xee_exceptions.ParseException) as err: return None, err
def count_db_trade_bucketed(self, start_time, end_time): return self.collection.find({ "symbol": self.symbol, "binSize": self.bin_size, "timestamp": { "$gte": isodate.datetime_isoformat(start_time), "$lt": isodate.datetime_isoformat(end_time) } }).count()
def f(v): if isinstance(v, datetime): v = datetime_isoformat(v) else: try: v = datetime_isoformat(parse_datetime(v)) except: warnings.warn("incorrect timestamp format", LSTimestampWarning) return v
def get_signals(self, car_id, access_token, **options): """ Fetch a list of signals for a specific car within a period. Parameters ---------- car_id : str the id of the car you are looking for the signals. access_token : str the access token of the user. begin : datetime, optional The first datetime of the interval you want the signals. Default value is first day of month at 00:00:00.000. end : datetime, optional The last datetime of the interval you want the signals. Default value is current moment. limit : int, optional The maximum number of values you want back. Default value is no limit. names : list, optional The list if signals names you want to filter the result. For example ['Odometer', 'FuelLevel']. Default value is all the signals available. Returns ------- tuple A tuple containing [Signals], Error. The error is None if everything went fine. """ route = '{host}/cars/{car_id}/signals'.format(host=self.host, car_id=car_id) params = {} o_limit = options.get('limit', None) if o_limit is not None: if o_limit > 0: params['limit'] = o_limit else: raise ValueError( "limit must be a non 0 positive integer, " + str(o_limit) + " given") if options.get('begin', None) is not None: params['begin'] = isodate.datetime_isoformat(options['begin']) if options.get('end', None) is not None: params['end'] = isodate.datetime_isoformat(options['end']) if options.get('names', None) is not None: params['name'] = ','.join(options['names']) if bool(params): route = '?'.join([route, url_parser.urlencode(params)]) try: response = xee_utils.do_get_request(route, access_token) return [xee_entities.parse_signal(signal) for signal in response], None except ValueError: # Happens when the signals list is empty return [], None except (xee_exceptions.APIException, xee_exceptions.ParseException) as err: return None, err
def can_fulfill_data_request(request_id, study_id, query, expiration_time, cache_available=False, cache_timestamp=None, force_fullfil=False): """ This function is used to decide if the partner should fulfill the data request. @param request_id the request ID - it is supposed to be a non-None/empty str. Required. @param study_id the study ID - it is supposed to be a non-None/empty str. Required. @param query the query string - it is supposed to be a non-None/empty str. Required. @param expiration_time the request expiration time - it is supposed to be a non-None datetime. Required. @param cache_available whether cache is available - it is supposed to be a bool. Optional, default to False. @param cache_timestamp the cache timestamp - it is supposed to be a datetime. Optional, default to None. @param force_fullfil this parameter is set to True when this method is called by decision module. @return True if the partner client can fulfill the data request, False otherwise. @throws TypeError throws if any argument isn't of right type @throws ValueError throws if any argument isn't valid (refer to the argument documentation) @throws PartnerClientError throws if any other error occurred during the operation """ signature = 'hfppnetwork.partner.httpservices.datafulfillment.can_fulfill_data_request' method_enter( signature, { "request_id": request_id, "study_id": study_id, "query": query, "expiration_time": expiration_time, "cache_available": cache_available, "cache_timestamp": cache_timestamp }) if not PARTNER_IMMEDIATE_FULLFIL and not force_fullfil: url = DECISION_MODULE_URL values = { 'request_id': request_id, 'study_id': study_id, 'query': query, 'expiration_time': isodate.datetime_isoformat(expiration_time), 'cache_available': ('true' if cache_available else 'false'), 'cache_timestamp': ('' if cache_timestamp is None else isodate.datetime_isoformat(cache_timestamp)), 'status': 'pending' } data = urllib.parse.urlencode(values).encode('utf-8') urllib.request.urlopen(url, data) ret = PARTNER_IMMEDIATE_FULLFIL or force_fullfil method_exit(signature, ret) return ret
def element(self): """Returns XML element""" el = etree.Element("ContentKeyPeriod", nsmap=NSMAP) el.set("id", str(self.id)) if self.index is not None: el.set("index", str(self.index)) if self.start is not None: el.set("start", datetime_isoformat(self.start)) if self.end is not None: el.set("end", datetime_isoformat(self.end)) return el
def get(self): sessname = self.get_session_id() user_id = self.get_user_id() if (sessname is None) or (user_id is None): self.send_error() return user = JBoxUserV2(user_id) is_admin = sessname in JBoxCfg.get("admin_sessnames", []) manage_containers = is_admin or user.has_role(JBoxUserV2.ROLE_MANAGE_CONTAINERS) show_report = is_admin or user.has_role(JBoxUserV2.ROLE_ACCESS_STATS) cont = SessContainer.get_by_name(sessname) if cont is None: self.send_error() return if self.handle_if_logout(cont): return if self.handle_if_stats(is_admin or show_report): return if self.handle_if_show_cfg(is_admin): return if self.handle_if_instance_info(is_admin): return if self.handle_switch_julia_img(user): return if self.handle_if_open_port(sessname, user_id): return juliaboxver, _upgrade_available = self.get_upgrade_available(cont) jimg_type = 0 if user.has_resource_profile(JBoxUserV2.RES_PROF_JULIA_PKG_PRECOMP): jimg_type = JBoxUserV2.RES_PROF_JULIA_PKG_PRECOMP expire = JBoxCfg.get('interactive.expire') d = dict( manage_containers=manage_containers, show_report=show_report, sessname=sessname, user_id=user_id, created=isodate.datetime_isoformat(cont.time_created()), started=isodate.datetime_isoformat(cont.time_started()), allowed_till=isodate.datetime_isoformat((cont.time_started() + timedelta(seconds=expire))), mem=cont.get_memory_allocated(), cpu=cont.get_cpu_allocated(), disk=cont.get_disk_allocated(), expire=expire, juliaboxver=juliaboxver, jimg_type=jimg_type ) self.rendertpl("ipnbadmin.tpl", d=d)
def create_entry(self, project, description, hours, date=None, start_time=None, billable=False, task=None): if not date: local_datetime = datetime.now() if start_time: local_datetime = self.replace_datetime_time( local_datetime, start_time) utc_start_datetime = self.tz.localize(local_datetime).astimezone( pytz.utc) localized_end_datetime = utc_start_datetime + timedelta( hours=float(hours)) utc_end_datetime = localized_end_datetime.astimezone(pytz.utc) start_date = isodate.datetime_isoformat(utc_start_datetime) end_date = isodate.datetime_isoformat(utc_end_datetime) else: if start_time: date = date + ' ' + start_time start_date = self.local_date_string_to_utc_iso_8601(date) localized_datetime = self.local_date_string_to_localized_datetime( date) end_date = self.add_hours_to_localized_datetime_and_convert_to_iso_8601( localized_datetime, hours) data = { "start": start_date, "end": end_date, "billable": billable, "description": description, "projectId": project, "taskId": task, "tagIds": [] } url = "{}workspaces/{}/timeEntries/".format(self.url, self.workspace) response = self.post(url, data) # Cache entry if entry was created response_data = response.json() if 'projectId' in response_data: self.cache.create_from_new_entry_response(response_data) return response_data
def get_locations(self, car_id, access_token, **options): """ Fetch a list of locations for a specific car within a period. Parameters ---------- car_id : str the id of the car you are looking for the locations. access_token : str the access token of the user. begin : datetime, optional The first datetime of the interval you want the locations. Default value is first day of month at 00:00:00.000. end : datetime, optional The last datetime of the interval you want the locations. Default value is current moment. limit : int, optional The maximum number of locations you want back. Default value is no limit. Returns ------- tuple A tuple containing [Locations], Error. The error is None if everything went fine. """ route = '{host}/vehicles/{car_id}/locations'.format(host=self.host, car_id=car_id) params = {} if options.get('limit', None) is not None: params['limit'] = options['limit'] if options.get('begin', None) is not None: params['begin'] = isodate.datetime_isoformat(options['begin']) if options.get('end', None) is not None: params['end'] = isodate.datetime_isoformat(options['end']) if bool(params): route = '?'.join([route, url_parser.urlencode(params)]) try: response = xee_utils.do_get_request(route, access_token) return [ xee_entities.parse_location(location) for location in response ], None except ValueError: # Happens when the locations list is empty return [], None except (xee_exceptions.APIException, xee_exceptions.ParseException) as err: return None, err
def get_single_entry(uuid=None, created_date=None, modified_date=None, engineering_ids=None): data = {} if not created_date: created_date = str(isodate.datetime_isoformat(datetime.now(tzutc()))) data["created_date"] = str(created_date) if not modified_date: modified_date = str(isodate.datetime_isoformat(datetime.now(tzutc()))) data["modified_date"] = str(modified_date) if engineering_ids is None: engineering_ids = ["1", "2", "3", "4", "5"] data["engineering_ids"] = engineering_ids if not uuid: uuid = str(module_uuid.uuid4()) data["uuid"] = uuid return data
def prepare_request_data(self): self.request_data = { "StopEventRequest": { "Location": { "LocationRef": self.location.get_dict(), "DepArrTime": isodate.datetime_isoformat(datetime.utcnow()) + "Z" } } } params = { "PtModeFilter": self.pt_mode_filter, "NumberOfResults": self.number_of_results, "TimeWindow": self.time_window, "StopEventType": self.stop_event_type, "IncludeOperatingDays": self.include_operatiing_days, "IncludeRealtimeData": self.include_realtime_data, "IncludeOnwardCalls": self.include_onward_calls, "IncludePreviousCalls": self.include_previous_calls } if self.line != None: if self.line.direction != None: self.line.direction = self.line.direction.value params["LineFilter"] = self._clear_dict({ "Line": { "LineRef": self.line.line, "DirectionRef": self.line.direction } }) params = self._clear_dict(params) if params != {}: self.request_data["StopEventRequest"]["Params"] = params
def to_dict(self): return { "datetime": isodate.datetime_isoformat(self.datetime), "sensor_id": self.sensor_id, "value": self.value, "horizon": self.horizon, }
def test_timezone_z(self): dt = "2008-12-01T18:02:00.522630Z" l = Literal(dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime")) self.assert_(isinstance(l.toPython(), datetime)) self.assertEquals(datetime_isoformat(l.toPython(), DATE_EXT_COMPLETE + "T" + "%H:%M:%S.%f" + TZ_EXT), dt) self.assertEquals(l.toPython().isoformat(), "2008-12-01T18:02:00.522630+00:00")
def action(self, record): print('Record:', record.record_id) print(' Order:', self.num_records) print(' File offset:', record.file_offset) print(' Type:', record.warc_type) print(' Date:', isodate.datetime_isoformat(record.date)) print(' Size:', record.content_length)
def post_weather_data( host: str, latest_version: str, auth_token: str, start: datetime, num_days: int, host_auth_start_month: Optional[str] = None, ): lat = 33.4843866 lng = 126 values = [random() * 600 * (1 + sin(x / 15)) for x in range(96 * num_days)] message = { "type": "PostWeatherDataRequest", "sensor": "%s:%s:%s:%s" % ( build_ea_scheme_and_naming_authority(host, host_auth_start_month), "radiation", lat, lng, ), "values": tile(values, 1).tolist(), "start": datetime_isoformat(start), "duration": duration_isoformat(timedelta(hours=24 * num_days)), "horizon": "R/PT0H", "unit": "kW/m²", } response = requests.post( "%s/api/%s/postWeatherData" % (host, latest_version), headers={"Authorization": auth_token}, json=message, ) assert response.status_code == 200
def process_bind_param(self, value, dialect): value = datetime_isoformat(value, '%Y-%m-%dT%H:%M:%S') if value is None: raise TypeError('error type') else: return value
def test_paasta_log_line_passes_filter_false_when_invalid_time(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' instances = [instance] components = ['build', 'deploy'] line = 'fake_line' formatted_line = format_log_line( levels[0], clusters[0], service, instance, components[0], line, timestamp=isodate.datetime_isoformat(datetime.datetime.utcnow()), ) start_time, end_time = logs.generate_start_end_time(from_string="5m", to_string="3m") assert logs.paasta_log_line_passes_filter( formatted_line, levels, service, components, clusters, instances, start_time=start_time, end_time=end_time, ) is False
def str_encode(self, value): """Encode the value into string representation.""" self.validate(value) if value is not None: value = isodate.datetime_isoformat(self._to_utc(value), self.isoformat) return value
def _get_site_values(service, url_params, input_file=None): """downloads and parses values for a site returns a values dict containing variable and data values """ if input_file is None: query_isodate = isodate.datetime_isoformat(datetime.datetime.now()) service_url = _get_service_url(service) try: req = requests.get(service_url, params=url_params) except requests.exceptions.ConnectionError: log.info("There was a connection error with query:\n\t%s\n\t%s" % (service_url, url_params)) return {} log.info("processing data from request: %s" % req.request.url) if req.status_code != 200: return {} input_file = StringIO.StringIO(str(req.content)) else: query_isodate = None with _open_input_file(input_file) as content_io: data_dict = wml.parse_site_values(content_io, query_isodate) for variable_dict in data_dict.values(): variable_dict['site'] = _extract_site_properties(variable_dict['site']) return data_dict
def test_timezone_offset_to_utc(self): dt = "2010-02-10T12:36:00+03:00" l = Literal( dt, datatype=URIRef('http://www.w3.org/2001/XMLSchema#dateTime')) utc_dt = l.toPython().astimezone(UTC) self.assertEqual(datetime_isoformat(utc_dt), "2010-02-10T09:36:00Z")
def default(self, o): if isinstance(o, (datetime, date)): return iso.datetime_isoformat(o) if isinstance(o, ObjectId): return str(o) else: return super().default(o)
def test_paasta_log_line_passes_filter_false_when_invalid_time(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" instances = [instance] components = ["build", "deploy"] line = "fake_line" formatted_line = format_log_line( levels[0], clusters[0], service, instance, components[0], line, timestamp=isodate.datetime_isoformat(datetime.datetime.utcnow()), ) start_time, end_time = logs.generate_start_end_time(from_string="5m", to_string="3m") assert (logs.paasta_log_line_passes_filter( formatted_line, levels, service, components, clusters, instances, start_time=start_time, end_time=end_time, ) is False)
def export_data(): export_id = str(uuid4()) filename = export_id + '.zip' my_dir = os.path.dirname(os.path.realpath(__file__)) full_path = os.path.join(my_dir, 'static', 'export', filename) with ZipFile(full_path, 'w') as myzip: auth_dataset = generate_data_set(csv_records('hc_authorization.csv'), 'marketing') id_rows = len(auth_dataset) auth_csv = csv_from_generated_data(auth_dataset) myzip.writestr('authorized_identifiable.csv', auth_csv) deid_dataset = generate_data_set(None, 'marketing_deid') deid_rows = len(deid_dataset) deid_csv = csv_from_generated_data(deid_dataset) myzip.writestr('deidentified_data.csv', deid_csv) return { 'export_id': export_id, 'created_at': datetime_isoformat(datetime.now(UTC)), 'id_rows': id_rows, 'deid_rows': deid_rows, 'url': url_for('static', filename='export/' + filename) }
def json(self): json_dict = { 'beneficiary_id':self.beneficiary.username, 'timestamp':datetime_isoformat(self.timestamp), 'event':self.event.json() } return json_dict
def multipass_string(user_id, user_name, user_email): # Create the JSON for the multipass. multipass_json = json.dumps({ 'uid': user_id, 'expires': datetime_isoformat( datetime.utcnow() + timedelta(minutes=2)), 'customer_email': user_email, 'customer_name': user_name }) # Create the 16 byte salted hash. salt = API_KEY + ACCOUNT_KEY salted_hash = hashlib.sha1(salt).digest()[:16] # Seed the random number generator with the current time, then create a # random Initialisation Vector (IV). random.seed() iv = ''.join(chr(random.randint(0, 0xFF)) for i in range(16)) # Pad the JSON to a multiple of 16 bytes using PKCS#5 padding. padding_length = 16 - len(multipass_json) % 16 multipass_json += ''.ljust(padding_length, chr(padding_length)) # XOR the first 16-byte block with the IV. first_block = '' for i in range(16): first_block += chr(ord(multipass_json[i]) ^ ord(iv[i])) multipass_xored = first_block + multipass_json[16:] # Encrypt the multipass using AES encryption in CBC mode. encryptor = AES.new(salted_hash, AES.MODE_CBC, iv) multipass_encrypted = encryptor.encrypt(multipass_xored) # Encode the encrypted data using Base64 encoding return b64encode(multipass_encrypted)
def delete_expired_images(): """ Helper method to delete expired images from database and filesystem :return: """ print "Deleting expired images" # Get expiration day days = int(app.config['SOURCE_IMAGE_LIFETIME']) expiration = isodate.datetime_isoformat(datetime.now() - timedelta(days=days)) storage = GoogleCloudStorage() # Get expired history history_list = History.get_expired(expiration) files_to_delete = [] for history in history_list: # Get images to delete files = history.source_files files_to_delete += files # Update mongo history.source_files = [] history.save() # Delete all files to delete for filename in files_to_delete: storage.delete_from_cloud_storage(filename)
def convert_datelikes_to_isoformat(record): for ix, value in enumerate(record): if isinstance(value, datetime): record[ix] = isodate.datetime_isoformat(value) elif isinstance(value, date): record[ix] = isodate.date_isoformat(value) elif isinstance(value, time): record[ix] = isodate.date_isoformat(value)
def format_isotime(context, request, dt): """Format datetime into a machine-readable format. :param context: A :class:`mako.runtime.Context` object. :param request: A :class:`pyramid.request.Request` object. :param dt: A :class:`datetime.datetime` object. """ return isodate.datetime_isoformat(dt.astimezone(pytz.utc))
def build_string_to_sign(self, canonical_request, signing_options): signing_string = "{1}{0}{2}Z{0}{3}".format( self.LINE_BREAK, signing_options.algorithm, isodate.datetime_isoformat(signing_options.date), canonical_request, ) return signing_string
def append_meta_attributes(self, attributes, signing_options): attributes[SigningAttributes.DATE] = "{0}Z".format(isodate.datetime_isoformat(signing_options.date)) attributes[SigningAttributes.EXPIRES] = str(signing_options.expires) attributes[SigningAttributes.ORIGINATOR] = signing_options.originator attributes[SigningAttributes.EMAIL_ADDRESS] = signing_options.email_address attributes[SigningAttributes.DISPLAY_NAME] = signing_options.display_name if not signing_options.profile_image == None and not signing_options.profile_image.strip() == "": attributes[SigningAttributes.PROFILE_IMAGE] = signing_options.profile_image
def iso_date(date): if date is not None: if is_naive(date): date = make_aware(date, get_current_timezone()) # NOTE naive formatting generates bad non-iso strings # return date.strftime("%Y-%m-%dT%H:%M:%S%z") # replaced with real isodate return datetime_isoformat(date)
def _encode(obj): if isinstance(obj, datetime): string = isodate.datetime_isoformat(obj) elif isinstance(obj, list): string = _encode(u", ".join(obj)) else: string = unicode(obj).encode("utf-8") return string
def test_timezone_offset_to_utc(self): dt = "2010-02-10T12:36:00+03:00" l = Literal(dt, datatype=URIRef('http://www.w3.org/2001/XMLSchema#dateTime')) utc_dt = l.toPython().astimezone(UTC) self.assertEqual(datetime_isoformat(utc_dt), "2010-02-10T09:36:00Z")
def default(self, o): if type(o) == datetime.timedelta: return duration_isoformat(o) elif type(o) == datetime.datetime: return datetime_isoformat(o) elif isinstance(o, decimal.Decimal): return float(o) return super(DateTimeAwareJsonEncoder, self).default(o)
def event(evt_type, data=None): event = { 'type': evt_type, 'time': isodate.datetime_isoformat(datetime.utcnow()), } if data: event.update({'data': data}) _send(event)
def get_locations(self, car_id, access_token, **options): """ Fetch a list of locations for a specific car within a period. Parameters ---------- car_id : str the id of the car you are looking for the locations. access_token : str the access token of the user. begin : datetime, optional The first datetime of the interval you want the locations. Default value is first day of month at 00:00:00.000. end : datetime, optional The last datetime of the interval you want the locations. Default value is current moment. limit : int, optional The maximum number of locations you want back. Default value is no limit. Returns ------- tuple A tuple containing [Locations], Error. The error is None if everything went fine. """ route = '{host}/cars/{car_id}/locations'.format(host=self.host, car_id=car_id) params = {} if options.get('limit', None) is not None: params['limit'] = options['limit'] if options.get('begin', None) is not None: params['begin'] = isodate.datetime_isoformat(options['begin']) if options.get('end', None) is not None: params['end'] = isodate.datetime_isoformat(options['end']) if bool(params): route = '?'.join([route, url_parser.urlencode(params)]) try: response = xee_utils.do_get_request(route, access_token) return [xee_entities.parse_location(location) for location in response], None except ValueError: # Happens when the locations list is empty return [], None except (xee_exceptions.APIException, xee_exceptions.ParseException) as err: return None, err
def _parse_datetime(datetime_str): """returns an iso 8601 datetime string; USGS returns fractions of a second which are usually all 0s. ISO 8601 does not limit the number of decimal places but we have to cut them off at some point """ #XXX: this could be sped up if need be #XXX: also, we need to document that we are throwing away fractions of # seconds return isodate.datetime_isoformat(isodate.parse_datetime(datetime_str))
def default(self, o): print(o) if isinstance(o, (datetime, date)): return iso.datetime_isoformat(o) if isinstance(o, ObjectId): #o['id'] = str(o['_id']) return str(o) else: return super().default(o)
def test_format(self): """ Take datetime object and create ISO string from it. This is the reverse test to test_parse. """ if expectation is None: self.assertRaises(AttributeError, datetime_isoformat, expectation, format) else: self.assertEqual(datetime_isoformat(expectation, format), output)
def get_mileage(self, car_id, access_token, **options): """ Fetch the mileage value for a specific car within a period. Parameters ---------- car_id : str the id of the car you are looking for the mileage. access_token : str the access token of the user. begin : datetime, optional The first datetime of the interval you want the mileage. Default value is first day of month at 00:00:00.000. end : datetime, optional The last datetime of the interval you want the mileage. Default value is current moment. initial_value : float, optional An offset for the mileage (will be added to the new one). Default value is 0. Returns ------- tuple A tuple containing MileageStat, Error. The error is None if everything went fine. """ route = '{host}/cars/{car_id}/stats/mileage'.format(host=self.host, car_id=car_id) params = {} if options.get('begin', None) is not None: params['begin'] = isodate.datetime_isoformat(options.get('begin')) if options.get('initial_value', None) is not None: params['end'] = isodate.datetime_isoformat(options.get('end')) if options.get('initial_value', None) is not None: params['initialValue'] = float(options.get('initial_value')) if bool(params): route = '?'.join([route, url_parser.urlencode(params)]) try: response = xee_utils.do_get_request(route, access_token) return xee_entities.parse_mileage(response), None except (xee_exceptions.APIException, xee_exceptions.ParseException) as err: return None, err
def create_backup_tree(date, storage, include_media=True): """Builds a complete backup in a temporary directory, return the path.""" backup_dir = tempfile.mkdtemp() metadata = {} # Save databases. output_filename = os.path.join(backup_dir, SQL_FILENAME) with open(output_filename, 'w') as out_fd: db_impl.dump(out_fd) # Save stored media. metadata[META_NUM_MEDIA_FILES] = 0 def add_files(storage, dirname, destdir): """Recursively copies all files in `dirname` to `destdir`.""" subdirs, files = storage.listdir(dirname) for filename in files: full_filename = os.path.join(dirname, filename) output_filename = os.path.join(destdir, full_filename) output_dirname = os.path.dirname(output_filename) if not os.path.exists(output_dirname): os.makedirs(output_dirname) with storage.open(full_filename, 'r') as srcfile: with open(output_filename, 'w') as dstfile: logger.debug('+++ Creating {}'.format(output_filename)) shutil.copyfileobj(srcfile, dstfile) metadata[META_NUM_MEDIA_FILES] += 1 for subdir in subdirs: add_files(storage, os.path.join((dirname, subdir)), destdir) if include_media: destdir = os.path.join(backup_dir, 'media') for media_dir in MEDIA_WHITELIST: if storage.exists(media_dir): add_files(storage, media_dir, destdir) else: logger.warning('Not including media.') # Store metadata file. metadata[META_SERVER_NAME] = get_title() metadata[META_SERVER_VERSION] = get_version() metadata[META_CREATED_TIME] = isodate.datetime_isoformat(date) metadata[META_DB_ENGINE] = db_impl.engine_name() metadata[META_BACKUP_FORMAT] = BACKUP_FORMAT metadata_filename = os.path.join(backup_dir, METADATA_FILENAME) with open(metadata_filename, 'w') as outfile: json.dump(metadata, outfile, sort_keys=True, indent=2) valid = False try: verify_backup_directory(backup_dir) valid = True return backup_dir finally: if not valid: shutil.rmtree(backup_dir)
def set_message(cluster, message, valid_delta): tnow = datetime.datetime.now(pytz.utc) tvalid = tnow + valid_delta msg = {"msg": message, "valid_till": isodate.datetime_isoformat(tvalid)} msg = json.dumps(msg) record = JBoxDynConfig(JBoxDB.qual(cluster, "message"), create=True, value=msg) if not record.is_new: record.set_value(msg) record.save()
def set_last_update_time(self, dt): assert self.annotation_key assert isinstance(dt, datetime) assert dt.tzinfo, "please use a timezone aware datetime" # use string to store date to ensure that the annotation # doesn't cause problems in the future annotations = IAnnotations(self.context) annotations[self.annotation_key] = isodate.datetime_isoformat(dt)
def send_to_station(self, topic, msg): """ Send a message on to rootio_telephony """ topic = "station.{}.db".format(msg['station_id']) # reserialize any datetime elements for zmq -- unpack again at ts for key, value in msg.items(): if isinstance(value, datetime): msg[key] = isodate.datetime_isoformat(value) msg = json.dumps(msg) self.logger.debug("fwd %s: %s" % (topic, msg)) self.broadcast_socket.send_multipart((topic, msg))
def as_dict(self): return dict( id=self.id, proposition_id=self.proposition_id, author_id=self.author_id, author_name=self.author_name, text=self.text, date_created=datetime_isoformat(self.date_created, '%Y-%m-%dT%H:%M:%S'), author=self.author.as_dict() )
def can_fulfill_data_request(request_id, study_id, query, expiration_time, cache_available=False, cache_timestamp=None, force_fullfil=False): """ This function is used to decide if the partner should fulfill the data request. @param request_id the request ID - it is supposed to be a non-None/empty str. Required. @param study_id the study ID - it is supposed to be a non-None/empty str. Required. @param query the query string - it is supposed to be a non-None/empty str. Required. @param expiration_time the request expiration time - it is supposed to be a non-None datetime. Required. @param cache_available whether cache is available - it is supposed to be a bool. Optional, default to False. @param cache_timestamp the cache timestamp - it is supposed to be a datetime. Optional, default to None. @param force_fullfil this parameter is set to True when this method is called by decision module. @return True if the partner client can fulfill the data request, False otherwise. @throws TypeError throws if any argument isn't of right type @throws ValueError throws if any argument isn't valid (refer to the argument documentation) @throws PartnerClientError throws if any other error occurred during the operation """ signature = 'hfppnetwork.partner.httpservices.datafulfillment.can_fulfill_data_request' method_enter(signature,{ "request_id":request_id, "study_id":study_id, "query":query, "expiration_time":expiration_time, "cache_available":cache_available, "cache_timestamp":cache_timestamp }) if not PARTNER_IMMEDIATE_FULLFIL and not force_fullfil: url = DECISION_MODULE_URL values = {'request_id':request_id, 'study_id':study_id, 'query':query, 'expiration_time':isodate.datetime_isoformat(expiration_time), 'cache_available':('true' if cache_available else 'false'), 'cache_timestamp':('' if cache_timestamp is None else isodate.datetime_isoformat(cache_timestamp)), 'status':'pending'} data = urllib.parse.urlencode(values).encode('utf-8') urllib.request.urlopen(url, data) ret = PARTNER_IMMEDIATE_FULLFIL or force_fullfil method_exit(signature,ret) return ret
def _dump_default(self, obj): if isinstance(obj, datetime.datetime): return isodate.datetime_isoformat(obj) elif isinstance(obj, datetime.date): return isodate.date_isoformat(obj) elif isinstance(obj, datetime.time): return isodate.time_isoformat(obj) elif isinstance(obj, datetime.timedelta): return obj.total_seconds() else: raise TypeError(obj)
def to_utc_iso8601(datetime_to_convert, strip_fractional_seconds=False): # Strip fractional seconds, if requested. if strip_fractional_seconds: datetime_to_convert.replace(microsecond=0) if (datetime_to_convert.tzinfo is None) or (datetime_to_convert.utcoffset is None): datetime_to_convert = datetime_to_convert.replace(tzinfo=isodate.UTC) else: datetime_to_convert = datetime_to_convert.astimezone(isodate.UTC) return isodate.datetime_isoformat(datetime_to_convert)
def to_raw(self, value, context=None): if isinstance(value, datetime.datetime): return isodate.datetime_isoformat(value) if isinstance(value, six.string_types): self.to_python(value, context=context) return value raise exceptions.ValidationError( "Invalid datetime value '%s' and type %s" % (value, type(value)), value=value, invalid='type', against='datetime' )