def _get_default_converter_single(self, expected_type): # this method converts an item that was already parsed to a supported type, # for example in JSON can be int, float, bool if expected_type in {str, int, float, bool} or str(expected_type) == "~T": return lambda value: value if expected_type is date: return lambda value: dateutil_parser(value).date() if value else None if expected_type is datetime: return lambda value: dateutil_parser(value) if value else None if expected_type is bytes: # note: the code is optimized for strings here, not bytes # since most of times the user will want to handle strings return ( lambda value: urlsafe_b64decode(value.encode("utf8")).decode("utf8") if value else None ) if expected_type is UUID: return lambda value: UUID(value) return lambda value: expected_type(**value)
def _get_default_converter_single(self, expected_type): if expected_type is str or str(expected_type) == "~T": return lambda value: unquote(value) if value else None if expected_type is bool: return _default_bool_converter if expected_type is bytes: # note: the code is optimized for strings here, not bytes # since most of times the user will want to handle strings return lambda value: value.encode("utf8") if value else None if expected_type in self._simple_types: return lambda value: expected_type(value) if value else None if expected_type is UUID: return lambda value: UUID(value) if expected_type is datetime: return lambda value: dateutil_parser(unquote(value)) if value else None if expected_type is date: return ( lambda value: dateutil_parser(unquote(value)).date() if value else None ) raise MissingConverterError(expected_type, self.__class__)
def exclusive_maximum_check(self, val: any, _filter: Filter) -> bool: """ Exclusivemaximum check. Returns True if value less than filter specified check Args: val: value to check, extracted from match _filter: Filter Return: bool """ try: if _filter.fmt: utc = pytz.UTC if _filter.fmt == "date" or _filter.fmt == "date-time": to_compare_date = dateutil_parser(_filter.exclusive_max).replace( tzinfo=utc ) given_date = dateutil_parser(str(val)).replace(tzinfo=utc) return given_date < to_compare_date else: if self.is_numeric(val): return val < _filter.exclusive_max return False except (TypeError, ValueError, ParserError): return False
def _get_default_converter(self, expected_type): if expected_type is str or str(expected_type) == "~T": return lambda value: unquote(value[0]) if value else None if expected_type is bool: return _default_bool_list_converter if expected_type is bytes: return lambda value: value[0].encode("utf8") if value else None if expected_type in self._simple_types: return lambda value: expected_type(value[0]) if value else None if expected_type is UUID: return lambda value: UUID(value[0]) if value else None if self.is_generic_iterable_annotation(expected_type) or expected_type in { list, set, tuple, }: return self._get_default_converter_for_iterable(expected_type) if expected_type is datetime: return lambda value: dateutil_parser(unquote(value[0])) if value else None if expected_type is date: return ( lambda value: dateutil_parser(unquote(value[0])).date() if value else None ) raise MissingConverterError(expected_type, self.__class__)
def minimum_check(self, val: any, _filter: Filter) -> bool: """ Minimum check. Returns True if value greater than equal to filter specified check Args: val: value to check, extracted from match _filter: Filter Return: bool """ try: if _filter.fmt: utc = pytz.UTC if _filter.fmt == "date" or _filter.fmt == "date-time": to_compare_date = dateutil_parser(_filter.minimum).replace( tzinfo=utc ) given_date = dateutil_parser(str(val)).replace(tzinfo=utc) return given_date >= to_compare_date else: if self.is_numeric(val): return val >= _filter.minimum return False except (TypeError, ValueError, ParserError): return False
def kitaichi(game_name): tweets = [] #パラメーター取得 gacha_percent = gacha_percent_dic[game_name] keyword = game_name + " ガチャ" + " 当たった" today = datetime.datetime.today() since_dt = today - relativedelta(months=1) since_dt = str(one_month_ago).split(".")[0] since_dt = dateutil_parser(since_dt+"+00:00") params = {'q' : keyword, 'count' : 100} req = twitter.get(url, params = params) if req.status_code == 200: search_timeline = json.loads(req.text) for tweet in search_timeline['statuses']: dt = dateutil_parser(tweet.get("created_at")) + datetime.timedelta(hours=9) if dt >= since_dt: tweets.append(tweet['id']) else: print("ERROR: %d" % req.status_code) kitaichi = len(tweets) * gacha_percent return kitaichi
def _parse_datetime(self, dt_string): """Try to return the ISO 8601 datetime string as a datetime instance.""" parser = dateutil_parser() try: return parser.parse(dt_string) except (AttributeError, ValueError): return None
def get_app_signal(api_client, query): api_instance = security_monitoring_api.SecurityMonitoringApi(api_client) body = SecurityMonitoringSignalListRequest( filter=SecurityMonitoringSignalListRequestFilter( _from=dateutil_parser("2021-01-01T00:00:00.00Z"), query=query, to=dateutil_parser("2050-01-01T00:00:00.00Z"), ), page=SecurityMonitoringSignalListRequestPage(limit=25, ), sort=SecurityMonitoringSignalsSort("timestamp"), ) api_response = api_instance.search_security_monitoring_signals(body=body) if not api_response["data"]: raise LookupError(query) return api_response
def _parse_sourcex_logs(path): """ Low level parsing of the logs :param path: SourceX logs :return: A dictionary with the log entries, with their timestamp, logger, level and message """ timestamp_parser = dateutil_parser() log = dict(timestamp=[], logger=[], level=[], message=[]) with open(path) as fd: for line in fd: try: timestamp, who, level, _, message = line.split(maxsplit=4) log['timestamp'].append(timestamp_parser.parse(timestamp)) log['logger'].append(who) log['level'].append(level) log['message'].append(message.strip()) except ValueError as e: logger.warning(e) start = log['timestamp'][0] log['Time'] = np.array([(t - start).total_seconds() for t in log['timestamp']]) return log
def assertCloseToNow(self, dt, now=None): """ Make sure the datetime is within a minute from `now`. """ # Try parsing the string if it's not a datetime. if isinstance(dt, str): try: dt = dateutil_parser(dt) except ValueError as e: raise AssertionError('Expected valid date; got %s\n%s' % (dt, e)) if not dt: raise AssertionError('Expected datetime; got %s' % dt) dt_later_ts = time.mktime((dt + timedelta(minutes=1)).timetuple()) dt_earlier_ts = time.mktime((dt - timedelta(minutes=1)).timetuple()) if not now: now = datetime.now() now_ts = time.mktime(now.timetuple()) assert dt_earlier_ts < now_ts < dt_later_ts, ( 'Expected datetime to be within a minute of %s. Got %r.' % (now, dt))
def assertCloseToNow(self, dt, now=None): """ Make sure the datetime is within a minute from `now`. """ # Try parsing the string if it's not a datetime. if isinstance(dt, basestring): try: dt = dateutil_parser(dt) except ValueError as e: raise AssertionError( 'Expected valid date; got %s\n%s' % (dt, e)) if not dt: raise AssertionError('Expected datetime; got %s' % dt) dt_later_ts = time.mktime((dt + timedelta(minutes=1)).timetuple()) dt_earlier_ts = time.mktime((dt - timedelta(minutes=1)).timetuple()) if not now: now = datetime.now() now_ts = time.mktime(now.timetuple()) assert dt_earlier_ts < now_ts < dt_later_ts, ( 'Expected datetime to be within a minute of %s. Got %r.' % (now, dt))
def update_global_totals(job, date, **kw): log.info('Updating global statistics totals (%s) for (%s)' % (job, date)) if isinstance(date, basestring): # Because of celery serialization, date is not date object, it has been # transformed into a string, we need the date object back. date = dateutil_parser(date).date() jobs = _get_daily_jobs(date) jobs.update(_get_metrics_jobs(date)) num = jobs[job]() q = """REPLACE INTO global_stats (`name`, `count`, `date`) VALUES (%s, %s, %s)""" p = [job, num or 0, date] try: cursor = connection.cursor() cursor.execute(q, p) except Exception as e: log.critical('Failed to update global stats: (%s): %s' % (p, e)) else: log.debug('Committed global stats details: (%s) has (%s) for (%s)' % tuple(p)) finally: cursor.close()
def from_raw_logline(cls, raw_line: str) -> LogRecord: line_items: List[str] = raw_line.rstrip('\r\n').split(' ') timestamp: Optional[datetime] = dateutil_parser( line_items[0]) if line_items[0] else None offset: int has_opmode: bool try: float(line_items[1]) except ValueError: has_opmode = True offset = 0 else: has_opmode = False offset = -1 return cls( timestamp=timestamp, timestamp_missing=timestamp is None, operating_mode=line_items[1] if has_opmode else 'schedule', temp_current=float(line_items[2 + offset]) if line_items[2 + offset] else None, temp_average=float(line_items[3 + offset]) if line_items[3 + offset] else None, temp_set=float(line_items[4 + offset]) if line_items[4 + offset] else None, therm_state=line_items[5 + offset] == 'on', wlan_active=line_items[6 + offset] == 'wlan', tampered=line_items[7 + offset] == 'tampered', grace_given=line_items[8 + offset] == 'gracious', battery_voltage=None, )
def _parse_date(date): if date is None: return None try: return dateutil_parser().parse(date) except: return None
def plot_ajax(): """Ajax route endpoint function for fetching plot data""" ts_from: Optional[str] = request.args.get('from') ts_to: Optional[str] = request.args.get('to') ts_exclusive: Optional[str] = request.args.get('exclusive') plot_mode: Optional[str] = request.args.get('mode') timestamp_min: Optional[datetime] = dateutil_parser( ts_from) if ts_from else None timestamp_max: Optional[datetime] = dateutil_parser( ts_to) if ts_to else None bound_exclusive: Optional[bool] = ts_exclusive.lower() in ( '1', 'true', 'yes') if ts_exclusive else None json_data = create_plot(timestamp_min=timestamp_min, timestamp_max=timestamp_max, bound_exclusive=bound_exclusive, plot_mode=plot_mode) return Response(json_data, mimetype=app.config["JSONIFY_MIMETYPE"])
def minimum_check(val: any, _filter: Filter) -> bool: try: if _filter.fmt: utc = pytz.UTC if _filter.fmt == "date" or _filter.fmt == "date-time": tmp_date = dateutil_parser(str( _filter.minimum)).replace(tzinfo=utc) val = dateutil_parser(str(val)).replace(tzinfo=utc) return val >= tmp_date elif _filter._type == "number": if type(val) is str: if val.isnumeric(): return float(val) >= int(_filter.minimum) else: return val >= int(_filter.minimum) return False except ValueError: return False
def nearby_search(self, lat: float, lon: float, radius: int = 500, service_area=None, service_provider=None) -> [VehicleTrack]: # i know its not exact but the best i wanted to build right now ^^ buffer = radius / 40000000. * 360. / math.cos(lon / 360. * math.pi) point = Point(lat, lon).buffer(buffer) polygon = point.exterior.bounds cookies = { '_limebike-web_session': self.settings.get("WEB_SESSION", None) } headers = {'authorization': self.settings.get('AUTHORIZATION', None)} params = ( ('ne_lat', str(polygon[0])), ('ne_lng', str(polygon[1])), ('sw_lat', str(polygon[2])), ('sw_lng', str(polygon[3])), ('user_latitude', str(lat)), ('user_longitude', str(lon)), ('zoom', '16'), ) response = requests.get( 'https://web-production.lime.bike/api/rider/v1/views/map', headers=headers, params=params, cookies=cookies, timeout=5) vehicle_tracks = [] try: if not "data" in response.json(): return [] for item in response.json()["data"]["attributes"]["bikes"]: vehicle_tracks.append(VehicleTrack(vehicle_id=f'{item["attributes"]["type_name"]}-{item["attributes"]["last_three"]}', provider="lime", last_seen=dateutil_parser(item["attributes"]["last_activity_at"]), lat=item["attributes"]["latitude"], lon=item["attributes"]["longitude"], battery_level=int(item["attributes"]["meter_range"]/ (40233/ 100)) \ if "meter_range" in item["attributes"] and item["attributes"]["meter_range"] else 0, raw_data=json.dumps(item))) except json.decoder.JSONDecodeError: print("I guess we have been caught") return vehicle_tracks time.sleep(5) return vehicle_tracks
def date_parser(input_str): """DateTime Factory. Parameter: input_str (string): date string. Returns: DateTime: Datetime object parsed from input string. """ return dateutil_parser(input_str, dayfirst=True, yearfirst=False)
def assertCloseToNow(self, dt, now=None): """ Make sure the datetime is within a minute from `now`. """ # Try parsing the string if it's not a datetime. if isinstance(dt, basestring): try: dt = dateutil_parser(dt) except ValueError, e: raise AssertionError("Expected valid date; got %s\n%s" % (dt, e))
def assertCloseToNow(self, dt, now=None): """ Make sure the datetime is within a minute from `now`. """ # Try parsing the string if it's not a datetime. if isinstance(dt, basestring): try: dt = dateutil_parser(dt) except ValueError, e: raise AssertionError('Expected valid date; got %s\n%s' % (dt, e))
def read(self, request, location_slug=None, start_dt=None, end_dt=None): """Return information about the activity at a location. This activity is available to the caller by accessing the following keys of the generated JSON response: total_users - The total number of users that logged a session This method can be passed two optional parameters to specify datetimes after or before which sessions were logged, being the :param:start_dt and :param:end_dt arguments. Each of these must be a datetime string formatted according to ISO 8601 (i.e., 1937-01-01T12:00:27.87+00:20). These parameters can be used to specify a date window during which all counted sessions must have occurred. If the location slug provided does not map to a valid location, or if either of the optional datetimes given is not properly formatted, an HTTP 400 bad request response is returned. :param location_slug: a slug identifying a location :type location_slug: str :key start_dt: an ISO 8601 datetime after which any counted session must be logged :type start_dt: str :key end_dt: an ISO 8601 datetime before which any counted session must be logged :type end_dt: str """ # Build the required location and optional datetime filters try: location = Location.objects.get(slug=location_slug) except Location.DoesNotExist: return rc.BAD_REQUEST else: filter_args = {'location': location} parser = dateutil_parser() if start_dt: try: filter_args['start_date'] = parser.parse(start_dt) except ValueError: return rc.BAD_REQUEST if end_dt: try: filter_args['end_date'] = parser.parse(end_dt) except ValueError: return rc.BAD_REQUEST # Return counts for the given sessions sessions = Session.objects.filter_sessions(**filter_args) return { 'total_users': sessions.count(), }
def timestring_to_datetime(timestring): """ Convert an ISO formated date and time string to a datetime object. :param str timestring: String with date and time in ISO format. :rtype: datetime :return: datetime object """ with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UnicodeWarning) result = dateutil_parser(timestring) return result
def request(context, flow): #print "handle request: %s - %s" % (flow.request.host, flow.request.path) # http://web.archive.org/web/19981212031357 fluxcapacitor_setting = "19981201052808" if flow.request.headers["accept-datetime"]: # eg Accept-Datetime: Thu, 31 May 2007 20:35:00 GMT parsed_date = dateutil_parser(flow.request.headers["accept-datetime"]) fluxcapacitor_setting = parsed_date.strftime("%Y%m%d%H%M%S") if not "id_/http" in flow.request.path: flow.request.path = "/web/%sid_/http://%s%s" % (fluxcapacitor_setting, flow.request.host, flow.request.path) flow.request.scheme = "http" flow.request.host = "web.archive.org" flow.request.headers["Host"] = ["web.archive.org"]
def from_raw_logline(cls, raw_line: str) -> LogRecord: line_items: List[str] = raw_line.rstrip('\r\n').split(' ') timestamp: Optional[datetime] = dateutil_parser( line_items[0]) if line_items[0] else None return cls( timestamp=timestamp, timestamp_missing=timestamp is None, temp_current=float(line_items[1]) if line_items[1] else None, temp_average=float(line_items[2]) if line_items[2] else None, humidity_current=float(line_items[3]) if line_items[3] else None, humidity_average=float(line_items[4]) if line_items[4] else None, temp_set=float(line_items[5]) if line_items[5] else None, wlan_active=line_items[6] == 'wlan', battery_voltage=float(line_items[7]) if len(line_items) > 7 and line_items[7] else None, )
def validate_patch(self, to_check: any, _filter: Filter) -> bool: """ Apply filter on match_value. Utility function used in applying filtering to a cred by triggering checks according to filter specification Args: to_check: value to check, extracted from match _filter: Filter Return: bool """ return_val = False if _filter._type: if self.check_filter_only_type_enforced(_filter): if _filter._type == "number": if isinstance(to_check, (int, float)): return True elif _filter._type == "string": if isinstance(to_check, str): if _filter.fmt == "date" or _filter.fmt == "date-time": try: to_compare_date = dateutil_parser(to_check) if isinstance(to_compare_date, datetime): return True except (ParserError, TypeError): return False else: return True else: if _filter._type == "number": return_val = self.process_numeric_val(to_check, _filter) elif _filter._type == "string": return_val = self.process_string_val(to_check, _filter) else: if _filter.enums: return_val = self.enum_check(val=to_check, _filter=_filter) if _filter.const: return_val = self.const_check(val=to_check, _filter=_filter) if _filter._not: return not return_val return return_val
def parse_datetime(s, **kwargs): """ Converts a time-string into a valid :py:class:`~datetime.datetime.DateTime` object. Args: s (str): string to be formatted. ``**kwargs`` is passed directly to :func:`.dateutil_parser`. Returns: :py:class:`~datetime.datetime.DateTime` """ if not s: return None try: ret = dateutil_parser(s, **kwargs) except (OverflowError, TypeError, ValueError) as e: reraise('datetime parsing error from %s' % s, e) return ret
def test_create(self): """Test case for create Create a video # noqa: E501 """ for status, json in self.load_json('videos', 'create'): responses.reset() kwargs = { 'video_creation_payload': VideoCreationPayload( title="Maths video", description="A video about string theory.", source="https://www.myvideo.url.com/video.mp4", public=True, panoramic=False, mp4_support=True, player_id="pl45KFKdlddgk654dspkze", tags=["maths", "string theory", "video"], metadata=[ Metadata( key="Color", value="Green", ), ], published_at=dateutil_parser('2020-07-14T23:36:18.598Z'), ), } url = '/videos'.format(**kwargs) responses.add('POST', url, body=json, status=int(status), content_type='application/json') if status[0] == '4': with self.assertRaises(ApiException) as context: self.api.create(**kwargs) if status == '404': self.assertIsInstance(context.exception, NotFoundException) else: self.api.create(**kwargs)
def parse_timestamp( timestamp_raw: str, timestamp_override: Optional[datetime] = None) -> datetime: """ Helper static method for parsing a raw timestamp string :param timestamp_raw: the raw timestamp string :param timestamp_override: if specified, return this directly :raise ValueError: if the raw timestamp couldn't be parsed :return: the parsed timestamp as a datetime object """ timestamp: datetime if timestamp_override is None: parsed_timestamp: Optional[datetime] = dateutil_parser( timestamp_raw) if timestamp_raw else None if not parsed_timestamp: raise ValueError('Missing timestamp') timestamp = parsed_timestamp else: timestamp = timestamp_override return timestamp
def handle_event(self, calendar, event): """Add the event to the list. Arguments: calendar -- a LocationCalendar instance event -- a raw Google Calendar event """ event_id = event.id.text if event_id not in self._added_events: try: when = event.when[0] except IndexError: return else: parser = dateutil_parser() self.add_event(calendar, event.title.text, event.recurrence, parser.parse(when.start), parser.parse(when.end)) self._added_events.append(event_id)
def nearby_search(self, lat: float, lon: float, radius: int = 500, service_area=None, service_provider=None) -> [VehicleTrack]: service_provider = self.refresh_token(service_provider) headers = { 'Authorization': service_provider.settings["accessToken"], } shp = Polygon(wkt.loads(str(service_area.area).split(";")[1])).bounds params = ( ('latitudeTopLeft', shp[3]), ('longitudeTopLeft', shp[2]), ('latitudeBottomRight', shp[1]), ('longitudeBottomRight', shp[0]), ) result = requests.get('https://node.goflash.com/devices', headers=headers, params=params) data = result.json() print(data) vehicle_tracks = [] for item in data["devices"]: vehicle_tracks.append( VehicleTrack(vehicle_id=item["name"], provider="circ", last_seen=dateutil_parser(item["timestamp"]), lat=item["latitude"], lon=item["longitude"], battery_level=item["energyLevel"], raw_data=json.dumps(item))) return vehicle_tracks
def get_formatted_value(value): """ Returns the formatted value for the signature calculation Uses rules specified in Systempay documentation : - bool is interpreted as an integer (O or 1) - empty values (except integers) are considered as an empty string - datetime values are formatted like 'YYYMMDD' using UTC timezone :param value: The input value to format :type value: any :returns: The formatted value :rtype: str """ # Boolean format if isinstance(value, bool): return str(int(value)) # Integer value if isinstance(value, int): return str(value) # Empty value if not value: return '' # Datetime format if isinstance(value, datetime): return value.astimezone(pytz.UTC).strftime('%Y%m%d') # String datetime format try: d = dateutil_parser(value).astimezone(pytz.UTC) return d.strftime('%Y%m%d') except: pass return value
async def fetch(self, max_count: int = None) -> Sequence[VCRecord]: """ Fetch the next list of VC records from the store. Args: max_count: Max number of records to return. If not provided, defaults to the backend's preferred page size Returns: A list of `VCRecord` instances """ rows = await self._search.fetch(max_count) records = [storage_to_vc_record(r) for r in rows] try: records.sort( key=lambda v: dateutil_parser(v.cred_value.get("issuanceDate") ), reverse=True, ) return records except ParserError: return records
configuration.api_key['appKeyAuth'] = '**************************' # Enter a context with an instance of the API client with ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = logs_archives_api.LogsArchivesApi(api_client) archive_id = "paxez2aSQs6IF7x8MjTF1A" # str | The ID of the archive. body = LogsListRequest( index="*", limit=1000, query="*", sort=LogsSort("desc"), # Hash identifier of the first log to return in the list, available in a log id attribute. This would be the next log id to start with, enable by removing hashtag before start_at and replace *********** with the next log id. For example: start_at="AQAAAXcHof6osxEV8gAAAABBWGNIb2hmNkFBQ25SSXlpc1RLeTl3QU8" #start_at="***********", time=LogsListRequestTime( _from=dateutil_parser('2021-01-14T20:00:00Z'), timezone="timezone_example", to=dateutil_parser('2021-01-15T20:00:00Z'), ), ) # LogsListRequest | Logs filter # example passing only required values which don't have defaults set try: # Get an archive api_response = api_instance.get_logs_archive( archive_id), api_instance.list_logs(body) pprint(api_response) except ApiException as e: print( "Exception when calling LogsArchivesApi->get_logs_archive: %s\n" % e)
# --------------------------------------------------------------------------------- feedRSS = "" dbName = "feeds" urlfeedrss = list() urlCatFeedrss = list() fluxRSS = list() compteurfeedRSS = 0 cptfeedRSSrestant = 1 # --------------------------------------------------------------------------------- def get_href(links, _type="text/html"): filtered = filter(lambda ln: ln.get("type") == _type, links) return len(filtered) and filtered[0].get("href") or None parse_date = dateutil_parser().parse def coerce_date_str(date_str): parsed = parse_date(date_str) return parsed.strftime("%Y-%m-%dT%H:%M:%SZ") # --------------------------------------------------------------------------------- # server object server = Server() # create database db = server.get_or_create_db(dbName) # #--------------------------------------------------------------------------------- # Lecture fichier OPML # #---------------------------------------------------------------------------------
def update(self): ## Check that the feed is enabled if self.enabled == False: logger.error("Feed not enabled: %s" % (self.name,)) return ## Parse the feed if self.etag: logger.debug('Scraping %s using etag' % (self.name,)) response = feedparser.parse(self.feed_url, etag=self.etag) elif self.last_modified: logger.debug('Scraping %s using last_modified' % (self.name,)) response = feedparser.parse(self.feed_url, modified=self.last_modified) else: logger.debug('Scraping %s (full parse)' % (self.name,)) response = feedparser.parse(self.feed_url) ## Update timestamps self.last_checked = datetime.now().replace(tzinfo=get_current_timezone()) if response.has_key('updated'): self.last_modified = dateutil_parser(response.get('updated')) ## Update feed response fields self.bozo = response.get('bozo', False) self.bozo_exception = response.get('bozo_exception', '') self.encoding = response.get('encoding', '') self.etag = response.get('etag', '') if response.has_key('href'): self.href = response.get('href') self.status = response.get('status', 0) self.version = response.get('version', '') self.save() ## Status Checks if self.status == 301: # The feed was permanently redirected to a new URL. # Updating feed_url to request the new URL from now on. logger.info("Status 301 - Feed permanantly redirected: %s" % (self.name,)) logger.info("%s -> %s" % (self.feed_url, response.get('href'))) self.feed_url = response.get('href') self.save() elif self.status == 410: # The feed is gone. # Disabling the feed. logger.error("Status 410 - Feed no longer exists: %s" % (self.name,)) self.enabled = False self.save() return elif self.status == 304: # The feed has not changed since the last time it was requested. # Stop processing. logger.debug("Status 304 - Feed has not changed: %s" % (self.name,)) return elif self.status == 404: # The feed_url is incorrect or inaccessible. logger.error("Status 404 - Feed inaccessible: %s" % (self.name,)) return else: pass ## Process entries list self.last_cached = datetime.now().replace(tzinfo=get_current_timezone()) self.save() for entry in response.get('entries', []): if not entry.has_key('id'): entry['id'] = entry.get('link') obj, created = Entry.objects.get_or_create(uid = entry['id'], defaults={ 'author' : entry.get('author', None), 'comments' : entry.get('comments', None), 'content' : striptags(entry.get('content', [{'value':''}])[0]['value']), 'license' : entry.get('license', None), 'link' : entry.get('link'), 'published' : dateutil_parser(entry.get('published')), 'publisher' : entry.get('publisher', None), 'summary' : striptags(entry.get('summary')), 'title' : entry.get('title'), 'feed' : self, }) # Additional Processing if entry.has_key('created'): obj.created = dateutil_parser(entry.get('created')) if entry.has_key('expired'): obj.expired = dateutil_parser(entry.get('expired')) if entry.has_key('updated'): obj.expired = dateutil_parser(entry.get('updated')) obj.save()
try: message_dict = json.loads(data) except json.decoder.JSONDecodeError as e: print(f'JSON error for killmail - [{e}]') continue # Extract the data we received from the server killmail = message_dict['killmail'] region_id = lookup.get_solarsystem_region(killmail['solar_system_id']) if region_id not in WATCH_REGIONS: continue # Time killmail_time = dateutil_parser(killmail['killmail_time']) oldest_date = datetime.now() - timedelta(minutes=45) oldest_date = pytz.utc.localize(oldest_date) if killmail_time < oldest_date: print(f'Too old {killmail_time} < {oldest_date}') continue time_string = f'{killmail_time.year}-{killmail_time.month:02}-{killmail_time.day:02} {killmail_time.hour:02}:{killmail_time.minute:02}' names = message_dict['names'] names['character_ids'] = dict_string_keys_to_int(names['character_ids']) names['corporation_ids'] = dict_string_keys_to_int( names['corporation_ids']) names['alliance_ids'] = dict_string_keys_to_int(names['alliance_ids']) # Eve static data dump lookups killmail_id = killmail['killmail_id']
def iso2human(value): date = dateutil_parser(value) return date.strftime('%b %d, %Y')
urllib3.disable_warnings( requests.packages.urllib3.exceptions.InsecureRequestWarning) # Enter a context with an instance of the API client with pensando_dss.psm.ApiClient(configuration) as api_client: # Create an instance of the API class api_instance = network_v1_api.NetworkV1Api(api_client) o_name = "O.name_example" # str | Name of the object, unique within a Namespace for scoped objects. Must start and end with alpha numeric and can have alphanumeric, -, _, . Length of string should be between 2 and 64. (optional) o_tenant = "O.tenant_example" # str | Tenant to which the object belongs to. This can be automatically filled in many cases based on the tenant the user, who created the object, belongs to. Must be alpha-numerics. Length of string should be between 1 and 48. (optional) o_namespace = "O.namespace_example" # str | Namespace of the object, for scoped objects. Must start and end with alpha numeric and can have alphanumeric, -, _, . Length of string should be between 2 and 64. (optional) o_generation_id = "O.generation-id_example" # str | GenerationID is the generation Id for the object. This is incremented anytime there is an update to the user intent, including Spec update and any update to ObjectMeta. System generated and updated, not updatable by user. (optional) o_resource_version = "O.resource-version_example" # str | Resource version in the object store. This is updated anytime there is any change to the object. System generated and updated, not updatable by user. (optional) o_uuid = "O.uuid_example" # str | UUID is the unique identifier for the object. This is generated on creation of the object. System generated, not updatable by user. (optional) o_creation_time = dateutil_parser( '1970-01-01T00:00:00.00Z' ) # datetime | CreationTime is the creation time of the object. System generated and updated, not updatable by user. (optional) o_mod_time = dateutil_parser( '1970-01-01T00:00:00.00Z' ) # datetime | ModTime is the Last Modification time of the object. System generated and updated, not updatable by user. (optional) o_self_link = "O.self-link_example" # str | SelfLink is a link for accessing this object. When the object is served from the API-GW it is the URI path. Example: - \"/v1/tenants/tenants/tenant2\" System generated and updated, not updatable by user. (optional) label_selector = "label-selector_example" # str | LabelSelector to select on labels in list or watch results. (optional) field_selector = "field-selector_example" # str | FieldSelector to select on field values in list or watch results. (optional) field_change_selector = [ "field-change-selector_example", ] # [str] | FieldChangeSelector specifies to generate a watch notification on change in field(s) specified. (optional) _from = 1 # int | From represents the start index number (1 based - first object starts from index 1), of the results list. The results returned would be in the range [from ... (from + (max-results - 1))]. If From = 0, the server will attempt to return all the results in the list without pagination. (optional) max_results = 1 # int | MaxResults is the maximum number of results to be returned as part of the response, per page If MaxResults is more than the maximum number of results per page supported by the server, the server will return an err If MaxResults is 0, the server will return all the results without pagination. (optional) sort_order = "sort-order_example" # str | order to sort List results in. (optional) meta_only = True # bool | If MetaOnly is set to true, the watch event notification that matches the watch criteria will not contain the full object. It will only contain the information about the object that changed, i.e. which object and what changed. MetaOnly is not set by default. (optional)