def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str.startswith('localnow'): d = get_offsetted_time(datetime.now(), query_str) elif query_str.startswith('localtoday'): d = get_offsetted_time( datetime.now().replace(hour=0, minute=0, second=0, microsecond=0), query_str) elif query_str.startswith('now'): d = get_offsetted_time(datetime.utcnow(), query_str) elif query_str.startswith('today'): d = get_offsetted_time( datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0), query_str) elif query_str[0] in ("+", "-"): d = get_offsetted_time(datetime.utcnow(), query_str) else: # Parse datetime string or timestamp try: ts = float(query_str) try: d = epoch(ts) except: d = epoch(ts / 1000) except ValueError: d = parse(str(query_str)) except (TypeError, ValueError): d = None return d
def get_offsetted_time(dt, query_str): m = re.compile(r".*?(?P<action>[\+\-])(?P<policy>.+)").match(query_str) if not m: return epoch(dt) action = m.group("action") policy = m.group("policy") subPatterns = [ r"(?:(?P<days>\d+)d)?", r"(?:(?P<hours>\d+)h)?", r"(?:(?P<minutes>\d+)m)?", r"(?:(?P<seconds>\d+)s)?", ] pattern = "".join(subPatterns) pattern = re.compile(pattern, re.I) m = pattern.match(policy) if m: delta_kwargs = {} for k, v in m.groupdict().items(): if v: delta_kwargs[k] = int(v) delta = timedelta(**delta_kwargs) if action == "-": delta = -delta return epoch(dt + delta) return epoch(dt)
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = utcnow() elif query_str.startswith('tz'): global _offset # get correction for local time and DST _offset = local_time_offset() # get current time epoch_time = time.time() # create delorean with local time d = epoch(float(epoch_time + _offset)) else: # Parse datetime string or timestamp try: d = epoch(float(query_str)) except ValueError: d = parse(str(query_str)) except (TypeError, ValueError): d = None return d
def checkin_time_extractor_hard(checkin_entry): offset = checkin_entry["timeZoneOffset"] timestamp = checkin_entry["timestamp"] # Add time offset to timestamp so we get the local time actual_time = epoch(timestamp) + timedelta(minutes=offset) hour = actual_time.datetime.hour if hour < 2: time_of_day = "NIGHT" elif hour < 6: time_of_day = "LATENIGHT" elif hour < 10: time_of_day = "MORNING" elif hour < 14: time_of_day = "NOON" elif hour < 18: time_of_day = "AFTERNOON" elif hour < 22: time_of_day = "EVENING" else: time_of_day = "NIGHT" return "timeOfDay", [time_of_day]
def add_track_rss_entry( self, feed: FeedGenerator, track: dict, username: str, tz: str = 'America/New_York', ): """ Add a new RSS entry for the track to the feed. track is the Last.fm response to user.getRecentTracks(...)['recenttracks']['track'][i]. """ entry = feed.add_entry() title = f"{track['artist']['#text']} - {track['name']}" playcount = self.get_playcount(username, track['name'], track['artist']['#text']) if playcount: title += f" ({playcount} play{'s' if playcount > 1 else ''})" entry.title(title) entry.guid(mkguid(username, track)) entry.link(href=track['url']) entry.published( delorean.epoch(int(track['date']['uts'])).shift(tz).datetime) if 'image' in track and len(track['image']) >= 1: url = track['image'][-1]['#text'].strip() if url: r = head(url) entry.enclosure(url, r.headers['Content-Length'], r.headers['Content-Type'])
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = Delorean(timezone=tz) elif query_str.startswith('y'): d = Delorean(Delorean(timezone=tz).midnight) d -= timedelta(days=len(query_str)) elif query_str.startswith('t'): d = Delorean(Delorean(timezone=tz).midnight) d += timedelta(days=len(query_str) - 1) else: # Parse datetime string or timestamp try: ts = float(query_str) if ts >= 1000000000000: ts /= 1000 d = epoch(float(ts)) d.shift(tz) except ValueError: d = parse(str(query_str), tz, dayfirst=False) except (TypeError, ValueError): d = None return d
def parse(self, response): articles = json.loads(response.text) for article in articles: il = FeedEntryItemLoader() il.add_value('title', article['title']) il.add_value('link', article['url']) if 'thumbnail_url_1_1' in article: il.add_value( 'content_html', '<img src="{}">'.format(article['thumbnail_url_1_1'])) il.add_value('content_html', article['body']) il.add_value('updated', delorean.epoch(article['publish_date'] / 1000)) il.add_value('author_name', [ contribution['contributor']['full_name'] for contribution in article['contributions'] ]) il.add_value('category', article['channel']['name']) for topic in article['topics'] + [article['primary_topic']]: if topic and 'name' in topic: il.add_value('category', topic['name'].title()) if article['nsfw']: il.add_value('category', 'nsfw') if article['nsfb']: il.add_value('category', 'nsfb') il.add_value('path', response.meta['locale']) yield il.load_item()
def trades_consumer_function(message, db): subprocess.call(['touch', 'monit/heartbeat/trades_consumer.txt']) trade_json = json.loads(message) timestamp = epoch(trade_json['timestamp']).datetime price_currency = trade_json.get('price_currency', 'USD') volume_currency = trade_json.get('volume_currency', 'BTC') t = Trade( price=Money(trade_json['price'], price_currency), volume=Money(trade_json['volume'], volume_currency), exchange=unicode(trade_json['exchange']), timestamp=timestamp, exchange_trade_id=unicode(trade_json['trade_id']), ) db.add(t) try: session.commit_mysql_session(db) except exc.IntegrityError as e: # We will get a duplicate entry error from the database if we happen to stop # the bot after we write the entry but before we acknowlege it to the queue. # This will cause an infinite loop of errors where we keep failing to write the # same entry. In this case we can successfully ack the message because we # already have it stored. if 'Duplicate entry' in str(e): return else: raise e
def get_timestamp(self, item): ''' Get timestamp, convert to x days, y hours, z minutes ago format to avoid fancy footwork on timezones. (Screw timezones.) ''' unix_timestamp = epoch(float(item['ts'])) datetime_now = Delorean().epoch unix_now = epoch(datetime_now) time_since = unix_now - unix_timestamp days = time_since.days seconds = time_since.seconds hours, remainder = divmod(seconds, 3600) minutes, seconds = divmod(remainder, 60) time_since = '{0} days, {1} hours, {2} minutes ago'.format(days, hours, minutes) return time_since
def period(self, step='1d', sid=''): from twisted.internet import defer points = yield self.req(self.create_url(step)) if not points: defer.returnValue(OrderedDict()) point_hash = OrderedDict() for p in points: point_hash[epoch(p[0]).naive] = Money(str(p[7]), 'BTC') defer.returnValue(point_hash)
def checkin_day_extractor(checkin_entry): offset = checkin_entry["timeZoneOffset"] timestamp = checkin_entry["timestamp"] # Add time offset to timestamp so we get the local time actual_time = epoch(timestamp) + timedelta(minutes=offset) day_of_week = actual_time.datetime.strftime("%A") return "dayOfWeek", [day_of_week]
def get_waf_metrics(): path_format = '%szones/%s/firewall/events?per_page=50%s' zone_id = get_zone_id() window_start_time = delorean.now().epoch window_end_time = window_start_time - 60 records = [] next_page_id = '' logging.info('Fetching WAF event data starting at %s, going back 60s' % delorean.epoch(window_start_time).format_datetime()) while next_page_id is not None: url = path_format % (ENDPOINT, zone_id, next_page_id) r = get_data_from_cf(url=url) if 'success' not in r or not r['success']: logging.error('Failed to get information from Cloudflare') for error in r['errors']: logging.error('[%s] %s' % (error['code'], error['message'])) return '' if r['result_info']['next_page_id']: next_id = r['result_info']['next_page_id'] logging.debug('Set next_page_id to %s' % next_id) next_page_id = ('&next_page_id=%s' % next_id) else: next_page_id = None for event in r['result']: occurred_at = event['occurred_at'] occurrence_time = delorean.parse(occurred_at).epoch logging.debug('Occurred at: %s (%s)' % (occurred_at, occurrence_time)) if occurrence_time <= window_end_time: logging.debug('Window end time reached, breaking') next_page_id = None break logging.debug('Adding WAF event') records.append(event) now = delorean.now().epoch logging.info('%d WAF events found (took %g seconds so far)' % (len(records), now - window_start_time)) if now - window_start_time > 55: logging.warn('Too many WAF events, skipping (metrics affected)') next_page_id = None return wafexporter.process(records)
def extract_rate_limit(self, response): """Extract rate limit info from response/headers. The rate limit Twitter API request response provides bad data in the headers, so check the payload first and fallback to headers for other request types.""" try: data = response.data['resources']['search']['/search/tweets'] self.rate_limit_remaining = data['remaining'] self.rate_limit_limit = data['limit'] self.rate_limit_reset = epoch(data['reset']).datetime except (KeyError, TypeError): self.rate_limit_remaining = int(response.headers['x-rate-limit-remaining']) self.rate_limit_limit = int(response.headers['x-rate-limit-limit']) self.rate_limit_reset = epoch(int(response.headers['x-rate-limit-reset'])).datetime self.twitter_date = parse(response.headers['date']).datetime logging.debug( 'Twitter rate limit info:: rate-limit: %s, remaining: %s, '\ 'reset: %s, current-time: %s' % (self.rate_limit_limit, self.rate_limit_remaining, self.rate_limit_reset, self.twitter_date))
def period(self, start_date, end_date, step='1d', sid=''): if self.should_cache_this_result(start_date, end_date): points = self.req_with_cache(self.create_url(step)) else: points = self.req(self.create_url(step)) if not points: return OrderedDict() point_hash = OrderedDict() for p in points: point_hash[epoch(p[0]).naive] = Money(str(p[7]), 'BTC') return point_hash
def _get_orderbook_from_api_resp(self, req): order_book = self.resp(req) timestamp = int(order_book['timestamp']) now = Delorean() if epoch(timestamp) < now.last_minute(10): raise exceptions.ExchangeAPIErrorException( self, 'Orderbook is more than 10 minutes old', ) return order_book
def convert_to_datetime_desc(timestamp): from delorean import epoch d = epoch(timestamp / 1000.0).date result = '%04d.%02d.%02d' % (d.year, d.month, d.day) diff = get_timestamp() - timestamp if diff < 60 * 1000: result = '방금' elif diff < 1 * 60 * 60 * 1000: result = '%d분전' % int(diff / (1000 * 60)) elif diff < 24 * 60 * 60 * 1000: result = '%d시간전' % int(diff / (1000 * 60 * 60)) elif diff < 7 * 24 * 60 * 60 * 1000: result = '%d일전' % int(diff / (1000 * 60 * 60 * 24)) return result
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = now().shift(local_tz) else: # Parse datetime string or timestamp try: d = epoch(float(query_str)).shift(local_tz) except ValueError: d = parse(str(query_str), timezone=local_tz) except (TypeError, ValueError): d = None return d
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = utcnow() else: # Parse datetime string or timestamp try: d = epoch(float(query_str)) except ValueError: d = parse(str(query_str)) except (TypeError, ValueError): d = None return d
def extract_rate_limit(self, response): """Extract rate limit info from response/headers. get it just from the response, so it is relevant to the type of query we are doing""" try: self.rate_limit_remaining = int(response.headers['x-rate-limit-remaining']) self.rate_limit_limit = int(response.headers['x-rate-limit-limit']) self.rate_limit_reset = epoch(int(response.headers['x-rate-limit-reset'])).datetime self.twitter_date = parse(response.headers['date']).datetime logging.debug( 'Twitter rate limit info:: rate-limit: %s, remaining: %s' % (self.rate_limit_limit, self.rate_limit_remaining)) # logging.debug( # 'Twitter rate limit info:: rate-limit: %s, remaining: %s, '\ # 'reset: %s, current-time: %s' % (self.rate_limit_limit, # self.rate_limit_remaining, self.rate_limit_reset, self.twitter_date)) except KeyError: pass
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = utcnow() else: # Parse datetime string or timestamp try: if query_str.isdigit() and len(query_str) == 13: query_str = query_str[:10] + '.' + query_str[10:] d = epoch(float(query_str)) except ValueError: d = parse(str(query_str)) except (TypeError, ValueError): d = None return d
def parse_query_value(query_str, tf): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = Delorean(timezone=tf) else: # Parse datetime string or timestamp try: d = epoch(float(query_str)) d.shift(tf); except ValueError: d = parse(str(query_str)) d.shift(tf); except (TypeError, ValueError): d = None return d
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = utcnow() else: # Parse datetime string or timestamp if len(query_str) == 13: query_str = int(query_str) / int('1000') try: d = epoch(float(query_str)) except ValueError: d = parse(str(query_str)) except (TypeError, ValueError): d = None return d
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = utcnow() else: # Parse datetime string or timestamp try: d = epoch(float(query_str)) except ValueError: d = parse(str(query_str)) except (TypeError, ValueError): d = None if d: d = d.shift('Asia/Shanghai') return d
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = utcnow() else: # Parse datetime string or timestamp try: if query_str.isdigit() and len(query_str) == 13: query_str = query_str[:10] d = epoch(float(query_str)) except ValueError: d = parse(str(query_str)) except (TypeError, ValueError): d = None return d
def exchange_volumes_consumer_function(message, db): subprocess.call(['touch', 'monit/heartbeat/exchange_volumes_consumer.txt']) exchange_volume_json = json.loads(message) timestamp = epoch(exchange_volume_json['timestamp']).datetime exchange = exchange_volume_json['exchange_name'] exch_vol_money = Money(exchange_volume_json['volume'], 'BTC') t = ExchangeVolume( exchange_volume=exch_vol_money, exchange=exchange, timestamp=timestamp, ) db.add(t) session.commit_mysql_session(db)
def orderbook_consumer_function(message, db): subprocess.call(["touch", "monit/heartbeat/orderbook_consumer.txt"]) ob = json.loads(message) assert len(ob.keys()) == 2 exchange_name = list(set(ob.keys()) - set(['timestamp'])).pop() timestamp = ob['timestamp'] orderbook_data = ob[exchange_name] orderbook = Orderbook( exchange_name, orderbook=orderbook_data, timestamp=epoch(timestamp).datetime, ) db.add(orderbook) session.commit_mysql_session(db)
def get_historical_trades(exchange, price_currency, volume_currency='BTC'): r = requests.get(historical_data_url % (exchange, price_currency)) compressedFile = StringIO.StringIO() compressedFile.write(r.content) compressedFile.seek(0) decompressedFile = gzip.GzipFile(fileobj=compressedFile, mode='rb') try: reader = csv.reader(decompressedFile) trades = [] for row in reader: timestamp = epoch(int(row[0])).datetime.replace(tzinfo=None) price = Money(row[1], price_currency) volume = Money(row[2], volume_currency) trades.append([timestamp, price, volume]) return trades finally: decompressedFile.close()
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = utcnow() elif query_str == 'yesterday' or query_str == '1 day ago': d = utcnow().last_day() elif ' days ago' in query_str: count = count_from_query(query_str) d = utcnow().last_day(count) elif query_str == 'last week' or query_str == '1 week ago': d = utcnow().last_week() elif ' weeks ago' in query_str: count = count_from_query(query_str) d = utcnow().last_week(count) elif query_str == 'last month' or query_str == '1 month ago': d = utcnow().last_month() elif ' months ago' in query_str: count = count_from_query(query_str) d = utcnow().last_month(count) elif query_str == 'last year' or query_str == '1 year ago': d = utcnow().last_year() elif ' years ago' in query_str: count = count_from_query(query_str) d = utcnow().last_year(count) else: # Parse datetime string or timestamp try: d = epoch(float(query_str)) except ValueError: d = parse(str(query_str)) except (TypeError, ValueError): d = None return d
def parse_query_value(query_str): """ Return value for the query string """ try: query_str = str(query_str).strip('"\' ') match = re.match('(\+|\-)(\d+)([smhdwMy])|now', query_str) if match is not None: if match.group(0) == 'now': d = datetime.now(tzlocal()) else: d = shift_time(match.group(1), match.group(2), match.group(3)) else: # Parse datetime string or timestamp try: if query_str.isdigit() and len(query_str) == 13: query_str = query_str[:10] + '.' + query_str[10:] d = epoch(float(query_str)).datetime except ValueError: d = parse(str(query_str)) except (TypeError, ValueError): d = None return d
def shift_time(op, value, measure): # Create operator map, to avoid some if/else's op_map = {'+' : operator.add, '-' : operator.sub} multiplier = 1 if measure == 'm': multiplier = 60 elif measure == 'h': multiplier = 60*60 elif measure == 'd': multiplier = (60*60)*24 elif measure == 'w': multiplier = ((60*60)*24)*7 elif measure == 'M': multiplier = ((60*60)*24)*30 # egh.. elif measure == 'y': multiplier = ((60*60)*24)*365 # Convert our value + measure to seconds seconds = multiplier * int(value) current_ts = calendar.timegm(datetime.now().timetuple()) return epoch(op_map[op](current_ts, seconds))
def parse_query_value(query_str): """ Return value for the query string """ global tz try: query_str = str(query_str).strip('"\' ') if query_str == 'now': d = utcnow().shift(tz) elif 'now' in query_str and '@' in query_str: tz = query_str.split('@')[1].strip() d = utcnow().shift(tz) else: # Parse datetime string or timestamp if '@' in query_str: datas = query_str.split('@') query_str = datas[0].strip() tz = datas[1].strip() try: d = epoch(float(query_str)).shift(tz) except ValueError: d = parse(str(query_str)).shift(tz) except (TypeError, ValueError): d = None return d
def wait(e): sleep_time = (epoch(int(e.headers['x-rate-limit-reset'])).datetime - parse(e.headers['date']).datetime).seconds + 1 return sleep_time
def _delorean(): return delorean.epoch(TIME)
def wait(e): # sleeps until the given time in case of a TwitterRateLimitError sleep_time = (epoch(int(e.headers['x-rate-limit-reset'])).datetime - parse(e.headers['date']).datetime).seconds + 1 print 'sleeping for %s secs' % sleep_time sleep(sleep_time)
def test_epoch_creation(self): do = delorean.epoch(1357187474.148546) self.assertEqual(self.do, do)
def retry_after_secs(e): sleep_time = (epoch(int(e.headers['x-rate-limit-reset'])).datetime - parse(e.headers['date']).datetime).seconds + 1 return sleep_time
def test_epoch_creation(self): do = delorean.epoch(1357187474.148540) self.assertEqual(self.do, do)
def get_time_from_polyline(int_representation): ''' int representation to iso time string ''' delorean_time = delorean.epoch(int_representation).shift('UTC') return delorean_time.datetime.isoformat()
def get_submissions_between_epochs(start, end, subreddit): logger.info( "Running search for range %s->%s" % (epoch(start).datetime.strftime("%x"), epoch(end).datetime.strftime("%x")) ) query = "timestamp:%d..%d" % (start, end) return pr.search(query, subreddit=subreddit, sort="new", limit=1000, syntax="cloudsearch")