def search(cls, rowkey, when): if isinstance(when, uuid.UUID): when = convert_uuid_to_time(when) try: return cls._cf.get(rowkey, column_start=when, column_finish=when) except tdb_cassandra.NotFoundException: return {}
def process_ready_items(cls, rowkey, ready_fn): cutoff = datetime.now(g.tz) columns = cls._cf.xget(rowkey, include_timestamp=True) ready_items = OrderedDict() ready_timestamps = [] unripe_timestamps = [] for ready_time_uuid, (data, timestamp) in columns: ready_time = convert_uuid_to_time(ready_time_uuid) ready_datetime = datetime.fromtimestamp(ready_time, tz=g.tz) if ready_datetime <= cutoff: ready_items[ready_time_uuid] = data ready_timestamps.append(timestamp) else: unripe_timestamps.append(timestamp) g.stats.simple_event("trylater.{system}.ready".format(system=rowkey), delta=len(ready_items)) g.stats.simple_event("trylater.{system}.pending".format(system=rowkey), delta=len(unripe_timestamps)) if not ready_items: return try: ready_fn(ready_items) except: error = traceback.format_exc() g.log.info(error) g.stats.simple_event("trylater.{system}.failed".format(system=rowkey)) cls.cleanup(rowkey, ready_items, ready_timestamps, unripe_timestamps)
def test_auto_time_to_uuid1(self): cf_time = TestTimeUUIDs.cf_time key = 'key1' t = time.time() col = {t: 'foo'} cf_time.insert(key, col) uuid_res = cf_time.get(key).keys()[0] timestamp = convert_uuid_to_time(uuid_res) assert_almost_equal(timestamp, t, places=3) cf_time.remove(key)
def get_last_pixel_datetime(cls, user): rowkey = cls._rowkey(user) try: columns = cls._cf.get(rowkey, column_count=1, column_reversed=True) except tdb_cassandra.NotFoundException: return None u = columns.keys()[0] ts = convert_uuid_to_time(u) return datetime.utcfromtimestamp(ts).replace(tzinfo=g.tz)
def insert_pixel(cls, pixel): columns = { (pixel.x, pixel.y): json.dumps({ "color": pixel.color, "timestamp": convert_uuid_to_time(pixel._id), "user_name": pixel.user_name, "user_fullname": pixel.user_fullname, }) } cls._cf.insert(cls._rowkey(), columns)
def ymdhm_from_uuid1(uuid1_value): """ Returns a string with the format YEAR+MONTH+DAY+HOUR+MINUTE """ # Originaly this was done with `datetime.date.fromtimestamp()`, but # this method works with localtime, instead of UTC. timestamp = convert_uuid_to_time(uuid1_value) a_date = utc_timestamp2datetime(timestamp) return "{0:04d}{1:02d}{2:02d}{3:02d}{4:02d}".format( a_date.year, a_date.month, a_date.day, a_date.hour, a_date.minute)
def get_by_keyword(self, keyword, column_start='', column_finish='', count=30, action_next=None, action_prev=None): if action_next and action_prev: raise AttributeError( 'action_next and action_prev is mutually exclusive') if column_start: column_reversed = False else: column_reversed = True if action_next: action_next = action_next / 1000000 column_start = action_next column_reversed = False elif action_prev: action_prev = action_prev / 1000000 if column_start: column_finish = column_start / 1000000 column_start = action_prev column_reversed = True try: result = self.by_keyword.get(keyword, column_reversed=column_reversed, column_count=count, column_start=column_start, column_finish=column_finish) except NotFoundException: return [], None, None keys = map(lambda x: int(convert_uuid_to_time(x) * 1000000), result.keys()) entries = map(json.loads, result.values()) if column_reversed: keys.reverse() entries.reverse() if len(entries) == count: return_next = keys[-1] + 1 return_prev = keys[0] - 1 else: if action_prev: return_next = keys[-1] + 1 return_prev = None elif action_next: return_next = None return_prev = keys[0] - 1 return entries, return_prev, return_next
def _deserialize_date(cls, val): if isinstance(val, datetime): date = val elif isinstance(val, UUID): return convert_uuid_to_time(val) elif len(val) == 8: # cassandra uses 8-byte integer format for this date = date_serializer.unpack(val) else: # it's probably the old-style stringified seconds since epoch as_float = float(val) date = datetime.utcfromtimestamp(as_float) return date.replace(tzinfo=pytz.utc)
def get_gildings(cls, date): key = cls._rowkey(date) columns = cls.get_time_sorted_columns(key) gildings = [] for name, json_blob in columns.iteritems(): timestamp = convert_uuid_to_time(name) date = datetime.utcfromtimestamp(timestamp).replace(tzinfo=g.tz) gilding = json.loads(json_blob) gilding["date"] = date gilding["user"] = int(gilding["user"], 36) gildings.append(gilding) return gildings
def in_display_order(cls, system_name, subject): key = cls._rowkey(system_name, subject) try: query = cls._cf.get(key, column_reversed=True) except tdb_cassandra.NotFoundException: return [] result = [] for uuid, json_blob in query.iteritems(): when = datetime.fromtimestamp(convert_uuid_to_time(uuid), tz=g.tz) payload = json.loads(json_blob) payload['when'] = when result.append(payload) return result
def get_by_keyword(self, keyword, column_start='', column_finish='', count=30, action_next=None, action_prev=None): if action_next and action_prev: raise AttributeError('action_next and action_prev is mutually exclusive') if column_start: column_reversed = False else: column_reversed = True if action_next: action_next = action_next / 1000000 column_start = action_next column_reversed = False elif action_prev: action_prev = action_prev / 1000000 if column_start: column_finish = column_start / 1000000 column_start = action_prev column_reversed = True try: result = self.by_keyword.get(keyword, column_reversed=column_reversed, column_count=count, column_start=column_start, column_finish=column_finish) except NotFoundException: return [], None, None keys = map(lambda x: int(convert_uuid_to_time(x) * 1000000), result.keys()) entries = map(json.loads, result.values()) if column_reversed: keys.reverse() entries.reverse() if len(entries) == count: return_next = keys[-1] + 1 return_prev = keys[0] - 1 else: if action_prev: return_next = keys[-1] + 1 return_prev = None elif action_next: return_next = None return_prev = keys[0] - 1 return entries, return_prev, return_next
def get_date_count(self, keyword, end_date='', count=100): if end_date: end_date = end_date / 1000000 try: result = self.by_keyword.get(keyword, column_start=end_date, column_count=count, column_reversed=True) except NotFoundException: return [], None hour_buckets = defaultdict(int) timestamp = None for u, value in result.items(): timestamp = convert_uuid_to_time(u) date = datetime.datetime.fromtimestamp(timestamp) hours = datetime.datetime(date.year, date.month, date.day, date.hour, 0, 0) hour_buckets[hours] += 1 counts = [] for hour in sorted(hour_buckets.keys(), reverse=True): counts.append((str(hour), hour_buckets[hour])) return counts, int(timestamp * 1000000) - 1
def process_ready_items(cls, rowkey, ready_fn): cutoff = datetime.now(g.tz) columns = cls._cf.xget(rowkey, include_timestamp=True) ready_items = OrderedDict() ready_timestamps = [] unripe_timestamps = [] for ready_time_uuid, (data, timestamp) in columns: ready_time = convert_uuid_to_time(ready_time_uuid) ready_datetime = datetime.fromtimestamp(ready_time, tz=g.tz) if ready_datetime <= cutoff: ready_items[ready_time_uuid] = data ready_timestamps.append(timestamp) else: unripe_timestamps.append(timestamp) g.stats.simple_event( "trylater.{system}.ready".format(system=rowkey), delta=len(ready_items), ) g.stats.simple_event( "trylater.{system}.pending".format(system=rowkey), delta=len(unripe_timestamps), ) if not ready_items: return try: ready_fn(ready_items) except: error = traceback.format_exc() g.log.info(error) g.stats.simple_event( "trylater.{system}.failed".format(system=rowkey), ) cls.cleanup(rowkey, ready_items, ready_timestamps, unripe_timestamps)
def convert_uuid_to_datetime(uu): return datetime.fromtimestamp(convert_uuid_to_time(uu), g.tz)
def _date(self): timestamp = convert_uuid_to_time(self._id) return datetime.datetime.fromtimestamp(timestamp, pytz.UTC)