def _pack_name(self, value, is_supercol_name=False, slice_end=_NON_SLICE): if not self.autopack_names: if value is not None and not (isinstance(value, str) or isinstance(value, unicode)): raise TypeError("A str or unicode column name was expected, but %s was received instead (%s)" % (value.__class__.__name__, str(value))) return value if value is None: return if is_supercol_name: d_type = self.supercol_name_data_type else: d_type = self.col_name_data_type if d_type == 'TimeUUIDType': if slice_end: value = util.convert_time_to_uuid(value, lowest_val=(slice_end == _SLICE_START), randomize=False) else: value = util.convert_time_to_uuid(value, randomize=True) elif d_type == 'BytesType' and not (isinstance(value, str) or isinstance(value, unicode)): raise TypeError("A str or unicode column name was expected, but %s was received instead (%s)" % (value.__class__.__name__, str(value))) return util.pack(value, d_type)
def _pack_name(self, value, is_supercol_name=False, slice_end=_NON_SLICE): if not self.autopack_names: if value is not None and not (isinstance(value, str) or isinstance(value, unicode)): raise TypeError( "A str or unicode column name was expected, but %s was received instead (%s)" % (value.__class__.__name__, str(value))) return value if value is None: return if is_supercol_name: d_type = self.supercol_name_data_type else: d_type = self.col_name_data_type if d_type == 'TimeUUIDType': if slice_end: value = util.convert_time_to_uuid( value, lowest_val=(slice_end == _SLICE_START), randomize=False) else: value = util.convert_time_to_uuid(value, randomize=True) elif d_type == 'BytesType' and not (isinstance(value, str) or isinstance(value, unicode)): raise TypeError( "A str or unicode column name was expected, but %s was received instead (%s)" % (value.__class__.__name__, str(value))) return util.pack(value, d_type)
def test_insert_get_tuuids(self): key = 'TestColumnFamily.test_insert_get' columns = ((convert_time_to_uuid(time.time() - 1000, randomize=True), 'val1'), (convert_time_to_uuid(time.time(), randomize=True), 'val2')) for test_cf in (cf, cf_stub): assert_raises(NotFoundException, test_cf.get, key) ts = test_cf.insert(key, dict(columns)) assert_true(isinstance(ts, (int, long))) assert_equal(test_cf.get(key).keys(), [x[0] for x in columns])
def pack_uuid(value, slice_start=None): if slice_start is None: value = util.convert_time_to_uuid(value, randomize=True) else: value = util.convert_time_to_uuid(value, lowest_val=slice_start, randomize=False) if not hasattr(value, "bytes"): raise TypeError("%s is not valid for UUIDType" % value) return value.bytes
def pack_uuid(value, slice_start=None): if slice_start is None: value = util.convert_time_to_uuid(value, randomize=True) else: value = util.convert_time_to_uuid(value, lowest_val=slice_start, randomize=False) if not hasattr(value, 'bytes'): raise TypeError("%s is not valid for UUIDType" % value) return value.bytes
def _compute_count(a_range): column_start = convert_time_to_uuid(a_range[0], lowest_val=True) # start_datetime = utc_timestamp2datetime(a_range[0]) column_finish = convert_time_to_uuid(a_range[1], lowest_val=False) # end_datetime = utc_timestamp2datetime(a_range[1]) row_key = ymd_from_epoch(float(a_range[0])) count = self._get_cf_logs().get_count(row_key, column_start=column_start, column_finish=column_finish) return count # to be used by `_json_cache()`
def _compute_count(a_range): column_start = convert_time_to_uuid(a_range[0], lowest_val=True) # start_datetime = utc_timestamp2datetime(a_range[0]) column_finish = convert_time_to_uuid(a_range[1], lowest_val=False) # end_datetime = utc_timestamp2datetime(a_range[1]) row_key = ymd_from_epoch(float(a_range[0])) count = self._get_cf_logs().get_count( row_key, column_start=column_start, column_finish=column_finish) return count # to be used by `_json_cache()`
def save_log(self, application, host, severity, timestamp, message): """ Saves a log message. Raises: - DaedalusException if any parameter isn't valid. """ _check_application(application) _check_severity(severity) _check_host(host) _check_message(message) try: timestamp = float(timestamp) except: raise(DaedalusException("The timestamp '{0}' couldn't be transformed to a float".format(timestamp))) event_uuid = convert_time_to_uuid(timestamp, randomize=True) _id = event_uuid.get_hex() json_message = json.dumps({ 'application': application, 'host': host, 'severity': severity, 'timestamp': timestamp, '_id': _id, 'message': message, }) pool = self._get_pool() with Mutator(pool) as batch: # Save on <CF> CF_LOGS row_key = ymd_from_uuid1(event_uuid) batch.insert( self._get_cf_logs(), str(row_key), { event_uuid: json_message, }) # Save on <CF> CF_LOGS_BY_APP batch.insert( self._get_cf_logs_by_app(), application, { event_uuid: EMPTY_VALUE, }) # Save on <CF> CF_LOGS_BY_HOST batch.insert( self._get_cf_logs_by_host(), host, { event_uuid: EMPTY_VALUE, }) # Save on <CF> CF_LOGS_BY_SEVERITY batch.insert( self._get_cf_logs_by_severity(), severity, { event_uuid: EMPTY_VALUE, })
def insert(self, time, data, batch=None): rkey = self.row_key(time) ckey = convert_time_to_uuid(time, randomize=True) if batch is None: self._cf.insert(rkey, {ckey: data}) else: batch.insert(rkey, {ckey: data}) return ckey
def insert(self, obj, *args, **kwargs): """Insert a new object into the Column family. This method is responsible for serializing the object. If `args` exists, all objects in `args` will be associated with the newly created object. """ col_fam = ColumnFamily(self.pool, self.__column_family__) key = convert_time_to_uuid(datetime.utcnow()) serialized = json.dumps(obj) ret = col_fam.insert(key, {key: serialized}, **kwargs) versions = ((key, obj),) for remote in args: assert(hasattr(remote, '__column_family__')) # as we are the timestamped object, we are the "target" in the many # to many table. cf = "%s_%s" % (remote.__column_family__, self.__column_family__) col_fam_mtm = ColumnFamily(self.pool, cf) col_fam_mtm.insert(remote.rowkey, {convert_time_to_uuid(datetime.utcnow()): key}) return self(key, versions)
def insert(self, columns, **kwargs): """Insert a new row in the column family. Several things are checked before inserting: - Verify that inputs exists in class, and resolve aliases. - As we are handling manually uniqueness, we must ensure that all unique fields are present in the `columns` parameter. - For all unique fields, we use :meth:`get_by` to ensure given value is actually.. unique. - We need to create a TimeUUID compatible object using pycassa helper. Fields that refers to relationships cannot be assigned directly at insert. Maybe this will be implemented later. TODO: maybe it will need to have some consistency level adjusted to avoid possible race conditions. """ col_fam = ColumnFamily(self.pool, self.__column_family__) reg = self.registry[self.__column_family__] # verify inputs and resolve aliases for k, v in dict(columns).items(): if k not in reg: raise ModelException('%s: no column "%s" found' % (self.__column_family__, k)) if hasattr(reg[k], 'alias') and reg[k].alias: columns[reg[k].alias] = v del columns[k] # handles unique keys unique = [k for k, v in reg.items() if hasattr(v, 'unique') and v.unique] missing = set(unique) - set(columns.keys()) if missing: raise ModelException("%s: cannot insert without following fields: %s" % (self.__column_family__, ','.join(missing))) # ensure uniqueness verif_unique = [(k, v) for k, v in columns.items() if k in unique] for k, v in verif_unique: exists = self.get_by(k, v) if exists: # we have a hit, so this value is not unique break else: # generate a TimeUUID object for the rowkey key = convert_time_to_uuid(datetime.utcnow()) ret = col_fam.insert(key, columns, **kwargs) return self(key, **columns) # some key in not unique raise ModelException("%s: cannot create, a value is not unique" % self.__column_family__)
def save_log(self, application, host, severity, timestamp, message): """ Saves a log message. Raises: - DaedalusException if any parameter isn't valid. """ _check_application(application) _check_severity(severity) _check_host(host) _check_message(message) try: timestamp = float(timestamp) except: raise (DaedalusException( "The timestamp '{0}' couldn't be transformed to a float". format(timestamp))) event_uuid = convert_time_to_uuid(timestamp, randomize=True) _id = event_uuid.get_hex() json_message = json.dumps({ 'application': application, 'host': host, 'severity': severity, 'timestamp': timestamp, '_id': _id, 'message': message, }) pool = self._get_pool() with Mutator(pool) as batch: # Save on <CF> CF_LOGS row_key = ymd_from_uuid1(event_uuid) batch.insert(self._get_cf_logs(), str(row_key), { event_uuid: json_message, }) # Save on <CF> CF_LOGS_BY_APP batch.insert(self._get_cf_logs_by_app(), application, { event_uuid: EMPTY_VALUE, }) # Save on <CF> CF_LOGS_BY_HOST batch.insert(self._get_cf_logs_by_host(), host, { event_uuid: EMPTY_VALUE, }) # Save on <CF> CF_LOGS_BY_SEVERITY batch.insert(self._get_cf_logs_by_severity(), severity, { event_uuid: EMPTY_VALUE, })
def insert(self, user, time, batch=None): id = user["id"] ckey = convert_time_to_uuid(time, randomize=True) # name = user['screen_name'].lower() super(Users, self).insert(id, {ckey: user}, batch)
def col_key(self, _time): return convert_time_to_uuid(_time, randomize=True)
def insert(self, user, time, batch=None): id = user['id'] ckey = convert_time_to_uuid(time, randomize=True) #name = user['screen_name'].lower() super(Users, self).insert(id, {ckey: user}, batch)
def save_log(self, application, host, severity, timestamp, message, multi_message=False, multimessage_id=None): """ Saves a log message. Raises: - DaedalusException if any parameter isn't valid. Returns: - tuple with (row_key, column_key, multimessage_id) """ _check_application(application) _check_severity(severity) _check_host(host) _check_message(message) try: timestamp = float(timestamp) except: raise(DaedalusException("The timestamp '{0}' couldn't be " "transformed to a float".format(timestamp))) assert multi_message in (True, False,) event_uuid = convert_time_to_uuid(timestamp, randomize=True) column_key = (event_uuid, host, application, severity) _id = ','.join((event_uuid.get_hex(), host, application, severity, )) if not application in self._app_cache: self._app_cache[application] = True self._get_cf_metadata().insert('applications', {application: ''}) if not host in self._host_cache: self._host_cache[host] = True self._get_cf_metadata().insert('hosts', {host: ''}) row_key = ymdhm_from_uuid1(event_uuid) key_for_bitmap = int(row_key) if not key_for_bitmap in self._timestamp_bitmap_cache: self._timestamp_bitmap_cache[key_for_bitmap] = True self._get_cf_timestamp_bitmap().insert('timestamp_bitmap', {key_for_bitmap: ''}) message_dict = { 'application': application, 'host': host, 'severity': severity, 'timestamp': timestamp, '_id': _id, 'message': message, } if multi_message: # this message is part of a multi-message if multimessage_id: # The multi-message key was passed as parameter message_dict['multimessage_id'] = multimessage_id else: message_dict['multimessage_id'] = ','.join([row_key, _id]) self._get_cf_logs().insert(row_key, { column_key: json.dumps(message_dict), }) return (row_key, column_key, message_dict.get('multimessage_id', None))
def save_log(self, application, host, severity, timestamp, message, multi_message=False, multimessage_id=None): """ Saves a log message. Raises: - DaedalusException if any parameter isn't valid. Returns: - tuple with (row_key, column_key, multimessage_id) """ _check_application(application) _check_severity(severity) _check_host(host) _check_message(message) try: timestamp = float(timestamp) except: raise (DaedalusException( "The timestamp '{0}' couldn't be " "transformed to a float".format(timestamp))) assert multi_message in ( True, False, ) event_uuid = convert_time_to_uuid(timestamp, randomize=True) column_key = (event_uuid, host, application, severity) _id = ','.join(( event_uuid.get_hex(), host, application, severity, )) if not application in self._app_cache: self._app_cache[application] = True self._get_cf_metadata().insert('applications', {application: ''}) if not host in self._host_cache: self._host_cache[host] = True self._get_cf_metadata().insert('hosts', {host: ''}) row_key = ymdhm_from_uuid1(event_uuid) key_for_bitmap = int(row_key) if not key_for_bitmap in self._timestamp_bitmap_cache: self._timestamp_bitmap_cache[key_for_bitmap] = True self._get_cf_timestamp_bitmap().insert('timestamp_bitmap', {key_for_bitmap: ''}) message_dict = { 'application': application, 'host': host, 'severity': severity, 'timestamp': timestamp, '_id': _id, 'message': message, } if multi_message: # this message is part of a multi-message if multimessage_id: # The multi-message key was passed as parameter message_dict['multimessage_id'] = multimessage_id else: message_dict['multimessage_id'] = ','.join([row_key, _id]) self._get_cf_logs().insert(row_key, { column_key: json.dumps(message_dict), }) return (row_key, column_key, message_dict.get('multimessage_id', None))