Пример #1
0
    def _prune_multi(cls, queries):
        cls._fetch_multi(queries)

        with Mutator(CONNECTION_POOL) as m:
            for q in queries:
                q._sort_data()
                q._prune(m)
Пример #2
0
def set_account_ip(account_id, ip, date=None):
    """Set an IP address as having accessed an account.

    Updates all underlying datastores.
    """
    if date is None:
        date = datetime.datetime.now(g.tz)
    m = Mutator(CONNECTION_POOL)
    m.insert(IPsByAccount._cf, str(account_id), {date: ip}, ttl=CF_TTL)
    m.insert(AccountsByIP._cf, ip, {date: str(account_id)}, ttl=CF_TTL)
    m.send()
Пример #3
0
    def save_log(self, application, host, severity, timestamp, message):
        """
        Saves a log message.
        Raises:
        - DaedalusException if any parameter isn't valid.
        """
        _check_application(application)
        _check_severity(severity)
        _check_host(host)
        _check_message(message)
        try:
            timestamp = float(timestamp)
        except:
            raise (DaedalusException(
                "The timestamp '{0}' couldn't be transformed to a float".
                format(timestamp)))

        event_uuid = convert_time_to_uuid(timestamp, randomize=True)
        _id = event_uuid.get_hex()

        json_message = json.dumps({
            'application': application,
            'host': host,
            'severity': severity,
            'timestamp': timestamp,
            '_id': _id,
            'message': message,
        })

        pool = self._get_pool()
        with Mutator(pool) as batch:
            # Save on <CF> CF_LOGS
            row_key = ymd_from_uuid1(event_uuid)
            batch.insert(self._get_cf_logs(), str(row_key), {
                event_uuid: json_message,
            })

            # Save on <CF> CF_LOGS_BY_APP
            batch.insert(self._get_cf_logs_by_app(), application, {
                event_uuid: EMPTY_VALUE,
            })

            # Save on <CF> CF_LOGS_BY_HOST
            batch.insert(self._get_cf_logs_by_host(), host, {
                event_uuid: EMPTY_VALUE,
            })

            # Save on <CF> CF_LOGS_BY_SEVERITY
            batch.insert(self._get_cf_logs_by_severity(), severity, {
                event_uuid: EMPTY_VALUE,
            })
Пример #4
0
def set_account_ip(account_id, ip, date=None):
    """Set an IP address as having accessed an account.

    Updates all underlying datastores.
    """
    # don't store private IPs, send a graphite event so we can alert on this
    if ip_address(ip).is_private:
        g.stats.simple_event('ip.private_ip_storage_prevented')
        return

    if date is None:
        date = datetime.datetime.now(g.tz)
    m = Mutator(CONNECTION_POOL)
    m.insert(IPsByAccount._cf, str(account_id), {date: ip}, ttl=CF_TTL)
    m.insert(AccountsByIP._cf, ip, {date: str(account_id)}, ttl=CF_TTL)
    m.send()
Пример #5
0
 def __init__(self):
     self.mutator = Mutator(CONNECTION_POOL)
     self.to_prune = set()
Пример #6
0
    def update(self):
        things = list(self.query)

        with Mutator(CONNECTION_POOL) as m:
            self.model.remove(m, self.key, None)  # empty the whole row
            self._insert(m, things)
Пример #7
0
 def start_batch(self, queue_size=0):
     if self._batch is None:
         self.in_batch = True
         self._batch = Mutator(self._pool, queue_size)
     self.batch_count += 1
Пример #8
0
 def insert(self, column_family, key, columns):
     if self._batch is not None:
         self._batch.insert(column_family, key, columns)
     else:
         with Mutator(self._pool) as b:
             b.insert(column_family, key, columns)