예제 #1
0
파일: ip.py 프로젝트: AjaxGb/reddit
def set_account_ip(account_id, ip, date=None):
    """Set an IP address as having accessed an account.

    Updates all underlying datastores.
    """
    if date is None:
        date = datetime.datetime.now(g.tz)
    m = Mutator(CONNECTION_POOL)
    m.insert(IPsByAccount._cf, str(account_id), {date: ip}, ttl=CF_TTL)
    m.insert(AccountsByIP._cf, ip, {date: str(account_id)}, ttl=CF_TTL)
    m.send()
예제 #2
0
def set_account_ip(account_id, ip, date=None):
    """Set an IP address as having accessed an account.

    Updates all underlying datastores.
    """
    if date is None:
        date = datetime.datetime.now(g.tz)
    m = Mutator(CONNECTION_POOL)
    m.insert(IPsByAccount._cf, str(account_id), {date: ip}, ttl=CF_TTL)
    m.insert(AccountsByIP._cf, ip, {date: str(account_id)}, ttl=CF_TTL)
    m.send()
예제 #3
0
파일: ip.py 프로젝트: zeantsoi/reddit
def set_account_ip(account_id, ip, date=None):
    """Set an IP address as having accessed an account.

    Updates all underlying datastores.
    """
    # don't store private IPs, send a graphite event so we can alert on this
    if ip_address(ip).is_private:
        g.stats.simple_event('ip.private_ip_storage_prevented')
        return

    if date is None:
        date = datetime.datetime.now(g.tz)
    m = Mutator(CONNECTION_POOL)
    m.insert(IPsByAccount._cf, str(account_id), {date: ip}, ttl=CF_TTL)
    m.insert(AccountsByIP._cf, ip, {date: str(account_id)}, ttl=CF_TTL)
    m.send()
예제 #4
0
def set_account_ip(account_id, ip, date=None):
    """Set an IP address as having accessed an account.

    Updates all underlying datastores.
    """
    # don't store private IPs, send a graphite event so we can alert on this
    if ip_address(ip).is_private:
        g.stats.simple_event('ip.private_ip_storage_prevented')
        return

    if date is None:
        date = datetime.datetime.now(g.tz)
    m = Mutator(CONNECTION_POOL)
    m.insert(IPsByAccount._cf, str(account_id), {date: ip}, ttl=CF_TTL)
    m.insert(AccountsByIP._cf, ip, {date: str(account_id)}, ttl=CF_TTL)
    m.send()
    def parse_logs(self, build_ids):
        """Parse the logs for the specified build IDs into storage."""
        # TODO hook up parallel processing.

        OUR_VERSION = '1'
        mut = Mutator(self._pool)
        cf = ColumnFamily(self._pool, 'build_timelines')
        i_cf = ColumnFamily(self._pool, 'indices')
        builds_cf = ColumnFamily(self._pool, 'builds')
        counters = ColumnFamily(self._pool, 'counters')
        super_counters = ColumnFamily(self._pool, 'super_counters')

        for build_id in build_ids:
            info = self._connection.build_from_id(build_id)
            if not info:
                continue

            existing_version = info.get('log_parsing_version')
            if existing_version and existing_version >= OUR_VERSION:
                continue

            if info['log_fetch_status'] != 'fetched':
                continue

            log = self._connection.file_data(info['log_url'])
            if not log:
                continue

            parsed = parse_build_log(log)
            cat = info['builder_category']

            cols = {}
            indices = {}

            for step in parsed.steps:
                start = calendar.timegm(step.start.utctimetuple())
                end = calendar.timegm(step.end.utctimetuple())

                elapsed = end - start
                name = step.name

                cols[start] = {
                    'name': name,
                    'state': step.state,
                    'results': step.results,
                    'start': unicode(start),
                    'end': unicode(end),
                    'elapsed': unicode(elapsed)
                }

                start_date = step.start.date().isoformat()

                indices[name] = {build_id: ''}
                counters.add('build_step_number', name)
                counters.add('build_step_duration', name, elapsed)
                super_counters.add('build_step_number_by_category', name,
                    1, cat)
                super_counters.add('build_step_duration_by_category', name,
                    elapsed, cat)
                super_counters.add('build_step_number_by_day', name, 1,
                    start_date)
                super_counters.add('build_step_duration_by_day', name,
                    elapsed, start_date)

                day_cat = '%s.%s' % (start_date, cat)
                super_counters.add('build_step_number_by_day_and_category',
                    name, 1, day_cat)
                super_counters.add('build_step_duration_by_day_and_category',
                    name, elapsed, day_cat)

            mut.insert(cf, build_id, cols)
            mut.insert(i_cf, 'build_step_name_to_build_ids', indices)
            mut.insert(builds_cf, build_id, {'log_parsing_version': OUR_VERSION})

            yield 'Parsed build %s into %d steps.' % (build_id,
                len(parsed.steps))

        mut.send()