def _filter_records_by_days(start_date, end_date, memory_storage_inst): if start_date: start_date = utils.date_to_timestamp_ext(start_date[0]) else: start_date = memory_storage_inst.get_first_record_day() if end_date: end_date = utils.date_to_timestamp_ext(end_date[0]) else: end_date = utils.date_to_timestamp_ext('now') start_day = utils.timestamp_to_day(start_date) end_day = utils.timestamp_to_day(end_date) return memory_storage_inst.get_record_ids_by_days( six.moves.range(start_day, end_day + 1))
def _remove_record_from_index(self, record): for key, index in six.iteritems(self.indexes): index[getattr(record, key)].remove(record.record_id) record_day = utils.timestamp_to_day(record.date) self.day_index[record_day].remove(record.record_id) self.module_release_index[(record.module, record.release)].remove(record.record_id)
def _remove_record_from_index(self, record): for key, index in six.iteritems(self.indexes): index[record[key]].remove(record['record_id']) record_day = utils.timestamp_to_day(record['date']) self.day_index[record_day].remove(record['record_id']) self.module_release_index[ (record['module'], record['release'])].remove(record['record_id'])
def _remove_record_from_index(self, record): for key, index in six.iteritems(self.indexes): index[getattr(record, key)].remove(record.record_id) record_day = utils.timestamp_to_day(record.date) self.day_index[record_day].remove(record.record_id) self.module_release_index[ (record.module, record.release)].remove(record.record_id)
def person_day_filter(result, record, param_id, context): day = utils.timestamp_to_day(record.date) # fact that record-days are grouped by days in some order is used if context.get('last_processed_day') != day: context['last_processed_day'] = day context['counted_user_ids'] = set() user_id = record.user_id value = getattr(record, param_id) if user_id not in context['counted_user_ids']: context['counted_user_ids'].add(user_id) result[value]['metric'] += 1
def _save_record(self, record): if record.get('company_name') == '*robots': return self.records[record['record_id']] = record for key, index in six.iteritems(self.indexes): self._add_to_index(index, record, key) for bp_id in (record.get('blueprint_id') or []): if bp_id in self.blueprint_id_index: self.blueprint_id_index[bp_id].add(record['record_id']) else: self.blueprint_id_index[bp_id] = set([record['record_id']]) record_day = utils.timestamp_to_day(record['date']) if record_day in self.day_index: self.day_index[record_day].add(record['record_id']) else: self.day_index[record_day] = set([record['record_id']])
def person_day_filter(result, record, param_id, context): record_type = record.record_type if record_type == 'commit' or record_type == 'member': # 1. commit is attributed with the date of the merge which is not an # effort of the author (author's effort is represented in patches) # 2. registration on openstack.org is not an effort return day = utils.timestamp_to_day(record.date) # fact that record-days are grouped by days in some order is used if context.get('last_processed_day') != day: context['last_processed_day'] = day context['counted_user_ids'] = set() user_id = record.user_id value = getattr(record, param_id) if user_id not in context['counted_user_ids']: context['counted_user_ids'].add(user_id) result[value]['metric'] += 1
def _save_record(self, record): if (record.company_name == '*robots' and record.record_type not in ['patch', 'review']): return self.records[record.record_id] = record for key, index in six.iteritems(self.indexes): self._add_to_index(index, record, key) record_day = utils.timestamp_to_day(record.date) if record_day in self.day_index: self.day_index[record_day].add(record.record_id) else: self.day_index[record_day] = set([record.record_id]) mr = (record.module, record.release) if mr in self.module_release_index: self.module_release_index[mr].add(record.record_id) else: self.module_release_index[mr] = set([record.record_id])
def _save_record(self, record): if record.company_name == '*robots': return self.records[record.record_id] = record for key, index in six.iteritems(self.indexes): self._add_to_index(index, record, key) for bp_id in (record.blueprint_id or []): if bp_id in self.blueprint_id_index: self.blueprint_id_index[bp_id].add(record.record_id) else: self.blueprint_id_index[bp_id] = set([record.record_id]) record_day = utils.timestamp_to_day(record.date) if record_day in self.day_index: self.day_index[record_day].add(record.record_id) else: self.day_index[record_day] = set([record.record_id]) mr = (record.module, record.release) if mr in self.module_release_index: self.module_release_index[mr].add(record.record_id) else: self.module_release_index[mr] = set([record.record_id])
def timeline(records, **kwargs): # find start and end dates metric = parameters.get_parameter(kwargs, 'metric') start_date = int( parameters.get_single_parameter(kwargs, 'start_date') or 0) release_name = parameters.get_single_parameter(kwargs, 'release') or 'all' releases = vault.get_vault()['releases'] if 'all' in release_name: start_week = release_start_week = _get_week(kwargs, 'start_date') end_week = release_end_week = _get_week(kwargs, 'end_date') else: release = releases[release_name] start_week = release_start_week = utils.timestamp_to_week( release['start_date']) end_week = release_end_week = utils.timestamp_to_week( release['end_date']) now = utils.timestamp_to_week(int(time.time())) + 1 # expand start-end to year if needed if release_end_week - release_start_week < 52: expansion = (52 - (release_end_week - release_start_week)) // 2 if release_end_week + expansion < now: end_week += expansion else: end_week = now start_week = end_week - 52 # empty stats for all weeks in range weeks = range(start_week, end_week) week_stat_loc = dict((c, 0) for c in weeks) week_stat_commits = dict((c, 0) for c in weeks) week_stat_commits_hl = dict((c, 0) for c in weeks) if ('commits' in metric) or ('loc' in metric): handler = lambda record: record.loc else: handler = lambda record: 0 # fill stats with the data if 'person-day' in metric: # special case for man-day effort metric release_stat = collections.defaultdict(set) all_stat = collections.defaultdict(set) for record in records: if start_week <= record.week < end_week: day = utils.timestamp_to_day(record.date) user_id = record.user_id if record.release == release_name: release_stat[day].add(user_id) all_stat[day].add(user_id) for day, users in six.iteritems(release_stat): week = utils.timestamp_to_week(day * 24 * 3600) week_stat_commits_hl[week] += len(users) for day, users in six.iteritems(all_stat): week = utils.timestamp_to_week(day * 24 * 3600) week_stat_commits[week] += len(users) else: for record in records: week = record.week if start_week <= week < end_week: week_stat_loc[week] += handler(record) week_stat_commits[week] += 1 if 'members' in metric: if record.date >= start_date: week_stat_commits_hl[week] += 1 else: if record.release == release_name: week_stat_commits_hl[week] += 1 if 'all' == release_name and 'members' not in metric: week_stat_commits_hl = week_stat_commits # form arrays in format acceptable to timeline plugin array_loc = [] array_commits = [] array_commits_hl = [] for week in weeks: week_str = utils.week_to_date(week) array_loc.append([week_str, week_stat_loc[week]]) array_commits.append([week_str, week_stat_commits[week]]) array_commits_hl.append([week_str, week_stat_commits_hl[week]]) return [array_commits, array_commits_hl, array_loc]
def timeline(records, **kwargs): # find start and end dates metric = parameters.get_parameter(kwargs, 'metric') start_date = int(parameters.get_single_parameter(kwargs, 'start_date') or 0) release_name = parameters.get_single_parameter(kwargs, 'release') or 'all' releases = vault.get_vault()['releases'] if 'all' in release_name: start_week = release_start_week = _get_week(kwargs, 'start_date') end_week = release_end_week = _get_week(kwargs, 'end_date') else: release = releases[release_name] start_week = release_start_week = utils.timestamp_to_week( release['start_date']) end_week = release_end_week = utils.timestamp_to_week( release['end_date']) now = utils.timestamp_to_week(int(time.time())) + 1 # expand start-end to year if needed if release_end_week - release_start_week < 52: expansion = (52 - (release_end_week - release_start_week)) // 2 if release_end_week + expansion < now: end_week += expansion else: end_week = now start_week = end_week - 52 # empty stats for all weeks in range weeks = range(start_week, end_week) week_stat_loc = dict((c, 0) for c in weeks) week_stat_commits = dict((c, 0) for c in weeks) week_stat_commits_hl = dict((c, 0) for c in weeks) if ('commits' in metric) or ('loc' in metric): handler = lambda record: record['loc'] else: handler = lambda record: 0 # fill stats with the data if 'person-day' in metric: # special case for man-day effort metric release_stat = collections.defaultdict(set) all_stat = collections.defaultdict(set) for record in records: if ((record['record_type'] in ['commit', 'member']) or (record['week'] not in weeks)): continue day = utils.timestamp_to_day(record['date']) user = vault.get_user_from_runtime_storage(record['user_id']) if record['release'] == release_name: release_stat[day] |= set([user['seq']]) all_stat[day] |= set([user['seq']]) for day, users in six.iteritems(release_stat): week = utils.timestamp_to_week(day * 24 * 3600) week_stat_commits_hl[week] += len(users) for day, users in six.iteritems(all_stat): week = utils.timestamp_to_week(day * 24 * 3600) week_stat_commits[week] += len(users) else: for record in records: week = record['week'] if week in weeks: week_stat_loc[week] += handler(record) week_stat_commits[week] += 1 if 'members' in metric: if record['date'] >= start_date: week_stat_commits_hl[week] += 1 else: if record['release'] == release_name: week_stat_commits_hl[week] += 1 if 'all' == release_name and 'members' not in metric: week_stat_commits_hl = week_stat_commits # form arrays in format acceptable to timeline plugin array_loc = [] array_commits = [] array_commits_hl = [] for week in weeks: week_str = utils.week_to_date(week) array_loc.append([week_str, week_stat_loc[week]]) array_commits.append([week_str, week_stat_commits[week]]) array_commits_hl.append([week_str, week_stat_commits_hl[week]]) return [array_commits, array_commits_hl, array_loc]