def load_stats_cache(self):
        self.log.info('Loading previously calculated stats from cache')
        stats_days = Files(self.log).jsonl_load(self.config.filepath_stats_days)
        stats_weeks = Files(self.log).jsonl_load(self.config.filepath_stats_weeks)
        stats_remaining = Files(self.log).jsonl_load(self.config.filepath_stats_remaining)

        return stats_days, stats_weeks, stats_remaining
    def main(self):
        """Guesstimate efforts to completion, add result to a log file"""
        self.log.info('Guesstimating days to completion')
        for scan_week in self.__stats_weeks:
            current_week = self.__stats_weeks[scan_week]
            break

        date_current = self.time.get_current_date()

        remaining = {
            self.config.get_config_value('stats_metric'): self.remaining_work[self.config.get_config_value('stats_metric')]
            , 'datetime': date_current
            , 'types': self.remaining_work['types']
            , 'assignees': self.remaining_work['assignees']
            , 'days_to_completion': {}
        }
        self.log.info('Remaining number of story ' + self.config.get_config_value('stats_metric') + ': ' + str(remaining[self.config.get_config_value('stats_metric')]))
        for week_stat in current_week['stats']:
            avg_elements_per_day = float(current_week['stats'][week_stat]['avg']) / 5
            if avg_elements_per_day > 0:
                remaining_days = round(float(remaining[self.config.get_config_value('stats_metric')]) / float(avg_elements_per_day), 1)
            else:
                remaining_days = 0
            self.log.info('Over period: ' + str(week_stat) + ', average ' + self.config.get_config_value('stats_metric') + ' per day was: ' + str(
                avg_elements_per_day) + ' should completed in: ' + str(remaining_days) + ' days')
            remaining['days_to_completion'][week_stat] = remaining_days

        current_element_per_day = current_week[self.config.get_config_value('stats_metric')] / current_week['days']
        remaining_days = round(remaining[self.config.get_config_value('stats_metric')] / current_element_per_day, 1)
        remaining['days_to_completion']['current'] = remaining_days
        self.log.info('This week, average ' + self.config.get_config_value('stats_metric') + ' per day is: ' + str(
            current_element_per_day) + ' should completed in: ' + str(remaining_days) + ' days')

        # Load previous data and add current day (if already there, replace)
        daily_data = collections.OrderedDict()
        dict_idx = date_current.strftime('%Y-%m-%d')
        daily_data[dict_idx] = remaining

        current_data = Files(self.log).jsonl_load(self.config.filepath_stats_remaining)

        for current_day_data in current_data:
            if date_current.strftime('%Y-%m-%d') != current_data[current_day_data]['datetime'].strftime('%Y-%m-%d'):
                dict_idx = current_data[current_day_data]['datetime'].strftime('%Y-%m-%d')
                daily_data[dict_idx] = current_data[current_day_data]

        if os.path.isfile(self.config.filepath_stats_remaining):
            os.remove(self.config.filepath_stats_remaining)

        for currentdata in daily_data:
            daily_obj = copy.deepcopy(daily_data[currentdata])
            daily_obj['datetime'] = daily_data[currentdata]['datetime'].isoformat()
            self.log.info('StatsRemaining.main(): Writing stats for date: ' + daily_obj['datetime'])
            Files(self.log).jsonl_append(self.config.filepath_stats_remaining, daily_obj)

        return daily_data
Exemple #3
0
    def load_jira_cache(self):
        self.log.info('Load daily data and remaining work from cache')
        # Import existing data (if any) into a Python object
        daily_data = Files(self.log).jsonl_load(
            self.config.filepath_data_completion)
        remaining_work = Files(self.log).json_load(
            self.config.filepath_data_remaining)
        if remaining_work is None or daily_data is None:
            self.log.error(
                'Unable to load cached data, please run \'jav load\' first')
            exit()

        return daily_data, remaining_work
Exemple #4
0
    def refresh_jira_cache(self):
        self.log.info('Load updated data from Jira into Cache')
        date_start = self.time.get_current_date()
        date_end = self.time.get_end_date()
        self.log.info('Load.main(): Start Date: ' +
                      date_start.strftime('%Y-%m-%d'))
        self.log.info('Load.main(): End Date: ' +
                      date_end.strftime('%Y-%m-%d'))

        loader = ImportData(self.log, self.config)
        # Import existing data (if any) into a Python object
        previous_data = Files(self.log).jsonl_load(
            self.config.filepath_data_completion)
        # Refresh the cache by checking if additional days can be added
        daily_data = loader.refresh_dailydata_cache(previous_data, date_start,
                                                    date_end)
        # Write back the data cache to file after clearing any existing one
        loader.write_dailydata_cache(daily_data)

        # Call Jira to get Remaining work
        remaining_work = loader.get_remaining_work()

        return daily_data, remaining_work
Exemple #5
0
    def main(self):
        """This hacky function publishes chart to github pages"""
        self.log.info('Publishing Website')
        self.log.info('Git local path: ' +
                      self.config.get_config_value('git_localpath'))
        self.config.set_config_value(
            'git_localpath',
            Files.prep_path(self.config.get_config_value('git_localpath')))

        if not self.github.is_folder_git_repo():
            self.log.info('Cloning Git Repository: ' +
                          self.config.get_config_value('git_repo'))
            self.github.git_clone()
        else:
            self.log.info(
                'Repository already available on local filesystem: ' +
                self.config.get_config_value('git_repo'))
            if self.github.git_pull() is not True:
                self.log.info('Git Stash')
                self.github.git_stash()

        # Test if requested branch exists, if not, create
        # Then switch to that branch
        remote_branches = self.github.get_branches()
        if not any(
                self.config.get_config_value('git_branch') in s
                for s in remote_branches):
            # Init branch
            self.log.warning(
                'The remote repository does not contain branch: ' +
                self.config.get_config_value('git_branch'))
            self.github.git_checkout_create(
                self.config.get_config_value('git_branch'))
            self.github.git_pull_branch(
                self.config.get_config_value('git_branch'), True)
            self.github.git_push_branch(
                self.config.get_config_value('git_branch'))
        elif any('* ' + self.config.get_config_value('git_branch') in s
                 for s in remote_branches):
            self.log.info('Local repository already selected branch: ' +
                          self.config.get_config_value('git_branch'))
        else:
            self.log.info('Switching to branch: ' +
                          self.config.get_config_value('git_branch'))
            self.github.git_checkout_f(
                self.config.get_config_value('git_branch'))

        # Next do one last pull and copy content
        self.github.git_stash()

        # Then copy chart to requested directory
        if not os.path.isfile(self.config.filepath_charts + 'index.html'):
            self.log.error('Unable to locate source chart file at: ' +
                           self.config.filepath_charts + 'index.html')
            exit()

        dst_path = Files.prep_path(
            self.config.get_config_value('git_localpath') +
            self.config.get_config_value('git_pathdirectory'))
        self.log.info('Preparing to copy chart file to: ' + dst_path)
        shutil.copyfile(self.config.filepath_charts + 'index.html',
                        dst_path + 'index.html')
        self.log.info('Chart file copied')
        self.github.git_add('--all')
        self.github.git_commit('Copied chart file - ' +
                               self.time.get_current_date().isoformat())
        self.github.git_push()
        self.log.info('Chart pushed to: ' +
                      self.config.get_config_value('git_pageurl'))
    def main(self):
        self.log.info('Calculate weekly stats throughout the captured period')

        # First pass, calculate weekly values
        for current_day in self.daily_data:
            week_txt = self.daily_data[current_day]['datetime'].strftime(
                '%Y.%W')
            if week_txt not in self.weeks_data:
                self.weeks_data[week_txt] = {
                    'values': [],
                    'datetime': self.daily_data[current_day]['datetime'],
                    'weektxt': week_txt,
                    'stats': {}
                }
            self.weeks_data[week_txt]['values'].append(
                self.daily_data[current_day][self.config.get_config_value(
                    'stats_metric')])
            self.weeks_data[week_txt]['days'] = len(
                self.weeks_data[week_txt]['values'])
            self.weeks_data[week_txt][self.config.get_config_value(
                'stats_metric')] = sum(self.weeks_data[week_txt]['values'])

        # Second pass, get min and max since 'beginning of time'
        for week_txt in self.weeks_data:
            del self.weeks_data[week_txt]['values']
            week_found = False
            for scan_week in self.weeks_data:
                if week_found is True:
                    if 'all' not in self.weeks_data[week_txt]['stats']:
                        self.weeks_data[week_txt]['stats']['all'] = {
                            'values': []
                        }
                    self.weeks_data[week_txt]['stats']['all']['values'].append(
                        self.weeks_data[scan_week][
                            self.config.get_config_value('stats_metric')])
                    self.weeks_data[week_txt]['stats']['all']['avg'] = int(
                        numpy.mean(self.weeks_data[week_txt]['stats']['all']
                                   ['values']))
                    self.weeks_data[week_txt]['stats']['all']['max'] = max(
                        self.weeks_data[week_txt]['stats']['all']['values'])
                    self.weeks_data[week_txt]['stats']['all']['min'] = min(
                        self.weeks_data[week_txt]['stats']['all']['values'])
                    for week_idx in self.config.get_config_value(
                            'rolling_stats'):
                        if week_idx == 'all' or len(
                                self.weeks_data[week_txt]['stats']['all']
                            ['values']) <= week_idx:
                            if week_idx not in self.weeks_data[week_txt][
                                    'stats']:
                                self.weeks_data[week_txt]['stats'][
                                    week_idx] = {
                                        'values': []
                                    }
                            self.weeks_data[week_txt]['stats'][week_idx][
                                'values'].append(self.weeks_data[scan_week][
                                    self.config.get_config_value(
                                        'stats_metric')])
                            self.weeks_data[week_txt]['stats'][week_idx][
                                'avg'] = int(
                                    numpy.mean(self.weeks_data[week_txt]
                                               ['stats'][week_idx]['values']))
                            self.weeks_data[week_txt]['stats'][week_idx][
                                'max'] = max(self.weeks_data[week_txt]['stats']
                                             [week_idx]['values'])
                            self.weeks_data[week_txt]['stats'][week_idx][
                                'min'] = min(self.weeks_data[week_txt]['stats']
                                             [week_idx]['values'])

                if scan_week == week_txt:
                    week_found = True

        # Then write content to a JSONL file
        if os.path.isfile(self.config.filepath_stats_weeks):
            os.remove(self.config.filepath_stats_weeks)

        for week_txt in self.weeks_data:
            if 'all' in self.weeks_data[week_txt]['stats']:
                del self.weeks_data[week_txt]['stats']['all']['values']
            for week_idx in self.config.get_config_value('rolling_stats'):
                if week_idx in self.weeks_data[week_txt]['stats']:
                    del self.weeks_data[week_txt]['stats'][week_idx]['values']
            week_obj = copy.deepcopy(self.weeks_data[week_txt])
            week_obj['datetime'] = self.weeks_data[week_txt][
                'datetime'].isoformat()

            Files(self.log).jsonl_append(self.config.filepath_stats_weeks,
                                         week_obj)

        return self.weeks_data
    def main(self):
        self.log.info('Calculate daily stats throughout the captured period')

        for current_day in self.daily_data:
            day_txt = self.daily_data[current_day]['datetime'].strftime('%Y%m%d')
            self.days[day_txt] = {
                'datetime': self.daily_data[current_day]['datetime']
                , 'daytxt': self.daily_data[current_day]['datetime'].strftime('%A')
                , 'points': self.daily_data[current_day]['points']
                , 'tickets': self.daily_data[current_day]['tickets']
                , 'anyday': {'all': {'values': []}}
                , 'sameday': {'all': {'values': []}}
            }
            for week_idx in self.config.get_config_value('rolling_stats'):
                if week_idx not in self.days[day_txt]['sameday']:
                    self.days[day_txt]['anyday'][week_idx] = {'values': []}
                    self.days[day_txt]['sameday'][week_idx] = {'values': []}

            day_found = False
            for scan_day in self.daily_data:
                if day_found:
                    for stats_type in ['anyday', 'sameday']:
                        same_weekday = False
                        if stats_type == 'anyday':
                            self.days[day_txt][stats_type]['all']['values'].append(
                                self.daily_data[scan_day][self.config.get_config_value('stats_metric')])
                        elif self.daily_data[current_day]['datetime'].strftime('%A') == self.daily_data[scan_day][
                            'datetime'].strftime('%A') and stats_type == 'sameday':
                            same_weekday = True
                            self.days[day_txt][stats_type]['all']['values'].append(
                                self.daily_data[scan_day][self.config.get_config_value('stats_metric')])

                        if self.days[day_txt][stats_type]['all']['values']:
                            self.days[day_txt][stats_type]['all']['avg'] = int(
                                numpy.mean(self.days[day_txt][stats_type]['all']['values']))
                            self.days[day_txt][stats_type]['all']['min'] = min(
                                self.days[day_txt][stats_type]['all']['values'])
                            self.days[day_txt][stats_type]['all']['max'] = max(
                                self.days[day_txt][stats_type]['all']['values'])

                        for week_idx in self.config.get_config_value('rolling_stats'):
                            in_range = False
                            if stats_type == 'anyday' and len(
                                    self.days[day_txt][stats_type]['all']['values']) <= week_idx * 5:
                                in_range = True
                            elif stats_type == 'sameday' and len(
                                    self.days[day_txt][stats_type]['all']['values']) <= week_idx:
                                in_range = True
                            if (stats_type == 'anyday' and in_range is True) or (
                                            in_range is True and same_weekday is True):
                                self.days[day_txt][stats_type][week_idx]['values'].append(
                                    self.daily_data[scan_day][self.config.get_config_value('stats_metric')])
                                self.days[day_txt][stats_type][week_idx]['avg'] = int(
                                    numpy.mean(self.days[day_txt][stats_type]['all']['values']))
                                self.days[day_txt][stats_type][week_idx]['min'] = min(
                                    self.days[day_txt][stats_type]['all']['values'])
                                self.days[day_txt][stats_type][week_idx]['max'] = max(
                                    self.days[day_txt][stats_type]['all']['values'])

                if self.daily_data[current_day]['datetime'].date() == self.daily_data[scan_day]['datetime'].date():
                    day_found = True

        # Then write content to a JSONL file
        if os.path.isfile(self.config.filepath_stats_days):
            os.remove(self.config.filepath_stats_days)

        # Clear un-necessary array values, and write output to a JSONL file
        for current_day in self.days:
            del self.days[current_day]['anyday']['all']['values']
            del self.days[current_day]['sameday']['all']['values']
            for week_idx in self.config.get_config_value('rolling_stats'):
                for stats_type in ['anyday', 'sameday']:
                    if stats_type in self.days[current_day] and week_idx in self.days[current_day][
                        stats_type] and 'values' in self.days[current_day][stats_type][week_idx]:
                        del self.days[current_day][stats_type][week_idx]['values']

            day_obj = copy.deepcopy(self.days[current_day])
            day_obj['datetime'] = self.days[current_day]['datetime'].isoformat()
            Files(self.log).jsonl_append(self.config.filepath_stats_days, day_obj)

        return self.days
 def __init__(self, log, config):
     self.log = log
     self.config = config
     self.__jira = Jira(self.log, self.config)
     self.__files = Files(self.log)