Esempio n. 1
0
def task_sync_packages_with_platform():
    """
    sync all packages with translation platform
    """

    package_manager = PackagesManager()
    reports_manager = ReportsManager()

    def _sync_package(pkg):
        package_manager.sync_update_package_stats(pkg)

    all_packages = package_manager.get_packages().filter(
        platform_last_updated__lte=timezone.now() -
        timedelta(hours=6)).order_by('platform_url')
    for package in all_packages:
        th = threading.Thread(target=_sync_package,
                              args=(package.package_name, ))
        th.start()
        th.join()
        time.sleep(4)

    logger.info("%s Packages sync'd with Translation Platform" %
                len(all_packages))
    if reports_manager.analyse_releases_status():
        logger.info("Releases Summary Updated")
    if reports_manager.analyse_packages_status():
        logger.info("Packages Summary Updated")
Esempio n. 2
0
def tag_job_form(template_type):
    return_value = OrderedDict()
    job_template_manager = JobTemplateManager()
    filter_kwargs = {}
    if template_type in TS_JOB_TYPES:
        filter_kwargs['job_template_type'] = template_type
    templates = job_template_manager.get_job_templates(**filter_kwargs)
    if templates and len(templates) > 0:
        if len(templates) == 1:
            return_value['job_template'] = templates[0]
            return_value['yml_file'] = yaml.dump(
                templates[0].job_template_json,
                default_flow_style=False).replace("\'", "")
            return_value['job_params'] = templates[0].job_template_params
        return_value['job_templates'] = templates.values()
    package_manager = PackagesManager()
    release_streams = \
        package_manager.get_release_streams(
            only_active=True, fields=('product_build_system',)
        )
    available_build_systems = {}
    for relstream in release_streams:
        available_build_systems.update(
            {relstream.product_slug: relstream.product_build_system})
    if available_build_systems:
        return_value['build_systems'] = available_build_systems
    packages = package_manager.get_package_name_tuple(check_mapping=True)
    if packages:
        return_value['packages'] = packages
    relbranch_manager = ReleaseBranchManager()
    release_branches = relbranch_manager.get_relbranch_name_slug_tuple()
    if release_branches:
        return_value['releases'] = release_branches
    return return_value
Esempio n. 3
0
def tag_build_tags(buildsys, product):
    return_value = OrderedDict()
    package_manager = PackagesManager()
    tags = package_manager.get_build_tags(buildsys=buildsys,
                                          product_slug=product)
    return_value.update(dict(build_tags=tags))
    return return_value
Esempio n. 4
0
def tag_sync_from_coverage(stats, package, release, tag):
    return_value = OrderedDict()
    if not isinstance(stats, str):
        return return_value
    if isinstance(stats, str) and not stats.startswith('Not Synced with'):
        return return_value
    package_manager = PackagesManager()
    release_manager = ReleaseBranchManager()
    try:
        package_details = package_manager.get_packages([package]).get()
    except:
        # log event, passing for now
        pass
    else:
        branch_mapping = {}
        if package_details.release_branch_mapping_json:
            branch_mapping = package_details.release_branch_mapping_json.copy()
            if release in branch_mapping:
                branch_mapping = branch_mapping.get(release)
                branch_mapping['product'] = \
                    release_manager.get_product_by_release(release).product_slug
        return_value.update(
            dict(
                mapping=branch_mapping,
                package=package,
                tag=tag,
            ))
    return return_value
Esempio n. 5
0
def tag_branch_mapping(package):
    package_manager = PackagesManager()
    release_manager = ReleaseBranchManager()
    return_value = OrderedDict()
    try:
        package_details = package_manager.get_packages([package]).get()
    except:
        # log event, passing for now
        pass
    else:
        branch_mapping = {}
        if package_details.release_branch_mapping_json:
            branch_mapping = package_details.release_branch_mapping_json.copy()
            for k, v in package_details.release_branch_mapping_json.items():
                branch_mapping[k]['product'] = \
                    release_manager.get_product_by_release(k).product_slug

        return_value.update({
            'package_name':
            package_details.package_name,
            'branch_mapping':
            branch_mapping
            if branch_mapping else package_details.release_branch_mapping_json,
            'mapping_lastupdated':
            package_details.release_branch_map_last_updated,
            'mapping_keys':
            BRANCH_MAPPING_KEYS
        })
    return return_value
Esempio n. 6
0
 def clean(self):
     """
     Check if the package name exist on the selected translation platform, if not add error message for package_name
     """
     cleaned_data = super().clean()
     package_name = cleaned_data['package_name']
     platform_slug = getattr(cleaned_data['platform_slug'], 'platform_slug',
                             None)
     platform_url = cleaned_data['platform_url']
     packages_manager = PackagesManager()
     pkg_platform_url, _ = packages_manager.get_project_details(
         cleaned_data['platform_slug'], package_name)
     if not platform_url == pkg_platform_url:
         cleaned_data['platform_url'] = pkg_platform_url
     validate_package = packages_manager.validate_package(
         package_name=package_name, transplatform_slug=platform_slug)
     if not validate_package:
         self.add_error('package_name',
                        "Not found at selected translation platform")
     else:
         old_platform_engine = getattr(
             getattr(getattr(self, 'instance', None), 'platform_slug',
                     None), 'engine_name', None)
         if old_platform_engine:
             packages_manager.syncstats_manager.toggle_visibility(
                 package_name, stats_source=old_platform_engine)
         new_platform_engine = packages_manager.get_engine_from_slug(
             platform_slug)
         if new_platform_engine:
             packages_manager.syncstats_manager.toggle_visibility(
                 package_name,
                 visibility=True,
                 stats_source=new_platform_engine)
Esempio n. 7
0
def tag_outofsync_packages():
    return_value = OrderedDict()
    package_manager = PackagesManager()
    all_packages = package_manager.get_packages()
    outofsync_packages = \
        [i.package_name for i in all_packages if not i.stats_diff_health]
    if all_packages and outofsync_packages:
        return_value["insync_packages"] = \
            (all_packages.count() - len(outofsync_packages)) or 0
    return_value["outofsync_packages"] = len(outofsync_packages) or 0
    return_value["total_packages"] = all_packages.count() or 0
    return return_value
Esempio n. 8
0
def tag_packages_summary():
    return_value = OrderedDict()
    package_manager = PackagesManager()
    reports_manager = ReportsManager()
    packages_summary = reports_manager.get_reports('packages')
    if packages_summary:
        return_value.update(
            dict(
                pkgsummary=json.loads(packages_summary.get().report_json_str),
                last_updated=packages_summary.get().report_updated,
                package_count=package_manager.count_packages(),
            ))
    return return_value
Esempio n. 9
0
def tag_package_details(package_name, user):
    package_manager = PackagesManager()
    return_value = OrderedDict()
    try:
        package = package_manager.get_packages([package_name]).get()
        pkg_details = package.package_details_json
        if pkg_details and pkg_details.get('description'):
            return_value.update({'package_desc': pkg_details['description']})
    except:
        # log event, passing for now
        pass
    else:
        return_value.update({'package': package, 'user': user})
    return return_value
Esempio n. 10
0
def tag_latest_builds(package):
    package_manager = PackagesManager()
    return_value = OrderedDict()
    try:
        package_details = package_manager.get_packages([package]).get()
    except:
        # log event, passing for now
        pass
    else:
        return_value.update({
            'package_name':
            package,
            'latest_builds':
            package_details.package_latest_builds_json.copy(),
            'builds_lastupdated':
            package_details.package_latest_builds_last_updated
        })
    return return_value
Esempio n. 11
0
def tag_stats_diff(package):
    package_manager = PackagesManager()
    return_value = OrderedDict()
    try:
        package_details = package_manager.get_packages([package]).get()
    except:
        # log event, passing for now
        pass
    else:
        stats_diff = package_details.stats_diff_json or {}
        langs_out_of_sync = {}
        for branch, diff in stats_diff.items():
            langs_out_of_sync[branch] = {}
            for lang, diff_percent in diff.items():
                langs_out_of_sync[branch][lang] = diff_percent
        return_value.update({
            'package_name': package_details.package_name,
            'stats_diff': langs_out_of_sync
        })
    return return_value
Esempio n. 12
0
def task_sync_packages_with_platform():
    """
    sync all packages with translation platform
    """

    package_manager = PackagesManager()
    reports_manager = ReportsManager()

    def _sync_package(pkg):
        package_manager.sync_update_package_stats(pkg)
        package_manager.fetch_latest_builds(pkg)

    all_packages = []

    packages_except_weblate_fedora = package_manager.get_packages().filter(
        platform_last_updated__lte=timezone.now() -
        timedelta(hours=12)).order_by('platform_url').exclude(
            platform_slug_id=WEBLATE_SLUGS[1])

    weblate_fedora_packages = package_manager.get_packages().filter(
        platform_last_updated__lte=timezone.now() - timedelta(hours=60),
        platform_slug_id=WEBLATE_SLUGS[1]).order_by('platform_url')

    if packages_except_weblate_fedora:
        all_packages.extend(packages_except_weblate_fedora)
    if weblate_fedora_packages:
        all_packages.extend(weblate_fedora_packages)

    for package in all_packages:
        th = threading.Thread(target=_sync_package,
                              args=(package.package_name, ))
        th.start()
        th.join()
        time.sleep(randrange(5, 10))

    logger.info("%s Packages sync'd with Translation Platform" %
                len(all_packages))
    if reports_manager.analyse_releases_status():
        logger.info("Releases Summary Updated")
    if reports_manager.analyse_packages_status():
        logger.info("Packages Summary Updated")
Esempio n. 13
0
def tag_trending_languages():
    return_value = OrderedDict()
    reports_manager = ReportsManager()
    releases_summary = reports_manager.get_reports('releases')
    release_manager = ReleaseBranchManager()
    latest_release = release_manager.get_latest_release()
    pkg_manager = PackagesManager()
    lang_locale_dict = {
        lang: locale
        for locale, lang in pkg_manager.get_locale_lang_tuple()
    }

    if releases_summary and latest_release:
        releases_summary = releases_summary.get()
        trending_languages = reports_manager.get_trending_languages(
            releases_summary.report_json, *latest_release)
        if trending_languages and isinstance(trending_languages,
                                             (list, tuple)):
            return_value["trending_languages"] = trending_languages[:9]
            return_value["lang_locale_dict"] = lang_locale_dict
            return_value["latest_release"] = latest_release
    return return_value
Esempio n. 14
0
def tag_releases_summary():
    return_value = OrderedDict()
    reports_manager = ReportsManager()
    releases_summary = reports_manager.get_reports('releases')
    if releases_summary:
        report = releases_summary.get().report_json_str
        release_report_json = json.loads(report) if isinstance(report,
                                                               str) else {}
        pkg_manager = PackagesManager()
        lang_locale_dict = {
            lang: locale
            for locale, lang in pkg_manager.get_locale_lang_tuple()
        }
        for release, summary in release_report_json.items():
            if summary.get('languages'):
                release_report_json[release]['languages'] = \
                    OrderedDict(sorted(summary['languages'].items()))
        return_value.update(
            dict(relsummary=release_report_json,
                 last_updated=releases_summary.get().report_updated,
                 lang_locale=lang_locale_dict))
    return return_value
Esempio n. 15
0
class PackagesManagerTest(FixtureTestCase):

    packages_manager = PackagesManager()
    fixture = db_fixture
    datasets = [PackageData]

    def test_get_packages(self):
        """
        Test get_packages
        """
        packages = self.packages_manager.get_packages()
        self.assertEqual(len(packages), 4)
        package_names = [
            PackageData.package_anaconda.package_name,
            PackageData.package_ibus.package_name
        ]
        packages = self.packages_manager.get_packages(
            pkgs=package_names).values()
        self.assertEqual(len(packages), 2)
        self.assertEqual(packages[0]['package_name'],
                         PackageData.package_anaconda.package_name)
        self.assertEqual(packages[1]['package_name'],
                         PackageData.package_ibus.package_name)
        # todo: test the filtering according to params
        # params = ['package_name', 'upstream_url']
        # packages = self.packages_manager.get_packages(pkgs=['ibus'], pkg_params=params)
        # self.assertTrue(set(params).issubset(vars(packages.get()).keys()))

    def test_is_package_exist(self):
        """
        Test is_package_exist
        """
        self.assertTrue(
            self.packages_manager.is_package_exist(
                PackageData.package_anaconda.package_name))
        self.assertFalse(
            self.packages_manager.is_package_exist('otherpackage'))

    @patch('requests.get', new=mock_requests_get_add_package)
    def test_add_package(self):
        """
        Test add_package
        """
        transplatform = PlatformData.platform_zanata_fedora.platform_slug
        kwargs = {
            'package_name': 'authconfig',
            'upstream_url': 'https://github.com/jcam/authconfig',
            'transplatform_slug': transplatform,
            'release_streams': ['fedora']
        }
        package_added = self.packages_manager.add_package(**kwargs)
        self.assertTrue(package_added)
        self.assertTrue(self.packages_manager.is_package_exist('authconfig'))
        package_added = self.packages_manager.add_package(**kwargs)
        self.assertFalse(package_added)

    @patch('requests.get', new=mock_requests_get_validate_package)
    def test_validate_package(self):
        """
        Test validate_package
        """
        transplatform = PlatformData.platform_zanata_public.platform_slug
        package_candlepin_name = PackageData.package_candlepin.package_name
        package_validated = self.packages_manager.validate_package(
            package_name=package_candlepin_name,
            transplatform_slug=transplatform)
        self.assertEqual(package_validated, package_candlepin_name)
        package_validated = self.packages_manager.validate_package(
            package_name='otherpackage', transplatform_slug=transplatform)
        self.assertFalse(package_validated)
Esempio n. 16
0
def task_sync_packages_with_build_system():
    """
    sync all packages with build system
    """

    package_manager = PackagesManager()
    graph_manager = GraphManager()
    reports_manager = ReportsManager()
    job_template_manager = JobTemplateManager()
    location_manager = GeoLocationManager()

    def _update_diff(package):
        try:
            package_stats = graph_manager.get_trans_stats_by_package(
                package.package_name)
            graph_manager.package_manager.calculate_stats_diff(
                package.package_name, package_stats,
                package.release_branch_mapping_json)
        except Exception:
            # pass for now
            pass

    def _sync_build_system(template, params):

        if package_manager.is_package_build_latest(params):
            return

        t_params = template.job_template_params
        if len(t_params) == len(params):
            job_data = {
                field.upper(): param
                for field, param in zip(t_params, params)
            }
            job_data.update({
                'YML_FILE':
                yaml.dump(template.job_template_json,
                          default_flow_style=False).replace("\'", "")
            })
            job_data.update({'SCRATCH': True})

            temp_path = 'false/{0}/'.format('-'.join(params))
            job_manager = YMLBasedJobManager(
                **job_data, **{
                    'params': [p.upper() for p in t_params],
                    'type': TS_JOB_TYPES[3]
                }, **{'active_user_email':
                      '*****@*****.**'}, **{'sandbox_path': temp_path},
                **{'job_log_file': temp_path + '.log'})

            try:
                if os.path.isdir(temp_path):
                    shutil.rmtree(temp_path)
                os.mkdir(temp_path)
                job_manager.execute_job()
            except Exception as e:
                # pass for now
                pass
            finally:
                shutil.rmtree(temp_path)

    job_template = None
    all_packages = package_manager.get_packages().filter(
        release_branch_mapping__isnull=False)
    job_templates = job_template_manager.get_job_templates(
        job_template_type=TS_JOB_TYPES[3])
    if job_templates:
        job_template = job_templates.first()

    if all_packages and job_template:
        for package in all_packages:
            candidates = []
            mapping = package.release_branch_mapping_json or {}

            for release, map_dict in mapping.items():
                candidates.append((package.package_name,
                                   map_dict.get(BRANCH_MAPPING_KEYS[1]),
                                   map_dict.get(BRANCH_MAPPING_KEYS[2])))

            for candidate in candidates:
                th = threading.Thread(target=_sync_build_system,
                                      args=(
                                          job_template,
                                          candidate,
                                      ))
                th.start()
                th.join()
                time.sleep(6)

            _update_diff(package)

    logger.info("%s Packages sync'd with Build System" % len(all_packages))
    if reports_manager.analyse_packages_status():
        logger.info("Packages Summary Updated")
    time.sleep(2)
    if reports_manager.refresh_stats_required_by_territory():
        logger.info("Location Summary Updated")
    time.sleep(2)
    if location_manager.save_territory_build_system_stats():
        logger.info("Territory Summary Updated")
Esempio n. 17
0
class Command(BaseCommand):

    help = 'Sync packages with their respective translation platform.'

    graph_manager = GraphManager()
    package_manager = PackagesManager()
    reports_manager = ReportsManager()
    job_template_manager = JobTemplateManager()
    location_manager = GeoLocationManager()

    def _sync_package(self, pkg):
        self.package_manager.sync_update_package_stats(pkg)

    def sync_with_platform(self):

        all_packages = self.package_manager.get_packages().filter(
            platform_last_updated__lte=timezone.now() -
            timedelta(hours=6)).order_by('platform_url')
        for package in all_packages:
            th = threading.Thread(target=self._sync_package,
                                  args=(package.package_name, ))
            th.start()
            th.join()
            time.sleep(2)

        self.reports_manager.analyse_releases_status()
        self.reports_manager.analyse_packages_status()

    def _update_diff(self, package):
        try:
            package_stats = self.graph_manager.get_trans_stats_by_package(
                package.package_name)
            self.graph_manager.package_manager.calculate_stats_diff(
                package.package_name, package_stats,
                package.release_branch_mapping_json)
        except Exception:
            # pass for now
            pass

    def _sync_build_system(self, template, params):

        if self.package_manager.is_package_build_latest(params):
            return

        t_params = template.job_template_params
        if len(t_params) == len(params):
            job_data = {
                field.upper(): param
                for field, param in zip(t_params, params)
            }
            job_data.update({
                'YML_FILE':
                yaml.dump(template.job_template_json,
                          default_flow_style=False).replace("\'", "")
            })
            job_data.update({'SCRATCH': True})

            temp_path = 'false/{0}/'.format('-'.join(params))
            job_manager = YMLBasedJobManager(
                **job_data, **{
                    'params': [p.upper() for p in t_params],
                    'type': TS_JOB_TYPES[3]
                }, **{'active_user_email':
                      '*****@*****.**'}, **{'sandbox_path': temp_path},
                **{'job_log_file': temp_path + '.log'})

            try:
                if os.path.isdir(temp_path):
                    shutil.rmtree(temp_path)
                os.mkdir(temp_path)
                job_manager.execute_job()
            except Exception as e:
                # pass for now
                pass
            finally:
                os.rmdir(temp_path)

    def sync_with_build_system(self):

        job_template = None
        all_packages = self.package_manager.get_packages().filter(
            release_branch_mapping__isnull=False)
        job_templates = self.job_template_manager.get_job_templates(
            job_template_type=TS_JOB_TYPES[3])
        if job_templates:
            job_template = job_templates.first()

        if all_packages and job_template:
            for package in all_packages:
                candidates = []
                mapping = package.release_branch_mapping_json or {}

                for release, map_dict in mapping.items():
                    candidates.append((package.package_name,
                                       map_dict.get(BRANCH_MAPPING_KEYS[1]),
                                       map_dict.get(BRANCH_MAPPING_KEYS[2])))

                for candidate in candidates:
                    th = threading.Thread(target=self._sync_build_system,
                                          args=(
                                              job_template,
                                              candidate,
                                          ))
                    th.start()
                    th.join()
                    time.sleep(5)

                self._update_diff(package)

        self.reports_manager.analyse_packages_status()
        self.reports_manager.refresh_stats_required_by_territory()
        self.location_manager.save_territory_build_system_stats()

    def add_arguments(self, parser):

        # Named (optional) arguments
        parser.add_argument(
            '--platform',
            action='store_true',
            help='Sync packages with translation platform only.',
        )

        parser.add_argument(
            '--build-system',
            action='store_true',
            help='Sync packages with build system only.',
        )

    def handle(self, *args, **options):

        cmd_combinations_options = ['platform', 'build_system', 'default_both']

        cmd_combinations = {
            cmd_combinations_options[0]:
            self.sync_with_platform,
            cmd_combinations_options[1]:
            self.sync_with_build_system,
            cmd_combinations_options[2]:
            [self.sync_with_platform, self.sync_with_build_system]
        }

        if options.get(cmd_combinations_options[0]):
            cmd_combinations.get(cmd_combinations_options[0])()
        elif options.get(cmd_combinations_options[1]):
            cmd_combinations.get(cmd_combinations_options[1])()
        else:
            [m() for m in cmd_combinations.get(cmd_combinations_options[2])]
Esempio n. 18
0
class JobsLogManager(BaseManager):
    """
    Maintains Job Logs
    """

    package_manager = PackagesManager()

    def get_job_logs(self, remarks=None, result=None):
        """
        Fetch all job logs from the db
        """
        job_logs = None
        filters = {}
        if remarks:
            filters.update(dict(job_remarks=remarks))
        if result:
            filters.update(dict(job_result=True))
        try:
            job_logs = Job.objects.filter(
                **filters).order_by('-job_start_time')
        except:
            # log event, passing for now
            pass
        return job_logs

    def get_job_detail(self, job_id):
        """
        Fetch just one job
        :param job_id: Job ID: uuid
        :return: Job object
        """
        job_log = None
        if not job_id:
            return job_log
        try:
            job_log = Job.objects.filter(job_uuid=job_id).first()
        except:
            # log event, passing for now
            pass
        return job_log

    def get_joblog_stats(self):
        """
        Stats about jobs log
        """
        last_ran_on = None
        last_ran_type = None
        jobs_logs = self.get_job_logs()
        jobs_count = jobs_logs.count()
        successful_jobs = jobs_logs.filter(**{'job_result': True})
        if successful_jobs.count() > 0:
            last_ran_on = successful_jobs[0].job_end_time
            last_ran_type = successful_jobs[0].job_type
        return jobs_count, last_ran_on, last_ran_type

    def analyse_job_data(self, job_data):
        """
        Analyses job output (of a package) to emit meaningful data
        :param job_data: dict
        :return: dict or None
        """
        if not job_data:
            return
        analysable_fields = [
            'Latest Build Details', 'Calculate Translation Stats'
        ]
        fields_to_analyse = [
            field for field in job_data.keys() if field in analysable_fields
        ]

        field_heading = [
            'Language', 'Total', 'Translated', 'Fuzzy', 'Untranslated',
            'Complete'
        ]

        if not fields_to_analyse and not len(fields_to_analyse) > 0:
            return
        else:
            try:
                analysed_data = {}
                total_no_of_messages = []

                for field in fields_to_analyse:
                    if field == analysable_fields[0]:
                        build_details = list(
                            job_data.get(analysable_fields[0], {}).values())
                        if build_details and len(build_details) > 0:
                            build_details = load(build_details[0],
                                                 Loader=FullLoader)
                        if isinstance(build_details, dict):
                            analysed_data.update({
                                'meta_data': {
                                    'package':
                                    build_details.get('package_name', ''),
                                    'nvr':
                                    build_details.get('nvr', ''),
                                    'built_on':
                                    build_details.get('completion_time', '')
                                }
                            })
                    elif field == analysable_fields[1]:
                        stats_json_data = list(
                            job_data.get(analysable_fields[1], {}).values())
                        if stats_json_data and len(stats_json_data) > 0:
                            data = {}
                            for json_data in stats_json_data:
                                if '{' in json_data:
                                    data = load(
                                        json_data[json_data.find('{'):],
                                        Loader=FullLoader)
                            stats_json = data.get('stats', {})
                            if stats_json and isinstance(stats_json, list):

                                lang_id_name = self.package_manager.get_lang_id_name_dict(
                                ) or []
                                locale_lang_dict = dict(
                                    self.package_manager.get_locale_lang_tuple(
                                    ))
                                locale_key = 'locale'

                                processed_stats = []
                                analysed_data['stats'] = []
                                for locale, l_alias in list(
                                        lang_id_name.keys()):
                                    filter_stat = []
                                    stats_chunk = []

                                    try:
                                        filter_stat = self.package_manager.filter_n_reduce_stats(
                                            locale_key, locale, l_alias,
                                            stats_json)
                                    except Exception as e:
                                        self.app_logger(
                                            'ERROR',
                                            "Error while filtering stats, details: "
                                            + str(e))
                                    else:
                                        filter_stat = filter_stat[0] \
                                            if isinstance(filter_stat, list) and len(filter_stat) > 0 else {}

                                    stats_chunk.append(
                                        locale_lang_dict.get(locale, locale))
                                    if filter_stat.get(
                                            'total'
                                    ) and filter_stat.get('total') > 0:
                                        total_no_of_messages.append(
                                            filter_stat.get('total'))
                                    stats_chunk.append(
                                        filter_stat.get('total', 0))
                                    stats_chunk.append(
                                        filter_stat.get('translated', 0))
                                    stats_chunk.append(
                                        filter_stat.get('fuzzy', 0))
                                    stats_chunk.append(
                                        filter_stat.get('untranslated', 0))
                                    completion_percentage = 0
                                    try:
                                        completion_percentage = int(
                                            (filter_stat.get('translated', 0) *
                                             100 /
                                             filter_stat.get('total', 0)))
                                    except ZeroDivisionError:
                                        pass
                                    stats_chunk.append(completion_percentage)
                                    if stats_chunk:
                                        non_zero_stats = [
                                            i for i in stats_chunk[1:] if i > 0
                                        ]
                                        if non_zero_stats and len(
                                                non_zero_stats) > 0:
                                            processed_stats.append(stats_chunk)

                                analysed_data['stats'].extend(processed_stats)
                            analysed_data.update(dict(headings=field_heading))
            except Exception as e:
                self.app_logger(
                    'ERROR',
                    "Error while analysing job data, details: " + str(e))
                return
            if len(set(total_no_of_messages)) > 1:
                analysed_data[
                    'pot_differ'] = "Total number of messages differ across languages."
            return analysed_data
Esempio n. 19
0
class YMLBasedJobManager(BaseManager):
    """
    Single Entry Point for
        - all YML based Jobs: syncupstream, syncdownstream, stringchange
        - this should be the base class to handle YML Jobs
    """

    sandbox_path = 'dashboard/sandbox/'
    job_log_file = sandbox_path + '.log'

    package_manager = PackagesManager()

    @staticmethod
    def job_suffix(args):
        return "-".join(args)

    def __init__(self, *args, **kwargs):
        """
        Set Job Environment here
        """
        super(YMLBasedJobManager, self).__init__(**kwargs)
        self.suffix = self.job_suffix(
            [getattr(self, param, '') for param in self.params])
        self.job_log_file = self.sandbox_path + '.log'

    def _get_package(self):
        package_details = \
            self.package_manager.get_packages([self.package])
        return package_details.get()

    def _bootstrap(self, package=None, build_system=None):
        if build_system:
            try:
                release_streams = \
                    self.package_manager.get_release_streams(built=build_system)
                release_stream = release_streams.first()
            except Exception as e:
                self.app_logger(
                    'ERROR',
                    "Release stream could not be found, details: " + str(e))
                raise Exception(
                    'Build Server URL could NOT be located for %s.' %
                    build_system)
            else:
                self.hub_url = release_stream.product_server
        if package:
            try:
                package_detail = self._get_package()
            except Exception as e:
                self.app_logger(
                    'ERROR', "Package could not be found, details: " + str(e))
                raise Exception(
                    'Upstream URL could NOT be located for %s package.' %
                    package)
            else:
                self.upstream_repo_url = package_detail.upstream_url \
                    if package_detail.upstream_url.endswith('.git') \
                    else package_detail.upstream_url + ".git"
                t_ext = package_detail.translation_file_ext
                file_ext = t_ext if t_ext.startswith('.') else '.' + t_ext
                self.trans_file_ext = file_ext.lower()
                self.pkg_upstream_name = package_detail.upstream_name
                self.pkg_tp_engine = package_detail.platform_slug.engine_name
                self.pkg_tp_url = package_detail.platform_slug.api_url
                self.pkg_tp_auth_usr = package_detail.platform_slug.auth_login_id
                self.pkg_tp_auth_token = package_detail.platform_slug.auth_token_key
                self.pkg_branch_map = package_detail.release_branch_mapping_json

    def _save_result_in_db(self, stats_dict, build_details):
        """
        Save derived stats in db from YML Job
        :param stats_dict: translation stats calculated
        """
        if not self.package_manager.is_package_exist(
                package_name=self.package):
            raise Exception("Stats NOT saved. Package does not exist.")
        stats_version, stats_source = '', ''
        if self.type == TS_JOB_TYPES[2]:
            stats_version, stats_source = 'Upstream', 'upstream'
        elif self.type == TS_JOB_TYPES[3]:
            stats_version, stats_source = self.buildsys + ' - ' + self.tag, self.buildsys

        try:
            self.package_manager.syncstats_manager.save_version_stats(
                self._get_package(), stats_version, stats_dict, stats_source)
            if stats_source == 'upstream':
                self.package_manager.update_package(
                    self.package, {'upstream_last_updated': timezone.now()})
            # If its for rawhide, update downstream sync time for the package
            if getattr(self, 'tag', ''):
                self.package_manager.update_package(
                    self.package, {'downstream_last_updated': timezone.now()})
            # If invoked by system user, cache build details
            if self.active_user_email == '*****@*****.**' and \
                    self.type == TS_JOB_TYPES[3]:
                cache_params = {}
                match_params = {
                    'package_name': self._get_package(),
                    'build_system': self.buildsys,
                    'build_tag': self.tag
                }
                cache_params.update(match_params)
                latest_build = {}
                if isinstance(
                        build_details,
                        list) and build_details and len(build_details) > 0:
                    latest_build = build_details[0]
                cache_params['build_details_json_str'] = json.dumps(
                    latest_build)
                try:
                    CacheBuildDetails.objects.update_or_create(
                        package_name=self._get_package(),
                        build_system=self.buildsys,
                        build_tag=self.tag,
                        defaults=cache_params)
                except Exception as e:
                    # log error
                    pass

        except Exception as e:
            self.app_logger('ERROR',
                            "Package could not be updated, details: " + str(e))
            raise Exception('Stats could NOT be saved in db.')

    def _wipe_workspace(self):
        """
        This makes sandbox clean for a new job to run
        """
        # remove log file if exists
        if os.path.exists(self.job_log_file):
            os.remove(self.job_log_file)
        for file in os.listdir(self.sandbox_path):
            file_path = os.path.join(self.sandbox_path, file)
            try:
                if os.path.isdir(file_path):
                    shutil.rmtree(file_path)
                elif os.path.isfile(file_path) and not file_path.endswith('.py') \
                        and '.log.' not in file_path:
                    os.unlink(file_path)
            except Exception as e:
                pass

    def execute_job(self):
        """
        1. PreProcess YML and replace variables with input_values
            - Example: %PACKAGE_NAME%
        2. Parse processed YML and build Job object
        3. Select data structure for tasks and instantiate one
            - Example: Linked list should be used for sequential tasks
        3. Discover namespace and method for each task and fill in TaskNode
        4. Perform actions (execute tasks) and return responses
        """
        self._wipe_workspace()

        yml_preprocessed = YMLPreProcessor(
            self.YML_FILE,
            **{param: getattr(self, param, '')
               for param in self.params}).output

        self.job_base_dir = os.path.dirname(settings.BASE_DIR)
        yml_job = YMLJobParser(yml_stream=io.StringIO(yml_preprocessed))

        try:
            self.yml_job_name = yml_job.job_name
        except AttributeError:
            raise Exception('Input YML could not be parsed.')

        self.package = yml_job.package
        self.buildsys = yml_job.buildsys
        if isinstance(yml_job.tags, list) and len(yml_job.tags) > 0:
            self.tag = yml_job.tags[0]
        elif isinstance(yml_job.tags, str):
            self.tag = yml_job.tags

        if self.type != yml_job.job_type:
            raise Exception('Selected job type differs to that of YML.')

        if (self.type == TS_JOB_TYPES[2]
                or self.type == TS_JOB_TYPES[5]) and self.package:
            self._bootstrap(package=self.package)
            self.release = yml_job.release
        elif self.type == TS_JOB_TYPES[3] and self.buildsys:
            self._bootstrap(build_system=self.buildsys)
        # for sequential jobs, tasks should be pushed to linked list
        # and output of previous task should be input for next task
        if JOB_EXEC_TYPES[0] in yml_job.execution:
            self.tasks_ds = TaskList()
        else:
            raise Exception('%s exec type is NOT supported yet.' %
                            yml_job.execution)

        # lets create a job
        job_manager = JobManager(self.type)
        if job_manager.create_job(user_email=self.active_user_email):
            self.job_id = job_manager.uuid
            job_manager.job_remarks = self.package
        # and set tasks
        tasks = yml_job.tasks
        for task in tasks:
            self.tasks_ds.add_task(task)
        log_file = self.job_log_file + ".%s.%s" % (self.suffix, self.type)
        if os.path.exists(log_file):
            os.unlink(log_file)
        action_mapper = ActionMapper(self.tasks_ds, self.job_base_dir,
                                     getattr(self, 'tag', ''),
                                     getattr(self, 'package', ''),
                                     getattr(self, 'hub_url', ''),
                                     getattr(self, 'buildsys', ''),
                                     getattr(self, 'release', ''),
                                     getattr(self, 'upstream_repo_url', ''),
                                     getattr(self, 'trans_file_ext', ''),
                                     getattr(self, 'pkg_upstream_name', ''),
                                     getattr(self, 'pkg_branch_map', {}),
                                     getattr(self, 'pkg_tp_engine', ''),
                                     getattr(self, 'pkg_tp_auth_usr', ''),
                                     getattr(self, 'pkg_tp_auth_token', ''),
                                     getattr(self, 'pkg_tp_url', ''), log_file)
        action_mapper.set_actions()
        # lets execute collected tasks
        try:
            action_mapper.execute_tasks()
        except Exception as e:
            job_manager.job_result = False
            raise Exception(e)
        else:
            job_manager.output_json = action_mapper.result
            job_manager.job_params.update(
                {param: getattr(self, param, '')
                 for param in self.params})
            job_template_manager = JobTemplateManager()
            templates = job_template_manager.get_job_templates(
                **{'job_template_type': self.type})
            if templates and len(templates) > 0:
                template_obj = templates.first()
                job_manager.job_template = template_obj
            job_manager.visible_on_url = True
            job_manager.job_result = True
        finally:
            job_manager.job_yml = yml_preprocessed
            job_manager.log_json = action_mapper.log
            action_mapper.clean_workspace()
            if not getattr(self, 'SCRATCH', None):
                job_manager.mark_job_finish()
            else:
                job_manager.mark_job_finish(remove=True)
            time.sleep(4)
        # if not a dry run, save results is db
        if action_mapper.result and not getattr(self, 'DRY_RUN', None):
            self._save_result_in_db(action_mapper.result, action_mapper.build)
        if os.path.exists(log_file):
            os.unlink(log_file)
        return self.job_id
Esempio n. 20
0
class GraphManager(BaseManager):
    """
    Manage graph representations
    """

    package_manager = PackagesManager()
    branch_manager = ReleaseBranchManager()

    def get_graph_rules(self, graph_rule=None, only_active=None):
        """
        Fetch graph rules from db
        :return: resultset
        """
        filter_kwargs = {}
        if graph_rule:
            filter_kwargs.update(dict(rule_name=graph_rule))
        if only_active:
            filter_kwargs.update(dict(rule_status=True))

        rules = None
        try:
            rules = GraphRule.objects.filter(
                **filter_kwargs).order_by('rule_name')
        except:
            # log event, passing for now
            pass
        return rules

    def slugify_graph_rule_name(self, suggested_name):
        try:
            return slugify(suggested_name)
        except:
            # log even, passing for now
            return False

    def validate_package_branch_participation(self, relbranch, packages):
        """
        This validates that packages belong to relbranch or not
        :param relbranch: release branch
        :param packages: list of packages
        :return: list of packages that DO NOT belong
        """
        if not relbranch and not packages:
            return
        pkg_not_participate = []
        for package in packages:
            relbranches = PackageBranchMapping(package).release_branches
            if relbranch not in relbranches:
                pkg_not_participate.append(package)
        return pkg_not_participate

    def validate_tags_product_participation(self, release_slug, tags):
        """
        This validates that tag belongs to release
        :param release_slug: Release Slug
        :param tags: Build Tags
        :return: List of tags that do not belong
        """
        if not release_slug and not tags:
            return
        tags_not_participate = []

        q_release = self.branch_manager.get_release_branches(
            relbranch=release_slug)
        if q_release:
            release = q_release.get()
            release_build_tags = release.product_slug.product_build_tags
            for tag in tags:
                if tag not in release_build_tags:
                    tags_not_participate.append(tag)
        return tags_not_participate

    def add_graph_rule(self, **kwargs):
        """
        Save graph rule in db
        :param kwargs: dict
        :return: boolean
        """
        if not kwargs.get('rule_name'):
            return

        if not kwargs.get('rule_relbranch'):
            return

        relbranch_slug = kwargs.get('rule_relbranch')
        release_branch = \
            self.branch_manager.get_release_branches(relbranch=relbranch_slug)

        if kwargs.get('tags_selection'
                      ) == "pick" and not kwargs.get('rule_build_tags'):
            release_packages = \
                self.package_manager.get_relbranch_specific_pkgs(
                    relbranch_slug, fields=['release_branch_mapping']
                )

            package_tags = [
                package.release_branch_mapping_json[relbranch_slug][
                    BRANCH_MAPPING_KEYS[2]] for package in release_packages
                if package.release_branch_mapping_json
                and package.release_branch_mapping_json.get(
                    relbranch_slug, {}).get(BRANCH_MAPPING_KEYS[2])
            ]
            if package_tags:
                kwargs['rule_build_tags'] = list(set(package_tags))
            else:
                return False

        if kwargs.get(
                'lang_selection') == "pick" and not kwargs.get('rule_langs'):
            relbranch_lang_set = self.branch_manager.get_release_branches(
                relbranch=relbranch_slug,
                fields=['language_set_slug']).first()
            locales = relbranch_lang_set.language_set_slug.locale_ids
            if locales:
                kwargs['rule_languages'] = locales
            else:
                return False
        elif kwargs.get('rule_langs'):
            kwargs['rule_languages'] = kwargs.pop('rule_langs')

        filters = [
            'tags_selection', 'lang_selection', 'rule_langs', 'rule_relbranch'
        ]
        [kwargs.pop(i) for i in filters if i in kwargs]

        try:
            kwargs['rule_release_slug'] = release_branch.get()
            kwargs['created_on'] = timezone.now()
            kwargs['rule_status'] = True
            new_rule = GraphRule(**kwargs)
            new_rule.save()
        except:
            # log event, pass for now
            # todo implement error msg handling
            return False
        else:
            return True

    def _normalize_stats(self, stats_nested_list, index_list):
        """
        Normalize stats for a locale index and picks higher value
        """
        temp_index_list = []
        temp_stat_list = []
        for index, stat in stats_nested_list:
            if index not in temp_index_list:
                temp_index_list.append(index)
                temp_stat_list.append(stat)
            else:
                last_stat = temp_stat_list.pop(len(temp_stat_list) - 1)
                temp_stat_list.append(last_stat) \
                    if last_stat > stat else temp_stat_list.append(stat)
        expected_stats_list = list(zip(temp_index_list, temp_stat_list))
        if len(index_list) > len(expected_stats_list):
            expected_stats_dict = dict(
                (k[0], k[1:]) for k in expected_stats_list)
            temp_patched_stats_list = []
            for index in index_list:
                temp_patched_stats_list.append(
                    [index, expected_stats_dict.get(index, [0.0])[0]])
            expected_stats_list = temp_patched_stats_list
        return expected_stats_list

    def _format_stats_for_default_graphs(self,
                                         locale_sequence,
                                         stats_dict,
                                         desc,
                                         prepend_source=False):
        """
        Formats stats dict for graph-ready material
        - sorting and normalization
        """
        stats_for_graphs_dict = OrderedDict()
        stats_for_graphs_dict['pkg_desc'] = desc
        stats_for_graphs_dict['ticks'] = \
            [[i, lang] for i, lang in enumerate(locale_sequence.values(), 0)]
        indexes = [index for index, lang in stats_for_graphs_dict['ticks']]

        graph_data_dict = {}
        for version, stats_lists in stats_dict.items():
            new_stats_list = []
            for stats_tuple in stats_lists:
                index = [
                    i
                    for i, locale_tuple in enumerate(list(locale_sequence), 0)
                    if (stats_tuple[0] in locale_tuple) or (
                        stats_tuple[0].replace('-', '_') in locale_tuple) or (
                            stats_tuple[0].replace('_', '-') in locale_tuple)
                ]
                if index:
                    index.append(stats_tuple[1] or 0.0)
                    new_stats_list.append(index)
                if prepend_source:
                    if stats_tuple[0] == 'source' and stats_tuple[
                            1] not in version.lower():
                        version = "{0} - {1}".format(stats_tuple[1], version)
            normalized_stats = self._normalize_stats(sorted(new_stats_list),
                                                     indexes)
            if len(list(filter(lambda x: x[1] > 0.0, normalized_stats))) > 0:
                graph_data_dict[version] = normalized_stats
        stats_for_graphs_dict['graph_data'] = OrderedDict(
            sorted(graph_data_dict.items()))
        return stats_for_graphs_dict

    def get_trans_stats_by_package(self, package, prepend_source=False):
        """
        formats stats of a package for all enabled languages
        :param package: str
        :param prepend_source: boolean
        :return: Graph data for "Package-wise" view: dict
        """
        if not package:
            return {}
        lang_id_name, stats_dict, pkg_desc = self.package_manager.get_trans_stats(
            package)
        # format trans_stats_list for graphs
        return self._format_stats_for_default_graphs(lang_id_name, stats_dict,
                                                     pkg_desc, prepend_source)

    def _format_stats_for_lang_wise_graphs(self, input_locale, locale_sequence,
                                           stats_dict, desc):
        """
        Formats stats dict for bar graph-ready material
        """
        stats_for_graphs_dict = OrderedDict()
        stats_branches = list(stats_dict.keys())
        stats_for_graphs_dict['pkg_desc'] = desc

        stats_for_graphs_dict['ticks'] = \
            [[i, version] for i, version in enumerate(stats_branches, 0)]

        stats_for_graphs_dict['graph_data'] = []
        for version, stats_lists in stats_dict.items():
            index = stats_branches.index(version)
            required_stat = 0.0
            for locale_tuple in list(locale_sequence.keys()):
                if input_locale in locale_tuple:
                    locale, alias = locale_tuple
                    expected_stat = [
                        stat for lang, stat in stats_lists
                        if lang.replace('-', '_') == locale
                        or lang.replace('-', '_') == alias
                    ]
                    required_stat = expected_stat[0] if expected_stat and len(
                        expected_stat) > 0 else 0.0
            stats_for_graphs_dict['graph_data'].append([required_stat, index])

        return stats_for_graphs_dict

    def get_stats_by_pkg_per_lang(self, package, locale):
        """
        formats stats of a package for given locale
        :param package: str
        :param locale: str
        :return: Graph data for "Language-wise" view: dict
        """
        if not package and not locale:
            return {}
        lang_id_name, stats_dict, pkg_desc = self.package_manager.get_trans_stats(
            package)
        # format stats for lang-wise graph
        return self._format_stats_for_lang_wise_graphs(locale, lang_id_name,
                                                       stats_dict, pkg_desc)

    def get_trans_stats_by_rule(self, graph_rule):
        """
        formats stats of a graph rule
        :param: graph_rule: str
        :return: Graph data for "Rule-wise" view: dict
        """
        rule_data = {}
        rule = self.get_graph_rules(graph_rule=graph_rule).get()

        packages = rule.rule_packages
        locales = rule.rule_languages
        tags = rule.rule_build_tags
        release = rule.rule_release_slug_id

        if rule:
            rule_data = self.package_manager.get_trans_stats_by_rule(rule)

        return rule_data, len(packages), len(locales), len(tags), release

    def _consolidate_branch_specific_stats(self, packages_stats_dict):
        """
        Sum up stats per language
        """
        temp_stats_dict = {}
        pkgs_stats_list = list(packages_stats_dict.values())
        pkgs_length = len(pkgs_stats_list)
        for pkg_stats in pkgs_stats_list:
            for pkg_stat in pkg_stats:
                if pkg_stat[0] not in temp_stats_dict:
                    temp_stats_dict[pkg_stat[0]] = pkg_stat[1]
                else:
                    temp_stats_dict[pkg_stat[0]] += pkg_stat[1]
        # Reverse stats to depict how much is left
        return sorted([(i, 100 - int(j / pkgs_length))
                       for i, j in temp_stats_dict.items()])

    def _format_data_for_pie_chart(self, consolidated_stats, lang_options):
        """
        Takes consolidated stats and formats for pie chart
        """
        formatted_stats = []
        formatted_langs = []
        for lang, stat in consolidated_stats:
            formatted_stats.append({'label': lang, 'data': stat})
        for locale, language in lang_options:
            formatted_langs.append({'value': locale, 'text': language})
        return {
            'graph_data': formatted_stats,
            'select_options': sorted(formatted_langs, key=itemgetter('text'))
        }

    def _get_branch_specific_pkgs_stats(self, relbranch):
        """
        Generates translation stats of all packages in all langs of attached lang-set
        """
        specific_pkgs = [
            package.package_name
            for package in self.package_manager.get_relbranch_specific_pkgs(
                relbranch, ['package_name'])
        ]
        all_pkgs_stats_dict = OrderedDict()
        for pkg in specific_pkgs:
            pkg_lang_stats = []
            locale_seq, trans_stats_dict, pkg_desc = \
                self.package_manager.get_trans_stats(pkg, apply_branch_mapping=True, specify_branch=relbranch)
            t_stats = self._format_stats_for_default_graphs(
                locale_seq, trans_stats_dict, pkg_desc)
            branch_stats = t_stats.get('graph_data').get(relbranch)
            langs = t_stats.get('ticks')
            if branch_stats and langs:
                pkg_lang_stats.extend(
                    list(
                        zip([lang[1] for lang in langs],
                            [stat[1] for stat in branch_stats])))
            all_pkgs_stats_dict[pkg] = pkg_lang_stats
        return all_pkgs_stats_dict

    def get_workload_graph_data(self, release_branch):
        """
        Build or generates workload graph data
        """
        consolidated_stats = self._consolidate_branch_specific_stats(
            self._get_branch_specific_pkgs_stats(release_branch))
        # get branch specific languages for select option
        locale_lang_tuple = self.package_manager.get_locale_lang_tuple(
            locales=self.package_manager.get_relbranch_locales(release_branch))
        return self._format_data_for_pie_chart(consolidated_stats,
                                               locale_lang_tuple)

    def _process_workload_combined_view(self, packages_stats_dict, headers):
        stats_summary_dict = OrderedDict()
        for package, locale_stats in packages_stats_dict.items():
            reduced_stats = {}
            try:
                reduced_stats = functools.reduce(
                    lambda x, y: dict(Counter(x) + Counter(y)),
                    list(locale_stats.values()))
            except Exception as e:
                # log error, pass for now
                pass
            if not reduced_stats:
                for field in headers:
                    reduced_stats[field] = 0
            try:
                if not reduced_stats.get('Untranslated'):
                    reduced_stats['Untranslated'] = 0
                reduced_stats[headers[3]] = \
                    (reduced_stats['Untranslated'] /
                     reduced_stats.get('Total', 0) * 100)
            except ZeroDivisionError:
                # log error, pass for now
                pass
            stats_summary_dict[package] = reduced_stats
        return stats_summary_dict

    def get_workload_estimate(self, release_branch, locale=None):
        """
        Build list of packages with translation workload for a given branch
        """
        headers = WORKLOAD_HEADERS
        pkg_stats = self.package_manager.get_release_specific_package_stats(
            release_branch=release_branch)

        required_stats_dict = {}
        if not locale:
            required_stats_dict = self._process_workload_combined_view(
                pkg_stats, headers)
        elif isinstance(locale, str):
            for pkg, locale_stat in pkg_stats.items():
                required_stats_dict[pkg] = locale_stat.get(locale) or {
                    header: 0
                    for header in headers
                }
        return headers, OrderedDict(
            sorted(required_stats_dict.items(),
                   key=lambda x: x[1]['Remaining'],
                   reverse=True))

    def get_workload_combined(self, release_branch):
        """
        Build list of packages with translation workload for a given branch in all languages
        """
        return self.get_workload_estimate(release_branch)

    def get_workload_detailed(self, release_branch):
        """
        Build translation workload percentage for a given branch in all languages
        """
        relbranch_packages_stats = self._get_branch_specific_pkgs_stats(
            release_branch)
        locale_lang_tuple = self.package_manager.get_locale_lang_tuple(
            locales=self.package_manager.get_relbranch_locales(release_branch))
        headers = sorted([lang for locale, lang in locale_lang_tuple])
        # Format data to fill table
        workload_combined = OrderedDict()
        for package, lang_stats in relbranch_packages_stats.items():
            temp_stat_list = []
            for lang_stat_tuple in lang_stats:
                temp_stat_list.insert(headers.index(lang_stat_tuple[0]),
                                      lang_stat_tuple[1])
            # flag incorrect branch mapping
            if len([i for i in temp_stat_list
                    if i == 0]) == len(temp_stat_list):
                package += "*"
            if temp_stat_list:
                workload_combined[package] = temp_stat_list
        return headers, OrderedDict(sorted(workload_combined.items()))

    def get_workload_combined_detailed(self, release_branch):
        """
        Build translation workload for a given branch in all langs for all pkgs
        :param release_branch: str
        :return: dict
        """
        if not isinstance(release_branch, str):
            return {}
        locale_lang_tuple = self.package_manager.get_locale_lang_tuple(
            locales=self.package_manager.get_relbranch_locales(release_branch))
        workload_combined_detailed = {}
        for locale, lang in locale_lang_tuple:
            workload_combined_detailed[lang] = \
                self.get_workload_estimate(release_branch, locale=locale)[1]
        return workload_combined_detailed

    def get_threshold_based(self, release_branch, threshold=70):
        """
        Build language list those have fulfilled given threshold
        :param release_branch: str
        :param threshold: translation %age margin: int
        :return: dict
        """

        consolidated_stats = self._consolidate_branch_specific_stats(
            self._get_branch_specific_pkgs_stats(release_branch))
        # Reverse the stats to have - what has been covered
        consolidated_stats_reversed = [(lang, 100 - stat)
                                       for lang, stat in consolidated_stats]
        filtered_stats = list(
            filter(lambda elem: elem[1] > threshold,
                   consolidated_stats_reversed))
        headers = ['Languages', 'Translation Complete %age']

        locale_lang_tuple = self.package_manager.get_locale_lang_tuple(
            locales=self.package_manager.get_relbranch_locales(release_branch))
        lang_locale_dict = {v: k for k, v in dict(locale_lang_tuple).items()}

        return headers, filtered_stats, lang_locale_dict
Esempio n. 21
0
class ReportsManager(GraphManager):
    """
    Manage Reports Generations
    """

    package_manager = PackagesManager()

    def get_reports(self, report_subject):
        """
        Fetch reports from db
        :return: Queryset
        """
        filter_kwargs = {}
        if report_subject:
            filter_kwargs.update(dict(report_subject=report_subject))

        reports = None
        try:
            reports = Report.objects.filter(**filter_kwargs)
        except Exception as e:
            self.app_logger('ERROR',
                            "Reports could not be fetched, details: " + str(e))
        return reports

    def create_or_update_report(self, **kwargs):
        """
        Creates or Updates a report
        :param kwargs: dict
        :return: boolean
        """
        if not kwargs.get('subject') and not kwargs.get('report_json'):
            return
        default_params = {}
        match_params = {'report_subject': kwargs['subject']}
        default_params.update(match_params)
        default_params['report_json_str'] = json.dumps(kwargs['report_json'])
        default_params['report_updated'] = timezone.now()
        try:
            Report.objects.update_or_create(report_subject=kwargs['subject'],
                                            defaults=default_params)
        except Exception as e:
            # log error
            return False
        else:
            return True

    def _filter_disabled_languages(self, lang_stats_dict):
        active_locales = self.package_manager.get_locales(only_active=True)
        active_languages = [locale.lang_name for locale in active_locales]
        return {
            k: v
            for k, v in lang_stats_dict.items() if k in active_languages
        }

    def analyse_releases_status(self):
        """
        Summarize Releases Status
        """
        relbranches = self.branch_manager.get_relbranch_name_slug_tuple()
        relbranch_report = {}
        for branch_slug, branch_name in relbranches:
            relbranch_report[branch_name] = {}
            relbranch_report[branch_name]['slug'] = branch_slug
            stats_estimate = self.get_workload_estimate(branch_slug)
            untranslated_messages = \
                [stats.get('Untranslated') for pkg, stats in stats_estimate[1].items()]
            if untranslated_messages:
                packages_need_attention = (len(untranslated_messages) -
                                           untranslated_messages.count(0)) or 0
                relbranch_report[branch_name][
                    'packages_need_attention'] = packages_need_attention
                total_untranslated_msgs = (functools.reduce(
                    (lambda x, y: x + y), untranslated_messages)) or 0
                relbranch_report[branch_name][
                    'total_untranslated_msgs'] = total_untranslated_msgs
                lang_stats_report = self._filter_disabled_languages(
                    self.get_workload_combined_detailed(branch_slug))
                relbranch_report[branch_name]['languages'] = {}
                for lang, pkg_stats in lang_stats_report.items():
                    untranslated_msgs = []
                    untranslated_msgs.extend([
                        stats.get('Untranslated')
                        for pkg, stats in pkg_stats.items()
                    ])
                    total_untranslated_msgs = (functools.reduce(
                        (lambda x, y: x + y), untranslated_msgs)) or 0

                    translated_msgs = []
                    translated_msgs.extend([
                        stats.get('Translated')
                        for pkg, stats in pkg_stats.items()
                    ])
                    total_translated_msgs = (functools.reduce(
                        (lambda x, y: x + y), translated_msgs)) or 0

                    msgs = []
                    msgs.extend([
                        stats.get('Total') for pkg, stats in pkg_stats.items()
                    ])
                    total_msgs = (functools.reduce(
                        (lambda x, y: x + y), msgs)) or 0
                    # 0: untranslated, 1: translated, 2: total
                    relbranch_report[branch_name]['languages'][lang] = (
                        total_untranslated_msgs, total_translated_msgs,
                        total_msgs)
        if self.create_or_update_report(**{
                'subject': 'releases',
                'report_json': relbranch_report
        }):
            return OrderedDict(sorted(relbranch_report.items(), reverse=True))
        return False

    def analyse_packages_status(self):
        """
        Summarize Packages Status
        """
        all_packages = self.package_manager.get_packages(pkg_params=[
            'package_name', 'products', 'details_json_last_updated',
            'stats_diff', 'release_branch_mapping', 'platform_last_updated',
            'upstream_last_updated'
        ])
        pkg_tracking_for_RHEL = all_packages.filter(
            products__icontains=RELSTREAM_SLUGS[0]).count()
        pkg_tracking_for_fedora = all_packages.filter(
            products__icontains=RELSTREAM_SLUGS[1]).count()
        pkg_details_week_old = all_packages.filter(
            details_json_last_updated__lte=timezone.now() -
            timezone.timedelta(days=7)).count()
        pkg_transtats_week_old = all_packages.filter(
            platform_last_updated__lte=timezone.now() -
            timezone.timedelta(days=7)).count()
        pkg_upstream_week_old = all_packages.filter(
            upstream_last_updated__lte=timezone.now() -
            timezone.timedelta(days=7)).count()
        relbranches = self.branch_manager.get_relbranch_name_slug_tuple()
        pkg_improper_branch_mapping = 0
        pkg_with_stats_diff = []
        if relbranches and len(relbranches) > 0:
            relbranch_slugs = sorted([slug for slug, name in relbranches],
                                     reverse=True)
            pkgs_improper_branch_mapping = [
                i.package_name for i in all_packages
                if not i.release_branch_mapping_health
            ]
            pkg_improper_branch_mapping = len(pkgs_improper_branch_mapping)
            pkg_with_stats_diff = [
                i.package_name for i in all_packages if not i.stats_diff_health
            ]

        package_report = {
            RELSTREAM_SLUGS[0]: pkg_tracking_for_RHEL or 0,
            RELSTREAM_SLUGS[1]: pkg_tracking_for_fedora or 0,
            'pkg_details_week_old': pkg_details_week_old or 0,
            'pkg_transtats_week_old': pkg_transtats_week_old or 0,
            'pkg_upstream_week_old': pkg_upstream_week_old or 0,
            'pkg_improper_branch_mapping': pkg_improper_branch_mapping or 0,
            'pkg_with_stats_diff': pkg_with_stats_diff,
            'pkg_having_stats_diff': len(pkg_with_stats_diff) or 0
        }
        if self.create_or_update_report(**{
                'subject': 'packages',
                'report_json': package_report
        }):
            return package_report
        return False

    def refresh_stats_required_by_territory(self):
        """
        This refreshes statistics which is required by territory
            - this includes:
                - all languages (both disabled and enabled)
                - build system stats where sync_visibility is True
                - both for translation platform and build system
        :return: master_statistics or False
        """
        all_locales = self.package_manager.get_locales()
        all_releases = self.branch_manager.get_release_branches()
        platform_release_stats_report = self.get_reports(
            report_subject='releases')
        if not platform_release_stats_report:
            return
        platform_release_stats_report = platform_release_stats_report.get()

        # Create basic skeleton
        master_statistics = {}
        for locale in all_locales:
            master_statistics[locale.locale_id] = {}
            master_statistics[locale.locale_id].update(
                {'language': locale.lang_name})
            for release in all_releases:
                if release.track_trans_flag:
                    master_statistics[locale.locale_id].update({
                        release.release_slug: {
                            'Release Name': release.release_name,
                            'Translation Platform': [],
                            'Build System': []
                        }
                    })

        lang_locale_dict = {
            locale.lang_name: locale.locale_id
            for locale in all_locales
        }

        # Let's fill master_statistics with platform_release_stats_report
        for release, data in platform_release_stats_report.report_json.items():
            release_slug = data.get('slug')
            language_stats = data.get('languages')
            if release_slug and language_stats:
                for language, stats in language_stats.items():
                    locale = lang_locale_dict.get(language)
                    if 'Translation Platform' in master_statistics.get(
                            locale, {}).get(release_slug, {}):
                        master_statistics[locale][release_slug][
                            'Translation Platform'] = stats

        # Now, fill the build system stats
        build_system_stats = self.package_manager.get_build_system_stats_by_release(
        )
        for b_release, locale_stats in build_system_stats.items():
            for b_locale, b_stats in locale_stats.items():
                if 'Build System' in master_statistics.get(b_locale, {}).get(
                        b_release, {}):
                    master_statistics[b_locale][b_release]['Build System'] = [
                        b_stats.get('Untranslated') or 0,
                        b_stats.get('Translated') or 0,
                        b_stats.get('Total') or 0
                    ]
        if self.create_or_update_report(**{
                'subject': 'location',
                'report_json': master_statistics
        }):
            return master_statistics
        return False

    @staticmethod
    def get_trending_languages(release_summary_data, *release_tuple):
        """
        Get Trending Languages
            Based on statistics of the latest releases.
        :param release_summary_data: dict
        :param release_tuple: tuple
        :return: dict: {language: average percentage}
        """
        if not release_summary_data:
            return {}

        trending_languages = []
        lang_stats_data = release_summary_data.get(release_tuple[1],
                                                   {}).get('languages')
        if not lang_stats_data:
            return {}

        try:
            for lang, stats in lang_stats_data.items():
                try:
                    percent = round((stats[1] * 100) / stats[2], 2)
                except (ZeroDivisionError, FloatingPointError):
                    percent = 0
                trending_languages.append((lang, percent, stats))
        except Exception as e:
            # log for now
            return {}
        if trending_languages:
            return sorted(trending_languages, key=lambda x: x[1], reverse=True)