Esempio n. 1
0
    def process(self, pkg, counter_msg):
        """Processes the download/install of packages."""
        if not config.HTTP_DMG:
            try:
                self._download(pkg=pkg, counter_msg=counter_msg)
            except Exception as e:
                LOG.info('Exception downloading: {}'.format(e.strip()))
                pass

        if config.DEPLOY_PKGS or config.FORCED_DEPLOYMENT:
            try:
                self._install(pkg=pkg, counter_msg=counter_msg)
            except Exception as e:
                LOG.info('Exception installing: {}'.format(e.strip()))
                pass

            # Installer can hang on the 'Preparing for install'
            # in macOS 11.0.1, so delay the install for a few seconds
            # to allow things to settle.
            # if StrictVersion(OS_VER) > StrictVersion('10.15.99') or config.INST_SLEEP:
            if not config.DRY_RUN and config.INST_SLEEP:
                sleep(int(config.INST_SLEEP))

            if not config.DRY_RUN:
                # Don't try and delete from DMG.
                if not config.HTTP_DMG:
                    misc.clean_up(file_path=pkg.DownloadPath)
Esempio n. 2
0
    def get(self, url, output=None, counter_msg=None):
        """Retrieves the specified URL. Saves it to path specified in 'output' if present."""
        # NOTE: Must ignore 'dry run' state for any '.plist' file downloads.
        _fetching_plist = url.endswith('.plist')

        # Now the command.
        cmd = [self._curl_path,
               '--user-agent',
               config.USERAGENT,
               '-L',
               '-C',
               '-',
               url]

        if config.FORCE_DOWNLOAD and os.path.exists(output):
            if not config.DRY_RUN:
                LOG.debug('Forced download - removing: {}'.format(output))
                misc.clean_up(file_path=output)

        if config.PROXY:
            cmd.extend(['--proxy', config.PROXY])

        if config.ALLOW_INSECURE_CURL:
            cmd.extend(['--insecure'])

        if not (config.QUIET or config.SILENT or self._silent_override or _fetching_plist):
            cmd.extend(['--progress-bar'])
        elif (config.QUIET or config.SILENT or self._silent_override or _fetching_plist):
            cmd.extend(['--silent'])

        if output:
            cmd.extend(['--create-dirs', '-o', output])

        LOG.debug('CURL get: {}'.format(' '.join(cmd)))

        if not config.DRY_RUN or _fetching_plist:
            if counter_msg:
                _msg = 'Downloading {} - {}'.format(counter_msg, url)
            else:
                _msg = 'Downloading {}'.format(url)

            if config.FORCE_DOWNLOAD:
                _msg = _msg.replace('Downloading', 'Re-downloading')

            try:
                LOG.info(_msg)

                if not (config.SILENT or self._silent_override or _fetching_plist):
                    print(_msg)

                subprocess.check_call(cmd)
            except subprocess.CalledProcessError as _e:
                LOG.debug('{}: {}'.format(' '.join(cmd), _e))
                raise _e
        elif config.DRY_RUN:
            if not config.SILENT:
                _msg = 'Download {} - {}'.format(counter_msg, url)

                print(_msg)
                LOG.info(_msg)
Esempio n. 3
0
    def _read_remote_plist(self):
        """Gets the property list."""
        result = None

        _basename = os.path.basename(self._plist_url_path)
        _tmp_file = os.path.join(self._tmp_dir, _basename)

        _bad_wolf_fixes = bad_wolf.BAD_WOLF_PKGS.get(_basename, None)
        _bwd = None

        _req = curl_requests.CURL(url=self._plist_url_path)

        # NOTE 2019-11-04: Seems that using the 'resume' capability in cURL does not
        # work here now for some reason, so don't resume.
        if _req.status in config.HTTP_OK_STATUS:
            _req.get(url=self._plist_url_path, output=_tmp_file, resume=False)
        else:
            _req.get(url=self._plist_failover_url_path,
                     output=_tmp_file,
                     resume=False)

        _root = plist.readPlist(_tmp_file)

        if _root:
            result = set()

            # Apply 'Bad Wolf' pathches
            for _pkg in _root['Packages']:
                _new_pkg = _root['Packages'][_pkg].copy()  # Work on copy

                # Create a new key called 'PackageName' that
                # contains the value '_pkg' for use with content packs.
                _new_pkg['PackageName'] = _pkg

                if _bad_wolf_fixes:
                    _bwd = _bad_wolf_fixes.get(
                        _pkg, None)  # A dictionary from '_bad_wolf_fixes'

                # Merge new/existing keys from matching '_bwd'
                if _bwd:
                    _new_pkg.update(_bwd)

                _pkg_obj = package.LoopPackage(**_new_pkg)

                # pylint: disable=no-member
                # Only add/process packages that are _not_ 'BadWolfIgnore = True'
                if not _pkg_obj.BadWolfIgnore:
                    result.add(_pkg_obj)
                # pylint: enable=no-member

            # Now process option packs
            _opt_packs = option_packs.OptionPack(source=_root,
                                                 release=_basename)
            self.option_packs = _opt_packs.option_packs

        misc.clean_up(file_path=_tmp_file)

        return result
Esempio n. 4
0
    def process(self, pkg, counter_msg):
        """Processes the download/install of packages."""
        if not config.HTTP_DMG:
            self._download(pkg=pkg, counter_msg=counter_msg)

        if config.DEPLOY_PKGS or config.FORCED_DEPLOYMENT:
            self._install(pkg=pkg, counter_msg=counter_msg)

            if not config.DRY_RUN:
                # Don't try and delete from DMG.
                if not config.HTTP_DMG:
                    misc.clean_up(file_path=pkg.DownloadPath)
Esempio n. 5
0
    def get(self, url, output=None, counter_msg=None, resume=True):
        """Retrieves the specified URL. Saves it to path specified in 'output' if present."""
        # NOTE: Must ignore 'dry run' state for any '.plist' file downloads.
        _headers = self._get_headers(obj=url)

        # Check if we're fetching a property list file
        _fetching_plist = url.endswith('.plist')

        # Now the command.
        cmd = [self._curl_path,
               '--retry', config.CURL_RETRIES,  # Retry failed downloads n times (default 5), will wait 1sec then on each retry double the wait time.
               '--retry-max-time', '10',  # Max of 10 seconds between each retry
               config.CURL_HTTP_ARG,
               '--user-agent',
               config.USERAGENT]

        if resume:
            cmd.extend(['-L', '-C', '-'])

        cmd.extend([url])

        # If there is a content header indicating gzipped content, pass the compressed flag so
        # curl can auto deflate it.
        _gzipped = _headers.get('Content-Encoding', False) == 'gzip'

        if _gzipped:
            cmd.extend(['--compressed'])

        if config.FORCE_DOWNLOAD and os.path.exists(output):
            if not config.DRY_RUN:
                LOG.debug('Forced download - removing: {}'.format(output))
                misc.clean_up(file_path=output)

        if config.PROXY:
            cmd.extend(['--proxy', config.PROXY])

        if config.ALLOW_INSECURE_CURL:
            cmd.extend(['--insecure'])

        if not (config.QUIET or config.SILENT or self._silent_override or _fetching_plist):
            cmd.extend(['--progress-bar'])
        elif (config.QUIET or config.SILENT or self._silent_override or _fetching_plist):
            cmd.extend(['--silent'])

        if output:
            cmd.extend(['--create-dirs', '-o', output])

        LOG.debug('CURL get: {}'.format(' '.join(cmd)))

        if not config.DRY_RUN or _fetching_plist:
            if counter_msg:
                _msg = 'Downloading file {} - {}'.format(counter_msg, url)
            else:
                _msg = 'Downloading {}'.format(url)

            if config.FORCE_DOWNLOAD:
                _msg = _msg.replace('Downloading', 'Re-downloading')

            try:
                if not os.path.exists(output):
                    LOG.info(_msg)

                    if not (config.SILENT or self._silent_override or _fetching_plist):
                        print(_msg)

                    subprocess.check_call(cmd)
                elif os.path.exists(output):
                    _local_len = os.path.getsize(output)
                    _content_len = None

                    try:
                        _content_len = _headers['Content-Length']
                    except KeyError:
                        _content_len = _headers['content-length']

                    if _content_len and _local_len == _content_len:
                        _msg = _msg.replace('Re-downloading', 'Downloading')
                        _msg = _msg.replace('Downloading', 'Skipping existing file')
                        LOG.info(_msg)

                        if not (config.SILENT or self._silent_override or _fetching_plist):
                            print(_msg)
                    elif not _content_len or (_content_len and _local_len != _content_len):
                        _msg = _msg.replace('Re-downloading', 'Downloading')
                        _msg = _msg.replace('Downloading', 'Resuming')
                        LOG.info(_msg)

                        if not (config.SILENT or self._silent_override or _fetching_plist):
                            print(_msg)

                        subprocess.check_call(cmd)
            except subprocess.CalledProcessError as _e:
                LOG.debug('{}: {}'.format(' '.join(cmd), _e))
                raise _e
        elif config.DRY_RUN:
            if not config.SILENT:
                _msg = 'Download {} - {}'.format(counter_msg, url)

                print(_msg)
                LOG.info(_msg)
Esempio n. 6
0
def differences(file_a, file_b, detailed_info=False):
    """Compares the package details in 'file_a' against 'file_b' to determine
    what files exist in 'file_b' but not in 'file_a'. This will also display
    packages _removed_.
    This function sorts the files into smallest to largest order. So if
    'garageband1021.plist' is compared to 'garageband1011.plist', the
    output will be based on packages that are in 'garageband1021.plist' but
    not in 'garageband1011.plist'. In otherwords, '1021' is the _right_
    file, '1011' is the _left_ file."""
    sorted_files = sorted([f for f in [file_a, file_b]])
    file_a = sorted_files[0]
    base_a = os.path.basename(file_a)
    file_b = sorted_files[1]
    base_b = os.path.basename(file_b)

    _supported = [_v for _v in config.SUPPORTED_PLISTS.values()]
    _supported.sort()

    if not all(_file.endswith('.plist') for _file in [file_a, file_b]):
        print('Files must both be property list files.')
        sys.exit(1)

    if not all(_file in _supported for _file in [base_a, base_b]):
        print('Files must be from {}'.format(_supported))
        sys.exit(1)

    # Sort the two files so if the order of 'file_a' 'file_b' is
    # 'garageband1021.plist' 'garageband1011.plist' it becomes
    # 'garageband1011.plist' 'garageband1021.plist'
    _tmp_dir = os.path.join(tempfile.gettempdir(), config.BUNDLE_ID)

    if not os.path.exists(file_a):
        _fa_fallback = os.path.join(config.AUDIOCONTENT_FAILOVER_URL,
                                    'lp10_ms3_content_2016', base_a)
        _fa_url = misc.plist_url_path(base_a)
        file_a = os.path.join(_tmp_dir, base_a)

        _req = curl_requests.CURL(url=_fa_url)

        if _req.status in config.HTTP_OK_STATUS:
            _req.get(url=_fa_url, output=file_a)
        else:
            _req.get(url=_fa_fallback, output=file_a)

        file_a_plist = plist.readPlist(plist_path=file_a)['Packages']
        misc.clean_up(file_a)
    elif os.path.exists(file_a):
        file_a_plist = plist.readPlist(plist_path=file_a)['Packages']

    if not os.path.exists(file_b):
        _fb_fallback = os.path.join(config.AUDIOCONTENT_FAILOVER_URL,
                                    'lp10_ms3_content_2016', base_b)
        _fb_url = misc.plist_url_path(base_b)
        file_b = os.path.join(_tmp_dir, base_b)

        _req = curl_requests.CURL(url=_fb_url)

        if _req.status in config.HTTP_OK_STATUS:
            _req.get(url=_fb_url, output=file_b)
        else:
            _req.get(url=_fb_fallback, output=file_b)

        file_b_plist = plist.readPlist(plist_path=file_b)['Packages']
        misc.clean_up(file_b)
    elif os.path.exists(file_b):
        file_a_plist = plist.readPlist(plist_path=file_a)['Packages']

    # Build a set of package names.
    file_a_packages = set([
        os.path.basename(file_a_plist[_pkg]['DownloadName'])
        for _pkg in file_a_plist
    ])
    file_b_packages = set([
        os.path.basename(file_b_plist[_pkg]['DownloadName'])
        for _pkg in file_b_plist
    ])

    # Get the new/removed files by using 'set_b.difference(set_a)'
    new_files = file_b_packages.difference(file_a_packages)
    rem_files = file_a_packages.difference(file_b_packages)
    cmn_files = file_b_packages.intersection(file_a_packages)

    if not detailed_info:
        if new_files:
            print('{} new packages in {} when compared to {}'.format(
                len(new_files), base_b, base_a))

        if rem_files:
            print('{} packages removed from {} compared to {}'.format(
                len(rem_files), base_a, base_b))

        if cmn_files:
            print('{} packages common between {} and {}'.format(
                len(cmn_files), base_a, base_b))

    # Exit success because nothing else to do.
    sys.exit(0)