예제 #1
0
파일: keys.py 프로젝트: smazoyer/PyUpdater
class KeyImporter(object):

    def __init__(self):
        self.db = Storage()

    def _look_for_keypack(self):
        files = os.listdir(os.getcwd())
        if settings.KEYPACK_FILENAME not in files:
            return False
        return True

    def _load_keypack(self):
        json_data = None
        try:
            with io.open(settings.KEYPACK_FILENAME, 'r',
                         encoding='utf-8') as f:
                data = f.read()
        except Exception as err:
            log.debug(err, exc_info=True)
        else:
            try:
                json_data = json.loads(data)
            except Exception as err:
                log.debug(err, exc_info=True)
        return json_data

    def start(self):
        found = self._look_for_keypack()
        if found is False:
            return False
        keypack = self._load_keypack()
        if keypack is None:
            return False
        self.db.save(settings.CONFIG_DB_KEY_KEYPACK, keypack)
        return True
예제 #2
0
class KeyImporter(object):
    def __init__(self):
        self.db = Storage()

    def _look_for_keypack(self):
        files = os.listdir(os.getcwd())
        if settings.KEYPACK_FILENAME not in files:
            return False
        return True

    def _load_keypack(self):
        json_data = None
        try:
            with io.open(settings.KEYPACK_FILENAME, 'r',
                         encoding='utf-8') as f:
                data = f.read()
        except Exception as err:
            log.debug(err, exc_info=True)
        else:
            try:
                json_data = json.loads(data)
            except Exception as err:
                log.debug(err, exc_info=True)
        return json_data

    def start(self):
        found = self._look_for_keypack()
        if found is False:
            return False
        keypack = self._load_keypack()
        if keypack is None:
            return False
        self.db.save(settings.CONFIG_DB_KEY_KEYPACK, keypack)
        return True
예제 #3
0
class KeyHandler(object):
    """KeyHandler object is used to manage keys used for signing updates

    Kwargs:

        app (obj): Config object to get config values from
    """
    def __init__(self):
        self.db = Storage()

        self.key_encoding = 'base64'
        data_dir = os.getcwd()
        self.data_dir = os.path.join(data_dir, settings.USER_DATA_FOLDER)
        self.deploy_dir = os.path.join(self.data_dir, 'deploy')

        # Name of the keypack to import. It should be placed
        # in the root of the repo
        self.keypack_filename = os.path.join(data_dir,
                                             settings.CONFIG_DATA_FOLDER,
                                             settings.KEYPACK_FILENAME)

        # The name of the gzipped version file in
        # the pyu-data/deploy directory
        self.version_file = os.path.join(self.deploy_dir,
                                         settings.VERSION_FILE_FILENAME)

        # The name of the gzipped key file in
        # the pyu-data/deploy directory
        self.key_file = os.path.join(self.deploy_dir,
                                     settings.KEY_FILE_FILENAME)

    def sign_update(self):
        """Signs version file with private key

        Proxy method for :meth:`_add_sig`
        """
        # Loads private key
        # Loads version file to memory
        # Signs Version file
        # Writes version file back to disk
        self._add_sig()

    def _load_private_keys(self):
        # Loads private key
        log.debug('Loading private key')

        # Loading keypack data from .pyupdater/config.pyu
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        private_key = None
        if keypack_data is not None:
            try:
                private_key = keypack_data['repo']['app_private']
            except KeyError:
                # We will exit in _add_sig if private_key is None
                pass
        return private_key

    def _add_sig(self):
        # Adding new signature to version file
        # Raw private key will need to be converted into
        # a signing key object
        private_key_raw = self._load_private_keys()
        if private_key_raw is None:
            log.error('Private Key not found. Please '
                      'import a keypack & try again')
            return

        # Load update manifest
        update_data = self._load_update_data()

        # We don't want to verify the signature
        if 'signature' in update_data:
            log.debug('Removing signatures from version file')
            del update_data['signature']

        # We create a signature from the string
        update_data_str = json.dumps(update_data, sort_keys=True)

        private_key_raw = private_key_raw.encode('utf-8')

        # Creating signing key object
        private_key = ed25519.SigningKey(private_key_raw,
                                         encoding=self.key_encoding)
        log.debug("Signing update data")
        # Signs update data with private key
        signature = private_key.sign(six.b(update_data_str),
                                     encoding=self.key_encoding).decode()
        log.debug('Sig: %s', signature)

        # Create new dict from json string
        update_data = json.loads(update_data_str)

        # Add signatures to update data
        update_data['signature'] = signature
        log.debug('Adding signature to update data')

        # Write updated version file to .pyupdater/config.pyu
        self._write_update_data(update_data)

        # Write gzipped key file
        self._write_key_file()

    def _write_update_data(self, data):
        # Save update data to repo database
        self.db.save(settings.CONFIG_DB_KEY_VERSION_META, data)
        log.debug('Saved version meta data')
        log.debug('Upload manifest: \n%s', data)
        # Gzip update date
        with gzip.open(self.version_file, 'wb') as f:
            new_data = json.dumps(data)
            if six.PY2:
                f.write(new_data)
            else:
                f.write(bytes(new_data, 'utf-8'))
        log.debug('Created gzipped version manifest in deploy dir')

    def _write_key_file(self):
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        if keypack_data is None:
            log.error('Private Key not found. Please '
                      'import a keypack & try again')
            return

        upload_data = keypack_data['upload']
        with gzip.open(self.key_file, 'wb') as f:
            new_data = json.dumps(upload_data)
            if six.PY2:
                f.write(new_data)
            else:
                f.write(bytes(new_data, 'utf-8'))
        log.debug('Created gzipped key file in deploy dir')

    def _load_update_data(self):
        log.debug("Loading version data")
        update_data = self.db.load(settings.CONFIG_DB_KEY_VERSION_META)
        # If update_data is None, create a new one
        if update_data is None:
            update_data = {}
            log.error('Version meta data not found')
            self.db.save(settings.CONFIG_DB_KEY_VERSION_META, update_data)
            log.debug('Created new version meta data')
        log.debug('Version file loaded')
        return update_data
예제 #4
0
class ConfigManager(object):

    def __init__(self):
        self.cwd = os.getcwd()
        self.db = Storage()
        self.config_key = settings.CONFIG_DB_KEY_APP_CONFIG

    # Loads config from database (json file)
    def load_config(self):
        config_data = self.db.load(self.config_key)
        if config_data is None:
            config_data = {}
        config = Config()
        for k, v in config_data.items():
            config[k] = v
        config.DATA_DIR = os.getcwd()
        return config

    def get_app_name(self):
        config = self.load_config()
        return config.APP_NAME

    # Saves config to database (json file)
    def save_config(self, obj):
        log.info('Saving Config')
        self.db.save(self.config_key, obj)
        log.info('Config saved')
        self.write_config_py(obj)
        log.info('Wrote client config')

    # Writes client config to client_config.py
    def write_config_py(self, obj):
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        if keypack_data is None:
            log.debug('*** Keypack data is None ***')
            public_key = None
        else:
            public_key = keypack_data['client']['offline_public']

        filename = os.path.join(self.cwd, *obj.CLIENT_CONFIG_PATH)
        attr_str_format = "    {} = '{}'\n"
        attr_format = "    {} = {}\n"

        log.debug('Writing client_config.py')
        with open(filename, 'w') as f:
            f.write('class ClientConfig(object):\n')

            log.debug('Adding PUBLIC_KEY to client_config.py')
            f.write(attr_str_format.format('PUBLIC_KEY', public_key))

            if hasattr(obj, 'APP_NAME'):
                log.debug('Adding APP_NAME to client_config.py')
                f.write(attr_str_format.format('APP_NAME', obj.APP_NAME))
            if hasattr(obj, 'COMPANY_NAME'):
                log.debug('Adding COMPANY_NAME to client_config.py')
                f.write(attr_str_format.format('COMPANY_NAME',
                                               obj.COMPANY_NAME))
            if hasattr(obj, 'UPDATE_URLS'):
                log.debug('Adding UPDATE_URLS to client_config.py')
                f.write(attr_format.format('UPDATE_URLS', obj.UPDATE_URLS))

            if hasattr(obj, 'MAX_DOWNLOAD_RETRIES'):
                log.debug('Adding MAX_DOWNLOAD_RETRIES to client_config.py')
                f.write(attr_format.format('MAX_DOWNLOAD_RETRIES',
                                           obj.MAX_DOWNLOAD_RETRIES))
예제 #5
0
class Loader(object):
    """Loads &  saves config file
    """
    def __init__(self):
        self.cwd = os.getcwd()
        self.db = Storage()
        self.password = os.environ.get(settings.USER_PASS_ENV)
        self.config_key = settings.CONFIG_DB_KEY_APP_CONFIG

    def load_config(self):
        """Loads config from database (json file)

            Returns (obj): Config object
        """
        config_data = self.db.load(self.config_key)
        if config_data is None:
            config_data = {}
        config = Config()
        for k, v in config_data.items():
            config[k] = v
        config.DATA_DIR = os.getcwd()
        return config

    def get_app_name(self):
        config = self.load_config()
        return config.APP_NAME

    def save_config(self, obj):
        """Saves config file to pyupdater database

        Args:

            obj (obj): config object
        """
        log.info('Saving Config')
        self.db.save(self.config_key, obj)
        log.info('Config saved')
        self._write_config_py(obj)
        log.info('Wrote client config')

    def _write_config_py(self, obj):
        """Writes client config to client_config.py

        Args:

            obj (obj): config object
        """
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        if keypack_data is None:
            public_key = None
        else:
            public_key = keypack_data['client']['offline_public']

        filename = os.path.join(self.cwd, *obj.CLIENT_CONFIG_PATH)
        attr_str_format = "    {} = '{}'\n"
        attr_format = "    {} = {}\n"
        with open(filename, 'w') as f:
            f.write('class ClientConfig(object):\n')
            if hasattr(obj, 'APP_NAME') and obj.APP_NAME is not None:
                f.write(attr_str_format.format('APP_NAME', obj.APP_NAME))
                log.debug('Wrote APP_NAME to client config')
            if hasattr(obj, 'COMPANY_NAME') and obj.COMPANY_NAME is not None:
                f.write(
                    attr_str_format.format('COMPANY_NAME', obj.COMPANY_NAME))
                log.debug('Wrote COMPANY_NAME to client config')
            if hasattr(obj, 'UPDATE_URLS') and obj.UPDATE_URLS is not None:
                f.write(attr_format.format('UPDATE_URLS', obj.UPDATE_URLS))
                log.debug('Wrote UPDATE_URLS to client config')
            f.write(attr_str_format.format('PUBLIC_KEY', public_key))
            log.debug('Wrote PUBLIC_KEY to client config')
예제 #6
0
class KeyHandler(object):
    """KeyHanlder object is used to manage keys used for signing updates

    Kwargs:

        app (obj): Config object to get config values from
    """

    def __init__(self):
        self.db = Storage()

        self.key_encoding = 'base64'
        data_dir = os.getcwd()
        self.data_dir = os.path.join(data_dir, settings.USER_DATA_FOLDER)
        self.deploy_dir = os.path.join(self.data_dir, 'deploy')
        self.keypack_filename = os.path.join(data_dir,
                                             settings.CONFIG_DATA_FOLDER,
                                             settings.KEYPACK_FILENAME)
        self.version_file = os.path.join(self.deploy_dir,
                                         settings.VERSION_FILE)
        self.key_file = os.path.join(self.deploy_dir,
                                     settings.KEY_FILE)

    def sign_update(self):
        """Signs version file with private key

        Proxy method for :meth:`_add_sig`
        """
        # Loads private key
        # Loads version file to memory
        # Signs Version file
        # Writes version file back to disk
        self._add_sig()

    def _load_private_keys(self):
        # Loads private key
        log.debug('Loading private key')
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        private_key = None
        if keypack_data is not None:
            try:
                private_key = keypack_data['repo']['app_private']
            except KeyError:
                # We will exit in _add_sig if private_key is None
                pass
        return private_key

    def _add_sig(self):
        # Adding new signature to version file
        # Raw private key will need to be converted into
        # a signing key object
        private_key_raw = self._load_private_keys()
        if private_key_raw is None:
            log.error('Private Key not found. Please '
                      'import a keypack & try again')
            return

        update_data = self._load_update_data()
        if 'signature' in update_data:
            log.debug('Removing signatures from version file')
            del update_data['signature']
        update_data_str = json.dumps(update_data, sort_keys=True)

        log.debug('Key type before: %s', type(private_key_raw))
        private_key_raw = private_key_raw.encode('utf-8')
        log.debug('Key type after: %s', type(private_key_raw))

        # Creating signing key object
        private_key = ed25519.SigningKey(private_key_raw,
                                         encoding=self.key_encoding)
        # Signs update data with private key
        signature = private_key.sign(six.b(update_data_str),
                                     encoding=self.key_encoding).decode()
        log.debug('Sig: %s', signature)

        update_data = json.loads(update_data_str)
        # Add signatures to update data
        update_data['signature'] = signature
        log.info('Adding sig to update data')
        # Write updated version file to filesystem
        self._write_update_data(update_data)
        self._write_key_file()

    def _write_update_data(self, data):
        # Save update data to repo database
        self.db.save(settings.CONFIG_DB_KEY_VERSION_META, data)
        log.debug('Saved version meta data')
        log.debug('Upload manifest: \n%s', data)
        # Gzip update date
        with gzip.open(self.version_file, 'wb') as f:
            new_data = json.dumps(data)
            if six.PY2:
                f.write(new_data)
            else:
                f.write(bytes(new_data, 'utf-8'))
        log.info('Created gzipped version manifest in deploy dir')

    def _write_key_file(self):
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        if keypack_data is None:
            log.error('Private Key not found. Please '
                      'import a keypack & try again')
            return

        upload_data = keypack_data['upload']
        with gzip.open(self.key_file, 'wb') as f:
            new_data = json.dumps(upload_data)
            if six.PY2:
                f.write(new_data)
            else:
                f.write(bytes(new_data, 'utf-8'))
        log.info('Created gzipped key file in deploy dir')

    def _load_update_data(self):
        log.debug("Loading version data")
        update_data = self.db.load(settings.CONFIG_DB_KEY_VERSION_META)
        # If update_data is None, create a new one
        if update_data is None:
            update_data = {}
            log.error('Version meta data not found')
            self.db.save(settings.CONFIG_DB_KEY_VERSION_META, update_data)
            log.info('Created new version meta data')
        log.debug('Version file loaded')
        return update_data
예제 #7
0
class PackageHandler(object):
    """Handles finding, sorting, getting meta-data, moving packages.

    Kwargs:

        app (instance): Config object
    """
    def __init__(self, config=None):
        # References the pyu-data folder in the root of repo
        self.data_dir = None

        # Specifies if the config file needs to be loaded
        self.config_loaded = False

        # Used to store config information
        self.db = Storage()

        # Initialize app if config is available
        if config is not None:
            self.init_app(config)

    def init_app(self, obj):
        """Sets up client with config values from obj

        Args:

            obj (instance): config object

        """
        self.patches = obj.get('UPDATE_PATCHES', True)
        if self.patches:
            log.debug('Patch support enabled')
            self.patch_support = True
        else:
            log.info('Patch support disabled')
            self.patch_support = False
        self.data_dir = os.path.join(os.getcwd(), settings.USER_DATA_FOLDER)
        self.files_dir = os.path.join(self.data_dir, 'files')
        self.deploy_dir = os.path.join(self.data_dir, 'deploy')
        self.new_dir = os.path.join(self.data_dir, 'new')
        self.config_dir = os.path.join(os.getcwd(),
                                       settings.CONFIG_DATA_FOLDER)
        self.config = None
        self.json_data = None

        self.setup()

    def setup(self):
        "Creates working directories & loads json files."
        if self.data_dir is not None:
            self._setup()

    def _setup(self):
        self._setup_work_dirs()
        if self.config_loaded is False:
            self.json_data = self._load_version_file()
            self.config = self._load_config()
            self.config_loaded = True

    def process_packages(self, report_errors=False):
        """Gets a list of updates to process.  Adds the name of an
        update to the version file if not already present.  Processes
        all packages.  Updates the version file meta-data. Then writes
        version file back to disk.
        """
        if self.data_dir is None:
            raise PackageHandlerError('Must init first.', expected=True)
        # Getting a list of meta data from all packages in the
        # pyu-data/new directory. Also create a patch manifest
        # to create patches.
        pkg_manifest, patch_manifest = self._get_package_list(report_errors)
        patches = self._make_patches(patch_manifest)
        self._cleanup(patch_manifest)
        pkg_manifest = self._add_patches_to_packages(pkg_manifest, patches)
        self.json_data = self._update_version_file(self.json_data,
                                                   pkg_manifest)
        self._write_json_to_file(self.json_data)
        self._write_config_to_file(self.config)
        self._move_packages(pkg_manifest)

    def _setup_work_dirs(self):
        # Sets up work dirs on dev machine.  Creates the following folder
        #    - Data dir
        # Then inside the data folder it creates 3 more folders
        #    - New - for new updates that need to be signed
        #    - Deploy - All files ready to upload are placed here.
        #    - Files - All updates are placed here for future reference
        #
        # This is non destructive
        dirs = [
            self.data_dir, self.new_dir, self.deploy_dir, self.files_dir,
            self.config_dir
        ]
        for d in dirs:
            if not os.path.exists(d):
                log.info('Creating dir: %s', d)
                os.mkdir(d)

    def _load_version_file(self):
        # If version file is found its loaded to memory
        # If no version file is found then one is created.
        json_data = self.db.load(settings.CONFIG_DB_KEY_VERSION_META)
        if json_data is None:  # pragma: no cover
            log.warning('Version file not found')
            json_data = {'updates': {}}
            log.info('Created new version file')
        return json_data

    def _load_config(self):
        # Loads config from db if exists.
        # If config doesn't exists create new one
        config = self.db.load(settings.CONFIG_DB_KEY_PY_REPO_CONFIG)
        if config is None:  # pragma: no cover
            log.info('Creating new config file')
            config = {'patches': {}}
        return config

    def _get_package_list(self, report_errors):
        # Adds compatible packages to internal package manifest
        # for futher processing
        # Process all packages in new folder and gets
        # url, hash and some outer info.
        log.info('Getting package list')
        # Clears manifest if sign updates runs more the once without
        # app being restarted
        package_manifest = []
        patch_manifest = []
        bad_packages = []
        with ChDir(self.new_dir):
            # Getting a list of all files in the new dir
            packages = os.listdir(os.getcwd())
            for p in packages:
                # On package initialization we do the following
                # 1. Check for a supported archive
                # 2. get required info: version, platform, hash
                # If any check fails package.info['status'] will be False
                # You can query package.info['reason'] for the reason
                package = Package(p)
                if package.info['status'] is False:
                    # Package failed at something
                    # package.info['reason'] will tell why
                    bad_packages.append(package)
                    continue

                # Add package hash
                package.file_hash = gph(package.filename)
                package.file_size = in_bytes(package.filename)
                self.json_data = self._update_file_list(
                    self.json_data, package)

                package_manifest.append(package)
                self.config = self._add_package_to_config(package, self.config)

                if self.patch_support:
                    # If channel is not stable skip patch creation
                    if package.channel != 'stable':
                        log.debug(
                            'Package %s not on stable channel: '
                            'Skipping', p)
                        continue
                    # Will check if source file for patch exists
                    # if so will return the path and number of patch
                    # to create. If missing source file None returned
                    path = self._check_make_patch(
                        self.json_data,
                        package.name,
                        package.platform,
                    )
                    if path is not None:
                        log.info('Found source file to create patch')
                        patch_name = package.name + '-' + package.platform
                        src_path = path[0]
                        patch_number = path[1]
                        patch_info = dict(src=src_path,
                                          dst=os.path.abspath(p),
                                          patch_name=os.path.join(
                                              self.new_dir, patch_name),
                                          patch_num=patch_number,
                                          package=package.filename)
                        # ready for patching
                        patch_manifest.append(patch_info)
                    else:
                        log.warning('No source file to patch from')

        # ToDo: Expose this & remove "pragma: no cover" once done
        if report_errors is True:  # pragma: no cover
            log.warning('Bad package & reason for being naughty:')
            for b in bad_packages:
                log.warning(b.name, b.info['reason'])
        # End ToDo

        return package_manifest, patch_manifest

    def _add_package_to_config(self, p, data):
        if 'package' not in data.keys():
            data['package'] = {}
            log.info('Initilizing config for packages')
        # First package with current name so add platform and version
        if p.name not in data['package'].keys():
            data['package'][p.name] = {p.platform: p.version}
            log.info('Adding new package to config')
        else:
            # Adding platform and version
            if p.platform not in data['package'][p.name].keys():
                data['package'][p.name][p.platform] = p.version
                log.info('Adding new arch to package-config: %s', p.platform)
            else:
                # Getting current version for platform
                value = data['package'][p.name][p.platform]
                # Updating version if applicable
                if p.version > value:
                    log.info('Adding new version to package-config')
                    data['package'][p.name][p.platform] = p.version
        return data

    def _cleanup(self, patch_manifest):
        # Remove old archives that were previously used to create patches
        if len(patch_manifest) < 1:
            return
        log.info('Cleaning up files directory')
        for p in patch_manifest:
            filename = os.path.basename(p['src'])
            directory = os.path.dirname(p['src'])
            remove_previous_versions(directory, filename)

    def _make_patches(self, patch_manifest):
        pool_output = []
        if len(patch_manifest) < 1:
            return pool_output
        log.info('Starting patch creation')
        if sys.platform != 'win32':
            try:
                cpu_count = multiprocessing.cpu_count() * 2
            except Exception as err:
                log.debug(err, exc_info=True)
                log.warning('Cannot get cpu count from os. Using default 2')
                cpu_count = 2

            pool = multiprocessing.Pool(processes=cpu_count)
            pool_output = pool.map(_make_patch, patch_manifest)
        else:
            pool_output = []
            for p in patch_manifest:
                pool_output.append(_make_patch(p))
        return pool_output

    def _add_patches_to_packages(self, package_manifest, patches):
        if patches is not None and len(patches) >= 1:
            log.info('Adding patches to package list')
            for p in patches:
                # We'll skip if patch meta data is incomplete
                if hasattr(p, 'ready') is False:
                    continue
                if hasattr(p, 'ready') and p.ready is False:
                    continue
                for pm in package_manifest:
                    #
                    if p.dst_filename == pm.filename:
                        pm.patch_info['patch_name'] = \
                            os.path.basename(p.patch_name)
                        # Don't try to get hash on a ghost file
                        if not os.path.exists(p.patch_name):
                            p_name = ''
                            p_size = 0
                        else:
                            p_name = gph(p.patch_name)
                            p_size = in_bytes(p.patch_name)
                        pm.patch_info['patch_hash'] = p_name
                        pm.patch_info['patch_size'] = p_size
                        # No need to keep searching
                        # We have the info we need for this patch
                        break
                    else:
                        log.debug('No patch match found')
        else:
            if self.patch_support is True:
                log.warning('No patches found')
        return package_manifest

    def _update_file_list(self, json_data, package_info):
        files = json_data[settings.UPDATES_KEY]
        latest = json_data.get('latest')
        if latest is None:
            json_data['latest'] = {}
        filename = files.get(package_info.name)
        if filename is None:
            log.debug('Adding %s to file list', package_info.name)
            json_data[settings.UPDATES_KEY][package_info.name] = {}

        latest_package = json_data['latest'].get(package_info.name)
        if latest_package is None:
            json_data['latest'][package_info.name] = {}

        latest_package = json_data['latest'][package_info.name]
        latest_channel = latest_package.get(package_info.channel)
        if latest_channel is None:
            json_data['latest'][package_info.name][package_info.channel] = {}
        return json_data

    def _manifest_to_version_file_compat(self, package_info):
        # Checking for patch info. Patch info maybe be none
        patch_name = package_info.patch_info.get('patch_name')
        patch_hash = package_info.patch_info.get('patch_hash')
        patch_size = package_info.patch_info.get('patch_size')

        # Converting info to version file format
        info = {
            'file_hash': package_info.file_hash,
            'file_size': package_info.file_size,
            'filename': package_info.filename
        }

        # Adding patch info if available
        if patch_name and patch_hash:
            info['patch_name'] = patch_name
            info['patch_hash'] = patch_hash
            info['patch_size'] = patch_size

        return info

    def _update_version_file(self, json_data, package_manifest):
        # Adding version metadata from scanned packages to our
        # version manifest
        log.info('Adding package meta-data to version manifest')
        easy_dict = EasyAccessDict(json_data)
        for p in package_manifest:
            info = self._manifest_to_version_file_compat(p)

            version_key = '{}*{}*{}'.format(settings.UPDATES_KEY, p.name,
                                            p.version)
            version = easy_dict.get(version_key)
            log.debug('Package Info: %s', version)

            # If we cannot get a version number this must be the first version
            # of its kind.
            if version is None:
                log.debug('Adding new version to file')

                # First version with this package name
                json_data[settings.UPDATES_KEY][p.name][p.version] = {}
                platform_key = '{}*{}*{}*{}'.format(settings.UPDATES_KEY,
                                                    p.name, p.version,
                                                    'platform')

                platform = easy_dict.get(platform_key)
                if platform is None:
                    _name = json_data[settings.UPDATES_KEY][p.name]
                    _name[p.version][p.platform] = info

            else:
                # package already present, adding another version to it
                log.debug('Appending info data to version file')
                _updates = json_data[settings.UPDATES_KEY]
                _updates[p.name][p.version][p.platform] = info

            # Add each package to latests section separated by release channel
            json_data['latest'][p.name][p.channel][p.platform] = p.version
        return json_data

    def _write_json_to_file(self, json_data):
        # Writes json data to disk
        log.debug('Saving version meta-data')
        self.db.save(settings.CONFIG_DB_KEY_VERSION_META, json_data)

    def _write_config_to_file(self, json_data):
        log.debug('Saving config data')
        self.db.save(settings.CONFIG_DB_KEY_PY_REPO_CONFIG, json_data)

    def _move_packages(self, package_manifest):
        if len(package_manifest) < 1:
            return
        log.info('Moving packages to deploy folder')
        for p in package_manifest:
            patch = p.patch_info.get('patch_name')
            with ChDir(self.new_dir):
                if patch:
                    if os.path.exists(os.path.join(self.deploy_dir, patch)):
                        os.remove(os.path.join(self.deploy_dir, patch))
                    log.debug('Moving %s to %s', patch, self.deploy_dir)
                    if os.path.exists(patch):
                        shutil.move(patch, self.deploy_dir)

                shutil.copy(p.filename, self.deploy_dir)
                log.debug('Copying %s to %s', p.filename, self.deploy_dir)

                if os.path.exists(os.path.join(self.files_dir, p.filename)):
                    os.remove(os.path.join(self.files_dir, p.filename))
                shutil.move(p.filename, self.files_dir)
                log.debug('Moving %s to %s', p.filename, self.files_dir)

    def _check_make_patch(self, json_data, name, platform):
        # Check to see if previous version is available to
        # make patch updates. Also calculates patch number
        log.debug(json.dumps(json_data['latest'], indent=2))
        log.info('Checking if patch creation is possible')
        if bsdiff4 is None:
            log.warning('Bsdiff is missing. Cannot create patches')
            return None
        src_file_path = None
        if os.path.exists(self.files_dir):
            with ChDir(self.files_dir):
                files = os.listdir(os.getcwd())
                log.debug('Found %s files in files dir', len(files))

            files = remove_dot_files(files)
            # No src files to patch from. Exit quickly
            if len(files) == 0:
                log.debug('No src file to patch from')
                return None
            # If latest not available in version file. Exit
            try:
                log.debug('Looking for %s on %s', name, platform)
                latest = json_data['latest'][name]['stable'][platform]
                log.debug('Found latest version for patches')
            except KeyError:
                log.debug('Cannot find latest version in version meta')
                return None
            try:
                latest_platform = json_data[settings.UPDATES_KEY][name][latest]
                log.debug('Found latest platform for patches')
                try:
                    filename = latest_platform[platform]['filename']
                    log.debug('Found filename for patches')
                except KeyError:
                    log.error('Found old version file. Please read '
                              'the upgrade section in the docs.')
                    log.debug('Found old verison file')
                    return None
            except Exception as err:
                log.debug(err, exc_info=True)
                return None
            log.debug('Generating src file path')
            src_file_path = os.path.join(self.files_dir, filename)

            try:
                patch_num = self.config['patches'][name]
                log.debug('Found patch number')
                self.config['patches'][name] += 1
            except KeyError:
                log.debug('Cannot find patch number')
                # If no patch number we will start at 1
                patch_num = 1
                if 'patches' not in self.config.keys():
                    log.debug('Adding patches to version meta')
                    self.config['patches'] = {}
                if name not in self.config['patches'].keys():
                    log.debug('Adding %s to patches version meta', name)
                    self.config['patches'][name] = patch_num + 1
            num = patch_num + 1
            log.debug('Patch Number: %s', num)
            return src_file_path, num
        return None
예제 #8
0
class KeyHandler(object):
    """KeyHandler object is used to manage keys used for signing updates

    Kwargs:

        app (obj): Config object to get config values from
    """

    def __init__(self):
        self.db = Storage()

        self.key_encoding = "base64"
        data_dir = os.getcwd()
        self.data_dir = os.path.join(data_dir, settings.USER_DATA_FOLDER)
        self.deploy_dir = os.path.join(self.data_dir, "deploy")

        # Name of the keypack to import. It should be placed
        # in the root of the repo
        self.keypack_filename = os.path.join(
            data_dir, settings.CONFIG_DATA_FOLDER, settings.KEYPACK_FILENAME
        )

        # The name of the gzipped version file in
        # the pyu-data/deploy directory
        self.version_file = os.path.join(
            self.deploy_dir, settings.VERSION_FILE_FILENAME
        )

        self.version_file_compat = os.path.join(
            self.deploy_dir, settings.VERSION_FILE_FILENAME_COMPAT
        )

        # The name of the gzipped key file in
        # the pyu-data/deploy directory
        self.key_file = os.path.join(self.deploy_dir, settings.KEY_FILE_FILENAME)

    def sign_update(self, split_version):
        """Signs version file with private key

        Proxy method for :meth:`_add_sig`
        """
        # Loads private key
        # Loads version file to memory
        # Signs Version file
        # Writes version file back to disk
        self._add_sig(split_version)

    def _load_private_keys(self):
        # Loads private key
        log.debug("Loading private key")

        # Loading keypack data from .pyupdater/config.pyu
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        private_key = None
        if keypack_data is not None:
            try:
                private_key = keypack_data["repo"]["app_private"]
            except KeyError:
                # We will exit in _add_sig if private_key is None
                pass
        return private_key

    def _add_sig(self, split_version):
        # Adding new signature to version file
        # Raw private key will need to be converted into
        # a signing key object
        private_key_raw = self._load_private_keys()
        if private_key_raw is None:
            log.error("Private Key not found. Please " "import a keypack & try again")
            return

        # Load update manifest
        update_data = self._load_update_data()

        # We don't want to verify the signature
        if "signature" in update_data:
            log.debug("Removing signatures from version file")
            del update_data["signature"]

        # We create a signature from the string
        update_data_str = json.dumps(update_data, sort_keys=True)

        private_key_raw = private_key_raw.encode("utf-8")

        # Creating signing key object
        private_key = ed25519.SigningKey(private_key_raw, encoding=self.key_encoding)
        log.debug("Signing update data")
        # Signs update data with private key
        signature = private_key.sign(
            bytes(update_data_str, 'latin-1'), encoding=self.key_encoding
        ).decode()
        log.debug("Sig: %s", signature)

        # Create new dict from json string
        update_data = json.loads(update_data_str)

        # Add signatures to update data
        update_data["signature"] = signature
        log.debug("Adding signature to update data")

        # Write updated version file to .pyupdater/config.pyu
        self._write_update_data(update_data, split_version)

        # Write gzipped key file
        self._write_key_file()

    def _write_update_data(self, data, split_version):
        log.debug("Saved version meta data")

        if split_version:
            version_file = self.version_file
        else:
            version_file = self.version_file_compat

        # Gzip update date
        with gzip.open(version_file, "wb") as f:
            new_data = json.dumps(data)
            f.write(bytes(new_data, "utf-8"))

        log.debug("Created gzipped version manifest in deploy dir")

    def _write_key_file(self):
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        if keypack_data is None:
            log.error("Private Key not found. Please import a keypack & try again")
            return

        upload_data = keypack_data["upload"]
        with gzip.open(self.key_file, "wb") as f:
            new_data = json.dumps(upload_data)
            f.write(bytes(new_data, "utf-8"))
        log.debug("Created gzipped key file in deploy dir")

    def _load_update_data(self):
        log.debug("Loading version data")
        update_data = self.db.load(settings.CONFIG_DB_KEY_VERSION_META)
        # If update_data is None, create a new one
        if update_data is None:
            update_data = {}
            log.error("Version meta data not found")
            self.db.save(settings.CONFIG_DB_KEY_VERSION_META, update_data)
            log.debug("Created new version meta data")
        log.debug("Version file loaded")

        return copy.deepcopy(update_data)
예제 #9
0
class Loader(object):
    """Loads &  saves config file
    """

    def __init__(self):
        self.cwd = os.getcwd()
        self.db = Storage()
        self.password = os.environ.get(settings.USER_PASS_ENV)
        self.config_key = settings.CONFIG_DB_KEY_APP_CONFIG

    def load_config(self):
        """Loads config from database (json file)

            Returns (obj): Config object
        """
        config_data = self.db.load(self.config_key)
        if config_data is None:
            config_data = {}
        config = Config()
        for k, v in config_data.items():
            config[k] = v
        config.DATA_DIR = os.getcwd()
        return config

    def get_app_name(self):
        config = self.load_config()
        return config.APP_NAME

    def save_config(self, obj):
        """Saves config file to pyupdater database

        Args:

            obj (obj): config object
        """
        log.info('Saving Config')
        self.db.save(self.config_key, obj)
        log.info('Config saved')
        self._write_config_py(obj)
        log.info('Wrote client config')

    def _write_config_py(self, obj):
        """Writes client config to client_config.py

        Args:

            obj (obj): config object
        """
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        if keypack_data is None:
            public_key = None
        else:
            public_key = keypack_data['client']['offline_public']

        filename = os.path.join(self.cwd, *obj.CLIENT_CONFIG_PATH)
        attr_str_format = "    {} = '{}'\n"
        attr_format = "    {} = {}\n"
        with open(filename, 'w') as f:
            f.write('class ClientConfig(object):\n')
            if hasattr(obj, 'APP_NAME') and obj.APP_NAME is not None:
                f.write(attr_str_format.format('APP_NAME', obj.APP_NAME))
                log.debug('Wrote APP_NAME to client config')
            if hasattr(obj, 'COMPANY_NAME') and obj.COMPANY_NAME is not None:
                f.write(attr_str_format.format('COMPANY_NAME',
                        obj.COMPANY_NAME))
                log.debug('Wrote COMPANY_NAME to client config')
            if hasattr(obj, 'UPDATE_URLS') and obj.UPDATE_URLS is not None:
                f.write(attr_format.format('UPDATE_URLS', obj.UPDATE_URLS))
                log.debug('Wrote UPDATE_URLS to client config')
            f.write(attr_str_format.format('PUBLIC_KEY', public_key))
            log.debug('Wrote PUBLIC_KEY to client config')
예제 #10
0
class ConfigManager(object):
    def __init__(self):
        self.cwd = os.getcwd()
        self.db = Storage()
        self.config_key = settings.CONFIG_DB_KEY_APP_CONFIG

    # Loads config from database (json file)
    def load_config(self):
        config_data = self.db.load(self.config_key)
        if config_data is None:
            config_data = {}
        config = Config()
        for k, v in config_data.items():
            config[k] = v
        config.DATA_DIR = os.getcwd()
        return config

    def get_app_name(self):
        config = self.load_config()
        return config.APP_NAME

    # Saves config to database (json file)
    def save_config(self, obj):
        log.debug("Saving Config")
        self.db.save(self.config_key, obj)
        log.debug("Config saved")
        self.write_config_py(obj)
        log.debug("Wrote client config")

    # Writes client config to client_config.py
    def write_config_py(self, obj):
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        if keypack_data is None:
            log.debug("*** Keypack data is None ***")
            public_key = None
        else:
            public_key = keypack_data["client"]["offline_public"]

        filename = os.path.join(self.cwd, *obj.CLIENT_CONFIG_PATH)
        attr_str_format = "    {} = '{}'\n"
        attr_format = "    {} = {}\n"

        log.debug("Writing client_config.py")
        with open(filename, "w") as f:
            f.write("class ClientConfig(object):\n")

            log.debug("Adding PUBLIC_KEY to client_config.py")
            f.write(attr_str_format.format("PUBLIC_KEY", public_key))

            if hasattr(obj, "APP_NAME"):
                log.debug("Adding APP_NAME to client_config.py")
                f.write(attr_str_format.format("APP_NAME", obj.APP_NAME))

            if hasattr(obj, "COMPANY_NAME"):
                log.debug("Adding COMPANY_NAME to client_config.py")
                f.write(
                    attr_str_format.format("COMPANY_NAME", obj.COMPANY_NAME))

            if hasattr(obj, "HTTP_TIMEOUT"):
                log.debug("Adding HTTP_TIMEOUT to cilent_config.py")
                f.write(attr_format.format("HTTP_TIMEOUT", obj.HTTP_TIMEOUT))

            if hasattr(obj, "MAX_DOWNLOAD_RETRIES"):
                log.debug("Adding MAX_DOWNLOAD_RETRIES to client_config.py")
                f.write(
                    attr_format.format("MAX_DOWNLOAD_RETRIES",
                                       obj.MAX_DOWNLOAD_RETRIES))

            if hasattr(obj, "UPDATE_URLS"):
                log.debug("Adding UPDATE_URLS to client_config.py")
                f.write(attr_format.format("UPDATE_URLS", obj.UPDATE_URLS))
예제 #11
0
class PackageHandler(object):
    """Handles finding, sorting, getting meta-data, moving packages.

    Kwargs:

        app (instance): Config object
    """
    def __init__(self, config=None):
        # Configuration data
        self.config = None

        # Version manifest file
        self.version_data = None

        # Specifies if the config file needs to be loaded
        self.config_loaded = False

        # Used to store config information
        self.db = Storage()

        if config:
            # Support for creating patches
            self.patch_support = config.get("UPDATE_PATCHES", True) is True
        else:
            self.patch_support = False

        # References the pyu-data folder in the root of repo
        self.data_dir = os.path.join(os.getcwd(), settings.USER_DATA_FOLDER)
        self.files_dir = os.path.join(self.data_dir, "files")
        self.deploy_dir = os.path.join(self.data_dir, "deploy")
        self.new_dir = os.path.join(self.data_dir, "new")
        self.config_dir = os.path.join(os.getcwd(),
                                       settings.CONFIG_DATA_FOLDER)
        self.setup()

    def setup(self):
        """Creates working directories & loads json files."""
        if self.data_dir is not None:
            self._setup()

    def _setup(self):
        self._setup_work_dirs()
        if self.config_loaded is False:
            self.version_data = self._load_version_file()
            self.config = self._load_config()
            self.config_loaded = True

    def process_packages(self, report_errors=False):
        """Gets a list of updates to process.  Adds the name of an
        update to the version file if not already present.  Processes
        all packages.  Updates the version file meta-data. Then writes
        version file back to disk.
        """
        if self.patch_support:
            log.info("Patch support enabled")
        else:
            log.info("Patch support disabled")

        # Getting a list of meta data from all packages in the
        # pyu-data/new directory. Also create a patch manifest
        # to create patches.
        pkg_manifest, patch_manifest = self._get_package_list(report_errors)

        patches = PackageHandler._make_patches(patch_manifest)
        PackageHandler._cleanup(patch_manifest)
        PackageHandler._add_patches_to_packages(pkg_manifest, patches,
                                                self.patch_support)
        PackageHandler._update_version_file(self.version_data, pkg_manifest)

        self._write_json_to_file(self.version_data)
        self._write_config_to_file(self.config)
        self._move_packages(pkg_manifest)

    def _setup_work_dirs(self):
        # Sets up work dirs on dev machine.  Creates the following folder
        #    - Data dir
        # Then inside the data folder it creates 3 more folders
        #    - New - for new updates that need to be signed
        #    - Deploy - All files ready to upload are placed here.
        #    - Files - All updates are placed here for future reference
        #
        # This is non destructive
        dirs = [
            self.data_dir,
            self.new_dir,
            self.deploy_dir,
            self.files_dir,
            self.config_dir,
        ]
        for d in dirs:
            if not os.path.exists(d):
                log.info("Creating dir: %s", d)
                os.mkdir(d)

    def _load_version_file(self):
        # If version file is found its loaded to memory
        # If no version file is found then one is created.
        json_data = self.db.load(settings.CONFIG_DB_KEY_VERSION_META)
        if json_data is None:  # pragma: no cover
            log.warning("Version file not found")
            json_data = {"updates": {}}
            log.debug("Created new version file")
        return json_data

    def _load_config(self):
        # Loads config from db if exists.
        # If config doesn't exists create new one
        config = self.db.load(settings.CONFIG_DB_KEY_PY_REPO_CONFIG)
        if config is None:  # pragma: no cover
            log.debug("Creating new config file")
            config = {"patches": {}}
        return config

    def _get_package_list(self, report_errors):
        # Adds compatible packages to internal package manifest
        # for further processing
        # Process all packages in new folder and gets
        # url, hash and some outer info.
        log.info("Generating package list")
        # Clears manifest if sign updates runs more the once without
        # app being restarted
        package_manifest = []
        patch_manifest = []
        bad_packages = []
        with ChDir(self.new_dir):
            # Getting a list of all files in the new dir
            packages = os.listdir(os.getcwd())
            for p in packages:
                # On package initialization we do the following
                # 1. Check for a supported archive
                # 2. get required info: version, platform, hash
                # If any check fails new_pkg.info['status'] will be False
                # You can query new_pkg.info['reason'] for the reason
                new_pkg = Package(p)
                if new_pkg.info["status"] is False:
                    # Package failed at something
                    # new_pkg.info['reason'] will tell why
                    bad_packages.append(new_pkg)
                    continue

                # Add package hash
                new_pkg.file_hash = gph(new_pkg.filename)
                new_pkg.file_size = in_bytes(new_pkg.filename)

                PackageHandler._update_file_list(self.version_data, new_pkg)

                package_manifest.append(new_pkg)
                PackageHandler._add_package_to_config(new_pkg, self.config)

                if self.patch_support:
                    data = {
                        "filename": p,
                        "files_dir": self.files_dir,
                        "new_dir": self.new_dir,
                        "json_data": self.version_data,
                        "pkg_info": new_pkg,
                        "config": self.config,
                    }
                    _patch = Patch(**data)

                    if _patch.ok:
                        patch_manifest.append(_patch)

        if report_errors is True:  # pragma: no cover
            log.warning("Bad package & reason for being naughty:")
            for b in bad_packages:
                log.warning(b.name, b.info["reason"])

        return package_manifest, patch_manifest

    @staticmethod
    def _add_package_to_config(p, data):
        if "package" not in data.keys():
            data["package"] = {}
            log.debug("Initializing config for packages")
        # First package with current name so add platform and version
        if p.name not in data["package"].keys():
            data["package"][p.name] = {p.platform: p.version}
            log.debug("Adding new package to config")
        else:
            # Adding platform and version
            if p.platform not in data["package"][p.name].keys():
                data["package"][p.name][p.platform] = p.version
                log.debug("Adding new arch to package-config: %s", p.platform)
            else:
                # Getting current version for platform
                value = data["package"][p.name][p.platform]
                # Updating version if applicable
                if p.version > value:
                    log.debug("Adding new version to package-config")
                    data["package"][p.name][p.platform] = p.version

    @staticmethod
    def _cleanup(patch_manifest):
        # Remove old archives that were previously used to create patches
        if len(patch_manifest) < 1:
            return
        log.info("Cleaning up stale files")
        for p in patch_manifest:
            remove_previous_versions(os.path.dirname(p.src), p.dst)

    @staticmethod
    def _make_patches(patch_manifest):
        pool_output = []
        if len(patch_manifest) < 1:
            return pool_output
        log.info("Starting patch creation")
        if sys.platform != "win32":
            try:
                cpu_count = multiprocessing.cpu_count() * 2
            except Exception as err:
                log.debug(err, exc_info=True)
                log.warning("Cannot get cpu count from os. Using default 2")
                cpu_count = 2

            pool = multiprocessing.Pool(processes=cpu_count)
            pool_output = pool.map(make_patch, patch_manifest)
        else:
            pool_output = []
            for p in patch_manifest:
                pool_output.append(make_patch(p))
        return pool_output

    @staticmethod
    def _add_patches_to_packages(package_manifest, patches, patch_support):
        if patches is not None and len(patches) >= 1:
            log.debug("Adding patches to package list")
            for p in patches:
                if not p.ok or not os.path.exists(p.patch_name):
                    continue

                log.debug("We have a good patch: %s", p)
                for pm in package_manifest:
                    if p.dst_filename == pm.filename:
                        pm.patch = p
                        pm.patch.hash = gph(pm.patch.patch_name)
                        pm.patch.size = in_bytes(pm.patch.patch_name)
                        break
                    else:
                        log.debug("No patch match found")
        else:
            if patch_support is True:
                log.debug("No patches found: %s", patches)

    @staticmethod
    def _update_file_list(json_data, package_info):
        files = json_data[settings.UPDATES_KEY]
        latest = json_data.get("latest")
        if latest is None:
            json_data["latest"] = {}
        filename = files.get(package_info.name)
        if filename is None:
            log.debug("Adding %s to file list", package_info.name)
            json_data[settings.UPDATES_KEY][package_info.name] = {}

        latest_package = json_data["latest"].get(package_info.name)
        if latest_package is None:
            json_data["latest"][package_info.name] = {}

        latest_package = json_data["latest"][package_info.name]
        latest_channel = latest_package.get(package_info.channel)
        if latest_channel is None:
            json_data["latest"][package_info.name][package_info.channel] = {}
        return json_data

    @staticmethod
    def _manifest_to_version_file_compat(package_info):
        # Converting info to version file format
        info = {
            "file_hash": package_info.file_hash,
            "file_size": package_info.file_size,
            "filename": package_info.filename,
        }

        # Adding patch info if available
        if package_info.patch is not None:
            info["patch_name"] = package_info.patch.basename
            info["patch_hash"] = package_info.patch.hash
            info["patch_size"] = package_info.patch.size

        return info

    @staticmethod
    def _update_version_file(json_data, package_manifest):
        # Adding version metadata from scanned packages to our
        # version manifest
        log.info("Adding package meta-data to version manifest")
        easy_dict = EasyAccessDict(json_data)
        for p in package_manifest:
            info = PackageHandler._manifest_to_version_file_compat(p)

            version_key = "{}*{}*{}".format(settings.UPDATES_KEY, p.name,
                                            p.version)
            version = easy_dict.get(version_key)
            log.debug("Package Info: %s", version)

            # If we cannot get a version number this must be the first version
            # of its kind.
            if version is None:
                log.debug("Adding new version to file")

                # First version with this package name
                json_data[settings.UPDATES_KEY][p.name][p.version] = {}
                platform_key = "{}*{}*{}*{}".format(settings.UPDATES_KEY,
                                                    p.name, p.version,
                                                    "platform")

                platform = easy_dict.get(platform_key)
                if platform is None:
                    _name = json_data[settings.UPDATES_KEY][p.name]
                    _name[p.version][p.platform] = info

            else:
                # package already present, adding another version to it
                log.debug("Appending info data to version file")
                _updates = json_data[settings.UPDATES_KEY]
                _updates[p.name][p.version][p.platform] = info

            # Add each package to latest section separated by release channel
            json_data["latest"][p.name][p.channel][p.platform] = p.version
        return json_data

    def _write_json_to_file(self, json_data):
        # Writes json data to disk
        log.debug("Saving version meta-data")
        self.db.save(settings.CONFIG_DB_KEY_VERSION_META, json_data)

    def _write_config_to_file(self, json_data):
        log.debug("Saving config data")
        self.db.save(settings.CONFIG_DB_KEY_PY_REPO_CONFIG, json_data)

    def _move_packages(self, package_manifest):
        if len(package_manifest) < 1:
            return
        log.info("Moving packages to deploy folder")
        for p in package_manifest:
            with ChDir(self.new_dir):
                if p.patch is not None:
                    if os.path.exists(
                            os.path.join(self.deploy_dir, p.patch.basename)):
                        os.remove(
                            os.path.join(self.deploy_dir, p.patch.basename))
                    log.debug("Moving %s to %s", p.patch.basename,
                              self.deploy_dir)
                    if os.path.exists(p.patch.basename):
                        shutil.move(p.patch.basename, self.deploy_dir)

                shutil.copy(p.filename, self.deploy_dir)
                log.debug("Copying %s to %s", p.filename, self.deploy_dir)

                if os.path.exists(os.path.join(self.files_dir, p.filename)):
                    os.remove(os.path.join(self.files_dir, p.filename))
                shutil.move(p.filename, self.files_dir)
                log.debug("Moving %s to %s", p.filename, self.files_dir)
예제 #12
0
class PackageHandler(object):
    """Handles finding, sorting, getting meta-data, moving packages.

    Kwargs:

        app (instance): Config object
    """
    def __init__(self, config=None):
        # Configuration data
        self.config = None

        # Version manifest file
        self.version_data = None

        # Specifies if the config file needs to be loaded
        self.config_loaded = False

        # Used to store config information
        self.db = Storage()

        if config:
            # Support for creating patches
            self.patch_support = config.get('UPDATE_PATCHES', True) is True
        else:
            self.patch_support = False

        # References the pyu-data folder in the root of repo
        self.data_dir = os.path.join(os.getcwd(), settings.USER_DATA_FOLDER)
        self.files_dir = os.path.join(self.data_dir, 'files')
        self.deploy_dir = os.path.join(self.data_dir, 'deploy')
        self.new_dir = os.path.join(self.data_dir, 'new')
        self.config_dir = os.path.join(os.getcwd(),
                                       settings.CONFIG_DATA_FOLDER)
        self.setup()

    def setup(self):
        """Creates working directories & loads json files."""
        if self.data_dir is not None:
            self._setup()

    def _setup(self):
        self._setup_work_dirs()
        if self.config_loaded is False:
            self.version_data = self._load_version_file()
            self.config = self._load_config()
            self.config_loaded = True

    def process_packages(self, report_errors=False):
        """Gets a list of updates to process.  Adds the name of an
        update to the version file if not already present.  Processes
        all packages.  Updates the version file meta-data. Then writes
        version file back to disk.
        """
        if self.patch_support:
            log.info('Patch support enabled')
        else:
            log.info('Patch support disabled')

        # Getting a list of meta data from all packages in the
        # pyu-data/new directory. Also create a patch manifest
        # to create patches.
        pkg_manifest, patch_manifest = self._get_package_list(report_errors)
        patches = PackageHandler._make_patches(patch_manifest)
        PackageHandler._cleanup(patch_manifest)
        pkg_manifest = self._add_patches_to_packages(pkg_manifest,
                                                     patches)
        # PEP8
        json_data = PackageHandler._update_version_file(self.version_data,
                                                        pkg_manifest)
        self.version_data = json_data
        self._write_json_to_file(self.version_data)
        self._write_config_to_file(self.config)
        self._move_packages(pkg_manifest)

    def _setup_work_dirs(self):
        # Sets up work dirs on dev machine.  Creates the following folder
        #    - Data dir
        # Then inside the data folder it creates 3 more folders
        #    - New - for new updates that need to be signed
        #    - Deploy - All files ready to upload are placed here.
        #    - Files - All updates are placed here for future reference
        #
        # This is non destructive
        dirs = [self.data_dir, self.new_dir,
                self.deploy_dir, self.files_dir,
                self.config_dir]
        for d in dirs:
            if not os.path.exists(d):
                log.info('Creating dir: %s', d)
                os.mkdir(d)

    def _load_version_file(self):
        # If version file is found its loaded to memory
        # If no version file is found then one is created.
        json_data = self.db.load(settings.CONFIG_DB_KEY_VERSION_META)
        if json_data is None:  # pragma: no cover
            log.warning('Version file not found')
            json_data = {'updates': {}}
            log.debug('Created new version file')
        return json_data

    def _load_config(self):
        # Loads config from db if exists.
        # If config doesn't exists create new one
        config = self.db.load(settings.CONFIG_DB_KEY_PY_REPO_CONFIG)
        if config is None:  # pragma: no cover
            log.debug('Creating new config file')
            config = {
                'patches': {}
                }
        return config

    def _get_package_list(self, report_errors):
        # Adds compatible packages to internal package manifest
        # for futher processing
        # Process all packages in new folder and gets
        # url, hash and some outer info.
        log.info('Generating package list')
        # Clears manifest if sign updates runs more the once without
        # app being restarted
        package_manifest = []
        patch_manifest = []
        bad_packages = []
        with ChDir(self.new_dir):
            # Getting a list of all files in the new dir
            packages = os.listdir(os.getcwd())
            for p in packages:
                # On package initialization we do the following
                # 1. Check for a supported archive
                # 2. get required info: version, platform, hash
                # If any check fails package.info['status'] will be False
                # You can query package.info['reason'] for the reason
                package = Package(p)
                if package.info['status'] is False:
                    # Package failed at something
                    # package.info['reason'] will tell why
                    bad_packages.append(package)
                    continue

                # Add package hash
                package.file_hash = gph(package.filename)
                package.file_size = in_bytes(package.filename)
                self.version_data = PackageHandler._update_file_list(self.version_data,
                                                                  package)

                package_manifest.append(package)
                self.config = PackageHandler._add_package_to_config(package,
                                                                    self.config)

                if self.patch_support:
                    # If channel is not stable skip patch creation
                    if package.channel != 'stable':
                        log.debug('Package %s not on stable channel: '
                                  'Skipping', p)
                        continue
                    # Will check if source file for patch exists
                    # if so will return the path and number of patch
                    # to create. If missing source file None returned
                    path = self._check_make_patch(self.version_data,
                                                  package.name,
                                                  package.platform,
                                                  )
                    if path is not None:
                        log.debug('Found source file to create patch')
                        patch_name = package.name + '-' + package.platform
                        src_path = path[0]
                        patch_number = path[1]
                        patch_info = dict(src=src_path,
                                          dst=os.path.abspath(p),
                                          patch_name=os.path.join(self.new_dir,
                                                                  patch_name),
                                          patch_num=patch_number,
                                          package=package.filename)
                        # ready for patching
                        patch_manifest.append(patch_info)
                    else:
                        log.warning('No source file to patch from')

        if report_errors is True:  # pragma: no cover
            log.warning('Bad package & reason for being naughty:')
            for b in bad_packages:
                log.warning(b.name, b.info['reason'])

        return package_manifest, patch_manifest

    @staticmethod
    def _add_package_to_config(p, data):
        if 'package' not in data.keys():
            data['package'] = {}
            log.debug('Initilizing config for packages')
        # First package with current name so add platform and version
        if p.name not in data['package'].keys():
            data['package'][p.name] = {p.platform: p.version}
            log.debug('Adding new package to config')
        else:
            # Adding platform and version
            if p.platform not in data['package'][p.name].keys():
                data['package'][p.name][p.platform] = p.version
                log.debug('Adding new arch to package-config: %s', p.platform)
            else:
                # Getting current version for platform
                value = data['package'][p.name][p.platform]
                # Updating version if applicable
                if p.version > value:
                    log.debug('Adding new version to package-config')
                    data['package'][p.name][p.platform] = p.version
        return data

    @staticmethod
    def _cleanup(patch_manifest):
        # Remove old archives that were previously used to create patches
        if len(patch_manifest) < 1:
            return
        log.info('Cleaning up stale files')
        for p in patch_manifest:
            filename = os.path.basename(p['src'])
            directory = os.path.dirname(p['src'])
            remove_previous_versions(directory, filename)

    @staticmethod
    def _make_patches(patch_manifest):
        pool_output = []
        if len(patch_manifest) < 1:
            return pool_output
        log.info('Starting patch creation')
        if sys.platform != 'win32':
            try:
                cpu_count = multiprocessing.cpu_count() * 2
            except Exception as err:
                log.debug(err, exc_info=True)
                log.warning('Cannot get cpu count from os. Using default 2')
                cpu_count = 2

            pool = multiprocessing.Pool(processes=cpu_count)
            pool_output = pool.map(_make_patch,
                                   patch_manifest)
        else:
            pool_output = []
            for p in patch_manifest:
                pool_output.append(_make_patch(p))
        return pool_output

    def _add_patches_to_packages(self, package_manifest, patches):
        if patches is not None and len(patches) >= 1:
            log.debug('Adding patches to package list')
            for p in patches:
                # We'll skip if patch meta data is incomplete
                if hasattr(p, 'ready') is False:
                    continue
                if hasattr(p, 'ready') and p.ready is False:
                    continue
                for pm in package_manifest:
                    #
                    if p.dst_filename == pm.filename:
                        pm.patch_info['patch_name'] = \
                            os.path.basename(p.patch_name)
                        # Don't try to get hash on a ghost file
                        if not os.path.exists(p.patch_name):
                            p_name = ''
                            p_size = 0
                        else:
                            p_name = gph(p.patch_name)
                            p_size = in_bytes(p.patch_name)
                        pm.patch_info['patch_hash'] = p_name
                        pm.patch_info['patch_size'] = p_size
                        # No need to keep searching
                        # We have the info we need for this patch
                        break
                    else:
                        log.debug('No patch match found')
        else:
            if self.patch_support is True:
                log.debug('No patches found')
        return package_manifest

    @staticmethod
    def _update_file_list(json_data, package_info):
        files = json_data[settings.UPDATES_KEY]
        latest = json_data.get('latest')
        if latest is None:
            json_data['latest'] = {}
        filename = files.get(package_info.name)
        if filename is None:
            log.debug('Adding %s to file list', package_info.name)
            json_data[settings.UPDATES_KEY][package_info.name] = {}

        latest_package = json_data['latest'].get(package_info.name)
        if latest_package is None:
            json_data['latest'][package_info.name] = {}

        latest_package = json_data['latest'][package_info.name]
        latest_channel = latest_package.get(package_info.channel)
        if latest_channel is None:
            json_data['latest'][package_info.name][package_info.channel] = {}
        return json_data

    @staticmethod
    def _manifest_to_version_file_compat(package_info):
        # Checking for patch info. Patch info maybe be none
        patch_name = package_info.patch_info.get('patch_name')
        patch_hash = package_info.patch_info.get('patch_hash')
        patch_size = package_info.patch_info.get('patch_size')

        # Converting info to version file format
        info = {
            'file_hash': package_info.file_hash,
            'file_size': package_info.file_size,
            'filename': package_info.filename
            }

        # Adding patch info if available
        if patch_name and patch_hash:
            info['patch_name'] = patch_name
            info['patch_hash'] = patch_hash
            info['patch_size'] = patch_size

        return info

    @staticmethod
    def _update_version_file(json_data, package_manifest):
        # Adding version metadata from scanned packages to our
        # version manifest
        log.info('Adding package meta-data to version manifest')
        easy_dict = EasyAccessDict(json_data)
        for p in package_manifest:
            info = PackageHandler._manifest_to_version_file_compat(p)

            version_key = '{}*{}*{}'.format(settings.UPDATES_KEY,
                                            p.name, p.version)
            version = easy_dict.get(version_key)
            log.debug('Package Info: %s', version)

            # If we cannot get a version number this must be the first version
            # of its kind.
            if version is None:
                log.debug('Adding new version to file')

                # First version with this package name
                json_data[settings.UPDATES_KEY][p.name][p.version] = {}
                platform_key = '{}*{}*{}*{}'.format(settings.UPDATES_KEY,
                                                    p.name, p.version,
                                                    'platform')

                platform = easy_dict.get(platform_key)
                if platform is None:
                    _name = json_data[settings.UPDATES_KEY][p.name]
                    _name[p.version][p.platform] = info

            else:
                # package already present, adding another version to it
                log.debug('Appending info data to version file')
                _updates = json_data[settings.UPDATES_KEY]
                _updates[p.name][p.version][p.platform] = info

            # Add each package to latest section separated by release channel
            json_data['latest'][p.name][p.channel][p.platform] = p.version
        return json_data

    def _write_json_to_file(self, json_data):
        # Writes json data to disk
        log.debug('Saving version meta-data')
        self.db.save(settings.CONFIG_DB_KEY_VERSION_META, json_data)

    def _write_config_to_file(self, json_data):
        log.debug('Saving config data')
        self.db.save(settings.CONFIG_DB_KEY_PY_REPO_CONFIG, json_data)

    def _move_packages(self, package_manifest):
        if len(package_manifest) < 1:
            return
        log.info('Moving packages to deploy folder')
        for p in package_manifest:
            patch = p.patch_info.get('patch_name')
            with ChDir(self.new_dir):
                if patch:
                    if os.path.exists(os.path.join(self.deploy_dir, patch)):
                        os.remove(os.path.join(self.deploy_dir, patch))
                    log.debug('Moving %s to %s', patch, self.deploy_dir)
                    if os.path.exists(patch):
                        shutil.move(patch, self.deploy_dir)

                shutil.copy(p.filename, self.deploy_dir)
                log.debug('Copying %s to %s', p.filename, self.deploy_dir)

                if os.path.exists(os.path.join(self.files_dir, p.filename)):
                    os.remove(os.path.join(self.files_dir, p.filename))
                shutil.move(p.filename, self.files_dir)
                log.debug('Moving %s to %s', p.filename, self.files_dir)

    def _check_make_patch(self, json_data, name, platform):
        # Check to see if previous version is available to
        # make patch updates. Also calculates patch number
        log.debug(json.dumps(json_data['latest'], indent=2))
        log.debug('Checking if patch creation is possible')
        if bsdiff4 is None:
            log.warning('Bsdiff is missing. Cannot create patches')
            return None

        if os.path.exists(self.files_dir):
            with ChDir(self.files_dir):
                files = os.listdir(os.getcwd())
                log.debug('Found %s files in files dir', len(files))

            files = remove_dot_files(files)
            # No src files to patch from. Exit quickly
            if len(files) == 0:
                log.debug('No src file to patch from')
                return None
            # If latest not available in version file. Exit
            try:
                log.debug('Looking for %s on %s', name, platform)
                latest = json_data['latest'][name]['stable'][platform]
                log.debug('Found latest version for patches')
            except KeyError:
                log.debug('Cannot find latest version in version meta')
                return None
            try:
                latest_platform = json_data[settings.UPDATES_KEY][name][latest]
                log.debug('Found latest platform for patches')
                try:
                    filename = latest_platform[platform]['filename']
                    log.debug('Found filename for patches')
                except KeyError:
                    log.error('Found old version file. Please read '
                              'the upgrade section in the docs.')
                    log.debug('Found old verison file')
                    return None
            except Exception as err:
                log.debug(err, exc_info=True)
                return None
            log.debug('Generating src file path')
            src_file_path = os.path.join(self.files_dir, filename)

            try:
                patch_num = self.config['patches'][name]
                log.debug('Found patch number')
                self.config['patches'][name] += 1
            except KeyError:
                log.debug('Cannot find patch number')
                # If no patch number we will start at 1
                patch_num = 1
                if 'patches' not in self.config.keys():
                    log.debug('Adding patches to version meta')
                    self.config['patches'] = {}
                if name not in self.config['patches'].keys():
                    log.debug('Adding %s to patches version meta', name)
                    self.config['patches'][name] = patch_num + 1
            num = patch_num + 1
            log.debug('Patch Number: %s', num)
            return src_file_path, num
        return None
예제 #13
0
class PackageHandler(object):
    """Handles finding, sorting, getting meta-data, moving packages.

    Kwargs:

        app (instance): Config object
    """

    data_dir = None

    def __init__(self, app=None):
        self.config_loaded = False
        self.db = Storage()
        if app is not None:
            self.init_app(app)

    def init_app(self, obj):
        """Sets up client with config values from obj

        Args:

            obj (instance): config object

        """
        self.patches = obj.get('UPDATE_PATCHES', True)
        if self.patches:
            log.debug('Patch support enabled')
            self.patch_support = True
        else:
            log.info('Patch support disabled')
            self.patch_support = False
        data_dir = os.getcwd()
        self.data_dir = os.path.join(data_dir, settings.USER_DATA_FOLDER)
        self.files_dir = os.path.join(self.data_dir, 'files')
        self.deploy_dir = os.path.join(self.data_dir, 'deploy')
        self.new_dir = os.path.join(self.data_dir, 'new')
        self.config_dir = os.path.join(os.path.dirname(self.data_dir),
                                       settings.CONFIG_DATA_FOLDER)
        self.config = None
        self.json_data = None

        self.setup()

    def setup(self):
        "Creates working directories & loads json files."
        if self.data_dir is not None:
            self._setup()

    def _setup(self):
        self._setup_work_dirs()
        if self.config_loaded is False:
            self.json_data = self._load_version_file()
            self.config = self._load_config()
            self.config_loaded = True

    def process_packages(self):
        """Gets a list of updates to process.  Adds the name of an
        update to the version file if not already present.  Processes
        all packages.  Updates the version file meta-data. Then writes
        version file back to disk.
        """
        if self.data_dir is None:
            raise PackageHandlerError('Must init first.', expected=True)
        package_manifest, patch_manifest = self._get_package_list()
        patches = self._make_patches(patch_manifest)
        self._cleanup(patch_manifest)
        package_manifest = self._add_patches_to_packages(package_manifest,
                                                         patches)
        self.json_data = self._update_version_file(self.json_data,
                                                   package_manifest)
        self._write_json_to_file(self.json_data)
        self._write_config_to_file(self.config)
        self._move_packages(package_manifest)

    def _setup_work_dirs(self):
        # Sets up work dirs on dev machine.  Creates the following folder
        #    - Data dir
        # Then inside the data folder it creates 3 more folders
        #    - New - for new updates that need to be signed
        #    - Deploy - All files ready to upload are placed here.
        #    - Files - All updates are placed here for future reference
        #
        # This is non destructive
        dirs = [self.data_dir, self.new_dir,
                self.deploy_dir, self.files_dir,
                self.config_dir]
        for d in dirs:
            if not os.path.exists(d):
                log.info('Creating dir: {}'.format(d))
                os.mkdir(d)

    def _load_version_file(self):
        # If version file is found its loaded to memory
        # If no version file is found then one is created.
        json_data = self.db.load(settings.CONFIG_DB_KEY_VERSION_META)
        if json_data is None:  # pragma: no cover
            log.warning('Version file not found')
            json_data = {'updates': {}}
            log.info('Created new version file')
        return json_data

    def _load_config(self):
        # Loads config from db if exists.
        # If config doesn't exists create new one
        config = self.db.load(settings.CONFIG_DB_KEY_PY_REPO_CONFIG)
        if config is None:  # pragma: no cover
            log.info('Creating new config file')
            config = {
                'patches': {}
                }
        return config

    def _get_package_list(self, ignore_errors=True):
        # Adds compatible packages to internal package manifest
        # for futher processing
        # Process all packages in new folder and gets
        # url, hash and some outer info.
        log.info('Getting package list')
        # Clears manifest if sign updates runs more the once without
        # app being restarted
        package_manifest = list()
        patch_manifest = list()
        bad_packages = list()
        with jms_utils.paths.ChDir(self.new_dir):
            # Getting a list of all files in the new dir
            packages = os.listdir(os.getcwd())
            for p in packages:
                # On package initialization we do the following
                # 1. Check for a supported archive
                # 2. get required info: version, platform, hash
                # If any check fails package.info['status'] will be False
                # You can query package.info['reason'] for the reason
                package = Package(p)
                if package.info['status'] is False:
                    # Package failed at something
                    # package.info['reason'] will tell why
                    bad_packages.append(package)
                    continue

                # Add package hash
                package.file_hash = gph(package.filename)
                package.file_size = in_bytes(package.filename)
                self.json_data = self._update_file_list(self.json_data,
                                                        package)

                package_manifest.append(package)
                self.config = self._add_package_to_config(package,
                                                          self.config)

                if self.patch_support:
                    # If channel is not stable skip patch creation
                    if package.channel != 'stable':
                        log.debug('Package {} not on stable channel: '
                                  'Skipping'.format(p))
                        continue
                    # Will check if source file for patch exists
                    # if so will return the path and number of patch
                    # to create. If missing source file None returned
                    path = self._check_make_patch(self.json_data,
                                                  package.name,
                                                  package.platform,
                                                  )
                    if path is not None:
                        log.info('Found source file to create patch')
                        patch_name = package.name + '-' + package.platform
                        src_path = path[0]
                        patch_number = path[1]
                        patch_info = dict(src=src_path,
                                          dst=os.path.abspath(p),
                                          patch_name=os.path.join(self.new_dir,
                                                                  patch_name),
                                          patch_num=patch_number,
                                          package=package.filename)
                        # ready for patching
                        patch_manifest.append(patch_info)
                    else:
                        log.warning('No source file to patch from')

        # ToDo: Expose this & remove "pragma: no cover" once done
        if ignore_errors is False:  # pragma: no cover
            log.warning('Bad package & reason for being naughty:')
            for b in bad_packages:
                log.warning(b.name, b.info['reason'])
        # End ToDo

        return package_manifest, patch_manifest

    def _add_package_to_config(self, p, data):
        if 'package' not in data.keys():
            data['package'] = {}
            log.info('Initilizing config for packages')
        # First package with current name so add platform and version
        if p.name not in data['package'].keys():
            data['package'][p.name] = {p.platform: p.version}
            log.info('Adding new package to config')
        else:
            # Adding platform and version
            if p.platform not in data['package'][p.name].keys():
                data['package'][p.name][p.platform] = p.version
                log.info('Adding new arch to package-config: '
                         '{}'.format(p.platform))
            else:
                # Getting current version for platform
                value = data['package'][p.name][p.platform]
                # Updating version if applicable
                if p.version > value:
                    log.info('Adding new version to package-config')
                    data['package'][p.name][p.platform] = p.version
        return data

    def _cleanup(self, patch_manifest):
        # Remove old archives
        # were previously used to create patches
        if len(patch_manifest) < 1:
            return
        log.info('Cleaning up files directory')
        for p in patch_manifest:
            if os.path.exists(p['src']):
                basename = os.path.basename(p['src'])
                log.info('Removing {}'.format(basename))
                os.remove(p['src'])

    def _make_patches(self, patch_manifest):
        pool_output = list()
        if len(patch_manifest) < 1:
            return pool_output
        log.info('Starting patch creation')
        try:
            cpu_count = multiprocessing.cpu_count() * 2
        except Exception as err:
            log.debug(err, exc_info=True)
            log.warning('Cannot get cpu count from os. Using default 2')
            cpu_count = 2

        pool = multiprocessing.Pool(processes=cpu_count)
        pool_output = pool.map(_make_patch, patch_manifest)
        return pool_output

    def _add_patches_to_packages(self, package_manifest, patches):
        # ToDo: Increase the efficiency of this double for
        #       loop. Not sure if it can be done though
        if patches is not None and len(patches) >= 1:
            log.info('Adding patches to package list')
            for p in patches:
                # Not adding patch that are not complete
                if hasattr(p, 'ready') is False:
                    continue
                # Not adding patch that are not complete
                if hasattr(p, 'ready') and p.ready is False:
                    continue
                for pm in package_manifest:
                    #
                    if p.dst_filename == pm.filename:
                        pm.patch_info['patch_name'] = \
                            os.path.basename(p.patch_name)
                        # Don't try to get hash on a ghost file
                        if not os.path.exists(p.patch_name):
                            p_name = ''
                        else:
                            p_name = gph(p.patch_name)
                            p_size = in_bytes(p.patch_name)
                        pm.patch_info['patch_hash'] = p_name
                        pm.patch_info['patch_size'] = p_size
                        # No need to keep searching
                        # We have the info we need for this patch
                        break
                    else:
                        log.debug('No patch match found')
        else:
            if self.patch_support is True:
                log.warning('No patches found')
        return package_manifest

    def _update_version_file(self, json_data, package_manifest):
        # Updates version file with package meta-data
        log.info('Adding package meta-data to version manifest')
        easy_dict = EasyAccessDict(json_data)
        for p in package_manifest:
            patch_name = p.patch_info.get('patch_name')
            patch_hash = p.patch_info.get('patch_hash')
            patch_size = p.patch_info.get('patch_size')

            # Converting info to format compatible for version file
            # ToDo: Remove filename in version 2.0
            info = {'file_hash': p.file_hash,
                    'file_size': p.file_size,
                    'file_name': p.filename,
                    'filename': p.filename}
            if patch_name and patch_hash:
                info['patch_name'] = patch_name
                info['patch_hash'] = patch_hash
                info['patch_size'] = patch_size

            version_key = '{}*{}*{}'.format(settings.UPDATES_KEY,
                                            p.name, p.version)
            version = easy_dict.get(version_key)
            log.debug('Package Info: {}'.format(version))

            if version is None:
                log.debug('Adding new version to file')

                # First version this package name
                json_data[settings.UPDATES_KEY][p.name][p.version] = {}
                platform_key = '{}*{}*{}*{}'.format(settings.UPDATES_KEY,
                                                    p.name, p.version,
                                                    'platform')

                platform = easy_dict.get(platform_key)
                if platform is None:
                    name_ = json_data[settings.UPDATES_KEY][p.name]
                    name_[p.version][p.platform] = info

            else:
                # package already present, adding another version to it
                log.debug('Appending info data to version file')
                # Used to keep within 80 characters
                n = p.name
                v = p.version
                json_data[settings.UPDATES_KEY][n][v][p.platform] = info

            # ToDo: Remove in future version. Maybe 2.0
            #       Backwards compat when implementing release channels
            if p.channel == 'stable':
                json_data['latest'][p.name][p.platform] = p.version
            # End ToDo

            # Add each package to latests section separated by release channel
            json_data['latest'][p.name][p.channel][p.platform] = p.version
        return json_data

    def _write_json_to_file(self, json_data):
        # Writes json data to disk
        log.debug('Saving version meta-data')
        self.db.save(settings.CONFIG_DB_KEY_VERSION_META, json_data)

    def _write_config_to_file(self, json_data):
        log.debug('Saving config data')
        self.db.save(settings.CONFIG_DB_KEY_PY_REPO_CONFIG, json_data)

    def _move_packages(self, package_manifest):
        if len(package_manifest) < 1:
            return
        log.info('Moving packages to deploy folder')
        for p in package_manifest:
            patch = p.patch_info.get('patch_name')
            with jms_utils.paths.ChDir(self.new_dir):
                if patch:
                    if os.path.exists(os.path.join(self.deploy_dir, patch)):
                        os.remove(os.path.join(self.deploy_dir, patch))
                    log.debug('Moving {} to {}'.format(patch,
                              self.deploy_dir))
                    if os.path.exists(patch):
                        shutil.move(patch, self.deploy_dir)

                shutil.copy(p.filename, self.deploy_dir)
                log.debug('Copying {} to {}'.format(p.filename,
                          self.deploy_dir))

                if os.path.exists(os.path.join(self.files_dir, p.filename)):
                    os.remove(os.path.join(self.files_dir, p.filename))
                shutil.move(p.filename, self.files_dir)
                log.debug('Moving {} to {}'.format(p.filename,
                          self.files_dir))

    def _update_file_list(self, json_data, package_info):
        files = json_data[settings.UPDATES_KEY]
        latest = json_data.get('latest')
        if latest is None:
            json_data['latest'] = {}
        file_name = files.get(package_info.name)
        if file_name is None:
            log.debug('Adding {} to file list'.format(package_info.name))
            json_data[settings.UPDATES_KEY][package_info.name] = {}

        latest_package = json_data['latest'].get(package_info.name)
        if latest_package is None:
            json_data['latest'][package_info.name] = {}

        latest_channel = json_data['latest'][package_info.name].get('channel')
        if latest_channel is None:
            json_data['latest'][package_info.name][package_info.channel] = {}
        return json_data

    def _check_make_patch(self, json_data, name, platform):
        # Check to see if previous version is available to
        # make patch updates
        # Also calculates patch number
        log.info('Checking if patch creation is possible')
        if bsdiff4 is None:
            log.warning('Bsdiff is missing. Cannot create patches')
            return None
        src_file_path = None
        if os.path.exists(self.files_dir):
            with jms_utils.paths.ChDir(self.files_dir):
                files = os.listdir(os.getcwd())

            files = remove_dot_files(files)
            # No src files to patch from. Exit quickly
            if len(files) == 0:
                return None
            # If latest not available in version file. Exit
            try:
                channel_dict = json_data['latest'][name]['channel']
                latest = channel_dict[platform]['stable']
            except KeyError:
                return None
            try:
                latest_platform = json_data[settings.UPDATES_KEY][name][latest]
                try:
                    filename = latest_platform[platform]['file_name']
                except KeyError:
                    # ToDo: Remove in version 2.0
                    filename = latest_platform[platform]['filename']
                    # End ToDo
            except:
                return None
            src_file_path = os.path.join(self.files_dir, filename)

            try:
                patch_num = self.config['patches'][name]
                self.config['patches'][name] += 1
            except KeyError:
                # If no patch number we will start at 1
                patch_num = 1
                if 'patches' not in self.config.keys():
                    self.config['patches'] = {}
                if name not in self.config['patches'].keys():
                    self.config['patches'][name] = patch_num + 1
            num = patch_num + 1
            log.debug('Patch Number: {}'.format(num))
            return src_file_path, num
        return None
예제 #14
0
파일: config.py 프로젝트: JMSwag/PyUpdater
class ConfigManager(object):

    def __init__(self):
        self.cwd = os.getcwd()
        self.db = Storage()
        self.config_key = settings.CONFIG_DB_KEY_APP_CONFIG

    # Loads config from database (json file)
    def load_config(self):
        config_data = self.db.load(self.config_key)
        if config_data is None:
            config_data = {}
        config = Config()
        for k, v in config_data.items():
            config[k] = v
        config.DATA_DIR = os.getcwd()
        return config

    def get_app_name(self):
        config = self.load_config()
        return config.APP_NAME

    # Saves config to database (json file)
    def save_config(self, obj):
        log.debug('Saving Config')
        self.db.save(self.config_key, obj)
        log.debug('Config saved')
        self.write_config_py(obj)
        log.debug('Wrote client config')

    # Writes client config to client_config.py
    def write_config_py(self, obj):
        keypack_data = self.db.load(settings.CONFIG_DB_KEY_KEYPACK)
        if keypack_data is None:
            log.debug('*** Keypack data is None ***')
            public_key = None
        else:
            public_key = keypack_data['client']['offline_public']

        filename = os.path.join(self.cwd, *obj.CLIENT_CONFIG_PATH)
        attr_str_format = "    {} = '{}'\n"
        attr_format = "    {} = {}\n"

        log.debug('Writing client_config.py')
        with open(filename, 'w') as f:
            f.write('class ClientConfig(object):\n')

            log.debug('Adding PUBLIC_KEY to client_config.py')
            f.write(attr_str_format.format('PUBLIC_KEY', public_key))

            if hasattr(obj, 'APP_NAME'):
                log.debug('Adding APP_NAME to client_config.py')
                f.write(attr_str_format.format('APP_NAME', obj.APP_NAME))
            if hasattr(obj, 'COMPANY_NAME'):
                log.debug('Adding COMPANY_NAME to client_config.py')
                f.write(attr_str_format.format('COMPANY_NAME',
                                               obj.COMPANY_NAME))
            if hasattr(obj, 'UPDATE_URLS'):
                log.debug('Adding UPDATE_URLS to client_config.py')
                f.write(attr_format.format('UPDATE_URLS', obj.UPDATE_URLS))

            if hasattr(obj, 'MAX_DOWNLOAD_RETRIES'):
                log.debug('Adding MAX_DOWNLOAD_RETRIES to client_config.py')
                f.write(attr_format.format('MAX_DOWNLOAD_RETRIES',
                                           obj.MAX_DOWNLOAD_RETRIES))