def _generate_gpg_key_if_does_not_exist(config): """:returns: True if the gpg key existed before""" gnupg_directory = path.join(path.expanduser("~"), ".gnupg") already_generated = all([ path.exists(path.join(gnupg_directory, filename)) for filename in GPG_FILENAMES ]) home_key_path = path.join(path.expanduser("~"), 'gpg.key') already_generated = already_generated and path.exists(home_key_path) if not already_generated: rmtree(gnupg_directory, ignore_errors=True) log_execute_assert_success([ 'gpg', '--batch', '--gen-key', resource_filename(__name__, 'gpg_batch_file') ]) pid = log_execute_assert_success(['gpg', '--export', '--armor']) with fopen(path.join(path.expanduser("~"), ".rpmmacros"), 'w') as fd: fd.write(GPG_TEMPLATE) with fopen(home_key_path, 'w') as fd: fd.write(pid.get_stdout().decode()) data_key_path = path.join(config.artifacts_directory, 'packages', 'gpg.key') if not path.exists(data_key_path): copy(home_key_path, data_key_path) return not already_generated
def write_to_packages_file(dirpath, contents, mode): import gzip packages_filepath = path.join(dirpath, 'Packages') with fopen(packages_filepath, mode) as fd: fd.write(contents) with fopen(packages_filepath, 'rb') as fd: all_contents = fd.read() fd = gzip.open(packages_filepath + '.gz', 'wb') fd.write(all_contents) fd.close()
def _fix_dpkg_sig(): # https://launchpadlibrarian.net/180099595/dpkg-sig-xz.patch # http://osdir.com/ml/ubuntu-bugs/2014-07/msg09103.html dpkg_sig = '/usr/bin/dpkg-sig' if path.exists(dpkg_sig): with fopen(dpkg_sig) as fd: contents = fd.read() with fopen(dpkg_sig, 'w') as fd: fd.write(contents.replace('$seen_files{"data.tar.gz"} &&', '($seen_files{"data.tar.gz"} || $seen_files{"data.tar.xz"}) &&'))
def ensure_packages_json_file_exists_in_directory(dirpath): filepath = path.join(dirpath, 'packages.json') if path.exists(filepath): try: with fopen(filepath) as fd: if isinstance(decode(fd.read()), list): return except: pass with fopen(filepath, 'w') as fd: fd.write('[]')
def _fix_entropy_generator(): from os import getuid rng_tools_script = '/etc/init.d/rng-tools' if not path.exists(rng_tools_script) or getuid() != 0: return with fopen("/etc/default/rng-tools") as fd: if fd.read().endswith("HRNGDEVICE=/dev/urandom\n"): return log_execute_assert_success([rng_tools_script, 'stop'], True) with fopen("/etc/default/rng-tools", 'a') as fd: fd.write("HRNGDEVICE=/dev/urandom\n") log_execute_assert_success([rng_tools_script, 'start'], True)
def _fix_dpkg_sig(): # https://launchpadlibrarian.net/180099595/dpkg-sig-xz.patch # http://osdir.com/ml/ubuntu-bugs/2014-07/msg09103.html dpkg_sig = '/usr/bin/dpkg-sig' if path.exists(dpkg_sig): with fopen(dpkg_sig) as fd: contents = fd.read() with fopen(dpkg_sig, 'w') as fd: fd.write( contents.replace( '$seen_files{"data.tar.gz"} &&', '($seen_files{"data.tar.gz"} || $seen_files{"data.tar.xz"}) &&' ))
def reload_configuration_from_disk(self): with fopen(self.filepath) as fd: kwargs = decode(fd.read()) kwargs['filepath'] = self.filepath for key, value in kwargs.iteritems(): setattr(self, key, value) return self
def reload_configuration_from_disk(self): with fopen(self.filepath) as fd: kwargs = decode(fd.read()) kwargs['filepath'] = self.filepath for key, value in kwargs.items(): setattr(self, key, value) return self
def generate_release_file_for_specific_distribution_and_version( self, distribution, codename, force=True): dirpath = path.join(self.base_directory, distribution, 'dists', codename) in_release = path.join(dirpath, 'InRelease') release = path.join(dirpath, 'Release') release_gpg = release + '.gpg' if path.exists(release) and not force: return # write release file contents = apt_ftparchive(['release', dirpath]) with fopen(release, 'w') as fd: available_archs = sorted( KNOWN_DISTRIBUTIONS[distribution][codename]) fd.write( RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents)) # delete old release signature files for filepath in [in_release, release_gpg]: if path.exists(filepath): remove(filepath) # sign release file if codename == "trusty": # trusty doesn't support SHA256 for InRelease gpg([ '--clearsign', '--digest-algo', 'SHA1', '-o', in_release, release ]) else: gpg([ '--clearsign', '--digest-algo', 'SHA256', '-o', in_release, release ]) gpg(['-abs', '-o', release_gpg, release])
def gpg_key(): from infi.gevent_utils.os import fopen with fopen( path.join(flask.current_app.app_repo_config.packages_directory, 'gpg.key')) as fd: return flask.Response(fd.read(), content_type='application/octet-stream')
def _deduce_produce_name(self, dirpath): try: with fopen(path.join(dirpath, 'product_name')) as fd: return fd.read().strip() except: return ' '.join( word.capitalize() for word in path.basename(dirpath).split('-')).strip()
def _read_release_date_from_file(self, dirpath): from dateutil.parser import parse try: with fopen(path.join(dirpath, 'release_date')) as fd: release_date = fd.read().strip() return parse(release_date).date() except: return None
def _generate_gpg_key_if_does_not_exist(config): """:returns: True if the gpg key existed before""" gnupg_directory = path.join(path.expanduser("~"), ".gnupg") already_generated = all([path.exists(path.join(gnupg_directory, filename)) for filename in GPG_FILENAMES]) home_key_path = path.join(path.expanduser("~"), 'gpg.key') already_generated = already_generated and path.exists(home_key_path) if not already_generated: rmtree(gnupg_directory, ignore_errors=True) log_execute_assert_success(['gpg', '--batch', '--gen-key', resource_filename(__name__, 'gpg_batch_file')]) pid = log_execute_assert_success(['gpg', '--export', '--armor']) with fopen(path.join(path.expanduser("~"), ".rpmmacros"), 'w') as fd: fd.write(GPG_TEMPLATE) with fopen(home_key_path, 'w') as fd: fd.write(pid.get_stdout()) data_key_path = path.join(config.artifacts_directory, 'packages', 'gpg.key') if not path.exists(data_key_path): copy(home_key_path, data_key_path) return not already_generated
def _get_custom_installation_instructions(self, package): filepath = path.join(package['abspath'], 'installation_instructions.json') try: if not path.exists(filepath): return dict() with fopen(filepath) as fd: result = decode(fd.read()) return result if isinstance(result, dict) else dict() except: logger.exception("failed to read custom installation instructions from {0}".format(filepath)) return dict()
def _upload_file(address, port, username, password, index, filepath): from ftplib import FTP from infi.gevent_utils.os import path, fopen from infi.app_repo.ftpserver import make_ftplib_gevent_friendly make_ftplib_gevent_friendly() ftp = FTP() ftp.connect(address, port) ftp.login(username, password) ftp.cwd(index) with fopen(filepath) as fd: ftp.storbinary("STOR %s" % path.basename(filepath), fd)
def upload_file(config, index, filepath): from ftplib import FTP from infi.gevent_utils.os import path, fopen from infi.app_repo.ftpserver import make_ftplib_gevent_friendly from infi.gevent_utils.deferred import create_threadpool_executed_func make_ftplib_gevent_friendly() ftp = FTP() ftp.connect('127.0.0.1', config.ftpserver.port) ftp.login(config.ftpserver.username, config.ftpserver.password) ftp.cwd(index) with fopen(filepath) as fd: ftp.storbinary("STOR %s" % path.basename(filepath), fd)
def from_disk(cls, filepath): filepath = filepath or cls.get_default_config_file() if not path.exists(filepath): self = cls() self.filepath = filepath else: with fopen(filepath) as fd: kwargs = decode(fd.read()) kwargs['filepath'] = filepath self = cls() for key, value in kwargs.items(): setattr(self, key, value) assert self.webserver.default_index is None or self.webserver.default_index in self.indexes return self
def from_disk(cls, filepath): filepath = filepath or cls.get_default_config_file() if not path.exists(filepath): self = cls() self.filepath = filepath else: with fopen(filepath) as fd: kwargs = decode(fd.read()) kwargs['filepath'] = filepath self = cls() for key, value in kwargs.iteritems(): setattr(self, key, value) assert self.webserver.default_index is None or self.webserver.default_index in self.indexes return self
def generate_release_file_for_specific_distribution_and_version(self, distribution, codename, force=True): dirpath = path.join(self.base_directory, distribution, 'dists', codename) in_release = path.join(dirpath, 'InRelease') release = path.join(dirpath, 'Release') release_gpg = release + '.gpg' if path.exists(release) and not force: return # write release file contents = apt_ftparchive(['release', dirpath]) with fopen(release, 'w') as fd: available_archs = sorted(KNOWN_DISTRIBUTIONS[distribution][codename]) fd.write(RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents)) # delete old release signature files for filepath in [in_release, release_gpg]: if path.exists(filepath): remove(filepath) # sign release file if codename == "trusty": # trusty doesn't support SHA256 for InRelease gpg(['--clearsign', '--digest-algo', 'SHA1', '-o', in_release, release]) else: gpg(['--clearsign', '--digest-algo', 'SHA256', '-o', in_release, release]) gpg(['-abs', '-o', release_gpg, release])
def _deduce_release_notes_url(self, dirpath): try: with fopen(path.join(dirpath, 'release_notes_url')) as fd: return fd.read().strip() except: return None
def _deduce_produce_name(self, dirpath): try: with fopen(path.join(dirpath, 'product_name')) as fd: return fd.read().strip() except: return ' '.join(word.capitalize() for word in path.basename(dirpath).split('-')).strip()
def gpg_key(): from infi.gevent_utils.os import fopen with fopen(path.join(flask.current_app.app_repo_config.packages_directory, 'gpg.key')) as fd: return flask.Response(fd.read(), content_type='application/octet-stream')
def write_file(filepath, contents): with fopen(filepath, 'w') as fd: fd.write(contents)
def read_file(filepath): """ Read the contents of a file in a gevent-friendly way """ with fopen(filepath) as fd: return fd.read()
def _read(filepath): with fopen(filepath, 'rb') as fd: return unpackb(fd.read())
def _write(filepath, contents): with fopen(filepath, 'wb') as fd: fd.write(packb(contents))
def to_disk(self): if not path.exists(path.dirname(self.filepath)): makedirs(path.dirname(self.filepath)) with fopen(self.filepath, 'w') as fd: fd.write(self.to_json())
def _write(): with fopen(release, 'w') as fd: available_archs = sorted(KNOWN_DISTRIBUTIONS[distribution][codename]) fd.write(RELEASE_FILE_HEADER.format(codename, " ".join(available_archs), contents))