def generate_index_json(cls, meta_inf_dir, repo_dir, ucs_version, appcenter_host): archive_name = os.path.join(meta_inf_dir, 'all.tar') with tarfile.open(archive_name, 'w') as archive: with gzip.open(os.path.join(meta_inf_dir, 'index.json.gz'), 'wb') as index_json: apps = {} for root, dirs, files in os.walk(meta_inf_dir): for filename in files: path = os.path.join(root, filename) appname = check_ini_file(filename) if not appname: continue parser = read_ini_file(path) try: appid = parser.get('Application', 'ID') except (NoSectionError, NoOptionError): continue if not appid: continue app = AppcenterApp(appname, appid, ucs_version, meta_inf_dir, repo_dir, appcenter_host) apps[app.name] = app.to_index() for filename_in_directory, filename_in_archive in app.tar_files(): archive.add(filename_in_directory, filename_in_archive) index_json.write(dumps(apps, sort_keys=True, indent=4)) if appcenter_host.startswith('https'): appcenter_host = 'http://%s' % appcenter_host[8:] if not appcenter_host.startswith('http://'): appcenter_host = 'http://%s' % appcenter_host call_process(['zsyncmake', '-u', '%s/meta-inf/%s/all.tar.gz' % (appcenter_host, ucs_version), '-q', '-z', '-o', archive_name + '.zsync', archive_name])
def create(self, hostname, env): env = { k: yaml.scalarstring.DoubleQuotedScalarString(v) for k, v in env.iteritems() } env.update({shell_safe(k).upper(): v for k, v in env.iteritems()}) self._setup_yml(recreate=True, env=env) call_process([ 'docker-compose', '-p', self.app.id, 'up', '-d', '--no-build', '--no-recreate' ], cwd=self.app.get_compose_dir()) try: out = ps(only_running=True) except CalledProcessError: return False else: yml_file = self.app.get_compose_file('docker-compose.yml') content = yaml.load(open(yml_file), yaml.RoundTripLoader, preserve_quotes=True) docker_image = content['services'][ self.app.docker_main_service]['image'] for line in out.splitlines(): try: container, image = line.split()[:2] except ValueError: pass else: if image == docker_image: ucr_save({self.app.ucr_container_key: container}) self.container = container return container
def _test_for_docker_service(self): if docker_bridge_network_conflict(): msg = _('A conflict between the system network settings and the docker bridge default network has been detected.') + '\n\n' msg += _('Please either configure a different network for the docker bridge by setting the UCR variable docker/daemon/default/opts/bip to a different network and restart the system,') + ' ' msg += _('or disable the docker support in the AppCenter by setting appcenter/docker to false.') raise umcm.UMC_Error(msg) if not docker_is_running(): MODULE.warn('Docker is not running! Trying to start it now...') call_process(['invoke-rc.d', 'docker', 'start']) if not docker_is_running(): return False return True
def start(self, attempts=2): service_name = self._get_service_name() if service_name: if call_process(['service', service_name, 'start'], database_logger).returncode: if attempts > 1: # try again. sometimes, under heavy load, mysql seems to fail to # start although it is just slow database_logger.info('Starting %s failed. Retrying...' % service_name) return self.start(attempts=attempts - 1) catcher = LogCatcher(database_logger) call_process(['service', service_name, 'status'], catcher) details = '\n'.join(catcher.stdstream()) raise DatabaseCreationFailed('Could not start %s' % service_name, details=details)
def start(self): service_name = self._get_service_name() if service_name: if call_process(['service', service_name, 'start'], database_logger).returncode: raise DatabaseCreationFailed('Could not start %s' % service_name)
def run_script(self, app, script): app = FindApps().find(app) url = os.path.join('http://appcenter-test.software-univention.de', 'univention-repository', app.get_ucs_version(), 'maintained', 'component', app.component_id, 'test_%s' % script) print(url) response = get(url) if response.ok is not True: print(' no %s script found for app %s: %s' % (script, app.id, response.content)) return fname = os.path.join(gettempdir(), '%s.%s' % (app.id, script)) with open(fname, 'wb') as f: f.write(response.content) os.chmod(fname, 0o755) bind_dn = self.ucr.get('tests/domainadmin/account') if bind_dn is None: bind_dn = 'uid=Administrator,%s' % self.ucr.get('ldap/base') pwd_file = self.ucr.get('tests/domainadmin/pwdfile') unlink_pwd_file = False if pwd_file is None: pwd_file = '/tmp/app-installation.pwd' with open(pwd_file, 'w') as fd: fd.write('univention') unlink_pwd_file = True try: cmd = [fname, '--binddn', bind_dn, '--bindpwdfile', pwd_file] print('running ', cmd) return call_process(cmd).returncode finally: if unlink_pwd_file: os.unlink(pwd_file)
def rm(self): ret = self.stop() ret = ret and call_process( ['docker-compose', '-p', self.app.id, 'down', '--remove-orphans'], logger=self.logger, cwd=self.app.get_compose_dir()).returncode == 0 return ret
def rm(self): ret = self.stop() ret = ret and call_process( ['docker-compose', '-p', self.app.id, 'rm', '--force'], logger=self.logger, cwd=self.app.get_compose_dir()).returncode == 0 return ret
def _apt_get_dry_run(action, pkgs): apt_args = _apt_args() logger = LogCatcher(package_logger) success = call_process(['/usr/bin/apt-get'] + apt_args + [action, '-s'] + pkgs, logger=logger).returncode == 0 install, remove, broken = [], [], [] install_regex = re.compile('^(Inst) ([^ ]*?) \((.*?) ') upgrade_remove_regex = re.compile('^(Remv|Inst) ([^ ]*?) \[(.*?)\]') for line in logger.stdout(): for regex in [install_regex, upgrade_remove_regex]: match = regex.match(line) if match: operation, pkg_name, version = match.groups() if operation == 'Inst': install.append(pkg_name) elif operation == 'Remv': remove.append(pkg_name) break if not success: for pkg in pkgs: if action == 'install' and pkg not in install: broken.append(pkg) if action == 'remove' and pkg not in remove: broken.append(pkg) return dict(zip(['install', 'remove', 'broken'], [install, remove, broken]))
def wait_for_dpkg_lock(timeout=120): lock_files = ['/var/lib/dpkg/lock', '/var/lib/apt/lists/lock'] lock_file_string = ' or '.join(lock_files) package_logger.debug('Trying to get a lock for %s...' % lock_file_string) first = True while first or timeout > 0: returncode = call_process(['fuser'] + lock_files).returncode if returncode == 0: if first: package_logger.info( 'Could not lock %s. Is another process using it? Waiting up to %s seconds' % (lock_file_string, timeout)) first = False # there seems to be a timing issue with the fuser approach # in which the second (the apt) process releases its lock before # re-grabbing it once again # we hope to minimize this error by having a relatively high sleep duration sleep_duration = 3 time.sleep(sleep_duration) timeout -= sleep_duration else: if not first: package_logger.info('Finally got the lock. Continuing...') return True package_logger.info('Unable to get a lock. Giving up...') return False
def _apt_get(action, pkgs): env = os.environ.copy() env['DEBIAN_FRONTEND'] = 'noninteractive' apt_args = _apt_args() ret = call_process(['/usr/bin/apt-get'] + apt_args + [action] + pkgs, logger=package_logger, env=env).returncode == 0 reload_package_manager() return ret
def execute_with_process(container, args, logger=None, tty=None): if logger is None: logger = _logger docker_exec = ['docker', 'exec'] if tty is None: tty = sys.stdin.isatty() if tty: docker_exec.append('-it') args = docker_exec + [container] + args return call_process(args, logger)
def mark_packages_as_manually_installed(pkgs): call_process(['/usr/bin/apt-mark', 'manual'] + pkgs, logger=package_logger) reload_package_manager()
def update_packages(): call_process(['/usr/bin/apt-get', 'update'], logger=package_logger) reload_package_manager()
def _subprocess(self, args, logger=None, env=None): if logger is None: logger = self.logger elif isinstance(logger, string_types): logger = self.logger.getChild(logger) return call_process(args, logger, env)
def _run_file(self, app, fname, args): if os.path.exists(fname): self.log('Running %s with additional args %r' % (fname, args)) return call_process([fname] + args, self.logger).returncode
def restart(self): self._setup_yml(recreate=False) return call_process(['docker-compose', '-p', self.app.id, 'restart'], logger=self.logger, cwd=self.app.get_compose_dir()).returncode == 0
def manage(self, login, pwdfile, logger, *args): process_args = [ '/usr/sbin/univention-ssh', pwdfile, login, 'univention-app' ] + list(args) call_process(process_args, logger=logger) return logger