def disable(cls): if nfs.exists(cls.enable_conf): nfs.rename(cls.enable_conf, cls.disable_conf) yield cls.rm_openresty() if not nfs.exists(cls.disable_conf): raise MessageError('Disable openresty failed !')
def copy_to(self, dst, plugin_parent='plugins'): if self.is_plugin: plugin_yml_path = fs.join(self.path, 'module.yml') if not fs.exists(plugin_yml_path): plugin_yml_path = fs.join(self.path, 'plugin.yml') if fs.exists(plugin_yml_path): import yaml info = yaml.load(open(plugin_yml_path)) fullname = '{}@{}'.format(info['name'], info['version']) dst = fs.join(dst, plugin_parent, fullname) fs.makedirs(dst) else: logger.error('module.yml or plugin.yml not exists') sys.exit(1) logger.info('Copy project: {!r} from {!r} to {!r}'.format( self.name, self.path, dst)) for dirname in fs.listdir(self.path): dirpath = fs.join(self.path, dirname) if dirname in (EXCLUDE_DIRS + EXCLUDE_FILES) \ or dirname.startswith('.'): continue fs.copy(dirpath, dst, exclude_dirs=EXCLUDE_DIRS, exclude_files=['*.exe', '*.bat'] if not IS_WINDOWS else ['*.sh']) return dst
def download_pkg(self, filename): yield self.reporter.log_ok('Begin to download package ' '{} ...'.format(filename)) down_url = self.download_url + '?filename=' + filename try: response = yield self.client.fetch( down_url, connect_timeout=config.get('file_service_connect_timeout', 3600.0), request_timeout=config.get('file_service_request_timeout', 3600.0), validate_cert=False) if response.code == 200: if not nfs.exists(PKG_CACHE_DIR): os.makedirs(PKG_CACHE_DIR) nfs.copy(response.body, nfs.join(PKG_CACHE_DIR, filename)) yield self.reporter.log_ok( 'Download package {} success'.format(filename)) else: raise MessageError( 'Download package {} failed, reason: {}!'.format( filename, response.body)) except HTTPError as e: raise MessageError( 'Download package {} failed, reason: {}, {}!'.format( filename, e, e.message))
def uncompress(cls, fullname, parent_dir, overwrite=True): parent_dir = parent_dir if parent_dir else PKG_DIR src = os.path.join(PKG_CACHE_DIR, fullname) if not os.path.exists(src): raise NotExistsError('The cache of '.format(fullname)) # ensure make target_dir for dir_name in [parent_dir, PKG_UNCOMPRESS_DIR]: if not nfs.exists(dir_name): nfs.makedirs(dir_name, 0750) # clear PKG_UNCOMPRESS_DIR for name in os.listdir(PKG_UNCOMPRESS_DIR): nfs.remove(os.path.join(PKG_UNCOMPRESS_DIR, name)) # extract file with tarfile.open(src) as tar: tar.extractall(PKG_UNCOMPRESS_DIR) extract_path = os.path.join(PKG_UNCOMPRESS_DIR, os.listdir(PKG_UNCOMPRESS_DIR)[0]) pkg_yml_path = os.path.join(extract_path, PKG_YAML_NAME) pkg_info = cls.get_info(pkg_yml_path, fullname) dst = os.path.join(parent_dir, pkg_info['name']) if not overwrite and os.path.exists(dst): return dst nfs.rename(extract_path, dst, overwrite) return dst, pkg_info
def execute(self): try: for module in self.modules: module_name = module['module_name'] module_env = self.deal_env(module.get('env')) if not module.get('filename'): yield self.reporter.log_error('Filename for {} is None' ''.format(module_name)) continue if not nfs.exists(nfs.join(PKG_DIR, module_name)): yield self.reporter.log_error('{} is not installed!' ''.format(module_name)) continue yield self.reporter.log_ok('Begin to Upgrade {}' ''.format(module_name)) pkg_path = yield self.do_pre(module['filename'], module_env) uninstall = Uninstall([module], self.io_loop, False) uninstall.reporter = self.reporter yield uninstall.remove(module_name) yield self.do_install(module) nfs.remove(pkg_path) yield self.reporter.log_ok('Finish upgrade modules!', True) yield self.circle_cmd('reloadconfig') except Exception as e: yield self.reporter.log_error(str(e), True) sys.exit(1)
def get_agent_config(): from constants import CONF_PATH if not nfs.exists(CONF_PATH): return {} with open(CONF_PATH) as temp: agent_mes = yaml.load(temp.read()) return agent_mes
def wait_for_file_complete(self): logger.info('#%d File lock exists, waiting for complete: %s', id(self.request), self.file_path) lock_watch_interval = config.get('file_service_lock_watch_interval', 5.0) current_timeout = 0.0 request_timeout = config.get('file_service_request_timeout', 3600.0) while current_timeout < request_timeout: yield gen.sleep(lock_watch_interval) current_timeout += lock_watch_interval if not nfs.exists(self.lock_file) and nfs.exists(self.file_path): self.write(self.file_path) # 文件缓存完毕,返回本地缓存文件的路径 return else: logger.info('#%d Waiting for file complete: %s', id(self.request), self.file_path) # 等待文件缓存超时 self.send_error(504, message='Waiting for file complete timeout')
def _check(project_name, project_url, project_branch='master'): if not nfs.exists(project_name): execute('git clone {} {}'.format(project_url, project_name)) with cd(project_name): execute('git checkout {}'.format(project_branch)) else: with cd(project_name): execute('git checkout {}'.format(project_branch)) execute('git pull')
def get(self): self.file_name = self.get_argument('filename') # type: str self.space_dir = nfs.join(settings.REPO_DIR, settings.REPO_ANT_SPACENAME) if not nfs.exists(self.space_dir): nfs.makedirs(self.space_dir) self.file_path = nfs.join(self.space_dir, self.file_name) lock_file_name = nfs.extsep + self.file_name + nfs.extsep + 'lock' self.lock_file = nfs.join(self.space_dir, lock_file_name) logger.info('#%d Request file: %s', id(self.request), self.file_name) if nfs.exists(self.lock_file): yield self.wait_for_file_complete() else: is_cache_hit = yield self.try_to_return_file_cache() if is_cache_hit: return logger.info('#%d File cache missed: %s', id(self.request), self.file_path) nfs.touch(self.lock_file) yield self.request_file_from_upstream()
def check(self, dst=PROJECT_ROOT): logger.info('----------------------' * 3) logger.info('Check project: {!r}'.format(self.name)) if not fs.exists(dst): fs.makedirs(dst) fs.chdir(dst) if self.exists(): self.remove() compress_path = self.download(dst) logger.info('Uncompress from {!r} to {!r}'.format(compress_path, dst)) fs.uncompress(compress_path, dst=PROJECT_ROOT, temp_dir=PROJECT_ROOT)
def backup_files(self): if nfs.exists(AGENT_BACK_DIR): nfs.remove(nfs.join(AGENT_BACK_DIR, '*')) else: nfs.makedirs(AGENT_BACK_DIR) # Copy self.http_handler.log_ok('Backup files') for dir_name in nfs.listdir(ROOT_DIR): if dir_name in EXCLUDE_BACK_DIRS: continue nfs.copy(nfs.join(ROOT_DIR, dir_name), AGENT_BACK_DIR) self.http_handler.log_ok('Backup done')
def close_file_resource(self): try: if self.temp_file and not self.temp_file.closed: self.temp_file.close() if nfs.exists(self.lock_file): nfs.remove(self.lock_file) except Exception as exc: logger.error('#%d Error while closing resource (%s): %s', id(self.request), self.file_path, exc, exc_info=True) self.send_error(500, message=exc) # FIXME: 有可能是请求结束后调用
def init_openresty(baseurl, upstream, runner): if not nfs.exists(NGINX_CONF): return with open(NGINX_CONF) as temp: content = temp.read() content = content.replace('UPSTREAM', upstream) content = content.replace('BASEURL', baseurl) if not IS_WINDOWS and runner: content = content.replace('RUNNER', runner) if IS_WINDOWS: BASEDIR = os.path.normpath(ROOT_DIR).replace('\\', '\\\\') else: BASEDIR = ROOT_DIR content = content.replace('BASEDIR', BASEDIR) with open(NGINX_CONF, 'w') as conf: conf.write(content) log_dir = nfs.join(ROOT_DIR, 'openresty', 'logs') if not nfs.exists(log_dir): nfs.makedirs(log_dir, 0750)
def _deliver_to_aix(self, compress_name): try: file_path = nfs.join(REPO_DIR, compress_name) if not nfs.exists(file_path): file_path = nfs.join(REPO_DIR, REPO_ANT_SPACENAME, compress_name) if not nfs.exists(file_path): down_url = 'http://127.0.0.1:16600/file?filename={}'\ .format(compress_name) client = AsyncHTTPClient(io_loop=ioloop.IOLoop.current()) response = yield client.fetch(down_url, connect_timeout=3600.0, request_timeout=3600.0, validate_cert=False) if response.code != 200: raise MessageError("Can't download pkg by http") yield self.do_ssh_cmd('umask 0027 && mkdir -p "{}"'.format( self.dst)) yield self.ssh_client.scp(os.path.realpath(file_path), self.dst_name) except Exception as e: raise MessageError('Download agent pkg failed. {}'.format(e))
def try_to_return_file_cache(self): is_cache_hit = False if nfs.exists(self.file_path): flag = yield self.check_file_mtime() if flag: logger.info('#%d File cache hit: %s', id(self.request), self.file_path) self.write(self.file_path) # 直接返回本地缓存文件的路径 is_cache_hit = True else: logger.info('#{} The cache file is too old and need to ' 'download the new file'.format(id(self.request))) nfs.remove(self.file_path) raise gen.Return(is_cache_hit)
def umcompress(self, compress_agent_path): self.http_handler.log_ok('Removing {!r}...' ''.format(AGENT_UNCOMPRESS_DIRNAME)) if nfs.exists(AGENT_UNCOMPRESS_DIR): nfs.remove(AGENT_UNCOMPRESS_DIR) self.http_handler.log_ok('Remove {!r} done' ''.format(AGENT_UNCOMPRESS_DIRNAME)) self.http_handler.log_ok('Uncompressing {}...' ''.format(self.task_message['filename'])) os.makedirs(AGENT_UNCOMPRESS_DIR) uncompress_agent_path = nfs.uncompress(compress_agent_path, AGENT_UNCOMPRESS_DIR, temp_dir=AGENT_UNCOMPRESS_DIR) self.http_handler.log_ok('Uncompress done') return uncompress_agent_path
def _load_command_and_env(self, action): if action.startswith('core.'): self.module_name = 'core' command = settings.CORE_ACTIONS.get(action) else: self.module_name, _ = action.split('.') module_path = nfs.join(settings.MODULES_DIR, self.module_name) self.cwd = module_path yaml_file = nfs.join(module_path, 'manifest.yaml') if not nfs.exists(yaml_file): return None with open(yaml_file) as f: data = yaml.load(f) command = data['actions'].get(action) env = utils.normalize_env(data.get('env', {}), relpath_prefix=module_path) logger.debug('Action task {!r} env: {}'.format(self.action, env)) # 处理PATH开头的环境变量,加入到PATH环境变量中 paths = [self.env.get('PATH', '')] for name, value in env.iteritems(): if isinstance(value, int): value = str(value) else: value = value.encode('utf-8') if name.startswith('PATH'): paths.append(value) else: self.env[name] = value self.env['PATH'] = ';'.join(paths) if IS_WINDOWS else \ ':'.join(paths) logger.debug('Action task {!r} env PATH: {}'.format( self.action, self.env['PATH'])) command = utils.normalize_cmdline(command) return command
def maybe_download_python(): system = platform.system.lower() cwd = os.getcwd() if WINDOWS: python = fs.join(cwd, 'embedded/python.exe') pip = fs.join(cwd, 'embedded/Scripts/pip.exe') else: python = fs.join(cwd, 'embedded/bin/python') pip = fs.join(cwd, 'embedded/bin/pip') if not fs.exists(fs.join(cwd, 'embedded')): python_name = 'python-{}-{}.{}'.format(system, platform.cpu, POSTFIX.get(system, 'tgz')) python_url = PYTHON_TEMPLATE_URL.format(python_name) download(python_url, python_name) fs.uncompress(python_name) fs.rename('python-{}-{}'.format(system, platform.cpu), 'embedded') return python, pip
def init_uuid(): try: if nfs.exists(CONF_PATH): with open(CONF_PATH) as f: conf_dict = yaml.load(f.read()) or {} if 'id' in conf_dict and conf_dict['id']: return id_str = '{}:{}:{}'.format(conf_dict['tenant'], conf_dict['network_domain'], conf_dict['ip']) conf_dict.update({'id': hashlib.md5(id_str).hexdigest()}) result = yaml.dump(conf_dict, default_flow_style=False) with open(CONF_PATH, 'w') as f: f.write(result) except Exception as e: print(e) sys.exit(1)
def deploy_dispatcher(self): with cd(self.project_name): execute(['../node/bin/node', '../node/bin/yarn']) execute(['../node/bin/node', '../node/lib/node_modules/npm/bin/npm-cli.js', 'run', 'build']) if nfs.exists('dispatcher'): nfs.remove('dispatcher') nfs.rename('dist', 'dispatcher') cmd = 'su uyun -c "pm2 delete all"' excutor_cmd(cmd) for file in ['node_modules', 'bin', 'scripts', 'install.sh', 'uninstall.sh', 'check_status.sh', 'dispatcher']: local_path = os.path.join(self.project_name, file) server_path = INSTALL_DIR[self.project_name] scp_upload_file(local_path, server_path) cmd = 'cd {} && su uyun -c "pm2 start process.json"'.format( INSTALL_DIR[self.project_name]) excutor_cmd(cmd)
def run_hooks(self): hooks_dir = nfs.join(self.new_agent_dir, 'hooks') if not nfs.exists(hooks_dir): return True for dir_name in nfs.listdir(hooks_dir): hooks_file = nfs.join(hooks_dir, dir_name) if nfs.isfile(hooks_file) and hooks_file.endswith('.py'): p = Popen([sys.executable, hooks_file], stdout=PIPE, stderr=STDOUT, cwd=ROOT_DIR, shell=True) while p.poll() is None: time.sleep(1) if p.stdout: logger.info(p.stdout.read()) if p.poll() != 0: self.http_handler.log_ok('Run hooks {} failed!' ''.format(hooks_file)) return False return True
def enable(cls): if nfs.exists(cls.disable_conf): nfs.rename(cls.disable_conf, cls.enable_conf) if not nfs.exists(cls.enable_conf): raise MessageError('Enable openresty failed !') yield cls.reload()
# coding: utf-8 import os import sys import logging import logging.config import nfs from framework.actions.constants import LOG_DIR os.umask(0027) if not nfs.exists(LOG_DIR): nfs.makedirs(LOG_DIR) LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'verbose': { 'format': '[%(levelname)s][%(asctime)s][%(module)s][%(process)d] %(message)s' }, 'module': { 'format': '[%(levelname)s][%(asctime)s][%(process)d] %(message)s' }, 'simple': { 'format': '%(message)s' }, }, 'handlers': { 'console': {
def _roll_back_files(self): self.http_handler.log_ok('Rolling back files...') if nfs.exists(AGENT_BACK_DIR): self.copy(AGENT_BACK_DIR, ROOT_DIR) self.http_handler.log_ok('Roll back files done')
def clear_old_dir(dst): if fs.exists(dst): logger.info('Clear {!r}'.format(dst)) fs.remove(dst)
def exists(self): return fs.exists(self.path)
def create_lock_file(cls, pkg_dict): if not nfs.exists(PKG_DIR): nfs.makedirs(PKG_DIR) with open(LOCK_FILE, 'w') as lock_file: yaml.dump(pkg_dict, lock_file, default_flow_style=False)
def handle_cli(): try: cli_args = docopt(__doc__) if IS_WINDOWS and not check_win_agent(): return if not nfs.exists(UPGRADE_PYTHON_DIR): nfs.copy(nfs.join(PYTHON_DIR, '*'), UPGRADE_PYTHON_DIR) # Get upstream message if cli_args['--upstream']: baseurl = cli_args['--upstream'] upstream = urlparse.urljoin(cli_args['--upstream'], UPSTREAM_SUFFIX) upstream_mes = upstream_validate(cli_args['--upstream']) if not upstream_mes: logger.error('The upstream: {} is wrong!' ''.format(cli_args['--upstream'])) return else: upstream_ip = os.environ['SSH_CLIENT'].split()[0] baseurl = 'http://{}:{}/'.format(upstream_ip, NGINX_PORT) upstream = '{}{}'.format(baseurl, UPSTREAM_SUFFIX) upstream_mes = [upstream_ip, NGINX_PORT] if cli_args['--ip']: if not ip_validate(cli_args['--ip']): raise ValueError('Ant agent ip: {} is invalid' ''.format(cli_args['--ip'])) else: agent_ip = cli_args['--ip'] else: agent_ip = get_agent_ip(upstream_mes[0], int(upstream_mes[1])) runner = cli_args['--user'] if cli_args['--user'] else os.environ.get( 'USER') su_cmd = '' if runner == 'root' else 'su - {} -c '.format(runner) conf_dict = { 'tenant': cli_args['--tenant'], 'ip': agent_ip, 'upstream': upstream, 'network_domain': cli_args['--network-domain'] } init_conf(conf_dict) init_bootstrap() init_openresty(baseurl, upstream, runner) register_service(su_cmd) if runner and not IS_WINDOWS and runner != 'root': status, result = execute('chown -R {user}:{user} {path}'.format( user=runner, path=ROOT_DIR)) if status != 0: raise MessageError( 'Change log path owen failed! Error[{}]: {}'.format( status, result)) register_upgrade_service(su_cmd) start_circled(su_cmd) except Exception as e: logger.error(e) sys.exit(1)