def copy_to(self, dst, plugin_parent='plugins'): if self.is_plugin: plugin_yml_path = fs.join(self.path, 'module.yml') if not fs.exists(plugin_yml_path): plugin_yml_path = fs.join(self.path, 'plugin.yml') if fs.exists(plugin_yml_path): import yaml info = yaml.load(open(plugin_yml_path)) fullname = '{}@{}'.format(info['name'], info['version']) dst = fs.join(dst, plugin_parent, fullname) fs.makedirs(dst) else: logger.error('module.yml or plugin.yml not exists') sys.exit(1) logger.info('Copy project: {!r} from {!r} to {!r}'.format( self.name, self.path, dst)) for dirname in fs.listdir(self.path): dirpath = fs.join(self.path, dirname) if dirname in (EXCLUDE_DIRS + EXCLUDE_FILES) \ or dirname.startswith('.'): continue fs.copy(dirpath, dst, exclude_dirs=EXCLUDE_DIRS, exclude_files=['*.exe', '*.bat'] if not IS_WINDOWS else ['*.sh']) return dst
def backup_files(self): if nfs.exists(AGENT_BACK_DIR): nfs.remove(nfs.join(AGENT_BACK_DIR, '*')) else: nfs.makedirs(AGENT_BACK_DIR) # Copy self.http_handler.log_ok('Backup files') for dir_name in nfs.listdir(ROOT_DIR): if dir_name in EXCLUDE_BACK_DIRS: continue nfs.copy(nfs.join(ROOT_DIR, dir_name), AGENT_BACK_DIR) self.http_handler.log_ok('Backup done')
def check(self, dst=PROJECT_ROOT): logger.info('----------------------' * 3) logger.info('Check project: {!r}'.format(self.name)) if not fs.exists(dst): fs.makedirs(dst) fs.chdir(dst) if self.exists(): self.remove() compress_path = self.download(dst) logger.info('Uncompress from {!r} to {!r}'.format(compress_path, dst)) fs.uncompress(compress_path, dst=PROJECT_ROOT, temp_dir=PROJECT_ROOT)
def execute(self): try: if not self.validate(): return if not self._init_http_handler(): return nfs.makedirs(AGENT_DOWNLOAD_DIR) self.check_version(self.task_message['version']) compress_agent_path = self.download() uncompress_agent_path = self.umcompress(compress_agent_path) # Backup files self.backup_files() # Run upgrader from doupgrade import DoUpgrade do_upgrade = DoUpgrade(uncompress_agent_path, self.http_handler) do_upgrade.do_upgrade() except Exception as e: self.http_handler.log_error(str(e), done=True) logger.error(str(e), exc_info=True)
def get(self): self.file_name = self.get_argument('filename') # type: str self.space_dir = nfs.join(settings.REPO_DIR, settings.REPO_ANT_SPACENAME) if not nfs.exists(self.space_dir): nfs.makedirs(self.space_dir) self.file_path = nfs.join(self.space_dir, self.file_name) lock_file_name = nfs.extsep + self.file_name + nfs.extsep + 'lock' self.lock_file = nfs.join(self.space_dir, lock_file_name) logger.info('#%d Request file: %s', id(self.request), self.file_name) if nfs.exists(self.lock_file): yield self.wait_for_file_complete() else: is_cache_hit = yield self.try_to_return_file_cache() if is_cache_hit: return logger.info('#%d File cache missed: %s', id(self.request), self.file_path) nfs.touch(self.lock_file) yield self.request_file_from_upstream()
def clone(self, dst=PROJECT_ROOT): fs.makedirs(dst) fs.chdir(dst) check_call(['git', 'clone', self.url, self.name]) fs.chdir('../')
# coding: utf-8 import os import sys import logging import logging.config import nfs from framework.actions.constants import LOG_DIR os.umask(0027) if not nfs.exists(LOG_DIR): nfs.makedirs(LOG_DIR) LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'verbose': { 'format': '[%(levelname)s][%(asctime)s][%(module)s][%(process)d] %(message)s' }, 'module': { 'format': '[%(levelname)s][%(asctime)s][%(process)d] %(message)s' }, 'simple': { 'format': '%(message)s' }, }, 'handlers': { 'console': {
def create_lock_file(cls, pkg_dict): if not nfs.exists(PKG_DIR): nfs.makedirs(PKG_DIR) with open(LOCK_FILE, 'w') as lock_file: yaml.dump(pkg_dict, lock_file, default_flow_style=False)