def copy_module(ip, src, dest): """[Using ansible.sh copy_module, push package to servers] Arguments: ip {string} -- corresponding server host ip src {string} -- files which push dest {string} -- corresponding server dir path Returns: bool -- true or false. """ (status, result) = utils.getstatusoutput('bash ' + path.get_path() + '/scripts/ansible.sh copy ' + ip + ' ' + src + ' ' + dest) logger.debug('copy action , status %s, output %s' % (status, result)) if status: logger.warn('copy action failed, status %s' % (status)) consoler.warn(' ansible copy failed, host is %s, src is %s, dst is %s, status is %s, output is %s.', ip, src, dest, status, result) elif result.find('SUCCESS') == -1 and result.find('CHANGED') == -1: consoler.warn(' ansible copy failed, host is %s, src is %s, dst is %s, status is %s, output is %s.', ip, src, dest, status, result) logger.warn('copy action failed, output %s' % (result)) else: consoler.info(' ansible copy success, host is %s, src is %s, dst is %s.', ip, src, dest) return True return False
def push_package(dir, host, chain_id, version, meta, force = True): """push install package of one server Arguments: dir {string} -- package install dir host {string} -- server host chain_id {string} -- chain id version {string} -- chain version force {string} -- is push all node dir or not published Returns: [bool] -- success return True, if not False will return. """ # check if common dir exist. if not os.path.exists(dir + '/common'): logger.warn(' common dir is not exist, dir is %s, host is %s', dir, host) return False # check if host dir exist. if not os.path.exists(dir + '/' + host): logger.warn(' host dir is not exist, dir is %s, host is %s', dir, host) return False try: if meta.get_host_nodes(host): pass except MCError as me: # create dir on the target server ret = ansible.mkdir_module(host, ansible.get_dir() + '/' + chain_id) if not ret: return ret # push common package ret = ansible.copy_module(host, dir + '/common/', ansible.get_dir() + '/' + chain_id) if not ret: return ret if force: logger.debug(' force is set, push all package, chain_id is %s, chain_version is %s, host is %s',chain_id, version, host) # push host dir ret = ansible.copy_module(host, dir + '/' + host + '/', ansible.get_dir() + '/' + chain_id) if not ret: return ret else: # push node${index} dir in host dir not published hnd = HostNodeDirs(chain_id, version, host) for node_dir in hnd.get_node_dirs(): if meta.host_node_exist(host, node_dir): logger.info(' %s already published, skip', node_dir) continue logger.info(' publish nodedir, chain_id is %s, chain_version is %s, node is %s', chain_id, version, node_dir) # push host dir ret = ansible.copy_module(host, dir + '/' + host + '/' + node_dir, ansible.get_dir() + '/' + chain_id) if not ret: return ret logger.info('push package success, dir is %s, host is %s, chain_id is %s, chain_version is %s', dir, host, chain_id, version) return True
def publish_chain(chains, force = False): """Publish a chain to designated servers using publish_server Arguments: chains {list} -- chain which you wanto published, type chain_id:chain_version. Returns: null """ pchains = [] for i in range(len(chains)): chain = chains[i].split(':') if len(chain) != 2: logger.error('not chain_id:chain_version format, str is %s', chains[i]) consoler.error(' \033[1;31m skip, invalid publish format, chain_id:chain_version should require, chain is %s \033[0m', chain) continue chain_id = chain[0] chain_version = chain[1] pchains.append(Chain(chain_id, chain_version)) # consoler.info('\t append publish chain, chain_id %s:chain_version %s', chain_id, chain_version) logger.debug('chain_id is %s, chain_version is %s', chain_id, chain_version) if len(pchains) != 0: for chain in pchains: logger.info('publish, chain_id is %s, chain_version is %s', chain_id, chain_version) publish_server(chain.get_id(), chain.get_version(), force)
def stop(self): stop_command = 'bash ' + self.stop_shell_file() if not os.path.exists(self.stop_shell_file()): logger.debug('stop file not exist, file is %s. ', stop_command) return status, output = utils.getstatusoutput(stop_command) logger.debug('stop status, status is %d, output is %s', status, output)
def unarchive_module(ip, src, dest): """[Using ansible.sh unarchive_module, compress files to the corresponding server and extract it] Arguments: ip {[string]} -- [corresponding server host ip] src {[string]} -- [files dir path] dest {[string]} -- [corresponding server dir path] Returns: [bool] -- [true or false] """ (status, result) = utils.getstatusoutput('bash ' + path.get_path() + '/scripts/ansible.sh unarchive ' + ip + ' ' + src + ' ' + dest) logger.debug('unarchive action , status %s, output %s' % (status, result)) if status: logger.warn('unarchive action failed, status %s' % (status)) consoler.warn(' ansible unarchive failed, host is %s, src is %s, dst is %s, status is %s, output is %s.', ip, src, dest, status, result) elif result.find('SUCCESS') == -1 and result.find('CHANGED') == -1: logger.warn('unarchive action failed, output %s' % (result)) consoler.warn(' ansible unarchive failed, host is %s, src is %s, dst is %s, status is %s, output is %s.', ip, src, dest, status, result) else: consoler.info(' ansible unarchive success, host is %s, src is %s, dst is %s.', ip, src, dest) return True return False
def get_name(self, chain_id): name = str(chain_id) if chain_id in self.names: name = self.names[chain_id] logger.debug(' get name chain id is %s, chain name is %s.', chain_id, name) return name
def register(chain_id, host, node): logger.debug(' chain_id is %s, host is %s, node is %s', chain_id, host, node) meta = Meta(chain_id) if not meta.exist(): consoler.error( ' \033[1;31m register failed, chain not published, chain id is %s \033[0m', chain_id) return try: meta.get_host_node(host, node) except MCError as me: consoler.error(' \033[1;31m register failed, %s \033[0m', me) else: ret = ansible.register_module(host, ansible.get_dir() + '/' + chain_id, int(node[4:])) if ret: consoler.info( ' register success, chain_id is %s, host is %s, node is %s. \033[0m', chain_id, host, node) else: consoler.error( ' \033[1;31m register failed, chain_id is %s, host is %s, node is %s. \033[0m', chain_id, host, node)
def load(self): self.clear() namesjson = data.package_names_dir() + '/names.json' if not os.path.exists(namesjson): logger.info(' names.json is not exist, path is %s', namesjson) return logger.debug(' load begin, path is %s', namesjson) try: with open(namesjson) as f: jsondata = json.load(f) if 'names' in jsondata: for k in jsondata['names'].keys(): chain_id = k chain_name = jsondata['names'][chain_id] self.append(chain_id, chain_name) logger.debug( ' load one name, chain id is %s, chain name is %s', chain_id, chain_name) return True except Exception as e: logger.error( ' parser namesjson failed, namesjson is %s, exception is %s', namesjson, e) return False logger.info(' load names end, info %s', self.names)
def do_cmd(dst, cmd): """do cmd on remote server, dst can be one server, one chain or all server Arguments: dst {string} -- host ip or chain id or 'all' cmd {string} -- shell cmd or shell file """ if dst == 'all': ansible.cmd_module('all', cmd) elif utils.valid_chain_id(dst): mm = Meta(dst) if not mm.exist(): consoler.error( ' \033[1;31m chain is not published, can not cmd action, chain_id is %s \033[0m', dst) else: consoler.info(' => do cmd, chain id is %s', dst) for k in mm.get_nodes().keys(): logger.debug('host ip is ' + k) ansible.cmd_module(k, cmd) elif utils.valid_ip(dst): ansible.cmd_module(dst, cmd) else: consoler.error( ' \033[1;31m invalid docmd dst, dst is %s, dst should be invalid chain_id or invali host ip or \'all\'. \033[0m', dst)
def cmd_module(ip, cmd): """Using ansible.sh cmd_module, execute commands on the corresponding server. Arguments: ip {string} -- server host ip Keyword Arguments: msg {string} -- execute commands Returns: [bool] -- true or false """ cmd = '"' + cmd + '"' (status, result) = utils.getstatusoutput('bash ' + path.get_path() + '/scripts/ansible.sh cmd ' + ip + ' ' + cmd) logger.debug(' cmd action , status %s, output %s' % (status, result)) if status: consoler.error(' \033[1;31m ansible cmd failed, host is %s, output is %s \033[0m', ip, result) elif result.find('SUCCESS') == -1 and result.find('CHANGED') == -1: consoler.error(' \033[1;31m ansible cmd failed, host is %s, output is %s \033[0m', ip, result) else: consoler.info(' ansible cmd success, host is %s, cmd is %s.', ip, cmd) return True return False
def check_module(ip, dest): """Using ansible.sh check_module, check chain status Arguments: ip {string} -- corresponding server host ip dest {string} -- corresponding server dir path Returns: bool -- true or false """ (status, result) = utils.getstatusoutput('bash ' + path.get_path() + '/scripts/ansible.sh check ' + ip + ' ' + dest) logger.debug('check action , status %s, output %s' % (status, result)) if status: logger.warn('check action failed, status %s' % (status)) consoler.warn(' ansible check failed, host is %s, dst is %s, status is %s, output is %s.', ip, dest, status, result) elif result.find('SUCCESS') == -1 and result.find('CHANGED') == -1: logger.warn('check action failed, output %s' % (result)) consoler.warn(' ansible check failed, host is %s, dst is %s, status is %s, output is %s.', ip, dest, status, result) else: consoler.info(' ansible check success, host is %s, output is %s.', ip, result) return True return False
def unregister_module(ip, dest, index): """Using ansible.sh unregister_module, start nodes Arguments: ip {string} -- corresponding server host ip dest {string} -- corresponding server dir path index {int} -- node index to be register Returns: bool -- true or false """ (status, result) = utils.getstatusoutput('bash ' + path.get_path() + '/scripts/ansible.sh unregister ' + ip + ' ' + dest + ' ' + str(index)) logger.debug(' unregister action, status %s, output %s' % (status, result)) if status: consoler.warn(' ansible unregister failed, host is %s, index is %s, dst is %s, status is %s, output is %s.', ip, str(index), dest, status, result) elif result.find('SUCCESS') == -1 and result.find('CHANGED') == -1: consoler.warn(' ansible unregister failed, host is %s, index is %s, dst is %s, status is %s, output is %s.', ip, str(index), dest, status, result) elif not (result.find('success') + 1): consoler.warn(' ansible register failed, host is %s, index is %s, dst is %s, status is %s, output is %s.', ip, str(index), dest, status, result) else: consoler.info(' ansible unregister success, host is %s, index is %s, output is %s.', ip, str(index), result) return True return False
def parser(self): self.clear() if os.path.exists(self.cfg) and os.path.isfile(self.cfg): logger.info(' single config is %s', self.cfg) # resolve one config.json try: self.append(ConfigConf(self.cfg)) except Exception as e: logger.warn('parser cfg %s end exception, e is %s ', self.cfg, e) raise MCError(' parser config failed, invalid format, config is %s, exception is %s' % (self.cfg, e)) elif os.path.isdir(self.cfg): logger.info(' config dir is %s', self.cfg) # resolve dir, if not config.json goto next for c in os.listdir(self.cfg): try: logger.debug(' config dir is %s, config file is %s', self.cfg, c) cc = ConfigConf(self.cfg + '/' + c) chain = cc.get_chain() if not self.append(cc): cc = self.get_cc(chain) logger.error(' chain_id: %s and chain_version: %s duplicate, config is %s:%s', chain.get_id(), chain.get_version(), cc.get_cfg(), c) raise MCError(' chain_id: %s and chain_version: %s duplicate, config is %s:%s' % (chain.get_id(), chain.get_version(), cc.get_cfg(), c)) logger.debug(' append cc, cc is %s', cc) consoler.info(' parser config %s success, chain_id is %s, chain_version is %s', c, chain.get_id(), chain.get_version()) except Exception as e: consoler.error( ' \033[1;31m skip config %s, invalid config format parser failed, exception is %s \033[0m', c, e) logger.warn(' parser cfg %s end exception, e %s ', c, e) else: raise MCError(' invalid config, %s not exist' % self.cfg)
def check_fisco_version(self): # check if fisco-bcos exists if not (os.path.exists(self.fisco_path) and os.path.isfile(self.fisco_path)): logger.error(' fisco-bcos not exist, fisco-bcos is %s', self.fisco_path) raise MCError(' fisco-bcos not exist, fisco-bcos is %s' % self.fisco_path) cmd = self.fisco_path + ' --version' status, output = utils.getstatusoutput(cmd) if status != 0: logger.error( ' fisco-bcos --version failed, fisco-bcos is %s, status is %d, output is %s', self.fisco_path, status, output) raise MCError('fisco-bcos --version failed , fisco-bcos is %s.' % self.fisco_path) logger.debug(' fisco-bcos --version, status is %d, output is %s', status, output) version_info = output.split() if version_info[0] == 'FISCO-BCOS' and len(version_info) > 2: version = version_info[2].split('.') if not len(version) == 3: logger.error( ' fisco-bcos --version invalid format, 00 status is %d, output is %s', status, output) raise MCError( ' fisco-bcos --version invalid format , fisco-bcos is %s, version is %s.' % (self.fisco_path, version_info[2])) if version[2].endswith('-gm'): self.gm = True self.major = str(int(version[0])) self.minor = str(int(version[1])) if self.gm: self.revision = str(int(version[2][:-3])) else: self.revision = str(int(version[2])) logger.info(' fisco-bcos is %s', self) # parser fisco-bcos version and check it. if not self.is_13_version(): logger.error( ' fisco-bcos is not 1.3.x version, not support now, %s', fisco) raise MCError( ' fisco-bcos is not 1.3.x version, not support now, %s' % fisco) else: logger.error( ' fisco-bcos --version invalid format, fisco-bcos is %s, status is %d, output is %s', self.fisco_path, status, output) raise MCError( ' fisco-bcos --version invalid format , fisco-bcos is %s, status is %d, output is %s.' % (self.fisco_path, status, output))
def append(self, chain_id, name): if chain_id in self.names: return False logger.debug(' append one name chain id is %s, chain name is %s', chain_id, name) self.names[chain_id] = name self.changed = True return True
def to_p2p_nodes(self): phs = P2pHosts() for node in self.nodes: for index in range(node.get_node_num()): ph = P2pHost(node.get_p2p_ip(), self.port.get_p2p_port() + index) logger.debug(' add P2pHost : %s', ph) phs.add_p2p_host(ph) return phs
def expand_on_nonexist_chain(cc, dir): fisco_path = dir + '/' + 'fisco-bcos' genesisjson = dir + '/' + 'genesis.json' bootstrapnodesjson = dir + '/' + 'bootstrapnodes.json' # check if fisco-bcos、genesis.json、bootstrapsnode.json exist. if not os.path.exists(fisco_path): raise MCError( ' fisco bcos not exist, dir path is %s' % dir) if not os.path.exists(genesisjson): raise MCError( ' genesis.json not exist, dir path is %s' % dir) if not os.path.exists(bootstrapnodesjson): raise MCError( ' bootstrapnodes.json not exist, dir path is %s' % dir) chain = cc.get_chain() port = cc.get_port() chain_id = chain.get_id() chain_version = chain.get_version() # parser fisco-bcos version and check it. fisco = Fisco(fisco_path) logger.debug(' fisco is %s', fisco) acp = AllChainPort() # port check for node in cc.get_nodes(): for index in range(node.get_node_num()): # create dir for every node on the server acp.port_conflicts_outside_chain(chain.get_id(), node.get_host_ip(), port.to_port(index)) acp.port_conflicts_inside_chain(node.get_host_ip(), port.to_port(index) ,chain.get_id(), chain.get_version()) try: # create host dir os.makedirs(chain.data_dir()) # copy genesis.json bootstrapnodes.json to chain dir. shutil.copy(genesisjson, chain.data_dir() + '/') shutil.copy(bootstrapnodesjson, chain.data_dir() + '/') # create common dir build_pkg.build_common_dir(chain, fisco) # build install dir for every server for node in cc.get_nodes(): build_pkg.expand_host_dir(chain, node, port, fisco) # update web3sdk config web3_conf_by_chain(chain, fisco.is_gm()) except Exception as e: if os.path.exists(chain.data_dir()): shutil.rmtree(chain.data_dir()) logger.error(' expand failed, chain id is %s, chain version is %s, exception is %s.', chain_id, chain_version, e) raise MCError(' expand failed, chain id is %s, chain version is %s, exception is %s' % ( chain_id, chain_version, e))
def writeFile(self, file): try: with open(file, "w+") as f: f.write(self.toJson()) logger.debug(' write file success, file is %s', file) except Exception as e: logger.error(' write file failed, file is %s, exception is %s.', file, e) raise e
def export(self): self.stop() export_command = 'bash ' + self.export_shell_file() + ' ' + \ self.chain.data_dir() + '/genesis.json' status, output = utils.getstatusoutput(export_command) if not os.path.exists(self.dir() + '/../genesis.json'): logger.error('export genesis.json failed, output is %s', output) raise MCError( ' export genesis.json failed, output is %s.' % output) else: logger.debug( 'export status, status is %d, output is %s', status, output)
def get_all_ports_by_host(self, host): hps = [] for cp in self.get_ports().values(): for cvp in cp.get_ports().values(): try: hp = cvp.get_by_host(host) logger.debug(' host is %s, hp is %s', host, hp) hps.append(hp) except Exception as e: pass logger.debug('host is %s, len is %d', host, len(hps)) return hps
def publish_server(chain_id, chain_version, force=False): """publish one chain. Arguments: chain_id {string} -- chain id. chain_version {string} -- chain version. force {bool} """ chain = Chain(chain_id, chain_version) dir = chain.data_dir() if not os.path.isdir(dir): consoler.info( ' No build version exist for chain_id:%s chain_version:%s, do nothing.', chain_id, chain_version) logger.warn( ' No build version exist for chain_id:%s chain_version:%s, do nothing.', chain_id, chain_version) return mm = meta.Meta(chain_id) if force: # force is set, publish this chain again. mm.clear() mm.set_chain_version(chain_version) else: if mm.exist(): if chain_version != mm.get_chain_version(): consoler.error( ' \033[1;31m chain %s already publish %s version, if you want publish annother version, --force/-f need to be set.\033[0m ', chain_id, mm.get_chain_version()) return else: mm.set_chain_version(chain_version) consoler.info(' publish package for chain %s version %s begin.', chain_id, chain_version) for host in os.listdir(dir): if not utils.valid_ip(host): logger.debug(' skip, not invalid host_ip ' + host) continue ret = push_package(dir, host, chain_id, chain_version, mm, force) if ret: hp = HostPort(chain_id, chain_version, host) for node_dir, p in hp.get_ports().items(): logger.debug(' node_dir is %s, port is %s', node_dir, p) if not mm.host_node_exist(host, node_dir): mm.append(meta.MetaNode(host, p.get_rpc_port(), p.get_p2p_port(), p.get_channel_port(), node_dir)) consoler.info(' \t push package : %s success.', host) else: consoler.error(' \033[1;31m \t push package : %s failed. \033[0m', host) # record meta info, write meta.json file mm.write_to_file() consoler.info(' publish package for chain %s version %s end.', chain_id, chain_version)
def load(self): self.clear() if not self.exist(): logger.info(' dir not exist, chain_id is %s', self.chain_id) return dir = self.get_chain_dir() logger.debug(' load begin, chain_id is %s ', self.chain_id) for v in os.listdir(dir): self.append(v) logger.debug(' chain id %s, ver is %s', self.chain_id, v) logger.info(' load end, ver list is %s', self.get_ver_list())
def port_in_use(port): """using cmd nc to check if the port is occupied. Arguments: port {string} -- port number Returns: bool -- True or False. """ cmd = 'nc -z 127.0.0.1' + (' %d' % port) status, output = getstatusoutput(cmd) logger.debug('port is %s, status is %s, output is %s', port, status, output) return status == 0
def get_meta_ports_by_host(host, am = None): if not am is None: am = AllMeta() metas = [] for meta in am.get_metas().values(): try: host_nodes = meta.get_host_nodes(host) if len(host_nodes) == 0: continue logger.debug(' host is %s, meta is %s', host, meta) metas.append(meta) except Exception as e: pass return metas
def diagnose_module(ip, dest): """Using ansible.sh diagnose_module, call script -> monotor.sh, Check status of nodes Arguments: ip {string} -- corresponding server host ip dest {string} -- corresponding server dir path """ (status, result) = utils.getstatusoutput('bash ' + path.get_path() + '/scripts/ansible.sh diagnose ' + ip + ' ' + dest) logger.debug('diagnose action , status %s, output %s' % (status, result)) if status: consoler.error(' \033[1;31m ansible diagnose failed, host is %s, output is %s \033[0m', ip, result) elif result.find('SUCCESS') == -1 and result.find('CHANGED') == -1: consoler.error(' \033[1;31m ansible diagnose failed, host is %s, output is %s \033[0m', ip, result) else: consoler.info(' ansible diagnose success, host is %s, result is %s.', ip, result) return True return False
def check_server(chain_id): """[Using scheck.sh check all nodes of a chain] Arguments: chain_id {[string]} -- [chain_id:version] """ mm = Meta(chain_id) if not mm.exist(): logger.warn('chain meta is not exist, maybe the chain is not published, chain_id is %s', chain_id) consoler.warn('chain is not published, can not check action, chain_id is %s', chain_id) return logger.info('check action, chain_id is ' + chain_id) consoler.info(' => check all node of chain %s', chain_id) for k in mm.get_nodes().keys(): logger.debug('host ip is ' + k) ansible.check_module(k, ansible.get_dir() + '/' + chain_id)
def load(self): self.clear() hn = HostNodeDirs(self.chain_id, self.chain_version, self.host) host_dir = Chain(self.chain_id, self.chain_version).data_dir() + '/' + self.host + '/' for node in hn.get_node_dirs(): cfg_json = host_dir + '/' + node + '/config.json' if not os.path.exists(cfg_json): logger.error(' config not exist, config is %s .', cfg_json) continue cf = Config(self.chain_id) if cf.fromJson(cfg_json): p = Port(int(cf.get_rpc_port()), int(cf.get_p2p_port()), int(cf.get_channel_port())) self.ports[node] = p logger.debug(' append node, node is %s, port is %s', node, p) logger.info(' load end, hp ports is %s', self)
def pkg_list(chains): """list all version and all pacakge of the chain Arguments: chains {[type]} -- all chains """ logger.info(' chains is %s', chains) consoler.info(' chains is %s' % chains) ns = Names() if chains[0] == 'all': ac = AllChain() chains = ac.get_chains() if len(chains) == 0: consoler.info(' No build chain exist, do nothing.') for chain in chains: logger.debug(' chain id is %s, chain name is %s', chain, ns.get_name(chain)) consoler.info(' ==> chain id : %s ,chain name is %s', chain, ns.get_name(chain)) cv = ChainVers(chain) if len(cv.get_ver_list()) == 0: consoler.info(' No build version exist for chain %s, do nothing.', chain) for version in cv.get_ver_list(): consoler.info('\t\t => chain version : %s', version) logger.debug(' chain id is %s, chain version is %s', chain, version) vh = VerHosts(chain, version) for pkg in vh.get_pkg_list(): consoler.info('\t\t\t => package :%s', pkg) hn = HostNodeDirs(chain, version, pkg) hp = HostPort(chain, version, pkg) for node_dir in hn.get_node_dirs(): consoler.info('\t\t\t\t => %s %s ', node_dir, hp.get_by_index(node_dir)) logger.info('load end')
def fromJson(self, sjson): ''' resolve .json, convert to config ''' try: with open(sjson) as f: js = json.load(f) self.systemproxyaddress = js['systemproxyaddress'] self.rpcport = js['rpcport'] self.p2pport = js['p2pport'] self.channelPort = js['channelPort'] logger.debug( ' parser config success, cfg is %s, rpc : %s, p2p : %s, channel : %s', sjson, str(self.rpcport), str(self.p2pport), str(self.channelPort)) return True except Exception as e: logger.error(' parser config failed, cfg is %s, exception is %s', sjson, e) return False
def push_file(host, src, dst): """push file to remote server Arguments: host {string} -- host ip or chain id or 'all' src {string} -- file or dir dst {string} -- dst dir """ if not os.path.exists(src): consoler.error(' \033[1;31m src is not exist, src is %s. \033[0m', src) return logger.info(' host is %s, src is %s, dst is %s', host, src, dst) if host == 'all': if mkdir_and_push(host, src, dst): consoler.info(' push %s to %s of all server success.', src, dst) elif utils.valid_chain_id(host): consoler.info(' => push %s to %s of chain %s.', src, dst, host) mm = Meta(host) if not mm.exist(): consoler.error( ' \033[1;31m chain is not published, can not push file action, chain_id is %s \033[0m', host) else: consoler.info(' => do cmd, chain id is %s', host) for k in mm.get_nodes().keys(): logger.debug(' host is %s', k) if mkdir_and_push(k, src, dst): consoler.info(' \t\t push %s to %s of %s server success.', src, dst, k) consoler.info(' => push %s to %s of chain %s end.', src, dst, host) elif utils.valid_ip(host): if mkdir_and_push(host, src, dst): consoler.info(' push %s to %s of %s server success.', src, dst, host) else: consoler.error( ' \033[1;31m invalid push file host, host is %s, dst should be invalid chain_id or invali host ip or \'all\'. \033[0m', host)