def __run_sshcmd(self, int_cmd): """ run internal used ssh command """ self.__run_ssh(int_cmd) if self.rc != 0: msg = 'Host [%s]: Failed to run ssh commands, check SSH password or connectivity' % self.host self.logger.error(msg) err_m(msg)
def main(): options = get_options() cfgs = defaultdict(str) if options.cfgfile: if not os.path.exists(options.cfgfile): err_m('Cannot find config file \'%s\'' % options.cfgfile) config_file = options.cfgfile else: config_file = DBCFG_FILE if options.pwd: pwd = getpass.getpass('Input remote host SSH Password: '******'' if os.path.exists(config_file): cfgs = ParseInI(config_file).load() else: node_lists = expNumRe( raw_input( 'Enter list of Nodes separated by comma, support numeric RE, i.e. n[01-12]: ' )) # check if node list is expanded successfully if len([1 for node in node_lists if '[' in node]): err('Failed to expand node list, please check your input.') cfgs['node_list'] = ','.join(node_lists) results = wrapper.run(cfgs, options, mode='discover', pwd=pwd) format_output('Discover results') if len(results) > 4: output = output_row(results) else: output = output_column(results) print output with open('discover_result', 'w') as f: f.write('Discover Date: %s\n' % time.strftime('%Y-%m-%d %H:%M')) f.write(output)
def run(dbcfgs, options, mode='install', pwd=''): """ main entry mode: install/discover """ STAT_FILE = mode + '.status' LOG_FILE = '%s/logs/%s_%s.log' % (INSTALLER_LOC, mode, time.strftime('%Y%m%d_%H%M')) logger = get_logger(LOG_FILE) verbose = True if hasattr(options, 'verbose') and options.verbose else False upgrade = True if hasattr(options, 'upgrade') and options.upgrade else False user = options.user if hasattr(options, 'user') and options.user else '' threshold = options.fork if hasattr(options, 'fork') and options.fork else 10 script_output = [] # script output array conf = ParseJson(SCRCFG_FILE).load() script_cfgs = conf[mode] dbcfgs_json = json.dumps(dbcfgs) hosts = dbcfgs['node_list'].split(',') # handle skipped scripts, skip them if no need to run skipped_scripts = [] if upgrade: skipped_scripts += [ 'hadoop_mods', 'apache_mods', 'apache_restart', 'traf_dep', 'traf_kerberos' ] if dbcfgs['secure_hadoop'] == 'N': skipped_scripts += ['traf_kerberos'] if dbcfgs['traf_start'].upper() == 'N': skipped_scripts += ['traf_start'] if dbcfgs['ldap_security'].upper() == 'N': skipped_scripts += ['traf_ldap'] if 'APACHE' in dbcfgs['distro']: skipped_scripts += ['hadoop_mods'] else: skipped_scripts += ['apache_mods', 'apache_restart'] # set ssh config file to avoid known hosts verify on current installer node SSH_CFG_FILE = os.environ['HOME'] + '/.ssh/config' ssh_cfg = 'StrictHostKeyChecking=no\nNoHostAuthenticationForLocalhost=yes\n' with open(SSH_CFG_FILE, 'w') as f: f.write(ssh_cfg) run_cmd('chmod 600 %s' % SSH_CFG_FILE) def run_local_script(script, json_string, req_pwd): cmd = '%s/%s \'%s\'' % (INSTALLER_LOC, script, json_string) # pass the ssh password to sub scripts which need SSH password if req_pwd: cmd += ' ' + pwd if verbose: print cmd # stdout on screen p = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=True) stdout, stderr = p.communicate() rc = p.returncode if rc != 0: msg = 'Failed to run \'%s\'' % script if stderr: msg += ': ' + stderr print stderr logger.error(msg) state_fail('localhost: Script [%s]' % script) exit(rc) else: state_ok('Script [%s]' % script) logger.info('Script [%s] ran successfully!' % script) return stdout # run sub scripts try: remote_instances = [] if mode == 'discover': remote_instances = [ RemoteRun(host, logger, user=user, pwd=pwd, quiet=True) for host in hosts ] else: remote_instances = [ RemoteRun(host, logger, user=user, pwd=pwd) for host in hosts ] first_instance = remote_instances[0] for instance in remote_instances: if instance.host == dbcfgs['first_rsnode']: first_rs_instance = instance break logger.info(' ***** %s Start *****' % mode) for cfg in script_cfgs: script = cfg['script'] node = cfg['node'] desc = cfg['desc'] run_user = '' if not 'run_as_traf' in cfg.keys(): pass elif cfg['run_as_traf'] == 'yes': run_user = dbcfgs['traf_user'] if not 'req_pwd' in cfg.keys(): req_pwd = False elif cfg['req_pwd'] == 'yes': req_pwd = True status = Status(STAT_FILE, script) if status.get_status(): msg = 'Script [%s] had already been executed' % script state_skip(msg) logger.info(msg) continue if script.split('.')[0] in skipped_scripts: continue else: print '\nTASK: %s %s' % (desc, (83 - len(desc)) * '*') #TODO: timeout exit if node == 'local': run_local_script(script, dbcfgs_json, req_pwd) elif node == 'first': first_instance.run_script(script, run_user, dbcfgs_json, verbose=verbose) elif node == 'first_rs': first_rs_instance.run_script(script, run_user, dbcfgs_json, verbose=verbose) elif node == 'all': l = len(remote_instances) if l > threshold: piece = (l - (l % threshold)) / threshold parted_remote_instances = [ remote_instances[threshold * i:threshold * (i + 1)] for i in range(piece) ] parted_remote_instances.append(remote_instances[threshold * piece:]) else: parted_remote_instances = [remote_instances] for parted_remote_inst in parted_remote_instances: threads = [ Thread(target=r.run_script, args=(script, run_user, dbcfgs_json, verbose)) for r in parted_remote_inst ] for t in threads: t.start() for t in threads: t.join() if sum([r.rc for r in parted_remote_inst]) != 0: err_m( 'Script failed to run on one or more nodes, exiting ...\nCheck log file %s for details.' % LOG_FILE) script_output += [{ r.host: r.stdout.strip() } for r in parted_remote_inst] else: # should not go to here err_m('Invalid configuration for %s' % SCRCFG_FILE) status.set_status() except KeyboardInterrupt: err_m('User quit') # remove status file if all scripts run successfully os.remove(STAT_FILE) return script_output
def run(dbcfgs, options, mode='install', pwd=''): """ main entry mode: install/discover """ STAT_FILE = mode + '.status' LOG_FILE = '%s/logs/%s_%s.log' % (INSTALLER_LOC, mode, time.strftime('%Y%m%d_%H%M')) logger = get_logger(LOG_FILE) verbose = True if hasattr(options, 'verbose') and options.verbose else False upgrade = True if hasattr(options, 'upgrade') and options.upgrade else False user = options.user if hasattr(options, 'user') and options.user else '' threshold = options.fork if hasattr(options, 'fork') and options.fork else 10 script_output = [] # script output array conf = ParseJson(SCRCFG_FILE).load() script_cfgs = conf[mode] dbcfgs_json = json.dumps(dbcfgs) hosts = dbcfgs['node_list'].split(',') # handle skipped scripts, skip them if no need to run skipped_scripts = [] if upgrade: skipped_scripts += ['hadoop_mods', 'apache_mods', 'apache_restart', 'traf_dep', 'traf_kerberos'] if dbcfgs['secure_hadoop'] == 'N': skipped_scripts += ['traf_kerberos'] if dbcfgs['traf_start'].upper() == 'N': skipped_scripts += ['traf_start'] if dbcfgs['ldap_security'].upper() == 'N': skipped_scripts += ['traf_ldap'] if 'APACHE' in dbcfgs['distro']: skipped_scripts += ['hadoop_mods'] else: skipped_scripts += ['apache_mods', 'apache_restart'] # set ssh config file to avoid known hosts verify on current installer node SSH_CFG_FILE = os.environ['HOME'] + '/.ssh/config' ssh_cfg = 'StrictHostKeyChecking=no\nNoHostAuthenticationForLocalhost=yes\n' with open(SSH_CFG_FILE, 'w') as f: f.write(ssh_cfg) run_cmd('chmod 600 %s' % SSH_CFG_FILE) def run_local_script(script, json_string, req_pwd): cmd = '%s/%s \'%s\'' % (INSTALLER_LOC, script, json_string) # pass the ssh password to sub scripts which need SSH password if req_pwd: cmd += ' ' + pwd if verbose: print cmd # stdout on screen p = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=True) stdout, stderr = p.communicate() rc = p.returncode if rc != 0: msg = 'Failed to run \'%s\'' % script if stderr: msg += ': ' + stderr print stderr logger.error(msg) state_fail('localhost: Script [%s]' % script) exit(rc) else: state_ok('Script [%s]' % script) logger.info('Script [%s] ran successfully!' % script) return stdout # run sub scripts try: remote_instances = [] if mode == 'discover': remote_instances = [RemoteRun(host, logger, user=user, pwd=pwd, quiet=True) for host in hosts] else: remote_instances = [RemoteRun(host, logger, user=user, pwd=pwd) for host in hosts] first_instance = remote_instances[0] for instance in remote_instances: if instance.host == dbcfgs['first_rsnode']: first_rs_instance = instance break logger.info(' ***** %s Start *****' % mode) for cfg in script_cfgs: script = cfg['script'] node = cfg['node'] desc = cfg['desc'] run_user = '' if not 'run_as_traf' in cfg.keys(): pass elif cfg['run_as_traf'] == 'yes': run_user = dbcfgs['traf_user'] if not 'req_pwd' in cfg.keys(): req_pwd = False elif cfg['req_pwd'] == 'yes': req_pwd = True status = Status(STAT_FILE, script) if status.get_status(): msg = 'Script [%s] had already been executed' % script state_skip(msg) logger.info(msg) continue if script.split('.')[0] in skipped_scripts: continue else: print '\nTASK: %s %s' % (desc, (83 - len(desc))*'*') #TODO: timeout exit if node == 'local': run_local_script(script, dbcfgs_json, req_pwd) elif node == 'first': first_instance.run_script(script, run_user, dbcfgs_json, verbose=verbose) elif node == 'first_rs': first_rs_instance.run_script(script, run_user, dbcfgs_json, verbose=verbose) elif node == 'all': l = len(remote_instances) if l > threshold: piece = (l - (l % threshold)) / threshold parted_remote_instances = [remote_instances[threshold*i:threshold*(i+1)] for i in range(piece)] parted_remote_instances.append(remote_instances[threshold*piece:]) else: parted_remote_instances = [remote_instances] for parted_remote_inst in parted_remote_instances: threads = [Thread(target=r.run_script, args=(script, run_user, dbcfgs_json, verbose)) for r in parted_remote_inst] for t in threads: t.start() for t in threads: t.join() if sum([r.rc for r in parted_remote_inst]) != 0: err_m('Script failed to run on one or more nodes, exiting ...\nCheck log file %s for details.' % LOG_FILE) script_output += [{r.host:r.stdout.strip()} for r in parted_remote_inst] else: # should not go to here err_m('Invalid configuration for %s' % SCRCFG_FILE) status.set_status() except KeyboardInterrupt: err_m('User quit') # remove status file if all scripts run successfully os.remove(STAT_FILE) return script_output