def add_peers2cfg(_peers, _node): """[summary] Arguments: _peers {[type]} -- [description] _node {[type]} -- [description] """ data_path = _peers p2p_list = [] node_send = [] utils.file_must_exists(data_path) try: for line in open(data_path): peer = line.strip('\n') utils.valid_peer(peer) p2p_list.append(peer) except Exception as ini_exp: LOGGER.error( ' add peers %s file failed, exception is %s', data_path, ini_exp) raise MCError( ' add peers %s file failed, exception is %s' % (data_path, ini_exp)) LOGGER.info('merge peers is %s', p2p_list) p2p_list = list(set(p2p_list)) node_send = utils.get_all_nodes_dir(_node) for node_file in node_send: utils.file_must_exists('{}/config.ini'.format(node_file)) merge_cfg(p2p_list, '{}/config.ini'.format(node_file))
def generate_root_ca(_dir): """[generate root cert] Arguments: dir {[path]} -- [root cert path] """ try: ca_dir = os.path.abspath(_dir) if utils.Status.gm_option: os.chdir('{}/scripts/gm/'.format(path.get_path())) (status, result) = utils.getstatusoutput('./cts.sh gen_chain_cert {}' .format(ca_dir)) os.chdir('{}'.format(path.get_path())) else: os.chdir('{}/scripts'.format(path.get_path())) (status, result) = utils.getstatusoutput('./cts.sh gen_chain_cert {}' .format(ca_dir)) os.chdir('{}'.format(path.get_path())) if bool(status): LOGGER.error( ' cts.sh failed! status is %d, output is %s, dir is %s.', status, result, ca_dir) raise MCError('cts.sh failed! status is %d, output is %s, dir is %s.' % ( status, result, ca_dir)) LOGGER.info( ' cts.sh success! status is %d, output is %s, dir is %s.', status, result, ca_dir) LOGGER.info(' Generate root cert success, dir is %s', ca_dir) CONSOLER.info(' Generate root cert success, dir is %s', ca_dir) except MCError as cert_exp: console_error(' %s ' % cert_exp) except Exception as gen_cert_exp: console_error( ' Generate root cert failed! excepion is %s.' % gen_cert_exp) LOGGER.error(' Generate root cert failed! Result is %s', result) raise MCError( 'Generate root agency failed! Result is %s' % gen_cert_exp)
def generator_node_ca(_dir, agent, node): """[generate node cert ] Arguments: agent {[path]} -- [agency cert path] node {[string]} -- [node name] dir {[path]} -- [node cert path] """ node_dir = os.path.abspath(_dir) agent = os.path.abspath(agent) try: if utils.Status.gm_option: os.chdir('{}/scripts/gm/'.format(path.get_path())) (status, result) = utils.getstatusoutput( './cts.sh' ' gen_node_cert {} {}/{}'.format(agent, node_dir, node)) os.chdir('{}'.format(path.get_path())) else: os.chdir('{}/scripts/'.format(path.get_path())) (status, result) = utils.getstatusoutput( './cts.sh' ' gen_node_cert {} {}/{}'.format(agent, node_dir, node)) os.chdir('{}'.format(path.get_path())) if not bool(status): LOGGER.info(' Generate %s cert successful! dir is %s/%s.', node, node_dir, node) os.chdir('{}'.format(path.get_path())) if utils.Status.gm_option: (status, result) = utils.getstatusoutput( 'cat {}/{}/gmagency.crt ' '>> {}/{}/gmnode.crt'.format(_dir, node, _dir, node)) os.remove('{}/{}/gmagency.crt'.format(_dir, node)) os.remove('{}/{}/gmnode.serial'.format(_dir, node)) else: (status, result) = utils.getstatusoutput( 'cat {}/{}/agency.crt ' '>> {}/{}/node.crt'.format(_dir, node, _dir, node)) os.remove('{}/{}/agency.crt'.format(_dir, node)) os.remove('{}/{}/node.ca'.format(_dir, node)) os.remove('{}/{}/node.json'.format(_dir, node)) os.remove('{}/{}/node.private'.format(_dir, node)) os.remove('{}/{}/node.serial'.format(_dir, node)) os.remove('{}/{}/node.param'.format(_dir, node)) os.remove('{}/{}/node.pubkey'.format(_dir, node)) else: # console_error( # ' Generate node cert failed! Please check your network,' # ' and try to check your opennssl version.') LOGGER.error(' Generate %s cert failed! Result is %s', node, result) raise MCError(' Generate %s cert failed! Result is %s' % (node, result)) except MCError as cert_exp: console_error(' %s ' % cert_exp) except Exception as gen_cert_exp: console_error(' Generate node cert failed! excepion is %s.' % gen_cert_exp) LOGGER.error(' Generate node cert failed! Result is %s', result) raise MCError('Generate node failed! Result is %s' % gen_cert_exp)
def merge_cfg(p2p_list, cfg_file): """[combine config.ini] Arguments: p2p_list {[type]} -- [list] cfg_file {[type]} -- [file] Raises: MCError -- [description] """ LOGGER.info("merge peers to config.ini now!") data = cfg_file utils.file_must_exists(data) p2p_get = p2p_list p2p_send = [] p2p_cfg = configparser.ConfigParser(allow_no_value=True) try: with codecs.open(data, 'r', encoding='utf-8') as config_file: p2p_cfg.readfp(config_file) except Exception as build_exp: LOGGER.error(' open config.ini file failed, exception is %s', build_exp) raise MCError(' open config.ini file failed, exception is %s' % build_exp) if p2p_cfg.has_section('p2p'): p2p_send_opt = p2p_cfg.options('p2p') else: LOGGER.error(' open config.ini file failed, exception is %s', build_exp) raise MCError(' open config.ini file failed, exception is %s' % build_exp) for node in p2p_send_opt: p2p_section = p2p_cfg.get('p2p', node) p2p_send.append(p2p_section) p2p_send.pop(0) p2p_send.pop(0) LOGGER.info("send node is %s!", p2p_send) # for node_tuple in p2p_send: # p2p_send.append(node_tuple) LOGGER.info("get node ip is %s!", p2p_get) p2p_send = list(set(p2p_send + p2p_get)) LOGGER.info("final node ip is %s!", p2p_send) for ip_idx, p2p_ip in enumerate(p2p_send): p2p_cfg.set("p2p", "node.{}".format(ip_idx), p2p_ip) p2p_cfg.set('certificate_whitelist', '; cal.0 should be nodeid, nodeid\'s length is 128') p2p_cfg.set('certificate_whitelist', ';cal.0=') p2p_cfg.set('certificate_blacklist', '; crl.0 should be nodeid, nodeid\'s length is 128') p2p_cfg.set('certificate_blacklist', ';crl.0=') with open(data, 'w') as config_file: p2p_cfg.write(config_file) LOGGER.info("concatenate config.ini now! output => %s/conf/config.ini", data) return True
def parser(mgroup): """resolve group_genesis.ini Arguments: mgroup {string} -- path of group_genesis.ini Raises: MCError -- exception description """ LOGGER.info('group_genesis.ini is %s', mgroup) # resolve configuration if not utils.valid_string(mgroup): LOGGER.error( ' group_genesis.ini not invalid path, group_genesis.ini is %s', mgroup) raise MCError( ' group_genesis.ini not invalid path, group_genesis.ini is %s' % mgroup) # read and parser config file config_parser = configparser.ConfigParser(allow_no_value=True) try: with codecs.open(mgroup, 'r', encoding='utf-8') as file_mchain: config_parser.readfp(file_mchain) except Exception as ini_exp: LOGGER.error( ' open group_genesis.ini file failed, exception is %s', ini_exp) raise MCError( ' open group_genesis.ini file failed, exception is %s' % ini_exp) if config_parser.has_section('group'): MgroupConf.group_id = config_parser.get('group', 'group_id') else: LOGGER.error( ' invalid group_genesis.ini format, group id is %s', MgroupConf.group_id) raise MCError( ' invalid group_genesis.ini format, group id is %s' % MgroupConf.group_id) if not config_parser.has_section('nodes'): LOGGER.error( ' invalid group_genesis.ini format, nodes not existed!') raise MCError( ' invalid group_genesis.ini format, nodes not existed!') group_nodes = config_parser.options('nodes') for node in group_nodes: p2p_section = config_parser.get('nodes', node) utils.valid_package(p2p_section) MgroupConf.p2p_ip.append(p2p_section.split(':')[0]) MgroupConf.p2p_listen_port.append(p2p_section.split(':')[1]) LOGGER.info('group_id is %s', MgroupConf.group_id) LOGGER.info('p2p_ip is %s', MgroupConf.p2p_ip) LOGGER.info('p2p_listen_port is %s', MgroupConf.p2p_listen_port) LOGGER.info('group_genesis.ini end, result is %s', MgroupConf())
def download_console(_dir): """[summary] Arguments: _dir {[type]} -- [description] Raises: MCError -- [description] MCError -- [description] """ dir_must_exists(_dir) bin_path = _dir meta = '{}/meta'.format(path.get_path()) file_must_exists('{}/ca.crt'.format(meta)) file_must_exists('{}/agency.crt'.format(meta)) file_must_exists('{}/agency.key'.format(meta)) package_name = "console.tar.gz" dir_must_not_exists('{}/console'.format(bin_path)) (status, version) = getstatusoutput( 'curl -s https://api.github.com/repos/FISCO-BCOS/' 'console/releases | grep "tag_name" ' '| sort -u | tail -n 1 | cut -d \\" -f 4 | sed "s/^[vV]//"') if bool(status): LOGGER.error(' get fisco-bcos verion failed, result is %s.', version) raise MCError(' get fisco-bcos verion failed, result is %s.' % version) download_link = 'https://github.com/FISCO-BCOS/console/releases/download/v{}/{}'.format( version.strip('\n'), package_name.strip('\n')) cnd_link = 'https://www.fisco.com.cn/cdn/console/releases/download/v{}/{}'.format( version.strip('\n'), package_name.strip('\n')) if valid_url(cnd_link): LOGGER.info("Downloading console binary from %s", cnd_link) CONSOLER.info("Downloading console binary from %s", cnd_link) download_bin(cnd_link, package_name) elif valid_url(download_link): LOGGER.info("Downloading console binary from %s", download_link) CONSOLER.info("Downloading console binary from %s", download_link) download_bin(download_link, package_name) else: LOGGER.error(' Download console failed, Please check your network!') raise MCError(' Download console failed, Please check your network!') (status, result)\ = getstatusoutput('tar -zxf {} -C {} && ' 'rm {}'.format(package_name, bin_path, package_name)) if bool(status): LOGGER.error(' Decompress console failed, result is %s.', result) raise MCError(' Decompress console failed, result is %s.' % result) (status, result) = getstatusoutput( 'chmod a+x {}/console/start.sh'.format(bin_path)) if bool(status): LOGGER.error('chmod console failed! status is %d,' ' output is %s.', status, result) raise MCError('chmod console failed!' ' status is %d, output is %s.' % (status, result))
def valid_genesis(_file): """[summary] Arguments: _file {[type]} -- [description] """ group_genesis = _file LOGGER.info("group genesis file is %s", group_genesis) pack = group_genesis.split('.') if len(pack) == 3: if pack[0] == 'group' and int(pack[1]) and pack[2] == 'genesis': LOGGER.info("valid_genesis is %s", pack) return int(pack[1]) return 0
def download_fisco(_dir): """[download fisco-bcos] Arguments: _dir {[type]} -- [description] """ bin_path = _dir # bcos_bin_name = 'fisco-bcos' if Status.gm_option: package_name = "fisco-bcos-gm.tar.gz" else: package_name = "fisco-bcos.tar.gz" (status, version)\ = getstatusoutput('curl -s https://raw.githubusercontent.com/' 'FISCO-BCOS/FISCO-BCOS/master/release_note.txt | sed "s/^[vV]//"') if bool(status): LOGGER.error( ' get fisco-bcos verion failed, result is %s.', version) raise MCError(' get fisco-bcos verion failed, result is %s.' % version) download_link = 'https://github.com/FISCO-BCOS/FISCO-BCOS/releases/download/v{}/{}'.format( version.strip('\n'), package_name.strip('\n')) # filename = package_name LOGGER.info("Downloading fisco-bcos binary from %s", download_link) CONSOLER.info("Downloading fisco-bcos binary from %s", download_link) # (status, result) = getstatusoutput('curl -LO {}'.format(download_link)) # subprocess.call('curl -LO {}'.format(download_link), shell=True) download_bin(download_link, package_name) # if bool(status): # LOGGER.error( # ' download fisco-bcos failed, result is %s.', result) # raise MCError( # ' download fisco-bcos failed, result is %s.' % result) (status, result)\ = getstatusoutput('tar -zxf {} && mv fisco-bcos {} && rm {}'.format(package_name, bin_path, package_name)) if bool(status): LOGGER.error( ' Decompress fisco-bcos failed, result is %s.', result) raise MCError( ' Decompress fisco-bcos failed, result is %s.' % result) (status, result) = getstatusoutput('chmod a+x {}'.format(bin_path)) if bool(status): LOGGER.error( ' exec fisco-bcos failed, result is %s.', result) raise MCError( ' exec fisco-bcos failed, result is %s.' % result) LOGGER.info("Downloading fisco-bcos successful, fisco-bcos at %s", bin_path) CONSOLER.info( "Downloading fisco-bcos successful, fisco-bcos at %s", bin_path)
def get_console_cert(_dir): """get console certs Arguments: _dir {[type]} -- [description] """ LOGGER.info("get console in %s!", _dir) CONSOLER.info("get console in %s!", _dir) meta = '{}/meta'.format(path.get_path()) data = _dir get_sdk_cert() utils.dir_must_exists(data) shutil.copyfile('{}/ca.crt'.format(meta), '{}/ca.crt'.format(data)) shutil.copyfile('{}/sdk/node.key'.format(meta), '{}/node.key'.format(data)) shutil.copyfile('{}/sdk/node.crt'.format(meta), '{}/node.crt'.format(data))
def get_all_nodes_dir(_dir): """[summary] Arguments: _dir {[type]} -- [description] """ data_path = _dir node_dir_list = [] dir_must_exists(data_path) LOGGER.info("get all nodes_dir from %s", data_path) for node_file in os.listdir(data_path): file_path = os.path.join(data_path, node_file) if os.path.isdir(file_path) and valid_node_dir(node_file): node_dir_list.append(file_path) LOGGER.info("all nodes_dir is %s", node_dir_list) return node_dir_list
def generator_agent_ca(_dir, _ca, agent): """[generate agency cert] Arguments: dir {[path]} -- [agency cert path] ca {[path]} -- [root cert path] agent {[string]} -- [agency name] """ try: ca_dir = os.path.abspath(_ca) agency_dir = os.path.abspath(_dir) if utils.Status.gm_option: os.chdir('{}/scripts/gm/'.format(path.get_path())) (status, result) = utils.getstatusoutput('./cts.sh' ' gen_agency_cert {} {}/{}' .format(ca_dir, agency_dir, agent)) os.chdir('{}'.format(path.get_path())) else: os.chdir('{}/scripts'.format(path.get_path())) (status, result) = utils.getstatusoutput('./cts.sh' ' gen_agency_cert {} {}/{}' .format(ca_dir, agency_dir, agent)) os.chdir('{}'.format(path.get_path())) if not bool(status): LOGGER.info(' Generate %s cert successful! dir is %s/%s.', agent, agency_dir, agent) else: # console_error( # ' Generate cert failed! Please check your network,' # ' and try to check your opennssl version.') LOGGER.error(' Generate %s cert failed! Result is %s', agent, result) raise MCError(' Generate %s cert failed! Result is %s' % (agent, result)) except MCError as cert_exp: console_error(' %s ' % cert_exp) except Exception as gen_cert_exp: console_error( ' Generate agency cert failed! excepion is %s.' % gen_cert_exp) LOGGER.error(' Generate agency cert failed! Result is %s', result) raise MCError( 'Generate agency agency failed! Result is %s' % gen_cert_exp)
def get_nodeid(get_path, send_path): """[get nodeid into file] Arguments: get_path {[file]} -- [description] send_path {[file]} -- [description] Raises: MCError -- [description] """ LOGGER.info("get_nodeid start! get path is %s", get_path) LOGGER.info("get_nodeid start! send path is %s", send_path) if not os.path.isfile(get_path): LOGGER.error(' node cert doesn\'t existed! Need %s', get_path) raise MCError(' node cert doesn\'t existed! Need %s' % get_path) try: if utils.Status.gm_option: (status, result) = utils.getstatusoutput('~/.tassl x509 -text -in {}' ' | sed -n "15,20p" | sed ' '"s/://g" | sed "s/pub//g" |' ' tr "\n" " " | sed "s/ //g"' ' cut -c 3-130| cat >{}' .format(get_path, send_path)) else: (status, result) = utils.getstatusoutput('openssl x509 -text -in {}' ' | sed -n "15,20p" | sed "s/://g"' ' | tr "\n" " " | sed "s/ //g" |' ' cut -c 3-130| cat >{}' .format(get_path, send_path)) if status != 0: LOGGER.error( ' create nodeid failed! status is %d, output is %s, dir is %s.', status, result, get_path) LOGGER.info( ' create nodeid success! status is %d, output is %s, dir is %s.', status, result, get_path) except Exception as node_id_exp: LOGGER.error( ' create nodeid failed! status is %d, output is %s, dir is %s.', status, result, get_path) raise MCError(' create nodeid failed! excepion is %s.' % node_id_exp) LOGGER.info("get_nodeid success! get path is %s", get_path) LOGGER.info("get_nodeid success! send path is %s", send_path)
def read_peers(data_path): """[read peers] Arguments: data_path {[file]} -- [peers file] """ # read and parser peer file try: for line in open(data_path): peer = line.strip('\n') if utils.valid_peer(peer): MchainConf.peers.append(peer) except Exception as ini_exp: LOGGER.error(' open %s file failed, exception is %s', data_path, ini_exp) raise MCError(' open %s file failed, exception is %s' % (data_path, ini_exp)) MchainConf.peers = list(set(MchainConf.peers)) LOGGER.info('peers is %s', MchainConf.peers)
def check_java(self): """[check java] Raises: MCError -- [java not install] """ cmd = 'java -version' status, output = utils.getstatusoutput(cmd) if status != 0: LOGGER.error(' java -version failed , status is %d, output is %s', status, output) raise MCError(' java -version failed , java not installed.') version_str = output.split("\"") if not len(version_str) > 1: LOGGER.error( ' cannot get java version, status is %d, output is %s', status, output) raise MCError( ' cannot get java version, oracle jdk need >=1.8, please try \'java -version\'. ' ) version_arr = version_str[1].split('.') if not len(version_arr) > 2: LOGGER.error( ' cannot get java version, status is %d, output is %s', status, output) raise MCError( ' cannot get java version, oracle jdk need >=1.8, please try \'java -version\' ' ) self.major = version_arr[0] self.minor = version_arr[1] self.openjdk = bool(output.lower().find('openjdk') != -1) if not self.is_suitable(): raise MCError( ' invalid java version, oracle jdk need >=1.8, now %s ' % self) LOGGER.info(' java version is %s ', self)
def check_fisco(_file): """checkfisco """ bin_fisco = _file CONSOLER.info(" Checking fisco-bcos binary...") LOGGER.info(" Checking fisco-bcos binary...") (status, bin_version)\ = getstatusoutput('{} -v'.format(bin_fisco)) if bool(status): LOGGER.error( 'Checking fisco-bcos failed! status is %d,' ' output is %s, dir is %s.', status, bin_version, bin_fisco) raise MCError('Checking fisco-bcos failed!' ' status is %d, output is %s, dir is %s.' % (status, bin_version, bin_fisco)) if not 'FISCO-BCOS' in bin_version: LOGGER.error("%s is wrong. Please correct it and try again.", bin_fisco) raise Exception("%s is wrong. Please correct it and try again." % bin_fisco) if Status.gm_option: if not 'gm' in bin_version: LOGGER.error( 'Checking fisco-bcos failed! %s isn\'t ' 'gm version. Please correct it and try again.', bin_fisco) raise MCError('Checking fisco-bcos failed! %s isn\'t ' 'gm version. Please correct it and try again' % bin_version) else: if 'gm' in bin_version: LOGGER.error( 'Checking fisco-bcos failed! %s isn\'t ' 'standard version. Please correct it and try again.', bin_fisco) raise MCError( 'Checking fisco-bcos failed! %s isn\'t ' 'standard version. Please correct it and try again.' % bin_version) CONSOLER.info(' Binary check passed.') LOGGER.info(' Binary check passed.')
def download_console(_dir): """[summary] Arguments: _dir {[type]} -- [description] Raises: MCError -- [description] MCError -- [description] """ bin_path = _dir package_name = "console.tar.gz" dir_must_not_exists('{}/console'.format(bin_path)) (status, version) = getstatusoutput('curl -s https://raw.githubusercontent.com/' 'FISCO-BCOS/console/master/release_note.txt' ' | sed "s/^[vV]//"') if bool(status): LOGGER.error( ' get fisco-bcos verion failed, result is %s.', version) raise MCError(' get fisco-bcos verion failed, result is %s.' % version) download_link = 'https://github.com/FISCO-BCOS/console/releases/download/v{}/{}'.format( version.strip('\n'), package_name.strip('\n')) LOGGER.info("Downloading console binary %s", download_link) CONSOLER.info("Downloading console binary %s", download_link) download_bin(download_link, package_name) # subprocess.call('curl -LO {}'.format(download_link), shell=True) (status, result)\ = getstatusoutput('tar -zxf {} -C {} && ' 'rm {}'.format(package_name, bin_path, package_name)) if bool(status): LOGGER.error( ' Decompress console failed, result is %s.', result) raise MCError( ' Decompress console failed, result is %s.' % result) (status, result) = getstatusoutput( 'chmod a+x {}/console/start.sh'.format(bin_path))
def valid_url(_url): """check valid url """ baseURL = _url # print fullURL try: if sys.version > '3': try: resp = urllib.request.urlopen(baseURL) return True except urllib.error.HTTPError as e: # Return code error (e.g. 404, 501, ...) # ... LOGGER.warning('HTTPError: {}'.format(e.code)) return False except urllib.error.URLError as e: # Not an HTTP-specific error (e.g. connection refused) # ... LOGGER.warning('URLError: {}'.format(e.reason)) return False LOGGER.warning('Maybe others err') return False else: req = urllib2.Request(baseURL) resp = urllib2.urlopen(req) if resp.getcode() == 404: # Do whatever you want if 404 is found LOGGER.warning("404 Found!") return False # Do your normal stuff here if page is found. LOGGER.info("URL: {0} Response: {1}".format( baseURL, resp.getcode())) return True except Exception as download_err: LOGGER.error("Could not connect to URL: %s ,err is %s", baseURL, download_err) return False
def download_fisco(_dir): """[download fisco-bcos] Arguments: _dir {[type]} -- [description] """ dir_must_exists(_dir) bin_path = _dir # bcos_bin_name = 'fisco-bcos' if Status.gm_option: package_name = "fisco-bcos-gm.tar.gz" else: package_name = "fisco-bcos.tar.gz" (status, version)\ = getstatusoutput(r'curl -s https://api.github.com/repos/FISCO-BCOS/FISCO-BCOS/releases ' r'| grep "tag_name" | grep "\"v2\.[0-9]\.[0-9]\""' r' | sort -u | tail -n 1 | cut -d \" -f 4 | sed "s/^[vV]//"') if bool(status): LOGGER.error(' get fisco-bcos verion failed, result is %s.', version) raise MCError(' get fisco-bcos verion failed, result is %s.' % version) download_link = 'https://github.com/FISCO-BCOS/FISCO-BCOS/releases/download/v{}/{}'.format( version.strip('\n'), package_name.strip('\n')) cnd_link = 'https://www.fisco.com.cn/cdn/fisco-bcos/releases/download/v{}/{}'.format( version.strip('\n'), package_name.strip('\n')) if valid_url(cnd_link): LOGGER.info("Downloading fisco-bcos binary from %s", cnd_link) CONSOLER.info("Downloading fisco-bcos binary from %s", cnd_link) download_bin(cnd_link, package_name) elif valid_url(download_link): LOGGER.info("Downloading fisco-bcos binary from %s", download_link) CONSOLER.info("Downloading fisco-bcos binary from %s", download_link) download_bin(download_link, package_name) else: LOGGER.error(' Download fisco-bcos failed, Please check your network!') raise MCError( ' Download fisco-bcos failed, Please check your network!') (status, result)\ = getstatusoutput('tar -zxf {} -C {} && rm {}'.format(package_name, bin_path, package_name)) if bool(status): LOGGER.error(' Decompress fisco-bcos failed, result is %s.', result) raise MCError(' Decompress fisco-bcos failed, result is %s.' % result) (status, result) = getstatusoutput('chmod a+x {}'.format(bin_path)) if bool(status): LOGGER.error(' exec fisco-bcos failed, result is %s.', result) raise MCError(' exec fisco-bcos failed, result is %s.' % result) LOGGER.info("Downloading fisco-bcos successful, fisco-bcos at %s", bin_path) CONSOLER.info("Downloading fisco-bcos successful, fisco-bcos at %s", bin_path)
def get_nodeid_str(get_path): """[get nodeid string] Arguments: get_path {[file]} -- [description] Raises: MCError -- [description] Returns: [string] -- [nodeid] """ # openssl x509 -text -in ./node.crt | sed -n '15,20p' | sed 's/://g' | # tr "\n" " " | sed 's/ //g' | sed 's/pub//g' | cut -c 3-130 LOGGER.info("get_nodeid start! get path is %s", get_path) if not os.path.isfile(get_path): LOGGER.error(' node cert doesn\'t existed! Need %s', get_path) raise MCError(' node cert doesn\'t existed! Need %s' % get_path) try: if utils.Status.gm_option: (status, result) = utils.getstatusoutput( '~/.tassl x509 -text -in {}' ' | sed -n "15,20p" | sed ' '"s/://g" | sed "s/pub//g" |' ' tr "\n" " " | sed "s/ //g"' ' | cut -c 3-130'.format(get_path)) result = result.split('\n')[0] else: (status, result) = utils.getstatusoutput( 'openssl x509 -text -in {}' ' | sed -n "15,20p" | sed ' '"s/://g" | sed "s/pub//g" |' ' tr "\n" " " | sed "s/ //g"' ' | cut -c 3-130'.format(get_path)) if status != 0: LOGGER.error( ' create nodeid failed! status is %d, output is %s, dir is %s.', status, result, get_path) LOGGER.info( ' create nodeid success! status is %d, output is %s, dir is %s.', status, result, get_path) except Exception as node_id_exp: LOGGER.error( ' create nodeid failed! status is %d, output is %s, dir is %s.', status, result, get_path) raise MCError(' create nodeid failed! excepion is %s.' % node_id_exp) LOGGER.info("get_nodeid success! get path is %s", get_path) return result
def get_node_cert(get_path, send_path): """[get node crt to conf/] Arguments: get_path {[PATH]} -- [input file] send_path {[PATH]} -- [output file] Raises: MCError -- [description] MCError -- [description] MCError -- [description] """ LOGGER.info("get node.crt in %s", get_path) LOGGER.info("send node.crt in %s", send_path) if not os.path.isfile(get_path): LOGGER.error(' node cert doesn\'t existed! Need %s', get_path) raise MCError(' node cert doesn\'t existed! Need %s' % get_path) if os.path.isfile(send_path): LOGGER.error(' node.crt existed! path is %s', send_path) raise MCError(' node.crt existed! path is %s' % send_path) with open(get_path) as cert_file: node_crt = cert_file.read() cert_begin = node_crt.count( '-----BEGIN CERTIFICATE-----', 0, len(node_crt)) cert_end = node_crt.count( '-----END CERTIFICATE-----', 0, len(node_crt)) if (cert_begin != 2) or (cert_end != 2): LOGGER.error( ' node cert format checked failed! path is %s', get_path) raise MCError( ' node cert format checked failed! path is %s' % get_path) cert_file.close() shutil.copy(get_path, send_path) LOGGER.info("get_node_cert success! get path is %s", get_path) LOGGER.info("get_node_cert success! send path is %s", send_path)
def get_sdk_cert(): """[summary] Arguments: _dir {[type]} -- [description] """ LOGGER.info("get sdk cert in meta!") CONSOLER.info("get sdk cert in meta!") meta = '{}/meta'.format(path.get_path()) utils.file_must_exists('{}/ca.crt'.format(meta)) utils.file_must_exists('{}/agency.crt'.format(meta)) utils.file_must_exists('{}/agency.key'.format(meta)) if os.path.isdir('{}/sdk'.format(meta)): utils.file_must_exists('{}/sdk/ca.crt'.format(meta)) utils.file_must_exists('{}/sdk/node.crt'.format(meta)) utils.file_must_exists('{}/sdk/node.key'.format(meta)) LOGGER.info("sdk cert existed!") CONSOLER.info("sdk cert existed!") else: LOGGER.info("generate console cert!") CONSOLER.info("generate console cert!") ca.generator_node_ca(meta, meta, 'sdk')
def default_peers(): MchainConf.peers = list() LOGGER.info('default peers is %s', MchainConf.peers)
def create_group_genesis(data_dir='{}/meta'.format(path.get_path())): ''' create group in meta ''' LOGGER.info('create_group_genesis start') package_dir = data_dir gm_opr = utils.Status.gm_option group_id = mgroup.MgroupConf.group_id p2p_ip = mgroup.MgroupConf.p2p_ip p2p_listen_port = mgroup.MgroupConf.p2p_listen_port utils.file_must_not_exists('{}/group.{}.genesis'.format( data_dir, group_id)) if not os.path.exists(package_dir): LOGGER.warning(' %s not existed!', package_dir) os.mkdir(data_dir) shutil.copy('{}/tpl/group.i.genesis'.format(path.get_path()), '{}/group.{}.genesis'.format(package_dir, group_id)) shutil.copy('{}/tpl/group.i.ini'.format(path.get_path()), '{}/group.{}.ini'.format(package_dir, group_id)) # uptes second to ms (status, time_stamp) = utils.getstatusoutput('echo $(date +%s"000")') if not bool(status): CONSOLER.info('generate %s/group.%s.genesis, successful', package_dir, group_id) else: LOGGER.error( ' Generate %s/group.%s.genesis failed! Please check your network.', package_dir, group_id) raise MCError( ' Generate %s/group.%s.genesis failed, timestamp is %s! Please check your network.' % (package_dir, group_id, time_stamp)) # CONSOLER.info('generate %s/group.%s.ini', package_dir, group_id) group_cfg = configparser.ConfigParser() with open('{}/group.{}.genesis'.format(package_dir, group_id), 'r') as config_file: group_cfg.readfp(config_file) for node_idx, _in in enumerate(p2p_ip): try: if gm_opr: node_id = config.get_nodeid_str( '{}/meta/gmcert_{}_{}.crt'.format( path.get_path(), p2p_ip[node_idx], p2p_listen_port[node_idx])) node_id.strip('\n').strip( 'WARNING: can\'t open config file: /home/asherli/TASSL/ssl/openssl.cnf' ) LOGGER.info('resolve %s/meta/gmcert_%s_%s.crt', path.get_path(), p2p_ip[node_idx], p2p_listen_port[node_idx]) LOGGER.info("nodeid -> %s", node_id) group_cfg.set("consensus", "node.{}".format(node_idx), node_id) else: node_id = config.get_nodeid_str( '{}/meta/cert_{}_{}.crt'.format(path.get_path(), p2p_ip[node_idx], p2p_listen_port[node_idx])) LOGGER.info('resolve %s/meta/cert_%s_%s.crt', path.get_path(), p2p_ip[node_idx], p2p_listen_port[node_idx]) LOGGER.info("nodeid -> %s", node_id) group_cfg.set("consensus", "node.{}".format(node_idx), node_id) except Exception as group_exp: LOGGER.error('create group genesis failed! exception is %s', group_exp) raise MCError('create group genesis failed! exception is %s' % group_exp) group_cfg.set("group", "id", group_id) group_cfg.set("group", "timestamp", time_stamp) with open('{}/group.{}.genesis'.format(package_dir, group_id), 'w') as config_file: group_cfg.write(config_file) shutil.copy('{}/group.{}.genesis'.format(package_dir, group_id), '{}/meta/group.{}.genesis'.format(path.get_path(), group_id)) LOGGER.info('create_group_genesis end')
def build_package_only(_data_dir): """[-- build create config_ini] Keyword Arguments: _meta_dir {[PATH]} -- [input dir] (default: {meta}) _data_dir {[PATH]} -- [output dir] (default: {data}) Raises: MCError -- [description] MCError -- [description] MCError -- [description] MCError -- [description] """ LOGGER.info("build_package_only start ") p2p_listen_port = mconf.MchainConf.p2p_listen_port jsonrpc_listen_port = mconf.MchainConf.jsonrpc_listen_port channel_listen_port = mconf.MchainConf.channel_listen_port p2p_ip = mconf.MchainConf.p2p_ip rpc_ip = mconf.MchainConf.rpc_ip channel_ip = mconf.MchainConf.channel_ip peers = mconf.MchainConf.peers meta_dir = '{}/meta'.format(path.get_path()) conf_dir = meta_dir package_dir = _data_dir gm_opr = utils.Status.gm_option if os.path.exists(package_dir): LOGGER.error(' %s existed, maybe u had created it!', package_dir) raise MCError(' %s existed, maybe u had created it!' % package_dir) os.mkdir(package_dir) if gm_opr: shutil.copy('{}/tpl/config.ini.gm'.format(path.get_path()), '{}/.config.ini'.format(conf_dir)) else: shutil.copy('{}/tpl/config.ini'.format(path.get_path()), '{}/.config.ini'.format(conf_dir)) fin_p2p_ip = [] if not peers: LOGGER.warning('section peers not existed!') CONSOLER.warn('section peers not existed!') else: for _, peer in enumerate(peers): fin_p2p_ip.append(peer) # init config.ini & node package for my_node_index, node_ip in enumerate(p2p_ip): LOGGER.info("p2p_ip -> %s", node_ip) CONSOLER.info(' Generate %s/node_%s_%s ', package_dir, node_ip, p2p_listen_port[my_node_index]) node_dir = '{}/node_{}_{}'.format(package_dir, node_ip, p2p_listen_port[my_node_index]) os.mkdir(node_dir) os.mkdir('{}/scripts'.format(node_dir)) shutil.copy('{}/tpl/start.sh'.format(path.get_path()), '{}/start.sh'.format(node_dir)) shutil.copy('{}/tpl/stop.sh'.format(path.get_path()), '{}/stop.sh'.format(node_dir)) shutil.copy('{}/tpl/load_new_groups.sh'.format(path.get_path()), '{}/scripts/load_new_groups.sh'.format(node_dir)) shutil.copy('{}/tpl/reload_whitelist.sh'.format(path.get_path()), '{}/scripts/reload_whitelist.sh'.format(node_dir)) shutil.copy('{}/fisco-bcos'.format(meta_dir), '{}/fisco-bcos'.format(node_dir)) os.mkdir('{}/conf'.format(node_dir)) try: # cp config.ini shutil.copy('{}/.config.ini'.format(conf_dir), '{}/config.ini'.format(node_dir)) except Exception as build_exp: LOGGER.error(' exception is %s', build_exp) utils.delete_data(package_dir) raise MCError(' exception is %s' % build_exp) node_cfg = configparser.ConfigParser(allow_no_value=True) try: with codecs.open('{}/config.ini'.format(node_dir), 'r', encoding='utf-8') as config_file: node_cfg.readfp(config_file) except Exception as build_exp: LOGGER.error( ' open config.ini file failed, exception is %s', build_exp) utils.delete_data(package_dir) raise MCError( ' open config.ini file failed, exception is %s' % build_exp) if len(rpc_ip) > my_node_index: node_cfg.set("rpc", "jsonrpc_listen_ip", rpc_ip[my_node_index]) else: node_cfg.set("rpc", "jsonrpc_listen_ip", "127.0.0.1") if len(channel_ip) > my_node_index: node_cfg.set("rpc", "channel_listen_ip", channel_ip[my_node_index]) else: node_cfg.set("rpc", "channel_listen_ip", "0.0.0.0") node_cfg.set("rpc", "channel_listen_port", channel_listen_port[my_node_index]) node_cfg.set("rpc", "jsonrpc_listen_port", jsonrpc_listen_port[my_node_index]) node_cfg.set("p2p", "listen_port", p2p_listen_port[my_node_index]) with open('{}/config.ini'.format(node_dir), 'w') as config_file: node_cfg.write(config_file) config_file.close() # set p2p ip in config.ini for my_node_index, ip_item in enumerate(p2p_ip): node_cfg = configparser.ConfigParser(allow_no_value=True) if not utils.valid_ip(ip_item): LOGGER.error( ' init config.ini file failed, found ip => %s', ip_item) utils.delete_data(package_dir) raise MCError( ' init config.ini file failed, found ip => %s' % ip_item) node_dir = '{}/node_{}_{}'.format(package_dir, ip_item, p2p_listen_port[my_node_index]) try: with codecs.open('{}/config.ini'.format(node_dir), 'r', encoding='utf-8') as config_file: node_cfg.readfp(config_file) except Exception as build_exp: LOGGER.error( ' open config.ini file failed, exception is %s', build_exp) utils.delete_data(package_dir) raise MCError( ' open config.ini file failed, exception is %s' % build_exp) # write p2pip:port into config.ini for ip_idx, set_item in enumerate(p2p_ip): fin_p2p_ip.append("{}:{}".format( set_item, p2p_listen_port[ip_idx])) fin_p2p_ip = list(set(fin_p2p_ip)) for index, p2p_section in enumerate(fin_p2p_ip): node_cfg.set("p2p", "node.{}".format(index), '{}'.format(p2p_section)) node_cfg.set('certificate_whitelist', '; cal.0 should be nodeid, nodeid\'s length is 128') node_cfg.set('certificate_whitelist', ';cal.0=') node_cfg.set('certificate_blacklist', '; crl.0 should be nodeid, nodeid\'s length is 128') node_cfg.set('certificate_blacklist', ';crl.0=') with open('{}/config.ini'.format(node_dir), 'w') as config_file: node_cfg.write(config_file) os.mkdir(package_dir + '/scripts/') shutil.copy('{}/scripts/install.sh'.format(path.get_path()), package_dir + '/scripts/') shutil.copy('{}/scripts/pack.sh'.format(path.get_path()), package_dir + '/scripts/') shutil.copy('{}/tpl/start_all.sh'.format(path.get_path()), package_dir) shutil.copy('{}/tpl/stop_all.sh'.format(path.get_path()), package_dir) shutil.copytree('{}/scripts/monitor'.format((path.get_path())), '{}/monitor'.format(package_dir)) LOGGER.info("build_package_only end!")
def deploy_key(_get_dir, _send_dir): """[deploy_key] Arguments: _get_dir {[PATH]} -- [description] _send_dir {[PATH]} -- [description] """ utils.dir_must_exists(_get_dir) utils.dir_must_exists(_send_dir) meta_path = _get_dir data_path = _send_dir get_node_list = [] send_node_list = [] for _, dirs, _ in os.walk(meta_path, topdown=True, onerror=None, followlinks=False): for name in dirs: get_node_list.append(name) for _, dirs, _ in os.walk(data_path, topdown=True, onerror=None, followlinks=False): for name in dirs: send_node_list.append(name) LOGGER.info("get cert in %s!", get_node_list) LOGGER.info("send cert to %s!", send_node_list) for node_dir in get_node_list: if not utils.valid_node_dir(node_dir): continue if utils.Status.gm_option: utils.file_must_exists('{}/{}/gmnode.key'.format( meta_path, node_dir)) utils.file_must_exists('{}/{}/gmnode.nodeid'.format( meta_path, node_dir)) if os.path.exists('{}/{}/conf'.format(data_path, node_dir)): LOGGER.info("send cert from %s to %s", data_path, node_dir) shutil.copyfile( '{}/{}/gmnode.key'.format(meta_path, node_dir), '{}/{}/conf/gmnode.key'.format(data_path, node_dir)) shutil.copyfile( '{}/{}/gmnode.nodeid'.format(meta_path, node_dir), '{}/{}/conf/gmnode.nodeid'.format(data_path, node_dir)) shutil.copyfile( '{}/{}/gmennode.key'.format(meta_path, node_dir), '{}/{}/conf/gmennode.key'.format(data_path, node_dir)) shutil.copyfile( '{}/{}/gmennode.crt'.format(meta_path, node_dir), '{}/{}/conf/gmennode.crt'.format(data_path, node_dir)) shutil.copytree( '{}/{}/origin_cert'.format(meta_path, node_dir), '{}/{}/conf/origin_cert'.format(data_path, node_dir)) else: utils.file_must_exists('{}/{}/node.key'.format( meta_path, node_dir)) utils.file_must_exists('{}/{}/node.nodeid'.format( meta_path, node_dir)) if os.path.exists('{}/{}/conf'.format(data_path, node_dir)): LOGGER.info("send cert from %s to %s", data_path, node_dir) shutil.copyfile( '{}/{}/node.key'.format(meta_path, node_dir), '{}/{}/conf/node.key'.format(data_path, node_dir)) shutil.copyfile( '{}/{}/node.nodeid'.format(meta_path, node_dir), '{}/{}/conf/node.nodeid'.format(data_path, node_dir))
def concatenate_cfg(cfg_file, cfg_file_get): """[combine two config.ini] Arguments: cfg_file {[type]} -- [description] cfg_file_get {[type]} -- [description] Raises: MCError -- [description] """ LOGGER.info("concatenate two config.ini now!") meta = cfg_file data = cfg_file_get utils.file_must_exists(meta) utils.file_must_exists(data) p2p_get = [] p2p_get_ip = [] p2p_send = [] p2p_send_ip = [] p2p_cfg = configparser.ConfigParser() try: with codecs.open(meta, 'r', encoding='utf-8') as config_file: p2p_cfg.readfp(config_file) except Exception as build_exp: LOGGER.error( ' open config.ini file failed, exception is %s', build_exp) raise MCError( ' open config.ini file failed, exception is %s' % build_exp) p2p_get = p2p_cfg.items('p2p') p2p_get.pop(0) p2p_get.pop(0) LOGGER.info("get node is %s!", p2p_get) for node_tuple in p2p_get: p2p_get_ip.append(node_tuple[1]) LOGGER.info("get node ip is %s!", p2p_get_ip) try: with codecs.open(data, 'r', encoding='utf-8') as config_file: p2p_cfg.readfp(config_file) except Exception as build_exp: LOGGER.error( ' open config.ini file failed, exception is %s', build_exp) raise MCError( ' open config.ini file failed, exception is %s' % build_exp) p2p_send = p2p_cfg.items('p2p') p2p_send.pop(0) p2p_send.pop(0) LOGGER.info("send node is %s!", p2p_send) for node_tuple in p2p_send: p2p_send_ip.append(node_tuple[1]) LOGGER.info("get node ip is %s!", p2p_send_ip) p2p_send_ip = list(set(p2p_get_ip + p2p_send_ip)) LOGGER.info("final node ip is %s!", p2p_send_ip) for ip_idx, p2p_ip in enumerate(p2p_send_ip): p2p_cfg.set("p2p", "node.{}".format(ip_idx), p2p_ip) with open(data, 'w') as config_file: p2p_cfg.write(config_file) LOGGER.info( "concatenate two config.ini now! output => %s/conf/config.ini", data)
def build_config_ini(_data_dir): """[-- build create config_ini] Keyword Arguments: _meta_dir {[PATH]} -- [input dir] (default: {meta}) _data_dir {[PATH]} -- [output dir] (default: {data}) Raises: MCError -- [description] MCError -- [description] MCError -- [description] MCError -- [description] """ LOGGER.info("build_config_ini start ") p2p_listen_port = mconf.MchainConf.p2p_listen_port jsonrpc_listen_port = mconf.MchainConf.jsonrpc_listen_port channel_listen_port = mconf.MchainConf.channel_listen_port p2p_ip = mconf.MchainConf.p2p_ip rpc_ip = mconf.MchainConf.rpc_ip peers = mconf.MchainConf.peers meta_dir = '{}/meta'.format(path.get_path()) conf_dir = meta_dir package_dir = _data_dir gm_opr = utils.Status.gm_option group_id = mconf.MchainConf.group_id utils.file_must_exists('{}/group.{}.genesis'.format(meta_dir, group_id)) if os.path.exists(package_dir): LOGGER.error(' %s existed, maybe u had created it!', package_dir) raise MCError(' %s existed, maybe u had created it!' % package_dir) os.mkdir(package_dir) default_cfg = configparser.ConfigParser() if gm_opr: shutil.copy('{}/tpl/config.ini.gm'.format(path.get_path()), '{}/.config.ini'.format(conf_dir)) else: shutil.copy('{}/tpl/config.ini'.format(path.get_path()), '{}/.config.ini'.format(conf_dir)) try: with codecs.open('{}/.config.ini'.format(conf_dir), 'r', encoding='utf-8') as config_file: default_cfg.readfp(config_file) except Exception as build_exp: LOGGER.error( ' open config.ini file failed, exception is %s', build_exp) raise MCError( ' open config.ini file failed, exception is %s' % build_exp) fin_p2p_ip = [] if not peers: LOGGER.warning('section peers not existed!') CONSOLER.warn('section peers not existed!') else: for _, peer in enumerate(peers): fin_p2p_ip.append(peer) # default_cfg.set("p2p", "node.{}".format(node_id + len(p2p_listen_port)), # peer) # with open('{}/.config.ini'.format(conf_dir), 'w') as config_file: # default_cfg.write(config_file) # init config.ini & node package for my_node_index, node_ip in enumerate(p2p_ip): LOGGER.info("p2p_ip -> %s", node_ip) try: if utils.Status.gm_option: utils.file_must_exists('{}/gmcert_{}_{}.crt'.format(conf_dir, node_ip, p2p_listen_port[my_node_index])) else: utils.file_must_exists('{}/cert_{}_{}.crt'.format(conf_dir, node_ip, p2p_listen_port[my_node_index])) except Exception as build_exp: LOGGER.error('%s', build_exp) raise MCError('%s' % build_exp) CONSOLER.info(' Generate %s/node_%s_%s ', package_dir, node_ip, p2p_listen_port[my_node_index]) node_dir = '{}/node_{}_{}'.format(package_dir, node_ip, p2p_listen_port[my_node_index]) os.mkdir(node_dir) shutil.copy('{}/tpl/start.sh'.format(path.get_path()), '{}/start.sh'.format(node_dir)) shutil.copy('{}/tpl/stop.sh'.format(path.get_path()), '{}/stop.sh'.format(node_dir)) shutil.copy('{}/fisco-bcos'.format(meta_dir), '{}/fisco-bcos'.format(node_dir)) os.mkdir('{}/conf'.format(node_dir)) try: # get node cert shutil.copy('{}/.config.ini'.format(conf_dir), '{}/config.ini'.format(node_dir)) shutil.copy('{}/group.{}.genesis'.format(conf_dir, group_id), '{}/conf/group.{}.genesis'.format(node_dir, group_id)) shutil.copy('{}/tpl/group.i.ini'.format(path.get_path()), '{}/conf/group.{}.ini'.format(node_dir, group_id)) if gm_opr: get_node_cert('{}/gmcert_{}_{}.crt'.format(meta_dir, node_ip, p2p_listen_port[my_node_index]), '{}/conf/gmnode.crt'.format(node_dir)) # get_nodeid('{}/conf/gmnode.crt'.format(node_dir), # '{}/conf/gmnode.nodeid'.format(node_dir)) shutil.copyfile('{}/gmca.crt'.format(meta_dir), '{}/conf/gmca.crt'.format(node_dir)) else: get_node_cert('{}/cert_{}_{}.crt'.format(meta_dir, node_ip, p2p_listen_port[my_node_index]), '{}/conf/node.crt'.format(node_dir)) # get_nodeid('{}/conf/node.crt'.format(node_dir), # '{}/conf/node.nodeid'.format(node_dir)) shutil.copyfile('{}/ca.crt'.format(meta_dir), '{}/conf/ca.crt'.format(node_dir)) except Exception as build_exp: LOGGER.error(' get node.crt failed ! exception is %s', build_exp) utils.delete_data(package_dir) raise MCError(' get node.crt failed! exception is %s' % build_exp) node_cfg = configparser.ConfigParser() try: with codecs.open('{}/config.ini'.format(node_dir), 'r', encoding='utf-8') as config_file: node_cfg.readfp(config_file) except Exception as build_exp: LOGGER.error( ' open config.ini file failed, exception is %s', build_exp) utils.delete_data(package_dir) raise MCError( ' open config.ini file failed, exception is %s' % build_exp) node_cfg.set("rpc", "listen_ip", rpc_ip[my_node_index]) node_cfg.set("rpc", "channel_listen_port", channel_listen_port[my_node_index]) node_cfg.set("rpc", "jsonrpc_listen_port", jsonrpc_listen_port[my_node_index]) # node_cfg.set("p2p", "listen_ip", p2p_ip[my_node_index]) node_cfg.set("p2p", "listen_port", p2p_listen_port[my_node_index]) with open('{}/config.ini'.format(node_dir), 'w') as config_file: node_cfg.write(config_file) config_file.close() # set p2p ip in config.ini for my_node_index, ip_item in enumerate(p2p_ip): node_cfg = configparser.ConfigParser() if not utils.valid_ip(ip_item): LOGGER.error( ' init config.ini file failed, found ip => %s', ip_item) utils.delete_data(package_dir) raise MCError( ' init config.ini file failed, found ip => %s' % ip_item) node_dir = '{}/node_{}_{}'.format(package_dir, ip_item, p2p_listen_port[my_node_index]) try: with codecs.open('{}/config.ini'.format(node_dir), 'r', encoding='utf-8') as config_file: node_cfg.readfp(config_file) except Exception as build_exp: LOGGER.error( ' open config.ini file failed, exception is %s', build_exp) utils.delete_data(package_dir) raise MCError( ' open config.ini file failed, exception is %s' % build_exp) # write p2pip:port into config.ini for ip_idx, set_item in enumerate(p2p_ip): fin_p2p_ip.append("{}:{}".format(set_item, p2p_listen_port[ip_idx])) fin_p2p_ip = list(set(fin_p2p_ip)) for index, p2p_section in enumerate(fin_p2p_ip): node_cfg.set("p2p", "node.{}".format(index), '{}'.format(p2p_section)) with open('{}/config.ini'.format(node_dir), 'w') as config_file: node_cfg.write(config_file) os.mkdir(package_dir + '/scripts/') shutil.copy('{}/scripts/install.sh'.format(path.get_path()), package_dir + '/scripts/') shutil.copy('{}/scripts/pack.sh'.format(path.get_path()), package_dir + '/scripts/') shutil.copy('{}/tpl/start_all.sh'.format(path.get_path()), package_dir) shutil.copy('{}/tpl/stop_all.sh'.format(path.get_path()), package_dir) shutil.copytree('{}/scripts/monitor'.format((path.get_path())), '{}/monitor'.format(package_dir)) LOGGER.info("build_config_ini end!")
def gen_build_cert(_dir): """[gen_build_cert] Arguments: _dir {[PATH]} -- [cert output] Raises: MCError -- [description] """ meta_path = '{}/meta'.format(path.get_path()) cert_path = _dir data_path = meta_path p2p_listen_port = mconf.MchainConf.p2p_listen_port p2p_ip = mconf.MchainConf.p2p_ip utils.file_must_not_exists('{}/peers.txt'.format(cert_path)) if not os.path.exists(cert_path): os.mkdir(cert_path) if utils.Status.gm_option: if not os.path.exists('{}/gmca.crt'.format(meta_path)): CONSOLER.error(" gmca.crt not existed") utils.delete_data(cert_path) raise MCError(' gmca.crt not founded!') else: if not os.path.exists('{}/ca.crt'.format(meta_path)): CONSOLER.error(" ca.crt not existed") utils.delete_data(cert_path) raise MCError(' ca.crt not founded!') if utils.Status.gm_option: if not (os.path.exists('{}/gmagency.key'.format(meta_path)) and os.path.exists(('{}/gmagency.crt'.format(meta_path)))): CONSOLER.error(" gmagency.crt or gmagency.key not existed") utils.delete_data(cert_path) raise MCError(' gmagency.crt or gmagency.key not found in %s!' % meta_path) else: if not (os.path.exists('{}/agency.key'.format(meta_path)) and os.path.exists(('{}/agency.crt'.format(meta_path)))): CONSOLER.error(" agency.crt or agency.key not existed") utils.delete_data(cert_path) raise MCError(' agency.crt or agency.key not found in %s!' % meta_path) for my_node_index, node_ip in enumerate(p2p_ip): LOGGER.info("p2p_ip -> %s", node_ip) CONSOLER.info(' Generate %s/node_%s_%s ', data_path, node_ip, p2p_listen_port[my_node_index]) if utils.Status.gm_option: utils.file_must_not_exists('{}/gmcert_{}_{}.crt'.format( meta_path, node_ip, p2p_listen_port[my_node_index])) else: utils.file_must_not_exists('{}/cert_{}_{}.crt'.format( meta_path, node_ip, p2p_listen_port[my_node_index])) ca.generator_node_ca( data_path, '{}/'.format(meta_path), 'node_{}_{}'.format(node_ip, p2p_listen_port[my_node_index])) if utils.Status.gm_option: utils.off_gm() if os.path.isdir('./.origin_cert'): shutil.rmtree('./.origin_cert') ca.generator_node_ca('./', meta_path, '.origin_cert') shutil.copytree( './.origin_cert', '{}/node_{}_{}/origin_cert'.format( data_path, node_ip, p2p_listen_port[my_node_index])) shutil.rmtree('./.origin_cert') utils.set_gm() shutil.copyfile( '{}/node_{}_{}/gmnode.crt'.format( data_path, node_ip, p2p_listen_port[my_node_index]), '{}/gmcert_{}_{}.crt'.format(meta_path, node_ip, p2p_listen_port[my_node_index])) shutil.copyfile( '{}/gmcert_{}_{}.crt'.format(meta_path, node_ip, p2p_listen_port[my_node_index]), '{}/gmcert_{}_{}.crt'.format(cert_path, node_ip, p2p_listen_port[my_node_index])) else: shutil.copyfile( '{}/node_{}_{}/node.crt'.format( data_path, node_ip, p2p_listen_port[my_node_index]), '{}/cert_{}_{}.crt'.format(meta_path, node_ip, p2p_listen_port[my_node_index])) shutil.copyfile( '{}/cert_{}_{}.crt'.format(meta_path, node_ip, p2p_listen_port[my_node_index]), '{}/cert_{}_{}.crt'.format(cert_path, node_ip, p2p_listen_port[my_node_index])) (status, result) = \ utils.getstatusoutput('echo {}:{} >> {}/peers.txt' .format(node_ip, p2p_listen_port[my_node_index], cert_path)) LOGGER.info(" status is %s, result is %s", status, result) CONSOLER.info(" Generate cert by node_installation.ini successful!")
def parser(mchain): """resolve node_installation.ini Arguments: mchain {string} -- path of node_installation.ini Raises: MCError -- exception description """ LOGGER.info('node_installation.ini is %s', mchain) # resolve configuration if not utils.valid_string(mchain): LOGGER.error( ' node_installation.ini not invalid path, node_installation.ini is %s', mchain) raise MCError( ' node_installation.ini not invalid path, node_installation.ini is %s' % mchain) # read and parser config file config_parser = configparser.ConfigParser(allow_no_value=True) try: with codecs.open(mchain, 'r', encoding='utf-8') as file_mchain: config_parser.readfp(file_mchain) except Exception as ini_exp: LOGGER.error( ' open node_installation.ini file failed, exception is %s', ini_exp) raise MCError( ' open node_installation.ini file failed, exception is %s' % ini_exp) for idx in range(0, 128): node_index = ('node{}'.format(idx)) if config_parser.has_section('group'): MchainConf.group_id = config_parser.get('group', 'group_id') else: LOGGER.error( ' invalid node_installation.ini format, group id is %s', MchainConf.group_id) raise MCError( ' invalid node_installation.ini format, group id is %s' % MchainConf.group_id) if config_parser.has_section(node_index): p2p_ip = config_parser.get(node_index, 'p2p_ip') rpc_ip = config_parser.get(node_index, 'rpc_ip') if not utils.valid_ip(p2p_ip): LOGGER.error( ' invalid node_installation.ini format, p2p_ip is %s', p2p_ip) raise MCError( ' invalid node_installation.ini format, p2p_ip is %s' % p2p_ip) # if rpc_ip == "0.0.0.0" and utils.Status.allow_unsecure_cfg: if rpc_ip == "0.0.0.0": LOGGER.warning('Your rpc_ip is %s, this is an unsecurity way', rpc_ip) CONSOLER.warning( ' \033[1;31m Your rpc_ip is %s, this is an unsecurity way \033[0m', rpc_ip) elif not utils.valid_ip(rpc_ip): LOGGER.error( ' invalid node_installation.ini format, rpc_ip is %s', rpc_ip) raise MCError( ' invalid node_installation.ini format, rpc_ip is %s' % rpc_ip) p2p_listen_port = config_parser.get(node_index, 'p2p_listen_port') jsonrpc_listen_port = config_parser.get(node_index, 'jsonrpc_listen_port') channel_listen_port = config_parser.get(node_index, 'channel_listen_port') if not (utils.valid_string(p2p_listen_port) and utils.valid_string(jsonrpc_listen_port) and utils.valid_string(channel_listen_port)): LOGGER.error( 'mchain bad format, p2p_listen_port is %s, ' 'jsonrpc_port is %s, channel_port is %s', p2p_listen_port, jsonrpc_listen_port, channel_listen_port) raise MCError('mchain bad format, p2p_listen_port is %s, ' 'jsonrpc_port is %s, channel_port is %s' % (p2p_listen_port, jsonrpc_listen_port, channel_listen_port)) MchainConf.p2p_ip.append(p2p_ip) MchainConf.rpc_ip.append(rpc_ip) MchainConf.p2p_listen_port.append(p2p_listen_port) MchainConf.jsonrpc_listen_port.append(jsonrpc_listen_port) MchainConf.channel_listen_port.append(channel_listen_port) else: LOGGER.warning(' node%s not existed, break!', idx) break # if config_parser.has_section('peers'): # for peer in config_parser.items('peers'): # MchainConf.peers.append(peer[1]) # else: # LOGGER.warning(' section peers not existed!') LOGGER.info('group_id is %s', MchainConf.group_id) LOGGER.info('p2p_ip is %s', MchainConf.p2p_ip) LOGGER.info('rpc_ip is %s', MchainConf.rpc_ip) LOGGER.info('p2p_listen_port is %s', MchainConf.p2p_listen_port) LOGGER.info('jsonrpc_listen_port is %s', MchainConf.jsonrpc_listen_port) LOGGER.info('channel_listen_port is %s', MchainConf.channel_listen_port) LOGGER.info('peers is %s', MchainConf.peers) LOGGER.info('node_installation.ini end, result is %s', MchainConf())