def write_common(transforms, init, values): if values['example']: write_template( path.join(transforms, '__init__.py'), init + generate_all('common', 'helloworld') ) write_template( path.join(transforms, 'helloworld.py'), read_template('transform', values) ) else: write_template( path.join(transforms, '__init__.py'), init + generate_all('common') ) write_template( path.join(transforms, 'common', '__init__.py'), init + generate_all('entities') ) write_template( path.join(transforms, 'common', 'entities.py'), read_template('entities', values) )
def create_k8s_dashboard(self): com = self.cluster_object_model self.logger.info( "Create kubernetes dashboard deployment for kuberentes cluster.") self.logger.info("Create dashboard service.") file_path = "deployment/k8sPaiLibrary/template/dashboard-service.yaml.template" template_data = common.read_template(file_path) dict_map = {"cluster_cfg": com} generated_data = common.generate_from_template_dict( template_data, dict_map) common.write_generated_file(generated_data, "dashboard-service.yaml") retry_count = 5 common.execute_shell_retry( "kubectl apply --overwrite=true -f dashboard-service.yaml", "Failed to create dashboard-service", retry_count) os.remove("dashboard-service.yaml") self.logger.info("Create dashboard deployment.") file_path = "deployment/k8sPaiLibrary/template/dashboard-deployment.yaml.template" template_data = common.read_template(file_path) dict_map = {"cluster_cfg": com} generated_data = common.generate_from_template_dict( template_data, dict_map) common.write_generated_file(generated_data, "dashboard-deployment.yaml") common.execute_shell( "kubectl apply --overwrite=true -f dashboard-deployment.yaml", "Failed to create dashboard-deployment") os.remove("dashboard-deployment.yaml")
def create_k8s_dashboard(self): self.logger.info( "Create kubernetes dashboard deployment for kuberentes cluster.") self.logger.info("Create dashboard service.") file_path = "k8sPaiLibrary/template/dashboard-service.yaml.template" template_data = common.read_template(file_path) dict_map = {"clusterconfig": self.cluster_config['clusterinfo']} generated_data = common.generate_from_template_dict( template_data, dict_map) common.write_generated_file(generated_data, "dashboard-service.yaml") common.execute_shell("kubectl create -f dashboard-service.yaml", "Failed to create dashboard-service") os.remove("dashboard-service.yaml") self.logger.info("Create dashboard deployment.") file_path = "k8sPaiLibrary/template/dashboard-deployment.yaml.template" template_data = common.read_template(file_path) dict_map = {"clusterconfig": self.cluster_config['clusterinfo']} generated_data = common.generate_from_template_dict( template_data, dict_map) common.write_generated_file(generated_data, "dashboard-deployment.yaml") common.execute_shell("kubectl create -f dashboard-deployment.yaml", "Failed to create dashboard-deployment") os.remove("dashboard-deployment.yaml")
def write_setup(base_dir, values): plume_sh = os.path.join(base_dir, 'plume.sh') write_template(os.path.join(base_dir, 'canari.conf'), read_template('canari', values)) write_template(os.path.join(base_dir, 'plume.py'), read_template('plume_wsgi', values)) write_template(plume_sh, read_template('plume_sh', values)) os.chmod(plume_sh, 0755)
def write_setup(package_name, values): write_template(path.join(package_name, '.canari'), read_template('_canari', values)) write_template(path.join(package_name, 'setup.py'), read_template('setup', values)) write_template(path.join(package_name, 'README.md'), read_template('README', values)) write_template(path.join(package_name, 'MANIFEST.in'), read_template('MANIFEST', values))
def write_common(transforms, init, values): if values["example"]: write_template(path.join(transforms, "__init__.py"), init + generate_all("common", "helloworld")) write_template(path.join(transforms, "helloworld.py"), read_template("transform", values)) else: write_template(path.join(transforms, "__init__.py"), init + generate_all("common")) write_template(path.join(transforms, "common", "__init__.py"), init + generate_all("entities")) write_template(path.join(transforms, "common", "entities.py"), read_template("entities", values))
def write_resources(package_name, resources, init, values): write_template( path.join(resources, '__init__.py'), init + generate_all('etc', 'images', 'maltego', 'external') ) write_template( path.join(resources, 'etc', '__init__.py'), init ) write_template( path.join(resources, 'images', '__init__.py'), init ) write_template( path.join(resources, 'external', '__init__.py'), init ) write_template( path.join(resources, 'maltego', '__init__.py'), init ) write_template( path.join(resources, 'etc', '%s.conf' % package_name), read_template('conf', values) )
def kubectl_configuration_generate(self): com = self.cluster_config self.logger.info("Generate the configuation file of kubectl.") if com != None: self.logger.info("Cluster configuration is detected.") self.logger.info( "Generate the KUBECONIFG based on the cluster configuration.") dict_map = {"cluster_cfg": com} else: self.logger.warning("Unable to find the cluster configuration.") self.logger.warning( "Please enter the required infomation, when prompted.") user_input = raw_input( "Please input the api-server (or the api servers' load-balancer) address in your cluster: " ) dict_map = { "cluster_cfg": { "kubernetes": { "api-servers-ip": user_input } } } file_path = "deployment/k8sPaiLibrary/template/config.template" template_data = common.read_template(file_path) generated_data = common.generate_from_template_dict( template_data, dict_map) kube_config_path = os.path.expanduser("~/.kube") common.write_generated_file(generated_data, "{0}/config".format(kube_config_path)) self.logger.info("Successfully configure kubeconfig in the dev-box.")
def run(args): opts = parser.parse_args(args) package_name = opts.package capitalized_package_name = package_name.capitalize() values = { 'package' : package_name, 'entity' : 'My%sEntity' % capitalized_package_name, 'base_entity' : '%sEntity' % capitalized_package_name, 'project' : capitalized_package_name, 'author' : getuser(), 'year' : datetime.now().year, 'namespace' : package_name, 'email' : '', 'maintainer' : getuser(), 'example' : True, 'description' : '', 'canari_version' : canari.__version__ } ask_user(values) base = path.join(package_name, 'src', package_name) transforms = path.join(base, 'transforms') resources = path.join(base, 'resources') if not path.exists(package_name): print('creating skeleton in %s' % package_name) build_skeleton( package_name, [package_name, 'src'], [package_name, 'maltego'], base, transforms, [transforms, 'common'], resources, [resources, 'etc'], [resources, 'images'], [resources, 'external'], [resources, 'maltego'] ) else: print('A directory with the name %s already exists... exiting' % package_name) exit(-1) init = read_template('__init__', values) write_setup(package_name, values) write_root(base, init) write_resources(package_name, resources, init, values) write_common(transforms, init, values) print('done!')
def run(args): opts = parser.parse_args(args) package_name = opts.package capitalized_package_name = package_name.capitalize() values = { "package": package_name, "entity": "My%sEntity" % capitalized_package_name, "base_entity": "%sEntity" % capitalized_package_name, "project": capitalized_package_name, "author": getuser(), "year": datetime.now().year, "namespace": package_name, "email": "", "maintainer": getuser(), "example": True, "description": "", "canari_version": __version__, } ask_user(values) base = path.join(package_name, "src", package_name) transforms = path.join(base, "transforms") resources = path.join(base, "resources") if not path.exists(package_name): print("creating skeleton in %s" % package_name) build_skeleton( package_name, [package_name, "src"], [package_name, "maltego"], base, transforms, [transforms, "common"], resources, [resources, "etc"], [resources, "images"], [resources, "external"], [resources, "maltego"], ) else: print("A directory with the name %s already exists... exiting" % package_name) exit(-1) init = read_template("__init__", values) write_setup(package_name, values) write_root(base, init) write_resources(package_name, resources, init, values) write_common(transforms, init, values) print("done!")
def run(args): opts = parser.parse_args(args) package_name = opts.package capitalized_package_name = package_name.capitalize() values = { 'package': package_name, 'entity': 'My%sEntity' % capitalized_package_name, 'base_entity': '%sEntity' % capitalized_package_name, 'project': capitalized_package_name, 'author': getuser(), 'year': datetime.now().year, 'namespace': package_name, 'email': '', 'maintainer': getuser(), 'example': True, 'description': '', 'canari_version': canari.__version__ } ask_user(values) base = path.join(package_name, 'src', package_name) transforms = path.join(base, 'transforms') resources = path.join(base, 'resources') if not path.exists(package_name): print('creating skeleton in %s' % package_name) build_skeleton( package_name, [package_name, 'src'], [package_name, 'maltego'], base, transforms, [transforms, 'common'], resources, [resources, 'etc'], [resources, 'images'], [resources, 'external'], [resources, 'maltego'] ) else: print('A directory with the name %s already exists... exiting' % package_name) exit(-1) init = read_template('__init__', values) write_setup(package_name, values) write_root(base, init) write_resources(package_name, resources, init, values) write_common(transforms, init, values) print('done!')
def write_resources(package_name, resources, init, values): write_template(path.join(resources, "__init__.py"), init + generate_all("etc", "images", "maltego", "external")) write_template(path.join(resources, "etc", "__init__.py"), init) write_template(path.join(resources, "images", "__init__.py"), init) write_template(path.join(resources, "external", "__init__.py"), init) write_template(path.join(resources, "maltego", "__init__.py"), init) write_template(path.join(resources, "etc", "%s.conf" % package_name), read_template("conf", values))
def kubectl_configuration_generate(self): self.logger.info("Generate the configuation file of kubectl.") file_path = "k8sPaiLibrary/template/config.template" template_data = common.read_template(file_path) dict_map = { "clusterconfig": self.cluster_config['clusterinfo'], } generated_data = common.generate_from_template_dict(template_data, dict_map) kube_config_path = os.path.expanduser("~/.kube") common.write_generated_file(generated_data, "{0}/config".format(kube_config_path))
def create_kube_proxy(self): self.logger.info("Create kube-proxy daemon for kuberentes cluster.") file_path = "k8sPaiLibrary/template/kube-proxy.yaml.template" template_data = common.read_template(file_path) dict_map = {"clusterconfig": self.cluster_config['clusterinfo']} generated_data = common.generate_from_template_dict( template_data, dict_map) common.write_generated_file(generated_data, "kube-proxy.yaml") common.execute_shell("kubectl create -f kube-proxy.yaml", "Failed to create kube-proxy") os.remove("kube-proxy.yaml")
def create_kube_proxy(self): com = self.cluster_object_model self.logger.info("Create kube-proxy daemon for kuberentes cluster.") file_path = "deployment/k8sPaiLibrary/template/kube-proxy.yaml.template" template_data = common.read_template(file_path) dict_map = {"cluster_cfg": com} generated_data = common.generate_from_template_dict( template_data, dict_map) common.write_generated_file(generated_data, "kube-proxy.yaml") retry_count = 5 common.execute_shell_retry( "kubectl apply --overwrite=true -f kube-proxy.yaml", "Failed to create kube-proxy", retry_count) os.remove("kube-proxy.yaml")
def run(args): opts = parse_args(args) initf = path.join(opts.transform_dir, '__init__.py') transform = opts.transform if not opts.transform.endswith('.py') else opts.transform[:-3] if '.' in transform: print "Transform name (%s) cannot have a dot ('.')." % repr(transform) exit(-1) elif not transform: print "You must specify a valid transform name." exit(-1) directory = opts.transform_dir transformf = path.join(directory, opts.transform if opts.transform.endswith('.py') else '%s.py' % opts.transform ) if not path.exists(initf): print ('Directory %s does not appear to be a python package directory... quitting!' % repr(opts.transform_dir)) exit(-1) if path.exists(transformf): print ('Transform %s already exists... quitting' % repr(transformf)) exit(-1) values = init_pkg() write_template( transformf, read_template('newtransform', values) ) print ('updating %s' % initf) init = file(initf).read() with file(initf, mode='wb') as w: w.write( sub( r'__all__\s*\=\s*\[', '__all__ = [\n %s,' % repr(transform), init ) ) print ('done!')
def create_transform(args): opts = parse_args(args) initf = os.path.join(opts.transform_dir, '__init__.py') transform = opts.transform if not opts.transform.endswith('.py') else opts.transform[:-3] if '.' in transform: print "Transform name (%s) cannot have a dot ('.')." % repr(transform) exit(-1) elif not transform: print "You must specify a valid transform name." exit(-1) directory = opts.transform_dir transformf = os.path.join(directory, opts.transform if opts.transform.endswith('.py') else '%s.py' % opts.transform) if not os.path.exists(initf): print ('Directory %s does not appear to be a python package directory... quitting!' % repr(opts.transform_dir)) exit(-1) if os.path.exists(transformf): print ('Transform %s already exists... quitting' % repr(transformf)) exit(-1) values = init_pkg() write_template( transformf, read_template('newtransform', values) ) print ('updating %s' % initf) init = file(initf).read() with file(initf, mode='wb') as w: w.write( re.sub( r'__all__\s*\=\s*\[', '__all__ = [\n %s,' % repr(transform), init ) ) print ('done!')
def run(args): opts = parse_args(args) initf = path.join(opts.transform_dir, '__init__.py') transform = opts.transform directory = opts.transform_dir transformf = path.join(directory, transform if transform.endswith('.py') else '%s.py' % transform ) if not path.exists(initf): print ('Directory %s does not appear to be a python package directory... quitting!' % repr(opts.transform_dir)) exit(-1) if path.exists(transformf): print ('Transform %s already exists... quitting' % repr(transformf)) exit(-1) values = init_pkg() write_template( transformf, read_template('newtransform', values) ) print ('updating %s' % initf) init = file(initf).read() with file(initf, mode='wb') as w: w.write( sub( r'__all__\s*\=\s*\[', '__all__ = [\n %s,' % repr(transform), init ) ) print ('done!')
def gen_spec(args): go_dev_only = Ture if 'dev' in args.keys() else False spec_template = 'go-template.spec' spec_script = [] package_dict = {} package_url = args['<git_hub_url>'] package_dict['_version'] = args['<version>'] package_dict['package_ver'] = args['<version>'].replace('-', '_') rpmbuild_root = args['--rpmbuild_root'] git_tag = args['TAG'] gopkg = ('gopkg.in' in package_url) if gopkg: pattern = re.compile('https://(.*?)/(.*)\.(.*)') match = re.match(pattern, package_url) if '/' in match.group(2): (pkg_project, pkg_repo) = match.group(2).split('/') else: pkg_project = 'go-' + match.group(2) pkg_repo = match.group(2) branch = match.group(3) branch_git_version = common.get_gopkg_version_dict(package_url)[branch] package_url = 'https://github.com/' + pkg_project + '/' + pkg_repo (provider, provider_tld) = match.group(1).split('.') import_path = match.group(1) + '/' + match.group( 2) + '.' + match.group(3) else: import_path = '%{provider}.%{provider_tld}/%{project}/%{repo}' pattern = re.compile('https://(.*?)/(.*?)/(.*)') match = re.match(pattern, package_url) download_mathod = 'wget' if '/' in match.groups(3) else 'git' (package_dict['provider'], package_dict['provider_tld']) = match.group(1).split('.') if not gopkg: provider = package_dict['provider'] package_dict['project'] = match.group(2) package_dict['repo'] = match.group(3).split('/')[0].replace('.git', '') package_dict['repo_name'] = 'golang-' + provider + '-' + package_dict[ 'repo'] + '-' + branch if gopkg else 'golang-' + provider + '-' + package_dict[ 'project'] + '-' + package_dict['repo'] package_dict['import_path'] = import_path package_dict.update(gen_bin_context(package_dict['repo'], args)) package_dict.update(gen_devel_context(args)) package_dict.update(gen_systemd_context(args)) package_dict['today'] = datetime.datetime.now().strftime("%a %b %d %Y") package_ver_var = package_dict['repo'].replace('-', '').replace( '_', '').replace('.', '').upper() + 'VER' repo_name = package_dict['repo_name'] spec_filename = repo_name + '.spec' repo_filename_prefix = package_dict['project'] + '-' + package_dict[ 'repo'] + '-' + branch + '-$' + package_ver_var if gopkg else package_dict[ 'project'] + '-' + package_dict['repo'] + '-$' + package_ver_var repo_filename = repo_filename_prefix + '.tar.gz' package_dict['source_filename'] = repo_filename_prefix.replace( '$' + package_ver_var, '%{_version}') common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, spec_template))), package_dict, os.path.join(rpmbuild_root, 'SPECS', spec_filename)) source_dir = './sources' if args['--docker'] else '$SRCDIR' spec_dir = './specs' if args['--docker'] else '$SPECSDIR' spec_script.append('') spec_script.append('export %s' % package_ver_var + '=' + package_dict['_version']) if download_mathod == 'git': spec_script.append('cd /usr/local/src') spec_script.append('rm -rf /usr/local/src/' + repo_filename_prefix) spec_script.append('git clone --depth=10 -b ' + git_tag + '$' + package_ver_var + ' ' + package_url + '.git ' + repo_filename_prefix) if gopkg: spec_script.append('cd /usr/local/src/' + repo_filename_prefix) spec_script.append('(git tag -l |grep ' + branch_git_version + ') && git checkout ' + branch_git_version + ' || git checkout -b ' + branch_git_version) spec_script.append('cd ..') spec_script.append('tar -zcf $RPMBUILDROOT/SOURCES/' + repo_filename + ' ' + repo_filename_prefix) if args['--bin']: spec_script.append('export GOBINDIR=`find ' + repo_filename_prefix + '/ -name "main.go"|awk -F"' + repo_filename_prefix + '" {\'$2\'}|awk -F"/main.go" {\'$1\'}`') spec_script.append('cd /usr/local/src/' + repo_filename_prefix) spec_script.append('export GITCOMMIT=`git rev-parse HEAD`') spec_script.append('cd ..') if args['--bin']: spec_script.append( 'sed -i -e "s#GOBINDIR#$GOBINDIR#g" $RPMBUILDROOT/SPECS/' + spec_filename) spec_script.append( 'sed -i -e "/^%global/s#%global commit.*#%global commit $GITCOMMIT#g" $RPMBUILDROOT/SPECS/' + spec_filename) else: spec_script.append('wget -O $SRCDIR/' + repo_filename + ' ' + package_url) spec_script.append('export GITCOMMIT=' + sys.argv[3]) spec_script.append('') spec_script.append('/bin/cp -f $RPMBUILDROOT/SPECS/' + spec_filename + ' ' + spec_dir + '/') if args['--docker']: spec_script.append('echo "" >> readme.txt') spec_script.append('echo "rpmbuild -bb \\$RPMBUILDROOT/SPECS/' + spec_filename + '" >> readme.txt') spec_script.append( 'echo "rpm -U \\$(find \\$RPMBUILDROOT/RPMS -iname \\"' + repo_name + '-*.rpm\\" -a ! -iname \\"' + repo_name + '-*debug*.rpm\\"| tr \\"\\n\\" \\" \\")" >> readme.txt') else: spec_script.append('/bin/cp -f $SRCDIR/' + repo_filename + ' $RPMBUILDROOT/SOURCES/') spec_script.append('/bin/cp -f $SPECSDIR/' + spec_filename + ' $RPMBUILDROOT/SPECS/') spec_script.append('rpmbuild -bb $RPMBUILDROOT/SPECS/' + spec_filename) spec_script.append('rm -f $RPMDIR/' + repo_name + '-*') if args['--bin']: spec_script.append('mv -f $RPMBUILDROOT/RPMS/x86_64/' + repo_name + '-* $RPMDIR') if args['--dev']: spec_script.append('mv -f $RPMBUILDROOT/RPMS/noarch/' + repo_name + '-devel-* $RPMDIR') spec_script.append('rpm -U $RPMDIR/' + repo_name + '-devel-*') if not args['--bin']: spec_script.append('rm -f $RPMBUILDROOT/RPMS/x86_64/' + repo_name + '-*') print('\n'.join(spec_script)) return None
def gen_inv(args): start_time = time.time() playbook_template = 'pxc-playbook.j2' setting_template = 'pxc-setting.j2' ip_list = [] hosts_script = [] pxc_group_script = [] hosts_script.append('[pxc]') pxc_dict = {k[2:]: v for k, v in args.items()} pxc_dict['data_hosts'] = args['--data_host'].split(",") if args['--monitor_host']: pxc_dict['monitor_hosts'] = args['--monitor_host'].split(",") else: pxc_dict['monitor_hosts'] = [] pxc_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) if args['--taskid']: pxc_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) pxc_dict['uuid'] = args['--taskid'] else: pxc_dict['task_id'] = '' pxc_dict['uuid'] = str(uuid.uuid4()) pxc_dict['parted'] = '' if args['--without_parted'] else '\n - parted' pxc_dict['enable_backup'] = not args['--without_backup'] ansible_auth = '' if pxc_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(pxc_dict['sshpass']) elif pxc_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( pxc_dict['sshkey']) log_filename = os.path.join(pxc_dict['workdir'], 'pxc_' + pxc_dict['uuid'] + '.log') logger = common.MyLogger('pxc', log_filename).default_logger.logger logger.info('args:' + str(args)) playbook_filename = os.path.join(pxc_dict['workdir'], 'pxc_' + pxc_dict['uuid'] + '.yml') host_filename = os.path.join(pxc_dict['workdir'], 'inventory', pxc_dict['uuid'], 'hosts') setting_filename = os.path.join(pxc_dict['workdir'], 'inventory', pxc_dict['uuid'], 'pillar', 'pxc.yml') pxc_dict['data_hostlist'] = [] pxc_dict['data_iplist'] = [] pxc_dict['mon_hostlist'] = [] for i, host_info in enumerate(pxc_dict['monitor_hosts']): (k, v) = host_info.split(":") k = k.lower() pxc_dict['mon_hostlist'].append(k) ip_list.append(v) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(pxc_dict['sshport']), ansible_auth, pxc_dict['hostarg'])) pxc_group_script.append(' - hostname: {}'.format(k)) pxc_group_script.append(' role: arbitrator') pxc_group_script.append(' bootstrap: False') if pxc_dict['db_vip']: pxc_group_script.append(' vip: {}'.format( pxc_dict['db_vip'])) for i, host_info in enumerate(pxc_dict['data_hosts']): (k, v) = host_info.split(":") k = k.lower() pxc_dict['data_hostlist'].append(k) pxc_dict['data_iplist'].append(v) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(pxc_dict['sshport']), ansible_auth, pxc_dict['hostarg'])) ip_list.append(v) pxc_group_script.append(' - hostname: {}'.format(k)) pxc_group_script.append(' role: data') if i == 0: pxc_group_script.append(' bootstrap: True') else: pxc_group_script.append(' bootstrap: False') if pxc_dict['db_vip']: pxc_dict['ha_setting'] = "\n - pacemaker\n - lvs" pxc_dict['pxc_group'] = '\n'.join(pxc_group_script) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), pxc_dict, playbook_filename) logger.info('create mysql with pxc setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), pxc_dict, setting_filename) if pxc_dict['template_only']: print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) else: logger.info('check ssh availability') i = 1 for check_ip in ip_list: while (not common.check_server(check_ip, int( pxc_dict['sshport']))) and (i < pxc_dict['ssh_try_limit']): time.sleep(1) i += 1 for check_ip in ip_list: if (not common.check_server(check_ip, int(pxc_dict['sshport']))): logger.info('ssh check limit exceed ({} sec): ip {}'.format( str(pxc_dict['ssh_try_limit']), check_ip)) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{} {}'.format( pxc_dict['password'], pxc_dict['ip'], pxc_dict['database'])) return None
def gen_spec(args): spec_template = 'python-template.spec' spec_script = [] package_dict = {} package_url = args['<git_hub_url>'] package_dict['package_ver'] = args['<version>'] rpmbuild_root = args['--rpmbuild_root'] pattern = re.compile('https://(.*?)/(.*?)/(.*)') match = re.match(pattern, package_url) download_mathod = 'wget' if '/' in match.groups(3) else 'git' (package_dict['provider'], package_dict['provider_tld']) = match.group(1).split('.') package_dict['project'] = match.group(2) package_dict['repo'] = match.group(3).split('/')[0].replace('.git', '') package_dict['today'] = datetime.datetime.now().strftime("%a %b %d %Y") package_ver_var = package_dict['repo'].replace('-', '').replace( '_', '').replace('.', '').upper() + 'VER' repo_name = package_dict['repo'] spec_filename = 'python-' + repo_name + '.spec' repo_filename = package_dict['repo'] + '-$' + package_ver_var + '.tar.gz' common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, spec_template))), package_dict, os.path.join(rpmbuild_root, 'SPECS', spec_filename)) spec_script.append('') spec_script.append('export %s' % package_ver_var + '=' + package_dict['package_ver']) if download_mathod == 'git': spec_script.append('cd /usr/local/src') spec_script.append('rm -rf /usr/local/src/' + package_dict['repo'] + '-$' + package_ver_var) spec_script.append('git clone --depth=10 ' + package_url + ' ' + package_dict['repo'] + '-$' + package_ver_var) spec_script.append('tar -zcf $SRCDIR/' + repo_filename + ' ' + package_dict['repo'] + '-$' + package_ver_var) spec_script.append('cd /usr/local/src/' + package_dict['repo'] + '-$' + package_ver_var) spec_script.append('export GITCOMMIT=`git rev-parse HEAD`') spec_script.append('cd ..') spec_script.append( 'sed -i -e "/^%global/s#%global commit.*#%global commit $GITCOMMIT#g" $RPMBUILDROOT/SPECS/' + spec_filename) else: spec_script.append('wget -O $SRCDIR/' + repo_filename + ' ' + package_url) spec_script.append('export GITCOMMIT=' + sys.argv[3]) spec_script.append('') spec_script.append('/bin/cp -f $RPMBUILDROOT/SPECS/' + spec_filename + ' $SPECSDIR/') spec_script.append('/bin/cp -f $SRCDIR/' + repo_filename + ' $RPMBUILDROOT/SOURCES/') spec_script.append('/bin/cp -f $SPECSDIR/' + spec_filename + ' $RPMBUILDROOT/SPECS/') spec_script.append('rpmbuild -bb $RPMBUILDROOT/SPECS/' + spec_filename) spec_script.append('rm -f $RPMDIR/python-' + repo_name + '-*') spec_script.append('mv -f $RPMBUILDROOT/RPMS/x86_64/python-' + repo_name + '-* $RPMDIR') spec_script.append('mv -f $RPMBUILDROOT/RPMS/noarch/python-' + repo_name + '-* $RPMDIR') print('\n'.join(spec_script)) return None
def gen_spec(args): spec_template = 'nodejs-template.spec' spec_script = [] package_dict = {} package_url = args['<git_hub_url>'] package_dict['package_ver'] = args['<version>'] rpmbuild_root = args['--rpmbuild_root'] import_path = '%{provider}.%{provider_tld}/%{project}/%{repo}' pattern = re.compile('https://(.*?)/(.*?)/(.*)') match = re.match(pattern, package_url) download_mathod = 'wget' if '/' in match.groups(3) else 'git' (package_dict['provider'], package_dict['provider_tld']) = match.group(1).split('.') provider = package_dict['provider'] package_dict['project'] = match.group(2) package_dict['repo'] = match.group(3).split('/')[0].replace('.git', '') package_dict[ 'repo_name'] = package_dict['project'] + '-' + package_dict['repo'] package_dict['import_path'] = import_path package_dict['today'] = datetime.datetime.now().strftime("%a %b %d %Y") package_ver_var = package_dict['repo'].replace('-', '').replace( '_', '').replace('.', '').upper() + 'VER' repo_name = package_dict['repo_name'] spec_filename = repo_name + '.spec' repo_filename_prefix = package_dict['project'] + '-' + package_dict[ 'repo'] + '-$' + package_ver_var repo_filename = repo_filename_prefix + '.tar.gz' package_dict['source_filename'] = repo_filename_prefix.replace( '$' + package_ver_var, '%{version}') common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, spec_template))), package_dict, os.path.join(rpmbuild_root, 'SPECS', spec_filename)) nginx_conf = [] nginx_conf.append('server {') nginx_conf.append(' listen 80;') nginx_conf.append(' server_name _;') nginx_conf.append(' index index.html;') nginx_conf.append(' root /opt/' + package_dict['project'] + '/' + package_dict['repo'] + ';') nginx_conf.append('}') common.render_template( '{{ context }}', {'context': '\n'.join(nginx_conf)}, os.path.join(rpmbuild_root, 'SOURCES', package_dict['repo_name'] + '.nginx')) spec_script.append('') spec_script.append('export %s' % package_ver_var + '=' + package_dict['package_ver']) if download_mathod == 'git': spec_script.append('cd /usr/local/src') spec_script.append('rm -rf /usr/local/src/' + repo_filename_prefix) spec_script.append('git clone --depth=10 ' + package_url + '.git ' + repo_filename_prefix) spec_script.append('tar -zcf $SRCDIR/' + repo_filename + ' ' + repo_filename_prefix) spec_script.append('cd /usr/local/src/' + repo_filename_prefix) spec_script.append('export GITCOMMIT=`git rev-parse HEAD`') spec_script.append('cd ..') spec_script.append( 'sed -i -e "/^%global/s#%global commit.*#%global commit $GITCOMMIT#g" $RPMBUILDROOT/SPECS/' + spec_filename) else: spec_script.append('wget -O $SRCDIR/' + repo_filename + ' ' + package_url) spec_script.append('export GITCOMMIT=' + sys.argv[3]) spec_script.append('') spec_script.append('/bin/cp -f $RPMBUILDROOT/SPECS/' + spec_filename + ' $SPECSDIR/') spec_script.append('/bin/cp -f $SRCDIR/' + repo_filename + ' $RPMBUILDROOT/SOURCES/') spec_script.append('/bin/cp -f $SPECSDIR/' + spec_filename + ' $RPMBUILDROOT/SPECS/') spec_script.append('rpmbuild -bb $RPMBUILDROOT/SPECS/' + spec_filename) spec_script.append('rm -f $RPMDIR/' + repo_name + '-*') spec_script.append('mv -f $RPMBUILDROOT/RPMS/noarch/' + repo_name + '-* $RPMDIR') print('\n'.join(spec_script)) return None
def write_setup(package_name, values): write_template(path.join(package_name, ".canari"), read_template("_canari", values)) write_template(path.join(package_name, "setup.py"), read_template("setup", values)) write_template(path.join(package_name, "README.md"), read_template("README", values)) write_template(path.join(package_name, "MANIFEST.in"), read_template("MANIFEST", values))
def gen_inv(args): start_time = time.time() log_filename = 'mmm.log' logger = common.MyLogger('mmm', log_filename).default_logger.logger logger.info('args:' + str(args)) playbook_template = 'mmm-playbook.j2' setting_template = 'mmm-setting.j2' hosts_script = [] hosts_script.append('[mmm]') mmm_dict = {k[2:]: v for k, v in args.items()} mmm_dict['mon_fqdn'] = 'monitor_vip' if int(mmm_dict['osver']) > 6: mmm_dict['heartbeat'] = '' mmm_dict['pacemaker'] = '' if mmm_dict[ 'monitor_vip'] == '192.168.10.1' else '\n - pacemaker' else: mmm_dict['pacemaker'] = '' mmm_dict['heartbeat'] = '' if mmm_dict[ 'monitor_vip'] == '192.168.10.1' else '\n - heartbeat' mmm_dict['writer_fqdn'] = 'writer_vip' mmm_dict['writer_vips'] = args['--writer_vip'].split(",") mmm_dict['reader_fqdn'] = 'reader_vip' mmm_dict['reader_vips'] = args['--reader_vip'].split(",") mmm_dict['monitor_hosts'] = args['--monitor_host'].split(",") mmm_dict['enable_backup'] = not args['--without_backup'] if len(mmm_dict['monitor_hosts']) == 1: mmm_dict['monitor_hosts'].append('fakehost.domain:192.168.98.98') mmm_dict['data_hosts'] = args['--data_host'].split(",") mmm_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) if args['--taskid']: mmm_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) mmm_dict['uuid'] = args['--taskid'] else: mmm_dict['task_id'] = '' mmm_dict['uuid'] = str(uuid.uuid4()) ansible_auth = '' if mmm_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(mmm_dict['sshpass']) elif mmm_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( mmm_dict['sshkey']) playbook_filename = os.path.join(mmm_dict['workdir'], 'mmm_' + mmm_dict['uuid'] + '.yml') host_filename = os.path.join(mmm_dict['workdir'], 'inventory', mmm_dict['uuid'], 'hosts') setting_filename = os.path.join(mmm_dict['workdir'], 'inventory', mmm_dict['uuid'], 'pillar', 'mmm.yml') mmm_dict['data_hostlist'] = [] mmm_dict['mmm_hostlist'] = [] for i, host_info in enumerate(mmm_dict['monitor_hosts']): (k, v) = host_info.split(":") k = k.lower() mmm_dict['mmm_hostlist'].append(k) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(mmm_dict['sshport']), ansible_auth, mmm_dict['hostarg'])) mmm_dict['mon_host' + str(i + 1)] = k for i, host_info in enumerate(mmm_dict['data_hosts']): (k, v) = host_info.split(":") k = k.lower() mmm_dict['data_hostlist'].append(k) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(mmm_dict['sshport']), ansible_auth, mmm_dict['hostarg'])) mmm_dict['data_host' + str(i + 1)] = k logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), mmm_dict, playbook_filename) logger.info('create mysql with mmm setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), mmm_dict, setting_filename) print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{}'.format( mmm_dict['password'], mmm_dict['writer_vips'][0])) return None
def dbbackup(args, func_type, fields): start_time = time.time() hosts_script = [] hosts_script.append('[backup]') backup_dict = {k[2:]: v for k, v in args.items()} if args['--taskid']: backup_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) backup_dict['uuid'] = args['--taskid'] else: backup_dict['uuid'] = str(uuid.uuid4()) backup_dict['task_id'] = ' external_task_id: {}\n'.format( backup_dict['uuid']) log_filename = os.path.join( backup_dict['workdir'], 'dbbackup_%s_%s_%s.log' % (backup_dict['dbtype'], func_type, backup_dict['uuid'])) logger = common.MyLogger( 'dbbackup_%s_%s' % (backup_dict['dbtype'], backup_dict['uuid']), log_filename).default_logger.logger logger.info('args:' + str(args)) backup_dict['data_hosts'] = args['--data_host'].split(",") backup_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) playbook_template = 'dbbackup-%s-%s-playbook.j2' % (backup_dict['dbtype'], func_type) playbook_filename = os.path.join( backup_dict['workdir'], 'dbbackup_%s_%s_%s.yml' % (backup_dict['dbtype'], func_type, backup_dict['uuid'])) host_filename = os.path.join(backup_dict['workdir'], 'inventory', backup_dict['uuid'], 'hosts') ansible_auth = '' if backup_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(backup_dict['sshpass']) elif backup_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( backup_dict['sshkey']) host_list = [] ip_list = [] for i, host_info in enumerate(backup_dict['data_hosts']): (k, v) = host_info.split(":") k = k.lower() host_list.append(k) ip_list.append(v) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(backup_dict['sshport']), ansible_auth, backup_dict['hostarg'])) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), backup_dict, playbook_filename) if backup_dict['template_only']: print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) else: logger.info('check ssh availability') i = 1 check_list = [] for check_ip in ip_list: while (not common.check_server(check_ip, int( backup_dict['sshport']))) and ( i < backup_dict['ssh_try_limit']): time.sleep(1) i += 1 for check_ip in ip_list: check_result = common.check_server(check_ip, int(backup_dict['sshport'])) check_list.append(check_result) if (not check_result): logger.info('ssh check limit exceed ({} sec): ip {}'.format( str(backup_dict['ssh_try_limit']), check_ip)) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) result_dict = {} araapi = common.AraApi() for i, host_info in enumerate(host_list): cmd = 'data show --playbook %s mybak_%s -f json' % ( backup_dict['uuid'], host_info) result = araapi.run_result(cmd) if result['stderr']: logger.error('ara cmd error: {}: {}'.format( host_info, result['stderr'])) if result['stdout']: tmp_dict = json.loads(result['stdout'])['Value'] if isinstance(tmp_dict, list): tmp_dict = {'backup_list': tmp_dict} else: tmp_dict = {} if fields: for k in tmp_dict.keys(): if k not in fields: tmp_dict.pop(k) tmp_dict['ssh_check'] = check_list[i] result_dict[host_info] = tmp_dict logger.info('return value: %s' % (result_dict)) sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ return result_dict
def gen_inv(args): start_time = time.time() playbook_template = 'mysql-playbook.j2' setting_template = 'mysql-setting.j2' hosts_script = [] hosts_script.append('[mysql]') mysql_dict = {k[2:]: v for k, v in args.items()} if args['--taskid']: mysql_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) mysql_dict['uuid'] = args['--taskid'] else: mysql_dict['task_id'] = '' mysql_dict['uuid'] = str(uuid.uuid4()) log_filename = os.path.join(mysql_dict['workdir'], 'mysql_' + mysql_dict['uuid'] + '.log') logger = common.MyLogger('mysql', log_filename).default_logger.logger logger.info('args:' + str(args)) mysql_dict['hostname'] = args['--hostname'].lower() mysql_dict['ip'] = args['--ip'] mysql_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) mysql_dict['enable_backup'] = not args['--without_backup'] playbook_filename = os.path.join(mysql_dict['workdir'], 'mysql_' + mysql_dict['uuid'] + '.yml') host_filename = os.path.join(mysql_dict['workdir'], 'inventory', mysql_dict['uuid'], 'hosts') setting_filename = os.path.join(mysql_dict['workdir'], 'inventory', mysql_dict['uuid'], 'pillar', 'mysql.yml') ansible_auth = '' if mysql_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(mysql_dict['sshpass']) elif mysql_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( mysql_dict['sshkey']) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( mysql_dict['hostname'], 'ansible_ssh_host=' + mysql_dict['ip'], str(mysql_dict['sshport']), ansible_auth, mysql_dict['hostarg'])) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), mysql_dict, playbook_filename) logger.info( 'create mysql single instance setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), mysql_dict, setting_filename) if mysql_dict['template_only']: print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) else: logger.info('check ssh availability') i = 1 while (not common.check_server(mysql_dict['ip'], int(mysql_dict['sshport']))) and ( i < mysql_dict['ssh_try_limit']): time.sleep(1) i += 1 if (not common.check_server(mysql_dict['ip'], int( mysql_dict['sshport']))): logger.info('ssh check limit exceed ({} sec): ip {}'.format( str(mysql_dict['ssh_try_limit']), mysql_dict['ip'])) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- wait time for ssh reachable %s sec ---" % str(i)) print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{} {}'.format( mysql_dict['password'], mysql_dict['ip'], mysql_dict['database'])) return None
def gen_inv(args): start_time = time.time() playbook_template = 'mha-playbook.j2' setting_template = 'mha-setting.j2' ip_list = [] hosts_script = [] mha_group_script = [] replication_script = [] hosts_script.append('[mha]') mha_dict = {k[2:]: v for k, v in args.items()} mha_dict['data_hosts'] = args['--data_host'].split(",") mha_dict['monitor_hosts'] = args['--monitor_host'].split(",") if len(mha_dict['monitor_hosts']) == 1: mha_dict['monitor_hosts'].append('fakehost.domain:192.168.98.98') mha_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) if args['--taskid']: mha_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) mha_dict['uuid'] = args['--taskid'] else: mha_dict['task_id'] = '' mha_dict['uuid'] = str(uuid.uuid4()) mha_dict['parted'] = '' if args['--without_parted'] else '\n - parted' mha_dict['enable_backup'] = not args['--without_backup'] ansible_auth = '' if mha_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(mha_dict['sshpass']) elif mha_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( mha_dict['sshkey']) log_filename = os.path.join(mha_dict['workdir'], 'mha_' + mha_dict['uuid'] + '.log') logger = common.MyLogger('mha', log_filename).default_logger.logger logger.info('args:' + str(args)) playbook_filename = os.path.join(mha_dict['workdir'], 'mha_' + mha_dict['uuid'] + '.yml') host_filename = os.path.join(mha_dict['workdir'], 'inventory', mha_dict['uuid'], 'hosts') setting_filename = os.path.join(mha_dict['workdir'], 'inventory', mha_dict['uuid'], 'pillar', 'mha.yml') mha_dict['data_hostlist'] = [] mha_dict['mha_hostlist'] = [] for i, host_info in enumerate(mha_dict['monitor_hosts']): (k, v) = host_info.split(":") k = k.lower() mha_dict['mha_hostlist'].append(k) ip_list.append(v) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(mha_dict['sshport']), ansible_auth, mha_dict['hostarg'])) mha_group_script.append(' - hostname: {}'.format(k)) mha_group_script.append(' role: monitor') master_host = '' for i, host_info in enumerate(mha_dict['data_hosts']): (k, v) = host_info.split(":") k = k.lower() mha_dict['data_hostlist'].append(k) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(mha_dict['sshport']), ansible_auth, mha_dict['hostarg'])) ip_list.append(v) mha_group_script.append(' - hostname: {}'.format(k)) if i == 0: master_host = k mha_group_script.append(' role: master') else: mha_group_script.append(' role: slave') mha_group_script.append(' mha_args:') mha_group_script.append(' - candidate_master: "1"') if i > 0: replication_script.append(' {}:'.format(k)) replication_script.append( ' master_host: {}'.format(master_host)) replication_script.append(' master_auto_position: 1') mha_dict['mha_group'] = '\n'.join(mha_group_script) mha_dict['mysql_replication'] = '\n'.join(replication_script) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), mha_dict, playbook_filename) logger.info('create mysql with mha setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), mha_dict, setting_filename) if mha_dict['template_only']: print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) else: logger.info('check ssh availability') i = 1 for check_ip in ip_list: while (not common.check_server(check_ip, int( mha_dict['sshport']))) and (i < mha_dict['ssh_try_limit']): time.sleep(1) i += 1 for check_ip in ip_list: if (not common.check_server(check_ip, int(mha_dict['sshport']))): logger.info('ssh check limit exceed ({} sec): ip {}'.format( str(mha_dict['ssh_try_limit']), check_ip)) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{} {}'.format( mha_dict['password'], mha_dict['ip'], mha_dict['database'])) return None
def gen_inv(args): start_time = time.time() playbook_template = 'mysql-playbook.j2' setting_template = 'mysql-setting-aws.j2' ec2_template = 'ec2-instance.j2' hosts_script = [] hosts_script.append('[mysql]') mysql_dict = {k[2:]: v for k, v in args.items()} if args['--taskid']: mysql_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) mysql_dict['uuid'] = args['--taskid'] else: mysql_dict['task_id'] = '' mysql_dict['uuid'] = str(uuid.uuid4()) log_filename = os.path.join(mysql_dict['workdir'], 'mysql_' + mysql_dict['uuid'] + '.log') logger = common.MyLogger('aws', log_filename).default_logger.logger logger.info('args:' + str(args)) mysql_dict['hostname'] = 'mysql-' + mysql_dict['database'].lower( ) + '-' + mysql_dict['uuid'][:7] mysql_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) # provision ec2 instance terraform_cwd = '/opt/terraform/inventory/aws/us-east-1' terraform_filename = os.path.join(terraform_cwd, 'ec2-' + mysql_dict['hostname'] + '.tf') logger.info('create terraform file: {}'.format(terraform_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, ec2_template))), mysql_dict, terraform_filename) p = Popen(['terraform', 'plan'], cwd=terraform_cwd, stdout=PIPE, stderr=PIPE, stdin=PIPE) stdout, stderr = p.communicate() if p.returncode > 0: logger.error('terraform plan error: ' + stderr) sys.exit() else: print(stdout) p = Popen(['terraform', 'apply', '-auto-approve'], cwd=terraform_cwd, stdout=PIPE, stderr=PIPE, stdin=PIPE) stdout, stderr = p.communicate() if p.returncode > 0: logger.error('terraform apply error: ' + stderr) sys.exit() else: print(stdout) p = Popen([ 'terraform', 'output', 'aws_instance_' + mysql_dict['hostname'].replace("-", "_") + '_private_ip' ], cwd=terraform_cwd, stdout=PIPE, stderr=PIPE, stdin=PIPE) stdout, stderr = p.communicate() if p.returncode > 0: logger.error('terraform output error: ' + stderr) sys.exit() else: mysql_dict['ip'] = stdout.split('\n')[0] print(stdout) # post configuration playbook_filename = os.path.join(mysql_dict['workdir'], 'mysql_' + mysql_dict['uuid'] + '.yml') host_filename = os.path.join(mysql_dict['workdir'], 'inventory', mysql_dict['uuid'], 'hosts') setting_filename = os.path.join(mysql_dict['workdir'], 'inventory', mysql_dict['uuid'], 'pillar', 'mysql.yml') ansible_auth = '' if mysql_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(mysql_dict['sshpass']) elif mysql_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( mysql_dict['sshkey']) hosts_script.append( '{:<40}{:<40}{:<50} ansible_ssh_port={:<7} ansible_ssh_user=centos ansible_become=true ansible_become_user=root ansible_become_method=sudo {}' .format(mysql_dict['hostname'] + '.useast1.aws', 'ansible_ssh_host=' + mysql_dict['ip'], ansible_auth, str(mysql_dict['sshport']), mysql_dict['hostarg'])) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), mysql_dict, playbook_filename) logger.info( 'create mysql single instance setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), mysql_dict, setting_filename) logger.info('check ssh availability') i = 1 while (not common.check_server(mysql_dict['ip'], int( mysql_dict['sshport']))) and (i < mysql_dict['ssh_try_limit']): time.sleep(1) i += 1 if (not common.check_server(mysql_dict['ip'], int(mysql_dict['sshport']))): logger.info('ssh check limit exceed ({} sec): ip {}'.format( mysql_dict['ssh_try_limit'], mysql_dict['ip'])) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{} {}'.format( mysql_dict['password'], mysql_dict['ip'], mysql_dict['database'])) return None