def chapter_route_index(api_key): logger.info('Responding to chapter::index') if 'manga_id' not in request.args: logger.warning('Insufficient parameters') return '', 400 manga_id = request.args.get('manga_id') manga = backend.manga.read(manga_id) chapters = backend.chapter.index(manga_id) return render_template('chapter_index', manga=manga, chapters=chapters)
def page_route_display(api_key): logger.info('Responding to page::index') if 'chapter_id' not in request.args: logger.warning('Insufficient arguments') return 'none' chapter_id = request.args.get('chapter_id') chapter = backend.chapter.read(chapter_id) pages = backend.page.index(chapter_id) return render_template('page_display', pages=pages, chapter=chapter, api_hostname=config.api_hostname, api_public_port=config.api_public_port)
def generate_cloud_configs(target, override_params={}): conf_env = common.jinja2_env(common.script_path('/')) # Patch jinja2 with our custom filter conf_env.filters['kube_token'] = generate_kubernetes_system_service_token # Number of nodes to reach concensus in cluster: etcd_size = 1 if target == "aws": etcd_size = 3 cloud_config_params = common.read_global_config() cloud_config_params['groups'] = {} defaults = common.read_yaml(conf_env, 'defaults.yaml', cloud_config_params) add_authorized_keys_dict(defaults) p = common.read_yaml(conf_env, target + '.yaml', cloud_config_params) url = get_etcd_discovery_url(etcd_size) if not url: return None p['etcd_discovery_url'] = url params = common.deepupdate(dict(defaults.items()), p) params = common.deepupdate(params, override_params) env = common.jinja2_env(common.script_path('/templates')) common.render_template(env, "etcd.yaml.j2", "/tmp", params) common.render_template(env, "master.yaml.j2", "/tmp", params) common.render_template(env, "node.yaml.j2", "/tmp", params) return params
def get(self): user = users.get_current_user() if user: common.user_bootstrap(user) token = channel.create_channel(user.email()) values = { "user": user, "token": token, "online_list": UserOnline.get_online_list(), } else: values = { "user": user, "login_url": users.create_login_url("/"), } self.response.out.write(common.render_template("mainpage.html", values))
def manga_route_rss(api_key): logger.info('Responding to manga::rss') if 'manga_id' not in request.args: logger.warning('manga_id not in parameters, cannot generate rss') return '', 400 manga_id = request.args.get('manga_id') manga = backend.manga.read(manga_id) chapters = backend.chapter.index(manga_id) meta = { 'date': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), } return ( render_template('manga_rss', manga=manga, chapters=chapters, meta=meta), 200, {'Content-Type': 'application/xml'} )
def get(self): user = users.get_current_user() config = { "height": 500, "nmessage": 30, #The number of chat messages in history displayed to the client at first } values = { "user": user, "online_list": UserOnline.get_online_list(), "config": config, } values2 = {} if user: common.user_bootstrap(user) token = channel.create_channel(user.email()) q = db.GqlQuery("SELECT * FROM ChatMessage ORDER BY when_created DESC LIMIT %s" % config["nmessage"]) messages = [] for msg in q: messages.append({ "time": self.simple_time_str(msg.when_created), "nickname": msg.nickname, "content": msg.content, }) json_messages = json.dumps(messages) values2 = { "token": token, "logout_url": users.create_logout_url("/chatbox"), "messages": json_messages, "online_list": UserOnline.get_online_list(), } else: values2 = { "login_url": users.create_login_url("/chatbox"), } values.update(values2) self.response.out.write(common.render_template("chatbox.html", values))
def gen_inv(args): start_time = time.time() playbook_template = 'mysql-playbook.j2' setting_template = 'mysql-setting.j2' hosts_script = [] hosts_script.append('[mysql]') mysql_dict = {k[2:]: v for k, v in args.items()} if args['--taskid']: mysql_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) mysql_dict['uuid'] = args['--taskid'] else: mysql_dict['task_id'] = '' mysql_dict['uuid'] = str(uuid.uuid4()) log_filename = os.path.join(mysql_dict['workdir'], 'mysql_' + mysql_dict['uuid'] + '.log') logger = common.MyLogger('mysql', log_filename).default_logger.logger logger.info('args:' + str(args)) mysql_dict['hostname'] = args['--hostname'].lower() mysql_dict['ip'] = args['--ip'] mysql_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) mysql_dict['enable_backup'] = not args['--without_backup'] playbook_filename = os.path.join(mysql_dict['workdir'], 'mysql_' + mysql_dict['uuid'] + '.yml') host_filename = os.path.join(mysql_dict['workdir'], 'inventory', mysql_dict['uuid'], 'hosts') setting_filename = os.path.join(mysql_dict['workdir'], 'inventory', mysql_dict['uuid'], 'pillar', 'mysql.yml') ansible_auth = '' if mysql_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(mysql_dict['sshpass']) elif mysql_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( mysql_dict['sshkey']) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( mysql_dict['hostname'], 'ansible_ssh_host=' + mysql_dict['ip'], str(mysql_dict['sshport']), ansible_auth, mysql_dict['hostarg'])) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), mysql_dict, playbook_filename) logger.info( 'create mysql single instance setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), mysql_dict, setting_filename) if mysql_dict['template_only']: print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) else: logger.info('check ssh availability') i = 1 while (not common.check_server(mysql_dict['ip'], int(mysql_dict['sshport']))) and ( i < mysql_dict['ssh_try_limit']): time.sleep(1) i += 1 if (not common.check_server(mysql_dict['ip'], int( mysql_dict['sshport']))): logger.info('ssh check limit exceed ({} sec): ip {}'.format( str(mysql_dict['ssh_try_limit']), mysql_dict['ip'])) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- wait time for ssh reachable %s sec ---" % str(i)) print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{} {}'.format( mysql_dict['password'], mysql_dict['ip'], mysql_dict['database'])) return None
def login(): logger.info('Responding to login') return render_template('login')
def root(): return render_template('root')
def gen_inv(args): start_time = time.time() playbook_template = 'mha-playbook.j2' setting_template = 'mha-setting.j2' ip_list = [] hosts_script = [] mha_group_script = [] replication_script = [] hosts_script.append('[mha]') mha_dict = {k[2:]: v for k, v in args.items()} mha_dict['data_hosts'] = args['--data_host'].split(",") mha_dict['monitor_hosts'] = args['--monitor_host'].split(",") if len(mha_dict['monitor_hosts']) == 1: mha_dict['monitor_hosts'].append('fakehost.domain:192.168.98.98') mha_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) if args['--taskid']: mha_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) mha_dict['uuid'] = args['--taskid'] else: mha_dict['task_id'] = '' mha_dict['uuid'] = str(uuid.uuid4()) mha_dict['parted'] = '' if args['--without_parted'] else '\n - parted' mha_dict['enable_backup'] = not args['--without_backup'] ansible_auth = '' if mha_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(mha_dict['sshpass']) elif mha_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( mha_dict['sshkey']) log_filename = os.path.join(mha_dict['workdir'], 'mha_' + mha_dict['uuid'] + '.log') logger = common.MyLogger('mha', log_filename).default_logger.logger logger.info('args:' + str(args)) playbook_filename = os.path.join(mha_dict['workdir'], 'mha_' + mha_dict['uuid'] + '.yml') host_filename = os.path.join(mha_dict['workdir'], 'inventory', mha_dict['uuid'], 'hosts') setting_filename = os.path.join(mha_dict['workdir'], 'inventory', mha_dict['uuid'], 'pillar', 'mha.yml') mha_dict['data_hostlist'] = [] mha_dict['mha_hostlist'] = [] for i, host_info in enumerate(mha_dict['monitor_hosts']): (k, v) = host_info.split(":") k = k.lower() mha_dict['mha_hostlist'].append(k) ip_list.append(v) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(mha_dict['sshport']), ansible_auth, mha_dict['hostarg'])) mha_group_script.append(' - hostname: {}'.format(k)) mha_group_script.append(' role: monitor') master_host = '' for i, host_info in enumerate(mha_dict['data_hosts']): (k, v) = host_info.split(":") k = k.lower() mha_dict['data_hostlist'].append(k) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(mha_dict['sshport']), ansible_auth, mha_dict['hostarg'])) ip_list.append(v) mha_group_script.append(' - hostname: {}'.format(k)) if i == 0: master_host = k mha_group_script.append(' role: master') else: mha_group_script.append(' role: slave') mha_group_script.append(' mha_args:') mha_group_script.append(' - candidate_master: "1"') if i > 0: replication_script.append(' {}:'.format(k)) replication_script.append( ' master_host: {}'.format(master_host)) replication_script.append(' master_auto_position: 1') mha_dict['mha_group'] = '\n'.join(mha_group_script) mha_dict['mysql_replication'] = '\n'.join(replication_script) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), mha_dict, playbook_filename) logger.info('create mysql with mha setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), mha_dict, setting_filename) if mha_dict['template_only']: print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) else: logger.info('check ssh availability') i = 1 for check_ip in ip_list: while (not common.check_server(check_ip, int( mha_dict['sshport']))) and (i < mha_dict['ssh_try_limit']): time.sleep(1) i += 1 for check_ip in ip_list: if (not common.check_server(check_ip, int(mha_dict['sshport']))): logger.info('ssh check limit exceed ({} sec): ip {}'.format( str(mha_dict['ssh_try_limit']), check_ip)) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{} {}'.format( mha_dict['password'], mha_dict['ip'], mha_dict['database'])) return None
def gen_inv(args): start_time = time.time() playbook_template = 'pxc-playbook.j2' setting_template = 'pxc-setting.j2' ip_list = [] hosts_script = [] pxc_group_script = [] hosts_script.append('[pxc]') pxc_dict = {k[2:]: v for k, v in args.items()} pxc_dict['data_hosts'] = args['--data_host'].split(",") if args['--monitor_host']: pxc_dict['monitor_hosts'] = args['--monitor_host'].split(",") else: pxc_dict['monitor_hosts'] = [] pxc_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) if args['--taskid']: pxc_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) pxc_dict['uuid'] = args['--taskid'] else: pxc_dict['task_id'] = '' pxc_dict['uuid'] = str(uuid.uuid4()) pxc_dict['parted'] = '' if args['--without_parted'] else '\n - parted' pxc_dict['enable_backup'] = not args['--without_backup'] ansible_auth = '' if pxc_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(pxc_dict['sshpass']) elif pxc_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( pxc_dict['sshkey']) log_filename = os.path.join(pxc_dict['workdir'], 'pxc_' + pxc_dict['uuid'] + '.log') logger = common.MyLogger('pxc', log_filename).default_logger.logger logger.info('args:' + str(args)) playbook_filename = os.path.join(pxc_dict['workdir'], 'pxc_' + pxc_dict['uuid'] + '.yml') host_filename = os.path.join(pxc_dict['workdir'], 'inventory', pxc_dict['uuid'], 'hosts') setting_filename = os.path.join(pxc_dict['workdir'], 'inventory', pxc_dict['uuid'], 'pillar', 'pxc.yml') pxc_dict['data_hostlist'] = [] pxc_dict['data_iplist'] = [] pxc_dict['mon_hostlist'] = [] for i, host_info in enumerate(pxc_dict['monitor_hosts']): (k, v) = host_info.split(":") k = k.lower() pxc_dict['mon_hostlist'].append(k) ip_list.append(v) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(pxc_dict['sshport']), ansible_auth, pxc_dict['hostarg'])) pxc_group_script.append(' - hostname: {}'.format(k)) pxc_group_script.append(' role: arbitrator') pxc_group_script.append(' bootstrap: False') if pxc_dict['db_vip']: pxc_group_script.append(' vip: {}'.format( pxc_dict['db_vip'])) for i, host_info in enumerate(pxc_dict['data_hosts']): (k, v) = host_info.split(":") k = k.lower() pxc_dict['data_hostlist'].append(k) pxc_dict['data_iplist'].append(v) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(pxc_dict['sshport']), ansible_auth, pxc_dict['hostarg'])) ip_list.append(v) pxc_group_script.append(' - hostname: {}'.format(k)) pxc_group_script.append(' role: data') if i == 0: pxc_group_script.append(' bootstrap: True') else: pxc_group_script.append(' bootstrap: False') if pxc_dict['db_vip']: pxc_dict['ha_setting'] = "\n - pacemaker\n - lvs" pxc_dict['pxc_group'] = '\n'.join(pxc_group_script) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), pxc_dict, playbook_filename) logger.info('create mysql with pxc setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), pxc_dict, setting_filename) if pxc_dict['template_only']: print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) else: logger.info('check ssh availability') i = 1 for check_ip in ip_list: while (not common.check_server(check_ip, int( pxc_dict['sshport']))) and (i < pxc_dict['ssh_try_limit']): time.sleep(1) i += 1 for check_ip in ip_list: if (not common.check_server(check_ip, int(pxc_dict['sshport']))): logger.info('ssh check limit exceed ({} sec): ip {}'.format( str(pxc_dict['ssh_try_limit']), check_ip)) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{} {}'.format( pxc_dict['password'], pxc_dict['ip'], pxc_dict['database'])) return None
def gen_spec(args): go_dev_only = Ture if 'dev' in args.keys() else False spec_template = 'go-template.spec' spec_script = [] package_dict = {} package_url = args['<git_hub_url>'] package_dict['_version'] = args['<version>'] package_dict['package_ver'] = args['<version>'].replace('-', '_') rpmbuild_root = args['--rpmbuild_root'] git_tag = args['TAG'] gopkg = ('gopkg.in' in package_url) if gopkg: pattern = re.compile('https://(.*?)/(.*)\.(.*)') match = re.match(pattern, package_url) if '/' in match.group(2): (pkg_project, pkg_repo) = match.group(2).split('/') else: pkg_project = 'go-' + match.group(2) pkg_repo = match.group(2) branch = match.group(3) branch_git_version = common.get_gopkg_version_dict(package_url)[branch] package_url = 'https://github.com/' + pkg_project + '/' + pkg_repo (provider, provider_tld) = match.group(1).split('.') import_path = match.group(1) + '/' + match.group( 2) + '.' + match.group(3) else: import_path = '%{provider}.%{provider_tld}/%{project}/%{repo}' pattern = re.compile('https://(.*?)/(.*?)/(.*)') match = re.match(pattern, package_url) download_mathod = 'wget' if '/' in match.groups(3) else 'git' (package_dict['provider'], package_dict['provider_tld']) = match.group(1).split('.') if not gopkg: provider = package_dict['provider'] package_dict['project'] = match.group(2) package_dict['repo'] = match.group(3).split('/')[0].replace('.git', '') package_dict['repo_name'] = 'golang-' + provider + '-' + package_dict[ 'repo'] + '-' + branch if gopkg else 'golang-' + provider + '-' + package_dict[ 'project'] + '-' + package_dict['repo'] package_dict['import_path'] = import_path package_dict.update(gen_bin_context(package_dict['repo'], args)) package_dict.update(gen_devel_context(args)) package_dict.update(gen_systemd_context(args)) package_dict['today'] = datetime.datetime.now().strftime("%a %b %d %Y") package_ver_var = package_dict['repo'].replace('-', '').replace( '_', '').replace('.', '').upper() + 'VER' repo_name = package_dict['repo_name'] spec_filename = repo_name + '.spec' repo_filename_prefix = package_dict['project'] + '-' + package_dict[ 'repo'] + '-' + branch + '-$' + package_ver_var if gopkg else package_dict[ 'project'] + '-' + package_dict['repo'] + '-$' + package_ver_var repo_filename = repo_filename_prefix + '.tar.gz' package_dict['source_filename'] = repo_filename_prefix.replace( '$' + package_ver_var, '%{_version}') common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, spec_template))), package_dict, os.path.join(rpmbuild_root, 'SPECS', spec_filename)) source_dir = './sources' if args['--docker'] else '$SRCDIR' spec_dir = './specs' if args['--docker'] else '$SPECSDIR' spec_script.append('') spec_script.append('export %s' % package_ver_var + '=' + package_dict['_version']) if download_mathod == 'git': spec_script.append('cd /usr/local/src') spec_script.append('rm -rf /usr/local/src/' + repo_filename_prefix) spec_script.append('git clone --depth=10 -b ' + git_tag + '$' + package_ver_var + ' ' + package_url + '.git ' + repo_filename_prefix) if gopkg: spec_script.append('cd /usr/local/src/' + repo_filename_prefix) spec_script.append('(git tag -l |grep ' + branch_git_version + ') && git checkout ' + branch_git_version + ' || git checkout -b ' + branch_git_version) spec_script.append('cd ..') spec_script.append('tar -zcf $RPMBUILDROOT/SOURCES/' + repo_filename + ' ' + repo_filename_prefix) if args['--bin']: spec_script.append('export GOBINDIR=`find ' + repo_filename_prefix + '/ -name "main.go"|awk -F"' + repo_filename_prefix + '" {\'$2\'}|awk -F"/main.go" {\'$1\'}`') spec_script.append('cd /usr/local/src/' + repo_filename_prefix) spec_script.append('export GITCOMMIT=`git rev-parse HEAD`') spec_script.append('cd ..') if args['--bin']: spec_script.append( 'sed -i -e "s#GOBINDIR#$GOBINDIR#g" $RPMBUILDROOT/SPECS/' + spec_filename) spec_script.append( 'sed -i -e "/^%global/s#%global commit.*#%global commit $GITCOMMIT#g" $RPMBUILDROOT/SPECS/' + spec_filename) else: spec_script.append('wget -O $SRCDIR/' + repo_filename + ' ' + package_url) spec_script.append('export GITCOMMIT=' + sys.argv[3]) spec_script.append('') spec_script.append('/bin/cp -f $RPMBUILDROOT/SPECS/' + spec_filename + ' ' + spec_dir + '/') if args['--docker']: spec_script.append('echo "" >> readme.txt') spec_script.append('echo "rpmbuild -bb \\$RPMBUILDROOT/SPECS/' + spec_filename + '" >> readme.txt') spec_script.append( 'echo "rpm -U \\$(find \\$RPMBUILDROOT/RPMS -iname \\"' + repo_name + '-*.rpm\\" -a ! -iname \\"' + repo_name + '-*debug*.rpm\\"| tr \\"\\n\\" \\" \\")" >> readme.txt') else: spec_script.append('/bin/cp -f $SRCDIR/' + repo_filename + ' $RPMBUILDROOT/SOURCES/') spec_script.append('/bin/cp -f $SPECSDIR/' + spec_filename + ' $RPMBUILDROOT/SPECS/') spec_script.append('rpmbuild -bb $RPMBUILDROOT/SPECS/' + spec_filename) spec_script.append('rm -f $RPMDIR/' + repo_name + '-*') if args['--bin']: spec_script.append('mv -f $RPMBUILDROOT/RPMS/x86_64/' + repo_name + '-* $RPMDIR') if args['--dev']: spec_script.append('mv -f $RPMBUILDROOT/RPMS/noarch/' + repo_name + '-devel-* $RPMDIR') spec_script.append('rpm -U $RPMDIR/' + repo_name + '-devel-*') if not args['--bin']: spec_script.append('rm -f $RPMBUILDROOT/RPMS/x86_64/' + repo_name + '-*') print('\n'.join(spec_script)) return None
def gen_spec(args): spec_template = 'nodejs-template.spec' spec_script = [] package_dict = {} package_url = args['<git_hub_url>'] package_dict['package_ver'] = args['<version>'] rpmbuild_root = args['--rpmbuild_root'] import_path = '%{provider}.%{provider_tld}/%{project}/%{repo}' pattern = re.compile('https://(.*?)/(.*?)/(.*)') match = re.match(pattern, package_url) download_mathod = 'wget' if '/' in match.groups(3) else 'git' (package_dict['provider'], package_dict['provider_tld']) = match.group(1).split('.') provider = package_dict['provider'] package_dict['project'] = match.group(2) package_dict['repo'] = match.group(3).split('/')[0].replace('.git', '') package_dict[ 'repo_name'] = package_dict['project'] + '-' + package_dict['repo'] package_dict['import_path'] = import_path package_dict['today'] = datetime.datetime.now().strftime("%a %b %d %Y") package_ver_var = package_dict['repo'].replace('-', '').replace( '_', '').replace('.', '').upper() + 'VER' repo_name = package_dict['repo_name'] spec_filename = repo_name + '.spec' repo_filename_prefix = package_dict['project'] + '-' + package_dict[ 'repo'] + '-$' + package_ver_var repo_filename = repo_filename_prefix + '.tar.gz' package_dict['source_filename'] = repo_filename_prefix.replace( '$' + package_ver_var, '%{version}') common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, spec_template))), package_dict, os.path.join(rpmbuild_root, 'SPECS', spec_filename)) nginx_conf = [] nginx_conf.append('server {') nginx_conf.append(' listen 80;') nginx_conf.append(' server_name _;') nginx_conf.append(' index index.html;') nginx_conf.append(' root /opt/' + package_dict['project'] + '/' + package_dict['repo'] + ';') nginx_conf.append('}') common.render_template( '{{ context }}', {'context': '\n'.join(nginx_conf)}, os.path.join(rpmbuild_root, 'SOURCES', package_dict['repo_name'] + '.nginx')) spec_script.append('') spec_script.append('export %s' % package_ver_var + '=' + package_dict['package_ver']) if download_mathod == 'git': spec_script.append('cd /usr/local/src') spec_script.append('rm -rf /usr/local/src/' + repo_filename_prefix) spec_script.append('git clone --depth=10 ' + package_url + '.git ' + repo_filename_prefix) spec_script.append('tar -zcf $SRCDIR/' + repo_filename + ' ' + repo_filename_prefix) spec_script.append('cd /usr/local/src/' + repo_filename_prefix) spec_script.append('export GITCOMMIT=`git rev-parse HEAD`') spec_script.append('cd ..') spec_script.append( 'sed -i -e "/^%global/s#%global commit.*#%global commit $GITCOMMIT#g" $RPMBUILDROOT/SPECS/' + spec_filename) else: spec_script.append('wget -O $SRCDIR/' + repo_filename + ' ' + package_url) spec_script.append('export GITCOMMIT=' + sys.argv[3]) spec_script.append('') spec_script.append('/bin/cp -f $RPMBUILDROOT/SPECS/' + spec_filename + ' $SPECSDIR/') spec_script.append('/bin/cp -f $SRCDIR/' + repo_filename + ' $RPMBUILDROOT/SOURCES/') spec_script.append('/bin/cp -f $SPECSDIR/' + spec_filename + ' $RPMBUILDROOT/SPECS/') spec_script.append('rpmbuild -bb $RPMBUILDROOT/SPECS/' + spec_filename) spec_script.append('rm -f $RPMDIR/' + repo_name + '-*') spec_script.append('mv -f $RPMBUILDROOT/RPMS/noarch/' + repo_name + '-* $RPMDIR') print('\n'.join(spec_script)) return None
def gen_inv(args): start_time = time.time() log_filename = 'mmm.log' logger = common.MyLogger('mmm', log_filename).default_logger.logger logger.info('args:' + str(args)) playbook_template = 'mmm-playbook.j2' setting_template = 'mmm-setting.j2' hosts_script = [] hosts_script.append('[mmm]') mmm_dict = {k[2:]: v for k, v in args.items()} mmm_dict['mon_fqdn'] = 'monitor_vip' if int(mmm_dict['osver']) > 6: mmm_dict['heartbeat'] = '' mmm_dict['pacemaker'] = '' if mmm_dict[ 'monitor_vip'] == '192.168.10.1' else '\n - pacemaker' else: mmm_dict['pacemaker'] = '' mmm_dict['heartbeat'] = '' if mmm_dict[ 'monitor_vip'] == '192.168.10.1' else '\n - heartbeat' mmm_dict['writer_fqdn'] = 'writer_vip' mmm_dict['writer_vips'] = args['--writer_vip'].split(",") mmm_dict['reader_fqdn'] = 'reader_vip' mmm_dict['reader_vips'] = args['--reader_vip'].split(",") mmm_dict['monitor_hosts'] = args['--monitor_host'].split(",") mmm_dict['enable_backup'] = not args['--without_backup'] if len(mmm_dict['monitor_hosts']) == 1: mmm_dict['monitor_hosts'].append('fakehost.domain:192.168.98.98') mmm_dict['data_hosts'] = args['--data_host'].split(",") mmm_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) if args['--taskid']: mmm_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) mmm_dict['uuid'] = args['--taskid'] else: mmm_dict['task_id'] = '' mmm_dict['uuid'] = str(uuid.uuid4()) ansible_auth = '' if mmm_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(mmm_dict['sshpass']) elif mmm_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( mmm_dict['sshkey']) playbook_filename = os.path.join(mmm_dict['workdir'], 'mmm_' + mmm_dict['uuid'] + '.yml') host_filename = os.path.join(mmm_dict['workdir'], 'inventory', mmm_dict['uuid'], 'hosts') setting_filename = os.path.join(mmm_dict['workdir'], 'inventory', mmm_dict['uuid'], 'pillar', 'mmm.yml') mmm_dict['data_hostlist'] = [] mmm_dict['mmm_hostlist'] = [] for i, host_info in enumerate(mmm_dict['monitor_hosts']): (k, v) = host_info.split(":") k = k.lower() mmm_dict['mmm_hostlist'].append(k) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(mmm_dict['sshport']), ansible_auth, mmm_dict['hostarg'])) mmm_dict['mon_host' + str(i + 1)] = k for i, host_info in enumerate(mmm_dict['data_hosts']): (k, v) = host_info.split(":") k = k.lower() mmm_dict['data_hostlist'].append(k) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(mmm_dict['sshport']), ansible_auth, mmm_dict['hostarg'])) mmm_dict['data_host' + str(i + 1)] = k logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), mmm_dict, playbook_filename) logger.info('create mysql with mmm setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), mmm_dict, setting_filename) print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{}'.format( mmm_dict['password'], mmm_dict['writer_vips'][0])) return None
def gen_spec(args): spec_template = 'python-template.spec' spec_script = [] package_dict = {} package_url = args['<git_hub_url>'] package_dict['package_ver'] = args['<version>'] rpmbuild_root = args['--rpmbuild_root'] pattern = re.compile('https://(.*?)/(.*?)/(.*)') match = re.match(pattern, package_url) download_mathod = 'wget' if '/' in match.groups(3) else 'git' (package_dict['provider'], package_dict['provider_tld']) = match.group(1).split('.') package_dict['project'] = match.group(2) package_dict['repo'] = match.group(3).split('/')[0].replace('.git', '') package_dict['today'] = datetime.datetime.now().strftime("%a %b %d %Y") package_ver_var = package_dict['repo'].replace('-', '').replace( '_', '').replace('.', '').upper() + 'VER' repo_name = package_dict['repo'] spec_filename = 'python-' + repo_name + '.spec' repo_filename = package_dict['repo'] + '-$' + package_ver_var + '.tar.gz' common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, spec_template))), package_dict, os.path.join(rpmbuild_root, 'SPECS', spec_filename)) spec_script.append('') spec_script.append('export %s' % package_ver_var + '=' + package_dict['package_ver']) if download_mathod == 'git': spec_script.append('cd /usr/local/src') spec_script.append('rm -rf /usr/local/src/' + package_dict['repo'] + '-$' + package_ver_var) spec_script.append('git clone --depth=10 ' + package_url + ' ' + package_dict['repo'] + '-$' + package_ver_var) spec_script.append('tar -zcf $SRCDIR/' + repo_filename + ' ' + package_dict['repo'] + '-$' + package_ver_var) spec_script.append('cd /usr/local/src/' + package_dict['repo'] + '-$' + package_ver_var) spec_script.append('export GITCOMMIT=`git rev-parse HEAD`') spec_script.append('cd ..') spec_script.append( 'sed -i -e "/^%global/s#%global commit.*#%global commit $GITCOMMIT#g" $RPMBUILDROOT/SPECS/' + spec_filename) else: spec_script.append('wget -O $SRCDIR/' + repo_filename + ' ' + package_url) spec_script.append('export GITCOMMIT=' + sys.argv[3]) spec_script.append('') spec_script.append('/bin/cp -f $RPMBUILDROOT/SPECS/' + spec_filename + ' $SPECSDIR/') spec_script.append('/bin/cp -f $SRCDIR/' + repo_filename + ' $RPMBUILDROOT/SOURCES/') spec_script.append('/bin/cp -f $SPECSDIR/' + spec_filename + ' $RPMBUILDROOT/SPECS/') spec_script.append('rpmbuild -bb $RPMBUILDROOT/SPECS/' + spec_filename) spec_script.append('rm -f $RPMDIR/python-' + repo_name + '-*') spec_script.append('mv -f $RPMBUILDROOT/RPMS/x86_64/python-' + repo_name + '-* $RPMDIR') spec_script.append('mv -f $RPMBUILDROOT/RPMS/noarch/python-' + repo_name + '-* $RPMDIR') print('\n'.join(spec_script)) return None
def dbbackup(args, func_type, fields): start_time = time.time() hosts_script = [] hosts_script.append('[backup]') backup_dict = {k[2:]: v for k, v in args.items()} if args['--taskid']: backup_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) backup_dict['uuid'] = args['--taskid'] else: backup_dict['uuid'] = str(uuid.uuid4()) backup_dict['task_id'] = ' external_task_id: {}\n'.format( backup_dict['uuid']) log_filename = os.path.join( backup_dict['workdir'], 'dbbackup_%s_%s_%s.log' % (backup_dict['dbtype'], func_type, backup_dict['uuid'])) logger = common.MyLogger( 'dbbackup_%s_%s' % (backup_dict['dbtype'], backup_dict['uuid']), log_filename).default_logger.logger logger.info('args:' + str(args)) backup_dict['data_hosts'] = args['--data_host'].split(",") backup_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) playbook_template = 'dbbackup-%s-%s-playbook.j2' % (backup_dict['dbtype'], func_type) playbook_filename = os.path.join( backup_dict['workdir'], 'dbbackup_%s_%s_%s.yml' % (backup_dict['dbtype'], func_type, backup_dict['uuid'])) host_filename = os.path.join(backup_dict['workdir'], 'inventory', backup_dict['uuid'], 'hosts') ansible_auth = '' if backup_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(backup_dict['sshpass']) elif backup_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( backup_dict['sshkey']) host_list = [] ip_list = [] for i, host_info in enumerate(backup_dict['data_hosts']): (k, v) = host_info.split(":") k = k.lower() host_list.append(k) ip_list.append(v) hosts_script.append('{:<60}{:<60}ansible_ssh_port={:<7}{} {}'.format( k, 'ansible_ssh_host=' + v, str(backup_dict['sshport']), ansible_auth, backup_dict['hostarg'])) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), backup_dict, playbook_filename) if backup_dict['template_only']: print('You can run ansible-playbook -i {} {}'.format( host_filename, playbook_filename)) print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) else: logger.info('check ssh availability') i = 1 check_list = [] for check_ip in ip_list: while (not common.check_server(check_ip, int( backup_dict['sshport']))) and ( i < backup_dict['ssh_try_limit']): time.sleep(1) i += 1 for check_ip in ip_list: check_result = common.check_server(check_ip, int(backup_dict['sshport'])) check_list.append(check_result) if (not check_result): logger.info('ssh check limit exceed ({} sec): ip {}'.format( str(backup_dict['ssh_try_limit']), check_ip)) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) result_dict = {} araapi = common.AraApi() for i, host_info in enumerate(host_list): cmd = 'data show --playbook %s mybak_%s -f json' % ( backup_dict['uuid'], host_info) result = araapi.run_result(cmd) if result['stderr']: logger.error('ara cmd error: {}: {}'.format( host_info, result['stderr'])) if result['stdout']: tmp_dict = json.loads(result['stdout'])['Value'] if isinstance(tmp_dict, list): tmp_dict = {'backup_list': tmp_dict} else: tmp_dict = {} if fields: for k in tmp_dict.keys(): if k not in fields: tmp_dict.pop(k) tmp_dict['ssh_check'] = check_list[i] result_dict[host_info] = tmp_dict logger.info('return value: %s' % (result_dict)) sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ return result_dict
def file_route_index(api_key): logger.info('Responding to file::error') files = backend.file.index() return render_template('file_index', files=files)
def manga_route_index(api_key): logger.info('Responding to manga::index') manga = backend.manga.index() return render_template('manga_index', manga=manga)
def gen_inv(args): start_time = time.time() playbook_template = 'mysql-playbook.j2' setting_template = 'mysql-setting-aws.j2' ec2_template = 'ec2-instance.j2' hosts_script = [] hosts_script.append('[mysql]') mysql_dict = {k[2:]: v for k, v in args.items()} if args['--taskid']: mysql_dict['task_id'] = ' external_task_id: {}\n'.format( args['--taskid']) mysql_dict['uuid'] = args['--taskid'] else: mysql_dict['task_id'] = '' mysql_dict['uuid'] = str(uuid.uuid4()) log_filename = os.path.join(mysql_dict['workdir'], 'mysql_' + mysql_dict['uuid'] + '.log') logger = common.MyLogger('aws', log_filename).default_logger.logger logger.info('args:' + str(args)) mysql_dict['hostname'] = 'mysql-' + mysql_dict['database'].lower( ) + '-' + mysql_dict['uuid'][:7] mysql_dict['ssh_try_limit'] = int(args['--ssh_try_limit']) # provision ec2 instance terraform_cwd = '/opt/terraform/inventory/aws/us-east-1' terraform_filename = os.path.join(terraform_cwd, 'ec2-' + mysql_dict['hostname'] + '.tf') logger.info('create terraform file: {}'.format(terraform_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, ec2_template))), mysql_dict, terraform_filename) p = Popen(['terraform', 'plan'], cwd=terraform_cwd, stdout=PIPE, stderr=PIPE, stdin=PIPE) stdout, stderr = p.communicate() if p.returncode > 0: logger.error('terraform plan error: ' + stderr) sys.exit() else: print(stdout) p = Popen(['terraform', 'apply', '-auto-approve'], cwd=terraform_cwd, stdout=PIPE, stderr=PIPE, stdin=PIPE) stdout, stderr = p.communicate() if p.returncode > 0: logger.error('terraform apply error: ' + stderr) sys.exit() else: print(stdout) p = Popen([ 'terraform', 'output', 'aws_instance_' + mysql_dict['hostname'].replace("-", "_") + '_private_ip' ], cwd=terraform_cwd, stdout=PIPE, stderr=PIPE, stdin=PIPE) stdout, stderr = p.communicate() if p.returncode > 0: logger.error('terraform output error: ' + stderr) sys.exit() else: mysql_dict['ip'] = stdout.split('\n')[0] print(stdout) # post configuration playbook_filename = os.path.join(mysql_dict['workdir'], 'mysql_' + mysql_dict['uuid'] + '.yml') host_filename = os.path.join(mysql_dict['workdir'], 'inventory', mysql_dict['uuid'], 'hosts') setting_filename = os.path.join(mysql_dict['workdir'], 'inventory', mysql_dict['uuid'], 'pillar', 'mysql.yml') ansible_auth = '' if mysql_dict['sshpass']: ansible_auth = 'ansible_ssh_pass={}'.format(mysql_dict['sshpass']) elif mysql_dict['sshkey']: ansible_auth = 'ansible_ssh_private_key_file={}'.format( mysql_dict['sshkey']) hosts_script.append( '{:<40}{:<40}{:<50} ansible_ssh_port={:<7} ansible_ssh_user=centos ansible_become=true ansible_become_user=root ansible_become_method=sudo {}' .format(mysql_dict['hostname'] + '.useast1.aws', 'ansible_ssh_host=' + mysql_dict['ip'], ansible_auth, str(mysql_dict['sshport']), mysql_dict['hostarg'])) logger.info('create ansible hosts: {}'.format(host_filename)) common.render_template('\n'.join(hosts_script), {}, host_filename) logger.info('craete ansible playbooks: {}'.format(playbook_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, playbook_template))), mysql_dict, playbook_filename) logger.info( 'create mysql single instance setting: {}'.format(setting_filename)) common.render_template( '\n'.join( common.read_template( os.path.join(common.template_dir, setting_template))), mysql_dict, setting_filename) logger.info('check ssh availability') i = 1 while (not common.check_server(mysql_dict['ip'], int( mysql_dict['sshport']))) and (i < mysql_dict['ssh_try_limit']): time.sleep(1) i += 1 if (not common.check_server(mysql_dict['ip'], int(mysql_dict['sshport']))): logger.info('ssh check limit exceed ({} sec): ip {}'.format( mysql_dict['ssh_try_limit'], mysql_dict['ip'])) logger.info('run ansible from python') runner = pyansible.playbooks.Runner(hosts_file=host_filename, playbook_file=playbook_filename, verbosity=3) runner.run() print("--- Total Excution time: %s ---" % str(timedelta(seconds=(time.time() - start_time)))) print('You can connect db with:\n mysql -uroot -p{} -h{} {}'.format( mysql_dict['password'], mysql_dict['ip'], mysql_dict['database'])) return None