def run_create_default_lambda(name, settings): description = settings['DESCRIPTION'] function_name = settings['NAME'] phase = env['common']['PHASE'] template_name = env['template']['NAME'] template_path = 'template/%s' % template_name deploy_folder = '%s/lambda/%s' % (template_path, name) ################################################################################ print_session('create lambda: %s' % function_name) gitignore_path = '%s/.gitignore' % deploy_folder if os.path.exists(gitignore_path): ll = read_file(gitignore_path) print_message('cleanup generated files') subprocess.Popen(' '.join(['rm', '-rf'] + ll), shell=True, cwd=deploy_folder).communicate() print_message('install dependencies') requirements_path = '%s/requirements.txt' % deploy_folder if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = '%s/settings_local_sample.py' % deploy_folder if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % deploy_folder, lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() print_message('create lambda function') role_arn = aws_cli.get_role_arn('aws-lambda-default-role') cmd = [ 'lambda', 'create-function', '--function-name', function_name, '--description', description, '--zip-file', 'fileb://deploy.zip', '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.6', '--timeout', '120' ] aws_cli.run(cmd, cwd=deploy_folder)
def _build(deploy_folder, function_name, phase, settings, template_name, template_path): git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen(git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] print_session('packaging lambda: %s' % function_name) print_message('cleanup generated files') subprocess.Popen(['git', 'clean', '-d', '-f', '-x'], cwd=deploy_folder).communicate() requirements_path = '%s/requirements.txt' % deploy_folder if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip3', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = '%s/settings_local_sample.py' % deploy_folder if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % deploy_folder, lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() return [ 'git_hash_johanna=%s' % git_hash_johanna.decode('utf-8').strip(), 'git_hash_%s=%s' % (template_name, git_hash_template.decode('utf-8').strip()) ]
def run_create_s3_webapp(name, settings): git_url = settings['GIT_URL'] phase = env['common']['PHASE'] template_name = env['template']['NAME'] base_path = '%s/%s' % (name, settings.get('BASE_PATH', '')) common_path = '%s/%s' % (name, settings.get('COMMON_PATH', 'common')) template_path = 'template/%s' % template_name environment_path = '%s/s3/%s' % (template_path, name) app_root_path = os.path.normpath('%s/%s' % (environment_path, base_path)) common_root_path = os.path.normpath('%s/%s' % (environment_path, common_path)) deploy_bucket_name = settings['BUCKET_NAME'] bucket_prefix = settings.get('BUCKET_PREFIX', '') deploy_bucket_prefix = os.path.normpath( '%s/%s' % (deploy_bucket_name, bucket_prefix)) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url, name] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url, name] subprocess.Popen(git_command, cwd=environment_path).communicate() if not os.path.exists(app_root_path): raise Exception() git_clone_folder = '%s/%s' % (environment_path, name) git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=git_clone_folder).communicate()[0] subprocess.Popen(['rm', '-rf', './.git'], cwd=git_clone_folder).communicate() subprocess.Popen(['rm', '-rf', './.gitignore'], cwd=git_clone_folder).communicate() ################################################################################ print_message('bower install') if not os.path.exists('%s/bower.json' % app_root_path): subprocess.Popen( ['cp', '%s/bower.json' % common_root_path, app_root_path]).communicate() bower_process = subprocess.Popen(['bower', 'install'], cwd=app_root_path) bower_result, error = bower_process.communicate() if error: print(error) raise Exception() if bower_process.returncode != 0: print(' '.join(['Bower returns:', str(bower_process.returncode)])) raise Exception() ################################################################################ print_message('configure %s' % name) lines = read_file('%s/configuration/app/scripts/settings-local-sample.js' % environment_path) option_list = list() option_list.append(['phase', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(const %s) .*' % oo[0], '\\1 = \'%s\';' % oo[1]) write_file('%s/app/scripts/settings-local.js' % app_root_path, lines) ################################################################################ print_message('grunt build') if not os.path.exists('%s/package.json' % app_root_path): subprocess.Popen( ['cp', '%s/package.json' % common_root_path, app_root_path]).communicate() npm_process = subprocess.Popen(['npm', 'install'], cwd=app_root_path) npm_result, error = npm_process.communicate() if error: print(error) raise Exception() if npm_process.returncode != 0: print(' '.join(['NPM exited with:', str(npm_process.returncode)])) raise Exception() grunt_process = subprocess.Popen(['grunt'], cwd=app_root_path) grunt_result, error = grunt_process.communicate() if error: print(error) raise Exception() if grunt_process.returncode != 0: print(' '.join(['Grunt exited with:', str(grunt_process.returncode)])) raise Exception() ################################################################################ print_message('upload to temp bucket') app_dist_path = '%s/dist' % app_root_path temp_bucket_name = aws_cli.get_temp_bucket() timestamp = int(time.time()) temp_bucket_prefix = '%s/%s/%s/%s/%s' % (temp_bucket_name, template_name, name, base_path, timestamp) temp_bucket_prefix = os.path.normpath(temp_bucket_prefix) temp_bucket_uri = 's3://%s' % temp_bucket_prefix cmd = ['s3', 'cp', '.', temp_bucket_uri, '--recursive'] upload_result = aws_cli.run(cmd, cwd=app_dist_path) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('delete old files from deploy bucket') delete_excluded_files = list(settings.get('DELETE_EXCLUDED_FILES', '')) if len(delete_excluded_files) > 0: cmd = ['s3', 'rm', 's3://%s' % deploy_bucket_prefix, '--recursive'] for ff in delete_excluded_files: cmd += ['--exclude', '%s' % ff] delete_result = aws_cli.run(cmd) for ll in delete_result.split('\n'): print(ll) ################################################################################ print_message('sync to deploy bucket') cmd = ['s3', 'sync', temp_bucket_uri, 's3://%s' % deploy_bucket_prefix] if len(delete_excluded_files) < 1: cmd += ['--delete'] sync_result = aws_cli.run(cmd) for ll in sync_result.split('\n'): print(ll) ################################################################################ print_message('tag to deploy bucket') tag_dict = dict() cmd = ['s3api', 'get-bucket-tagging', '--bucket', deploy_bucket_name] tag_result = aws_cli.run(cmd, ignore_error=True) if tag_result: tag_result = dict(tag_result) for tt in tag_result['TagSet']: key = tt['Key'] value = tt['Value'] tag_dict[key] = value tag_dict['phase'] = phase tag_dict['git_hash_johanna'] = git_hash_johanna.decode('utf-8') tag_dict['git_hash_template'] = git_hash_template.decode('utf-8') tag_dict['git_hash_%s' % name] = git_hash_app.decode('utf-8') tag_dict['timestamp_%s' % name] = timestamp tag_format = '{Key=%s, Value=%s}' tag_list = list() for key in tag_dict: value = tag_dict[key] tag_list.append(tag_format % (key, value)) cmd = [ 's3api', 'put-bucket-tagging', '--bucket', deploy_bucket_name, '--tagging', 'TagSet=[%s]' % ','.join(tag_list) ] aws_cli.run(cmd) ################################################################################ print_message('cleanup temp bucket') cmd = ['s3', 'rm', temp_bucket_uri, '--recursive'] upload_result = aws_cli.run(cmd) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('purge cache from cloudflare') cf_api_key = env['common']['CLOUDFLARE_API_KEY'] cf_auth_email = env['common']['CLOUDFLARE_AUTH_EMAIL'] cf_zone_id = env['common']['CLOUDFLARE_ZONE_ID'] cf_endpoint = 'https://api.cloudflare.com/client/v4/zones/%s/purge_cache' % cf_zone_id data = dict() data['files'] = list(settings['PURGE_CACHE_FILES']) cmd = [ 'curl', '-X', 'DELETE', cf_endpoint, '-H', 'X-Auth-Email: %s' % cf_auth_email, '-H', 'X-Auth-Key: %s' % cf_api_key, '-H', 'Content-Type: application/json', '--data', json.dumps(data) ] subprocess.Popen(cmd).communicate()
def run_create_s3_vue(name, settings): aws_cli = AWSCli() git_url = settings['GIT_URL'] phase = env['common']['PHASE'] template_name = env['template']['NAME'] base_path = '%s/%s' % (name, settings.get('BASE_PATH', '')) common_path = '%s/%s' % (name, settings.get('COMMON_PATH', 'common')) template_path = 'template/%s' % template_name environment_path = '%s/s3/%s' % (template_path, name) app_root_path = os.path.normpath('%s/%s' % (environment_path, base_path)) common_root_path = os.path.normpath('%s/%s' % (environment_path, common_path)) deploy_bucket_name = settings['BUCKET_NAME'] bucket_prefix = settings.get('BUCKET_PREFIX', '') deploy_bucket_prefix = os.path.normpath( '%s/%s' % (deploy_bucket_name, bucket_prefix)) deploy_protocol = settings.get('PROTOCOL', 'http') git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url, name] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url, name] subprocess.Popen(git_command, cwd=environment_path).communicate() if not os.path.exists(app_root_path): raise Exception() git_clone_folder = '%s/%s' % (environment_path, name) git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=git_clone_folder).communicate()[0] subprocess.Popen(['rm', '-rf', './.git'], cwd=git_clone_folder).communicate() subprocess.Popen(['rm', '-rf', './.gitignore'], cwd=git_clone_folder).communicate() ################################################################################ print_message('npm install') if not os.path.exists('%s/package.json' % app_root_path): subprocess.Popen( ['cp', '%s/package.json' % common_root_path, app_root_path]).communicate() npm_process = subprocess.Popen(['npm', 'install'], cwd=app_root_path) npm_result, error = npm_process.communicate() if error: print(error) raise Exception() if npm_process.returncode != 0: print(' '.join(['npm returns:', str(npm_process.returncode)])) raise Exception() ################################################################################ print_message('configure %s' % name) lines = read_file('%s/configuration/static/settings-local-sample.js' % environment_path) option_list = list() option_list.append(['phase', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(const %s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/static/settings-local.js' % app_root_path, lines) ################################################################################ print_message('npm build') npm_process = subprocess.Popen(['npm', 'run', 'build'], cwd=app_root_path) npm_result, error = npm_process.communicate() if error: print(error) raise Exception() if npm_process.returncode != 0: print(' '.join(['Npm exited with:', str(npm_process.returncode)])) raise Exception() ################################################################################ print_message('upload to temp bucket') app_dist_path = '%s/dist' % app_root_path temp_bucket_name = aws_cli.get_temp_bucket() timestamp = int(time.time()) temp_bucket_prefix = '%s/%s/%s/%s/%s' % (temp_bucket_name, template_name, name, base_path, timestamp) temp_bucket_prefix = os.path.normpath(temp_bucket_prefix) temp_bucket_uri = 's3://%s' % temp_bucket_prefix cmd = ['s3', 'cp', '.', temp_bucket_uri, '--recursive'] upload_result = aws_cli.run(cmd, cwd=app_dist_path) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('create deploy bucket if not exists') cmd = ['s3', 'mb', 's3://%s' % deploy_bucket_name] aws_cli.run(cmd, ignore_error=True) ################################################################################ print_message('set bucket policy') lines = read_file('%s/configuration/aws-s3-bucket-policy-sample.json' % environment_path) lines = re_sub_lines(lines, 'BUCKET_NAME', deploy_bucket_name) write_file('%s/configuration/aws-s3-bucket-policy.json' % environment_path, lines) cmd = ['s3api', 'put-bucket-policy'] cmd += ['--bucket', deploy_bucket_name] cmd += [ '--policy', 'file://%s/configuration/aws-s3-bucket-policy.json' % environment_path ] aws_cli.run(cmd) ################################################################################ print_message('set website configuration') lines = read_file( '%s/configuration/aws-s3-website-configuration-sample.json' % environment_path) lines = re_sub_lines(lines, 'BUCKET_NAME', deploy_bucket_name) lines = re_sub_lines(lines, 'PROTOCOL', deploy_protocol) write_file( '%s/configuration/aws-s3-website-configuration.json' % environment_path, lines) cmd = ['s3api', 'put-bucket-website'] cmd += ['--bucket', deploy_bucket_name] cmd += [ '--website-configuration', 'file://%s/configuration/aws-s3-website-configuration.json' % environment_path ] aws_cli.run(cmd) ################################################################################ print_message('delete old files from deploy bucket') delete_excluded_files = list(settings.get('DELETE_EXCLUDED_FILES', '')) if len(delete_excluded_files) > 0: cmd = ['s3', 'rm', 's3://%s' % deploy_bucket_prefix, '--recursive'] for ff in delete_excluded_files: cmd += ['--exclude', '%s' % ff] delete_result = aws_cli.run(cmd) for ll in delete_result.split('\n'): print(ll) ################################################################################ print_message('sync to deploy bucket') cmd = ['s3', 'sync', temp_bucket_uri, 's3://%s' % deploy_bucket_prefix] if len(delete_excluded_files) < 1: cmd += ['--delete'] sync_result = aws_cli.run(cmd) for ll in sync_result.split('\n'): print(ll) ################################################################################ print_message('tag to deploy bucket') tag_dict = dict() cmd = ['s3api', 'get-bucket-tagging', '--bucket', deploy_bucket_name] tag_result = aws_cli.run(cmd, ignore_error=True) if tag_result: tag_result = dict(tag_result) for tt in tag_result['TagSet']: key = tt['Key'] value = tt['Value'] tag_dict[key] = value tag_dict['phase'] = phase tag_dict['git_hash_johanna'] = git_hash_johanna.decode('utf-8') tag_dict['git_hash_template'] = git_hash_template.decode('utf-8') tag_dict['git_hash_%s' % name] = git_hash_app.decode('utf-8') tag_dict['timestamp_%s' % name] = timestamp tag_format = '{Key=%s, Value=%s}' tag_list = list() for key in tag_dict: value = tag_dict[key] tag_list.append(tag_format % (key, value)) cmd = [ 's3api', 'put-bucket-tagging', '--bucket', deploy_bucket_name, '--tagging', 'TagSet=[%s]' % ','.join(tag_list) ] aws_cli.run(cmd) ################################################################################ print_message('cleanup temp bucket') cmd = ['s3', 'rm', temp_bucket_uri, '--recursive'] upload_result = aws_cli.run(cmd) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('invalidate cache from cloudfront') cf_dist_id = settings.get('CLOUDFRONT_DIST_ID', '') if len(cf_dist_id) > 0: path_list = list(settings['INVALIDATE_PATHS']) cmd = [ 'cloudfront', 'create-invalidation', '--distribution-id', cf_dist_id, '--paths', ' '.join(path_list) ] invalidate_result = aws_cli.run(cmd) print(invalidate_result)
def run_create_s3_webapp(name, settings): git_url = settings['GIT_URL'] phase = env['common']['PHASE'] template_name = env['template']['NAME'] base_path = '%s/%s' % (name, settings.get('BASE_PATH', '')) common_path = '%s/%s' % (name, settings.get('COMMON_PATH', 'common')) template_path = 'template/%s' % template_name environment_path = '%s/s3/%s' % (template_path, name) app_root_path = os.path.normpath('%s/%s' % (environment_path, base_path)) common_root_path = os.path.normpath('%s/%s' % (environment_path, common_path)) deploy_bucket_name = settings['BUCKET_NAME'] bucket_prefix = settings.get('BUCKET_PREFIX', '') deploy_bucket_prefix = os.path.normpath('%s/%s' % (deploy_bucket_name, bucket_prefix)) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen(git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url, name] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url, name] subprocess.Popen(git_command, cwd=environment_path).communicate() if not os.path.exists(app_root_path): raise Exception() git_clone_folder = '%s/%s' % (environment_path, name) git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=git_clone_folder).communicate()[0] subprocess.Popen(['rm', '-rf', './.git'], cwd=git_clone_folder).communicate() subprocess.Popen(['rm', '-rf', './.gitignore'], cwd=git_clone_folder).communicate() ################################################################################ print_message('yarn install') if not os.path.exists('%s/package.json' % app_root_path): subprocess.Popen(['cp', '%s/package.json' % common_root_path, app_root_path]).communicate() yarn_process = subprocess.Popen(['yarn'], cwd=app_root_path) yarn_result, error = yarn_process.communicate() if error: print(error) raise Exception() if yarn_process.returncode != 0: print(' '.join(['yarn returns:', str(yarn_process.returncode)])) raise Exception() ################################################################################ print_message('configure %s' % name) lines = read_file('%s/configuration/app/scripts/settings-local-sample.js' % environment_path) option_list = list() option_list.append(['phase', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(var %s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/app/scripts/settings-local.js' % app_root_path, lines) ################################################################################ print_message('grunt build') grunt_process = subprocess.Popen(['grunt'], cwd=app_root_path) grunt_result, error = grunt_process.communicate() if error: print(error) raise Exception() if grunt_process.returncode != 0: print(' '.join(['Grunt exited with:', str(grunt_process.returncode)])) raise Exception() ################################################################################ print_message('upload to temp bucket') app_dist_path = '%s/dist' % app_root_path temp_bucket_name = aws_cli.get_temp_bucket() timestamp = int(time.time()) temp_bucket_prefix = '%s/%s/%s/%s/%s' % (temp_bucket_name, template_name, name, base_path, timestamp) temp_bucket_prefix = os.path.normpath(temp_bucket_prefix) temp_bucket_uri = 's3://%s' % temp_bucket_prefix cmd = ['s3', 'cp', '.', temp_bucket_uri, '--recursive'] upload_result = aws_cli.run(cmd, cwd=app_dist_path) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('delete old files from deploy bucket') delete_excluded_files = list(settings.get('DELETE_EXCLUDED_FILES', '')) if len(delete_excluded_files) > 0: cmd = ['s3', 'rm', 's3://%s' % deploy_bucket_prefix, '--recursive'] for ff in delete_excluded_files: cmd += ['--exclude', '%s' % ff] delete_result = aws_cli.run(cmd) for ll in delete_result.split('\n'): print(ll) ################################################################################ print_message('sync to deploy bucket') cmd = ['s3', 'sync', temp_bucket_uri, 's3://%s' % deploy_bucket_prefix] if len(delete_excluded_files) < 1: cmd += ['--delete'] sync_result = aws_cli.run(cmd) for ll in sync_result.split('\n'): print(ll) ################################################################################ print_message('tag to deploy bucket') tag_dict = dict() cmd = ['s3api', 'get-bucket-tagging', '--bucket', deploy_bucket_name] tag_result = aws_cli.run(cmd, ignore_error=True) if tag_result: tag_result = dict(tag_result) for tt in tag_result['TagSet']: key = tt['Key'] value = tt['Value'] tag_dict[key] = value tag_dict['phase'] = phase tag_dict['git_hash_johanna'] = git_hash_johanna.decode('utf-8') tag_dict['git_hash_template'] = git_hash_template.decode('utf-8') tag_dict['git_hash_%s' % name] = git_hash_app.decode('utf-8') tag_dict['timestamp_%s' % name] = timestamp tag_format = '{Key=%s, Value=%s}' tag_list = list() for key in tag_dict: value = tag_dict[key] tag_list.append(tag_format % (key, value)) cmd = ['s3api', 'put-bucket-tagging', '--bucket', deploy_bucket_name, '--tagging', 'TagSet=[%s]' % ','.join(tag_list)] aws_cli.run(cmd) ################################################################################ print_message('cleanup temp bucket') cmd = ['s3', 'rm', temp_bucket_uri, '--recursive'] upload_result = aws_cli.run(cmd) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('invalidate cache from cloudfront') cf_dist_id = settings.get('CLOUDFRONT_DIST_ID', '') if len(cf_dist_id) > 0: path_list = list(settings['INVALIDATE_PATHS']) cmd = ['cloudfront', 'create-invalidation', '--distribution-id', cf_dist_id, '--paths', ' '.join(path_list)] invalidate_result = aws_cli.run(cmd) print(invalidate_result)
def run_create_lambda_sns(name, settings): aws_cli = AWSCli() description = settings['DESCRIPTION'] function_name = settings['NAME'] phase = env['common']['PHASE'] template_name = env['template']['NAME'] template_path = 'template/%s' % template_name deploy_folder = '%s/lambda/%s' % (template_path, name) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen(git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ topic_arn_list = list() for sns_topic_name in settings['SNS_TOPICS_NAMES']: print_message('check topic exists: %s' % sns_topic_name) region, topic_name = sns_topic_name.split('/') topic_arn = AWSCli(region).get_topic_arn(topic_name) if not topic_arn: print('sns topic: "%s" is not exists in %s' % (settings['SNS_TOPIC_NAME'], region)) raise Exception() topic_arn_list.append(topic_arn) ################################################################################ print_session('packaging lambda: %s' % function_name) print_message('cleanup generated files') subprocess.Popen(['git', 'clean', '-d', '-f', '-x'], cwd=deploy_folder).communicate() requirements_path = '%s/requirements.txt' % deploy_folder if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip3', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = '%s/settings_local_sample.py' % deploy_folder if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % deploy_folder, lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() print_message('create lambda function') role_arn = aws_cli.get_role_arn('aws-lambda-default-role') tags = list() # noinspection PyUnresolvedReferences tags.append('git_hash_johanna=%s' % git_hash_johanna.decode('utf-8').strip()) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (template_name, git_hash_template.decode('utf-8').strip())) ################################################################################ print_message('check previous version') need_update = False cmd = ['lambda', 'list-functions'] result = aws_cli.run(cmd) for ff in result['Functions']: if function_name == ff['FunctionName']: need_update = True break ################################################################################ if need_update: print_session('update lambda: %s' % function_name) cmd = ['lambda', 'update-function-code', '--function-name', function_name, '--zip-file', 'fileb://deploy.zip'] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] print_message('update lambda tags') cmd = ['lambda', 'tag-resource', '--resource', function_arn, '--tags', ','.join(tags)] aws_cli.run(cmd, cwd=deploy_folder) return ################################################################################ print_session('create lambda: %s' % function_name) cmd = ['lambda', 'create-function', '--function-name', function_name, '--description', description, '--zip-file', 'fileb://deploy.zip', '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.6', '--tags', ','.join(tags), '--timeout', '120'] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] for topic_arn in topic_arn_list: print_message('create subscription') topic_region = topic_arn.split(':')[3] cmd = ['sns', 'subscribe', '--topic-arn', topic_arn, '--protocol', 'lambda', '--notification-endpoint', function_arn] AWSCli(topic_region).run(cmd) print_message('Add permission to lambda') statement_id = '%s_%s_Permission' % (function_name, topic_region) cmd = ['lambda', 'add-permission', '--function-name', function_name, '--statement-id', statement_id, '--action', 'lambda:InvokeFunction', '--principal', 'sns.amazonaws.com', '--source-arn', topic_arn] aws_cli.run(cmd) print_message('update tag with subscription info') cmd = ['lambda', 'tag-resource', '--resource', function_arn, '--tags', ','.join(tags)] aws_cli.run(cmd, cwd=deploy_folder)
def run_create_lambda_default(name, settings): aws_cli = AWSCli() description = settings['DESCRIPTION'] function_name = settings['NAME'] phase = env['common']['PHASE'] template_name = env['template']['NAME'] template_path = 'template/%s' % template_name deploy_folder = '%s/lambda/%s' % (template_path, name) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('packaging lambda: %s' % function_name) print_message('cleanup generated files') subprocess.Popen(['git', 'clean', '-d', '-f', '-x'], cwd=deploy_folder).communicate() requirements_path = '%s/requirements.txt' % deploy_folder if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip3', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = '%s/settings_local_sample.py' % deploy_folder if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % deploy_folder, lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() print_message('create lambda function') role_arn = aws_cli.get_role_arn('aws-lambda-default-role') tags = list() # noinspection PyUnresolvedReferences tags.append('git_hash_johanna=%s' % git_hash_johanna.decode('utf-8').strip()) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (template_name, git_hash_template.decode('utf-8').strip())) ################################################################################ print_message('check previous version') need_update = False cmd = ['lambda', 'list-functions'] result = aws_cli.run(cmd) for ff in result['Functions']: if function_name == ff['FunctionName']: need_update = True break ################################################################################ if need_update: print_session('update lambda: %s' % function_name) cmd = [ 'lambda', 'update-function-code', '--function-name', function_name, '--zip-file', 'fileb://deploy.zip' ] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] print_message('update lambda tags') cmd = [ 'lambda', 'tag-resource', '--resource', function_arn, '--tags', ','.join(tags) ] aws_cli.run(cmd, cwd=deploy_folder) return ################################################################################ print_session('create lambda: %s' % function_name) cmd = [ 'lambda', 'create-function', '--function-name', function_name, '--description', description, '--zip-file', 'fileb://deploy.zip', '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.6', '--tags', ','.join(tags), '--timeout', '120' ] aws_cli.run(cmd, cwd=deploy_folder)
def run_create_eb_spring(name, settings): aws_cli = AWSCli() aws_asg_max_value = settings['AWS_ASG_MAX_VALUE'] aws_asg_min_value = settings['AWS_ASG_MIN_VALUE'] aws_default_region = env['aws']['AWS_DEFAULT_REGION'] cname = settings['CNAME'] db_conn_str_suffix = settings.get('DB_CONNECTION_STR_SUFFIX', '') eb_application_name = env['elasticbeanstalk']['APPLICATION_NAME'] git_url = settings['GIT_URL'] instance_type = settings.get('INSTANCE_TYPE', 't2.medium') key_pair_name = env['common']['AWS_KEY_PAIR_NAME'] phase = env['common']['PHASE'] service_name = env['common'].get('SERVICE_NAME', '') subnet_type = settings['SUBNET_TYPE'] name_prefix = '%s_' % service_name if service_name else '' cidr_subnet = aws_cli.cidr_subnet str_timestamp = str(int(time.time())) war_filename = '%s-%s.war' % (name, str_timestamp) eb_environment_name = '%s-%s' % (name, str_timestamp) eb_environment_name_old = None template_folder = 'template/%s' % name target_folder = 'template/%s/target' % name ebextensions_folder = 'template/%s/_provisioning/.ebextensions' % name configuration_folder = 'template/%s/_provisioning/configuration' % name properties_file = 'template/%s/%s' % (name, settings['PROPERTIES_FILE']) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen(git_rev, stdout=subprocess.PIPE).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('get vpc id') rds_vpc_id, eb_vpc_id = aws_cli.get_vpc_id() if not eb_vpc_id: print('ERROR!!! No VPC found') raise Exception() ################################################################################ print_message('get subnet id') elb_subnet_id_1 = None elb_subnet_id_2 = None ec2_subnet_id_1 = None ec2_subnet_id_2 = None cmd = ['ec2', 'describe-subnets'] result = aws_cli.run(cmd) for r in result['Subnets']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['public_1']: elb_subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['public_2']: elb_subnet_id_2 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_1']: ec2_subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_2']: ec2_subnet_id_2 = r['SubnetId'] elif 'private' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['private_1']: elb_subnet_id_1 = ec2_subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_2']: elb_subnet_id_2 = ec2_subnet_id_2 = r['SubnetId'] else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get security group id') security_group_id = None cmd = ['ec2', 'describe-security-groups'] result = aws_cli.run(cmd) for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['GroupName'] == '%seb_private' % name_prefix: security_group_id = r['GroupId'] break elif 'private' == subnet_type: if r['GroupName'] == '%seb_private' % name_prefix: security_group_id = r['GroupId'] break else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get database address') db_address = aws_cli.get_rds_address() ################################################################################ print_message('get cache address') cache_address = aws_cli.get_elasticache_address() ################################################################################ print_message('git clone') subprocess.Popen(['mkdir', '-p', 'template']).communicate() subprocess.Popen(['rm', '-rf', '%s/' % name], cwd='template').communicate() branch = aws_cli.env.get('GIT_BRANCH_APP', phase) git_command = ['git', 'clone', '--depth=1'] if branch != 'dv': git_command += ['-b', branch] git_command += [git_url] subprocess.Popen(git_command, cwd='template').communicate() if not os.path.exists('%s' % template_folder): raise Exception() git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_folder).communicate()[0] subprocess.Popen(['rm', '-rf', '.git'], cwd=template_folder).communicate() subprocess.Popen(['rm', '-rf', '.gitignore'], cwd=template_folder).communicate() ################################################################################ print_message('configuration %s' % name) with open('%s/phase' % configuration_folder, 'w') as f: f.write(phase) f.close() lines = read_file('%s/etc/logstash/conf.d/logstash_sample.conf' % configuration_folder) write_file('%s/etc/logstash/conf.d/logstash.conf' % configuration_folder, lines) lines = read_file('%s/%s.config.sample' % (ebextensions_folder, name)) lines = re_sub_lines(lines, 'AWS_ASG_MAX_VALUE', aws_asg_max_value) lines = re_sub_lines(lines, 'AWS_ASG_MIN_VALUE', aws_asg_min_value) write_file('%s/%s.config' % (ebextensions_folder, name), lines) sample_file = properties_file.replace('.properties', '-sample.properties') lines = read_file(sample_file) option_list = list() option_list.append(['jdbc.url', 'jdbc:mysql://%s%s' % (db_address, db_conn_str_suffix)]) option_list.append(['jdbc.username', env['rds']['USER_NAME']]) option_list.append(['jdbc.password', env['rds']['USER_PASSWORD']]) option_list.append(['redis.host', cache_address]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s)=.*' % oo[0], '\\1=%s' % oo[1]) write_file(properties_file, lines) ################################################################################ print_message('check previous version') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] result = aws_cli.run(cmd) for r in result['Environments']: if 'CNAME' not in r: continue if r['CNAME'] == '%s.%s.elasticbeanstalk.com' % (cname, aws_default_region): if r['Status'] == 'Terminated': continue elif r['Status'] != 'Ready': print('previous version is not ready.') raise Exception() eb_environment_name_old = r['EnvironmentName'] cname += '-%s' % str_timestamp break ################################################################################ print_message('build artifact') build_command = ['mvn'] if phase != 'dv': build_command += ['exec:exec'] build_command += ['package'] print_message('build %s: %s' % (name, ' '.join(build_command))) subprocess.Popen(build_command, cwd=template_folder).communicate() ################################################################################ print_message('create storage location') cmd = ['elasticbeanstalk', 'create-storage-location'] result = aws_cli.run(cmd) s3_bucket = result['S3Bucket'] s3_war_filename = '/'.join(['s3://' + s3_bucket, eb_application_name, war_filename]) ################################################################################ print_message('create application version') cmd = ['mv', 'ROOT.war', war_filename] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=target_folder).communicate() cmd = ['s3', 'cp', war_filename, s3_war_filename] aws_cli.run(cmd, cwd=target_folder) cmd = ['rm', '-rf', war_filename] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=target_folder).communicate() cmd = ['elasticbeanstalk', 'create-application-version'] cmd += ['--application-name', eb_application_name] cmd += ['--source-bundle', 'S3Bucket="%s",S3Key="%s/%s"' % (s3_bucket, eb_application_name, war_filename)] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=template_folder) ################################################################################ print_message('create environment %s' % name) option_settings = list() oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'EC2KeyName' oo['Value'] = key_pair_name option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'InstanceType' oo['Value'] = instance_type option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'IamInstanceProfile' oo['Value'] = 'aws-elasticbeanstalk-ec2-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'SecurityGroups' oo['Value'] = security_group_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'AssociatePublicIpAddress' oo['Value'] = 'false' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBScheme' oo['Value'] = 'public' if 'private' == subnet_type: oo['Value'] = 'internal' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBSubnets' oo['Value'] = ','.join([elb_subnet_id_1, elb_subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'Subnets' oo['Value'] = ','.join([ec2_subnet_id_1, ec2_subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'VPCId' oo['Value'] = eb_vpc_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'EnvironmentType' oo['Value'] = 'LoadBalanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'ServiceRole' oo['Value'] = 'aws-elasticbeanstalk-service-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'SystemType' oo['Value'] = 'enhanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'ConfigDocument' cw_env = dict() cw_env['ApplicationRequestsTotal'] = 60 cw_env['ApplicationRequests2xx'] = 60 cw_env['ApplicationRequests3xx'] = 60 cw_env['ApplicationRequests4xx'] = 60 cw_env['ApplicationRequests5xx'] = 60 cw_instance = dict() cw_instance['RootFilesystemUtil'] = 60 cw_instance['InstanceHealth'] = 60 cw_instance['CPUIdle'] = 60 cw = dict() cw['Environment'] = cw_env cw['Instance'] = cw_instance cfg_doc = dict() cfg_doc['CloudWatchMetrics'] = cw cfg_doc['Version'] = 1 oo['Value'] = json.dumps(cfg_doc) option_settings.append(oo) option_settings = json.dumps(option_settings) tag0 = 'Key=git_hash_johanna,Value=%s' % git_hash_johanna.decode('utf-8').strip() tag2 = 'Key=git_hash_%s,Value=%s' % (name, git_hash_app.decode('utf-8').strip()) cmd = ['elasticbeanstalk', 'create-environment'] cmd += ['--application-name', eb_application_name] cmd += ['--cname-prefix', cname] cmd += ['--environment-name', eb_environment_name] cmd += ['--option-settings', option_settings] cmd += ['--solution-stack-name', '64bit Amazon Linux 2018.03 v3.0.1 running Tomcat 8.5 Java 8'] cmd += ['--tags', tag0, tag2] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=template_folder) elapsed_time = 0 while True: cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ee = result['Environments'][0] print(json.dumps(ee, sort_keys=True, indent=4)) if ee.get('Health', '') == 'Green' and ee.get('Status', '') == 'Ready': break print('creating... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 if elapsed_time > 60 * 30: raise Exception() subprocess.Popen(['rm', '-rf', '%s/' % name], cwd='template').communicate() ################################################################################ print_message('revoke security group ingress') cmd = ['ec2', 'describe-security-groups'] cmd += ['--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name] result = aws_cli.run(cmd) for ss in result['SecurityGroups']: cmd = ['ec2', 'revoke-security-group-ingress'] cmd += ['--group-id', ss['GroupId']] cmd += ['--protocol', 'tcp'] cmd += ['--port', '22'] cmd += ['--cidr', '0.0.0.0/0'] aws_cli.run(cmd, ignore_error=True) ################################################################################ print_message('swap CNAME if the previous version exists') if eb_environment_name_old: cmd = ['elasticbeanstalk', 'swap-environment-cnames'] cmd += ['--source-environment-name', eb_environment_name_old] cmd += ['--destination-environment-name', eb_environment_name] aws_cli.run(cmd)
def run_create_eb_windows(name, settings): aws_cli = AWSCli(settings['AWS_DEFAULT_REGION']) aws_asg_max_value = settings['AWS_ASG_MAX_VALUE'] aws_asg_min_value = settings['AWS_ASG_MIN_VALUE'] aws_default_region = settings['AWS_DEFAULT_REGION'] aws_eb_notification_email = settings['AWS_EB_NOTIFICATION_EMAIL'] ssl_certificate_id = aws_cli.get_acm_certificate_id('hbsmith.io') cname = settings['CNAME'] debug = env['common']['DEBUG'] eb_application_name = env['elasticbeanstalk']['APPLICATION_NAME'] git_url = settings['GIT_URL'] key_pair_name = env['common']['AWS_KEY_PAIR_NAME'] phase = env['common']['PHASE'] subnet_type = settings['SUBNET_TYPE'] service_name = env['common'].get('SERVICE_NAME', '') name_prefix = f'{service_name}_' if service_name else '' url = settings['ARTIFACT_URL'] cidr_subnet = aws_cli.cidr_subnet str_timestamp = str(int(time.time())) zip_filename = f'{name}-{str_timestamp}.zip' eb_environment_name = f'{name}-{str_timestamp}' eb_environment_name_old = None template_path = f'template/{name}' git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen(git_rev, stdout=subprocess.PIPE).communicate()[0] ################################################################################ print_session(f'create {name}') ################################################################################ print_message('get vpc id') rds_vpc_id, eb_vpc_id = aws_cli.get_vpc_id() if not eb_vpc_id: print('ERROR!!! No VPC found') raise Exception() ################################################################################ print_message('get subnet id') elb_subnet_id_1 = None elb_subnet_id_2 = None ec2_subnet_id_1 = None ec2_subnet_id_2 = None cmd = ['ec2', 'describe-subnets'] result = aws_cli.run(cmd) for r in result['Subnets']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['public_1']: elb_subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['public_2']: elb_subnet_id_2 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_1']: ec2_subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_2']: ec2_subnet_id_2 = r['SubnetId'] elif 'private' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['private_1']: elb_subnet_id_1 = ec2_subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_2']: elb_subnet_id_2 = ec2_subnet_id_2 = r['SubnetId'] else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get security group id') security_group_id = None cmd = ['ec2', 'describe-security-groups'] result = aws_cli.run(cmd) for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['GroupName'] == f'{name_prefix}eb_private': security_group_id = r['GroupId'] break elif 'private' == subnet_type: if r['GroupName'] == f'{name_prefix}eb_private': security_group_id = r['GroupId'] break else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################# print_message('git clone') subprocess.Popen(['rm', '-rf', template_path]).communicate() subprocess.Popen(['mkdir', '-p', template_path]).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd=template_path).communicate() print(f'{template_path}/{name}') if not os.path.exists(f'{template_path}/{name}'): raise Exception() git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=f'{template_path}/{name}').communicate()[0] subprocess.Popen(['rm', '-rf', f'./{name}/.git'], cwd=template_path).communicate() subprocess.Popen(['rm', '-rf', f'./{name}/.gitignore'], cwd=template_path).communicate() lines = read_file(f'{template_path}/{name}/_provisioning/.ebextensions/{name}.config.sample') lines = re_sub_lines(lines, 'AWS_ASG_MAX_VALUE', aws_asg_max_value) lines = re_sub_lines(lines, 'AWS_ASG_MIN_VALUE', aws_asg_min_value) lines = re_sub_lines(lines, 'AWS_EB_NOTIFICATION_EMAIL', aws_eb_notification_email) lines = re_sub_lines(lines, 'SSL_CERTIFICATE_ID', ssl_certificate_id) write_file(f'{template_path}/{name}/_provisioning/.ebextensions/{name}.config', lines) lines = read_file( f'{template_path}/{name}/_provisioning/configuration/User/vagrant/Desktop/{name}/settings_local_sample.py') lines = re_sub_lines(lines, '^(DEBUG).*', f'\\1 = {debug}') option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, f'^({oo[0]}) .*', f'\\1 = \'{oo[1]}\'') write_file( f'{template_path}/{name}/_provisioning/configuration/User/vagrant/Desktop/{name}/settings_local.py', lines) lines = read_file(f'{template_path}/{name}/_provisioning/configuration/' f'User/vagrant/Desktop/{name}/{name}_cli/{name}_cli.exe_sample.config') option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, f'^.+add key=\"({oo[0]})\" value=.+$', f'<add key="\\1" value="{oo[1]}" />') write_file(f'{template_path}/{name}/_provisioning/configuration/' f'User/vagrant/Desktop/{name}/{name}_cli/{name}_cli.exe.config', lines) ################################################################################ print_message('download artifact') branch = 'master' if phase == 'dv' else phase file_name = f"{branch}-gendo-{git_hash_app.decode('utf-8').strip()}.zip" artifact_url = url + f'/{file_name}' cmd = ['s3', 'cp', artifact_url, f'{name}/gendo-artifact.zip'] aws_cli.run(cmd, cwd=template_path) ################################################################################ print_message('check previous version') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] result = aws_cli.run(cmd) for r in result['Environments']: if 'CNAME' not in r: continue if r['CNAME'] == f'{cname}.{aws_default_region}.elasticbeanstalk.com': if r['Status'] == 'Terminated': continue elif r['Status'] != 'Ready': print('previous version is not ready.') raise Exception() eb_environment_name_old = r['EnvironmentName'] cname += f'-{str_timestamp}' break ################################################################################ print_message('create storage location') cmd = ['elasticbeanstalk', 'create-storage-location'] result = aws_cli.run(cmd) s3_bucket = result['S3Bucket'] s3_zip_filename = '/'.join(['s3://' + s3_bucket, eb_application_name, zip_filename]) ################################################################################ print_message('create application version') file_list = list() file_list.append('.ebextensions') file_list.append('configuration') file_list.append('save_as_utf8.py') for ff in file_list: cmd = ['mv', f'{name}/_provisioning/{ff}', '.'] subprocess.Popen(cmd, cwd=template_path).communicate() cmd = ['zip', '-r', zip_filename, '.', '.ebextensions'] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=template_path).communicate() cmd = ['s3', 'cp', zip_filename, s3_zip_filename] aws_cli.run(cmd, cwd=template_path) cmd = ['elasticbeanstalk', 'create-application-version'] cmd += ['--application-name', eb_application_name] cmd += ['--source-bundle', f'S3Bucket="{s3_bucket}",S3Key="{eb_application_name}/{zip_filename}"'] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=template_path) ################################################################################ print_message(f'create environment {name}') option_settings = list() oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'EC2KeyName' oo['Value'] = key_pair_name option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'InstanceType' oo['Value'] = 't3.medium' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'IamInstanceProfile' oo['Value'] = 'aws-elasticbeanstalk-ec2-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'SecurityGroups' oo['Value'] = security_group_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'AssociatePublicIpAddress' oo['Value'] = 'false' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBScheme' oo['Value'] = 'public' if 'private' == subnet_type: oo['Value'] = 'internal' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBSubnets' oo['Value'] = ','.join([elb_subnet_id_1, elb_subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'Subnets' oo['Value'] = ','.join([ec2_subnet_id_1, ec2_subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'VPCId' oo['Value'] = eb_vpc_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'EnvironmentType' oo['Value'] = 'LoadBalanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'LoadBalancerType' oo['Value'] = 'application' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment:process:default' oo['OptionName'] = 'MatcherHTTPCode' oo['Value'] = '403' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'ServiceRole' oo['Value'] = 'aws-elasticbeanstalk-service-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'SystemType' oo['Value'] = 'basic' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:cloudwatch:logs' oo['OptionName'] = 'StreamLogs' oo['Value'] = 'true' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:cloudwatch:logs' oo['OptionName'] = 'DeleteOnTerminate' oo['Value'] = 'true' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:cloudwatch:logs' oo['OptionName'] = 'RetentionInDays' oo['Value'] = '3' option_settings.append(oo) option_settings = json.dumps(option_settings) tag0 = f"Key=git_hash_johanna,Value={git_hash_johanna.decode('utf-8').strip()}" tag1 = f"Key=git_hash_{name},Value={git_hash_app.decode('utf-8').strip()}" cmd = ['elasticbeanstalk', 'create-environment'] cmd += ['--application-name', eb_application_name] cmd += ['--cname-prefix', cname] cmd += ['--environment-name', eb_environment_name] cmd += ['--option-settings', option_settings] cmd += ['--solution-stack-name', '64bit Windows Server 2016 v2.5.6 running IIS 10.0'] cmd += ['--tags', tag0, tag1] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=template_path) elapsed_time = 0 while True: cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ee = result['Environments'][0] print(json.dumps(ee, sort_keys=True, indent=4)) if ee.get('Health', '') == 'Green' and ee.get('Status', '') == 'Ready': break print('creating... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 if elapsed_time > 60 * 60: raise Exception() subprocess.Popen(['rm', '-rf', f'./{name}'], cwd=template_path).communicate() ################################################################################ print_message('revoke security group ingress') cmd = ['ec2', 'describe-security-groups'] cmd += ['--filters', 'Name=tag-key,Values=Name', f'Name=tag-value,Values={eb_environment_name}'] result = aws_cli.run(cmd) for ss in result['SecurityGroups']: cmd = ['ec2', 'revoke-security-group-ingress'] cmd += ['--group-id', ss['GroupId']] cmd += ['--protocol', 'tcp'] cmd += ['--port', '22'] cmd += ['--cidr', '0.0.0.0/0'] aws_cli.run(cmd, ignore_error=True) ################################################################################ print_message('swap CNAME if the previous version exists') if eb_environment_name_old: cmd = ['elasticbeanstalk', 'swap-environment-cnames'] cmd += ['--source-environment-name', eb_environment_name_old] cmd += ['--destination-environment-name', eb_environment_name] aws_cli.run(cmd)
def run_create_s3_bucket(name, settings): aws_cli = AWSCli() bucket_name = settings['BUCKET_NAME'] expire_days = settings.get('EXPIRE_FILES_DAYS', 0) is_web_hosting = settings['WEB_HOSTING'] region = settings['REGION'] policy = settings.get('POLICY', '') ################################################################################ print_session('create %s' % name) cmd = [ 's3api', 'create-bucket', '--bucket', bucket_name, '--create-bucket-configuration', 'LocationConstraint=%s' % region ] aws_cli.run(cmd, ignore_error=True) ################################################################################ if policy in ['website', 'email']: print_message('delete public access block') cmd = ['s3api', 'delete-public-access-block'] cmd += ['--bucket', bucket_name] aws_cli.run(cmd) print_message('wait public access block has deleted...') time.sleep(10) print_message('set bucket policy') lines = read_file('aws_iam/aws-s3-bucket-policy-for-%s.json' % policy) lines = re_sub_lines(lines, 'BUCKET_NAME', bucket_name) pp = ' '.join(lines) cmd = ['s3api', 'put-bucket-policy'] cmd += ['--bucket', bucket_name] cmd += ['--policy', pp] aws_cli.run(cmd) if is_web_hosting: print_message('set website configuration') cmd = ['s3api', 'put-bucket-website'] cmd += ['--bucket', bucket_name] cmd += [ '--website-configuration', 'file://aws_iam/aws-s3-website-configuration.json' ] aws_cli.run(cmd) ################################################################################ if expire_days > 0: print_message('set life cycle rule') cc = { "Rules": [{ "Expiration": { "Days": expire_days }, "ID": "result_file_manage_rule", "Filter": { "Prefix": "" }, "Status": "Enabled", "NoncurrentVersionExpiration": { "NoncurrentDays": expire_days }, "AbortIncompleteMultipartUpload": { "DaysAfterInitiation": 7 } }] } cmd = [ 's3api', 'put-bucket-lifecycle-configuration', '--bucket', bucket_name ] cmd += ['--lifecycle-configuration', json.dumps(cc)] aws_cli.run(cmd)
def run_create_cron_lambda(name, settings): description = settings['DESCRIPTION'] function_name = settings['NAME'] phase = env['common']['PHASE'] schedule_expression = settings['SCHEDULE_EXPRESSION'] template_name = env['template']['NAME'] template_path = 'template/%s' % template_name deploy_folder = '%s/lambda/%s' % (template_path, name) ################################################################################ print_session('create lambda: %s' % function_name) gitignore_path = '%s/.gitignore' % deploy_folder if os.path.exists(gitignore_path): ll = read_file(gitignore_path) print_message('cleanup generated files') subprocess.Popen(' '.join(['rm', '-rf'] + ll), shell=True, cwd=deploy_folder).communicate() requirements_path = '%s/requirements.txt' % deploy_folder if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = '%s/settings_local_sample.py' % deploy_folder if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % deploy_folder, lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() print_message('create lambda function') role_arn = aws_cli.get_role_arn('aws-lambda-default-role') cmd = [ 'lambda', 'create-function', '--function-name', function_name, '--description', description, '--zip-file', 'fileb://deploy.zip', '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.6', '--timeout', '120' ] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] print_message('create cron event') cmd = [ 'events', 'put-rule', '--name', function_name + 'CronRule', '--description', description, '--schedule-expression', schedule_expression ] result = aws_cli.run(cmd) rule_arn = result['RuleArn'] print_message('give event permission') cmd = [ 'lambda', 'add-permission', '--function-name', function_name, '--statement-id', function_name + 'StatementId', '--action', 'lambda:InvokeFunction', '--principal', 'events.amazonaws.com', '--source-arn', rule_arn ] aws_cli.run(cmd) print_message('link event and lambda') cmd = [ 'events', 'put-targets', '--rule', function_name + 'CronRule', '--targets', '{"Id" : "1", "Arn": "%s"}' % function_arn ] aws_cli.run(cmd)
def run_create_lambda_sqs(function_name, settings): aws_cli = AWSCli(settings['AWS_DEFAULT_REGION']) description = settings['DESCRIPTION'] folder_name = settings.get('FOLDER_NAME', function_name) git_url = settings['GIT_URL'] phase = env['common']['PHASE'] sqs_name = settings['SQS_NAME'] mm = re.match(r'^.+/(.+)\.git$', git_url) if not mm: raise Exception() git_folder_name = mm.group(1) ################################################################################ print_session('create %s' % function_name) ################################################################################ print_message('download template: %s' % git_folder_name) if not os.path.exists('template/%s' % git_folder_name): if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd='template').communicate() if not os.path.exists('template/%s' % git_folder_name): raise Exception() deploy_folder = 'template/%s/lambda/%s' % (git_folder_name, folder_name) ################################################################################ cmd = ['sqs', 'get-queue-url'] cmd += ['--queue-name', sqs_name] queue_url = aws_cli.run(cmd)['QueueUrl'] cmd = ['sqs', 'get-queue-attributes'] cmd += ['--queue-url', queue_url] cmd += ['--attribute-names', 'QueueArn'] queue_arn = aws_cli.run(cmd)['Attributes']['QueueArn'] ################################################################################ print_message('packaging lambda: %s' % function_name) print_message('cleanup generated files') subprocess.Popen(['git', 'clean', '-d', '-f', '-x'], cwd=deploy_folder).communicate() requirements_path = '%s/requirements.txt' % deploy_folder if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip3', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = '%s/settings_local_sample.py' % deploy_folder if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % deploy_folder, lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() print_message('create lambda function') role_arn = aws_cli.get_role_arn('aws-lambda-sqs-role') git_hash_johanna = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, cwd='template/%s' % git_folder_name).communicate()[0] tags = list() # noinspection PyUnresolvedReferences tags.append('git_hash_johanna=%s' % git_hash_johanna.decode('utf-8').strip()) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (git_folder_name, git_hash_template.decode('utf-8').strip())) ################################################################################ print_message('check previous version') need_update = False cmd = ['lambda', 'list-functions'] result = aws_cli.run(cmd) for ff in result['Functions']: if function_name == ff['FunctionName']: need_update = True break ################################################################################ if need_update: print_session('update lambda: %s' % function_name) cmd = ['lambda', 'update-function-code', '--function-name', function_name, '--zip-file', 'fileb://deploy.zip'] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] cmd = ['lambda', 'update-function-configuration', '--function-name', function_name, '--description', description, '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.6', '--timeout', '120'] aws_cli.run(cmd, cwd=deploy_folder) print_message('update lambda tags') cmd = ['lambda', 'tag-resource', '--resource', function_arn, '--tags', ','.join(tags)] aws_cli.run(cmd, cwd=deploy_folder) print_message('update sqs event source for %s' % function_name) cmd = ['lambda', 'list-event-source-mappings', '--function-name', function_name] mappings = aws_cli.run(cmd)['EventSourceMappings'] for mapping in mappings: cmd = ['lambda', 'delete-event-source-mapping', '--uuid', mapping['UUID']] aws_cli.run(cmd) print_message('wait two minutes until deletion is complete') time.sleep(120) cmd = ['lambda', 'create-event-source-mapping', '--event-source-arn', queue_arn, '--function-name', function_name] aws_cli.run(cmd) return ################################################################################ print_session('create lambda: %s' % function_name) cmd = ['lambda', 'create-function', '--function-name', function_name, '--description', description, '--zip-file', 'fileb://deploy.zip', '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.6', '--tags', ','.join(tags), '--timeout', '120'] aws_cli.run(cmd, cwd=deploy_folder) print_message('give event permission') cmd = ['lambda', 'add-permission', '--function-name', function_name, '--statement-id', function_name + 'StatementId', '--action', 'lambda:InvokeFunction', '--principal', 'events.amazonaws.com', '--source-arn', queue_arn] aws_cli.run(cmd) print_message('create sqs event source for %s' % function_name) cmd = ['lambda', 'create-event-source-mapping', '--event-source-arn', queue_arn, '--function-name', function_name] aws_cli.run(cmd)
def run_create_lambda_default(function_name, settings): aws_cli = AWSCli(settings['AWS_DEFAULT_REGION']) description = settings['DESCRIPTION'] folder_name = settings.get('FOLDER_NAME', function_name) git_url = settings['GIT_URL'] phase = env['common']['PHASE'] mm = re.match(r'^.+/(.+)\.git$', git_url) if not mm: raise Exception() git_folder_name = mm.group(1) ################################################################################ print_session(f'create {function_name}') ################################################################################ print_message(f'download template: {git_folder_name}') subprocess.Popen(['mkdir', '-p', './template']).communicate() if not os.path.exists(f'template/{git_folder_name}'): if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd='template').communicate() if not os.path.exists(f'template/{git_folder_name}'): raise Exception() deploy_folder = f'template/{git_folder_name}/lambda/{folder_name}' ################################################################################ print_message(f'packaging lambda: {function_name}') print_message('cleanup generated files') subprocess.Popen(['git', 'clean', '-d', '-f', '-x'], cwd=deploy_folder).communicate() requirements_path = f'{deploy_folder}/requirements.txt' if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip3', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = f'{deploy_folder}/settings_local_sample.py' if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, f'^({oo[0]}) .*', f'\\1 = \'{oo[1]}\'') write_file(f'{deploy_folder}/settings_local.py', lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() print_message('create lambda function') role_arn = aws_cli.get_role_arn('aws-lambda-default-role') git_hash_johanna = subprocess.Popen( ['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen( ['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, cwd=f'template/{git_folder_name}').communicate()[0] tags = list() # noinspection PyUnresolvedReferences tags.append(f"git_hash_johanna={git_hash_johanna.decode('utf-8').strip()}") # noinspection PyUnresolvedReferences tags.append( f"git_hash_{git_folder_name}={git_hash_template.decode('utf-8').strip()}" ) ################################################################################ print_message('check previous version') need_update = False cmd = ['lambda', 'list-functions'] result = aws_cli.run(cmd) for ff in result['Functions']: if function_name == ff['FunctionName']: need_update = True break ################################################################################ if need_update: print_session(f'update lambda: {function_name}') cmd = [ 'lambda', 'update-function-code', '--function-name', function_name, '--zip-file', 'fileb://deploy.zip' ] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] cmd = [ 'lambda', 'update-function-configuration', '--function-name', function_name, '--description', description, '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.7', '--timeout', '480' ] aws_cli.run(cmd, cwd=deploy_folder) print_message('update lambda tags') cmd = [ 'lambda', 'tag-resource', '--resource', function_arn, '--tags', ','.join(tags) ] aws_cli.run(cmd, cwd=deploy_folder) return ################################################################################ print_session(f'create lambda: {function_name}') cmd = [ 'lambda', 'create-function', '--function-name', function_name, '--description', description, '--zip-file', 'fileb://deploy.zip', '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.7', '--tags', ','.join(tags), '--timeout', '480' ] aws_cli.run(cmd, cwd=deploy_folder) if 'AWS_CONNECT_ARN' in settings: cmd = ['sts', 'get-caller-identity'] result = aws_cli.run(cmd) account_id = result['Account'] cmd = [ 'lambda', 'add-permission', '--function-name', function_name, '--statement-id', function_name + 'StatementId', '--principal', 'connect.amazonaws.com', '--action', 'lambda:InvokeFunction', '--source-account', account_id, '--source-arn', settings['AWS_CONNECT_ARN'] ] aws_cli.run(cmd)
def run_create_lambda_default(name, settings): aws_cli = AWSCli() description = settings['DESCRIPTION'] function_name = settings['NAME'] phase = env['common']['PHASE'] template_name = env['template']['NAME'] template_path = 'template/%s' % template_name deploy_folder = '%s/lambda/%s' % (template_path, name) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen(git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('packaging lambda: %s' % function_name) print_message('cleanup generated files') subprocess.Popen(['git', 'clean', '-d', '-f', '-x'], cwd=deploy_folder).communicate() requirements_path = '%s/requirements.txt' % deploy_folder if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip3', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = '%s/settings_local_sample.py' % deploy_folder if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % deploy_folder, lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() print_message('create lambda function') role_arn = aws_cli.get_role_arn('aws-lambda-default-role') tags = list() # noinspection PyUnresolvedReferences tags.append('git_hash_johanna=%s' % git_hash_johanna.decode('utf-8').strip()) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (template_name, git_hash_template.decode('utf-8').strip())) ################################################################################ print_message('check previous version') need_update = False cmd = ['lambda', 'list-functions'] result = aws_cli.run(cmd) for ff in result['Functions']: if function_name == ff['FunctionName']: need_update = True break ################################################################################ if need_update: print_session('update lambda: %s' % function_name) cmd = ['lambda', 'update-function-code', '--function-name', function_name, '--zip-file', 'fileb://deploy.zip'] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] print_message('update lambda tags') cmd = ['lambda', 'tag-resource', '--resource', function_arn, '--tags', ','.join(tags)] aws_cli.run(cmd, cwd=deploy_folder) return ################################################################################ print_session('create lambda: %s' % function_name) cmd = ['lambda', 'create-function', '--function-name', function_name, '--description', description, '--zip-file', 'fileb://deploy.zip', '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.6', '--tags', ','.join(tags), '--timeout', '120'] aws_cli.run(cmd, cwd=deploy_folder)
def run_create_eb_spring(name, settings): aws_cli = AWSCli() aws_asg_max_value = settings['AWS_ASG_MAX_VALUE'] aws_asg_min_value = settings['AWS_ASG_MIN_VALUE'] aws_default_region = env['aws']['AWS_DEFAULT_REGION'] cname = settings['CNAME'] db_conn_str_suffix = settings.get('DB_CONNECTION_STR_SUFFIX', '') eb_application_name = env['elasticbeanstalk']['APPLICATION_NAME'] git_url = settings['GIT_URL'] instance_type = settings.get('INSTANCE_TYPE', 't2.medium') key_pair_name = env['common']['AWS_KEY_PAIR_NAME'] phase = env['common']['PHASE'] service_name = env['common'].get('SERVICE_NAME', '') subnet_type = settings['SUBNET_TYPE'] name_prefix = '%s_' % service_name if service_name else '' cidr_subnet = aws_cli.cidr_subnet str_timestamp = str(int(time.time())) war_filename = '%s-%s.war' % (name, str_timestamp) eb_environment_name = '%s-%s' % (name, str_timestamp) eb_environment_name_old = None template_folder = 'template/%s' % name target_folder = 'template/%s/target' % name ebextensions_folder = 'template/%s/_provisioning/.ebextensions' % name configuration_folder = 'template/%s/_provisioning/configuration' % name properties_file = 'template/%s/%s' % (name, settings['PROPERTIES_FILE']) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('get vpc id') rds_vpc_id, eb_vpc_id = aws_cli.get_vpc_id() if not eb_vpc_id: print('ERROR!!! No VPC found') raise Exception() ################################################################################ print_message('get subnet id') elb_subnet_id_1 = None elb_subnet_id_2 = None ec2_subnet_id_1 = None ec2_subnet_id_2 = None cmd = ['ec2', 'describe-subnets'] result = aws_cli.run(cmd) for r in result['Subnets']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['public_1']: elb_subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['public_2']: elb_subnet_id_2 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_1']: ec2_subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_2']: ec2_subnet_id_2 = r['SubnetId'] elif 'private' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['private_1']: elb_subnet_id_1 = ec2_subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_2']: elb_subnet_id_2 = ec2_subnet_id_2 = r['SubnetId'] else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get security group id') security_group_id = None cmd = ['ec2', 'describe-security-groups'] result = aws_cli.run(cmd) for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['GroupName'] == '%seb_private' % name_prefix: security_group_id = r['GroupId'] break elif 'private' == subnet_type: if r['GroupName'] == '%seb_private' % name_prefix: security_group_id = r['GroupId'] break else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get database address') db_address = aws_cli.get_rds_address() ################################################################################ print_message('get cache address') cache_address = aws_cli.get_elasticache_address() ################################################################################ print_message('git clone') subprocess.Popen(['mkdir', '-p', 'template']).communicate() subprocess.Popen(['rm', '-rf', '%s/' % name], cwd='template').communicate() branch = aws_cli.env.get('GIT_BRANCH_APP', phase) git_command = ['git', 'clone', '--depth=1'] if branch != 'dv': git_command += ['-b', branch] git_command += [git_url] subprocess.Popen(git_command, cwd='template').communicate() if not os.path.exists('%s' % template_folder): raise Exception() git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_folder).communicate()[0] subprocess.Popen(['rm', '-rf', '.git'], cwd=template_folder).communicate() subprocess.Popen(['rm', '-rf', '.gitignore'], cwd=template_folder).communicate() ################################################################################ print_message('configuration %s' % name) with open('%s/phase' % configuration_folder, 'w') as f: f.write(phase) f.close() lines = read_file('%s/etc/logstash/conf.d/logstash_sample.conf' % configuration_folder) write_file('%s/etc/logstash/conf.d/logstash.conf' % configuration_folder, lines) lines = read_file('%s/%s.config.sample' % (ebextensions_folder, name)) lines = re_sub_lines(lines, 'AWS_ASG_MAX_VALUE', aws_asg_max_value) lines = re_sub_lines(lines, 'AWS_ASG_MIN_VALUE', aws_asg_min_value) write_file('%s/%s.config' % (ebextensions_folder, name), lines) sample_file = properties_file.replace('.properties', '-sample.properties') lines = read_file(sample_file) option_list = list() option_list.append( ['jdbc.url', 'jdbc:mysql://%s%s' % (db_address, db_conn_str_suffix)]) option_list.append(['jdbc.username', env['rds']['USER_NAME']]) option_list.append(['jdbc.password', env['rds']['USER_PASSWORD']]) option_list.append(['redis.host', cache_address]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s)=.*' % oo[0], '\\1=%s' % oo[1]) write_file(properties_file, lines) ################################################################################ print_message('check previous version') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] result = aws_cli.run(cmd) for r in result['Environments']: if 'CNAME' not in r: continue if r['CNAME'] == '%s.%s.elasticbeanstalk.com' % (cname, aws_default_region): if r['Status'] == 'Terminated': continue elif r['Status'] != 'Ready': print('previous version is not ready.') raise Exception() eb_environment_name_old = r['EnvironmentName'] cname += '-%s' % str_timestamp break ################################################################################ print_message('build artifact') build_command = ['mvn'] if phase != 'dv': build_command += ['exec:exec'] build_command += ['package'] print_message('build %s: %s' % (name, ' '.join(build_command))) subprocess.Popen(build_command, cwd=template_folder).communicate() ################################################################################ print_message('create storage location') cmd = ['elasticbeanstalk', 'create-storage-location'] result = aws_cli.run(cmd) s3_bucket = result['S3Bucket'] s3_war_filename = '/'.join( ['s3://' + s3_bucket, eb_application_name, war_filename]) ################################################################################ print_message('create application version') cmd = ['mv', 'ROOT.war', war_filename] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=target_folder).communicate() cmd = ['s3', 'cp', war_filename, s3_war_filename] aws_cli.run(cmd, cwd=target_folder) cmd = ['rm', '-rf', war_filename] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=target_folder).communicate() cmd = ['elasticbeanstalk', 'create-application-version'] cmd += ['--application-name', eb_application_name] cmd += [ '--source-bundle', 'S3Bucket="%s",S3Key="%s/%s"' % (s3_bucket, eb_application_name, war_filename) ] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=template_folder) ################################################################################ print_message('create environment %s' % name) option_settings = list() oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'EC2KeyName' oo['Value'] = key_pair_name option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'InstanceType' oo['Value'] = instance_type option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'IamInstanceProfile' oo['Value'] = 'aws-elasticbeanstalk-ec2-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'SecurityGroups' oo['Value'] = security_group_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'AssociatePublicIpAddress' oo['Value'] = 'false' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBScheme' oo['Value'] = 'public' if 'private' == subnet_type: oo['Value'] = 'internal' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBSubnets' oo['Value'] = ','.join([elb_subnet_id_1, elb_subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'Subnets' oo['Value'] = ','.join([ec2_subnet_id_1, ec2_subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'VPCId' oo['Value'] = eb_vpc_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'EnvironmentType' oo['Value'] = 'LoadBalanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'ServiceRole' oo['Value'] = 'aws-elasticbeanstalk-service-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'SystemType' oo['Value'] = 'enhanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'ConfigDocument' cw_env = dict() cw_env['ApplicationRequestsTotal'] = 60 cw_env['ApplicationRequests2xx'] = 60 cw_env['ApplicationRequests3xx'] = 60 cw_env['ApplicationRequests4xx'] = 60 cw_env['ApplicationRequests5xx'] = 60 cw_instance = dict() cw_instance['RootFilesystemUtil'] = 60 cw_instance['InstanceHealth'] = 60 cw_instance['CPUIdle'] = 60 cw = dict() cw['Environment'] = cw_env cw['Instance'] = cw_instance cfg_doc = dict() cfg_doc['CloudWatchMetrics'] = cw cfg_doc['Version'] = 1 oo['Value'] = json.dumps(cfg_doc) option_settings.append(oo) option_settings = json.dumps(option_settings) tag0 = 'Key=git_hash_johanna,Value=%s' % git_hash_johanna.decode( 'utf-8').strip() tag2 = 'Key=git_hash_%s,Value=%s' % (name, git_hash_app.decode('utf-8').strip()) cmd = ['elasticbeanstalk', 'create-environment'] cmd += ['--application-name', eb_application_name] cmd += ['--cname-prefix', cname] cmd += ['--environment-name', eb_environment_name] cmd += ['--option-settings', option_settings] cmd += [ '--solution-stack-name', '64bit Amazon Linux 2018.03 v3.0.1 running Tomcat 8.5 Java 8' ] cmd += ['--tags', tag0, tag2] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=template_folder) elapsed_time = 0 while True: cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ee = result['Environments'][0] print(json.dumps(ee, sort_keys=True, indent=4)) if ee.get('Health', '') == 'Green' and ee.get('Status', '') == 'Ready': break print('creating... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 if elapsed_time > 60 * 30: raise Exception() subprocess.Popen(['rm', '-rf', '%s/' % name], cwd='template').communicate() ################################################################################ print_message('revoke security group ingress') cmd = ['ec2', 'describe-security-groups'] cmd += [ '--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name ] result = aws_cli.run(cmd) for ss in result['SecurityGroups']: cmd = ['ec2', 'revoke-security-group-ingress'] cmd += ['--group-id', ss['GroupId']] cmd += ['--protocol', 'tcp'] cmd += ['--port', '22'] cmd += ['--cidr', '0.0.0.0/0'] aws_cli.run(cmd, ignore_error=True) ################################################################################ print_message('swap CNAME if the previous version exists') if eb_environment_name_old: cmd = ['elasticbeanstalk', 'swap-environment-cnames'] cmd += ['--source-environment-name', eb_environment_name_old] cmd += ['--destination-environment-name', eb_environment_name] aws_cli.run(cmd)
for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if r['GroupName'] == 'eb_public': security_group_id = r['GroupId'] break ################################################################################ print_message('configuration nova') with open('nova/configuration/phase', 'w') as f: f.write(phase) f.close() lines = read_file('nova/.elasticbeanstalk/config.yml.sample') lines = re_sub_lines(lines, '^( application_name).*', '\\1: %s' % eb_application_name) lines = re_sub_lines(lines, '^( default_ec2_keyname).*', '\\1: %s' % key_pair_name) write_file('nova/.elasticbeanstalk/config.yml', lines) lines = read_file('nova/.ebextensions/nova.config.sample') lines = re_sub_lines(lines, 'AWS_ASG_MIN_VALUE_NOVA', aws_asg_min_value) lines = re_sub_lines(lines, 'AWS_ASG_MAX_VALUE_NOVA', aws_asg_max_value) write_file('nova/.ebextensions/nova.config', lines) lines = read_file('nova/configuration/etc/nova/settings_local.py.sample') lines = re_sub_lines(lines, '^(DEBUG).*', '\\1 = %s' % debug) option_list = list() option_list.append(['HOST', host_nova]) option_list.append(['PHASE', phase]) option_list.append(['URL', url_nova])
def run_create_eb_cron_job(name, settings): aws_cli = AWSCli(settings['AWS_DEFAULT_REGION']) aws_asg_max_value = settings['AWS_ASG_MAX_VALUE'] aws_asg_min_value = settings['AWS_ASG_MIN_VALUE'] aws_default_region = settings['AWS_DEFAULT_REGION'] aws_eb_notification_email = settings['AWS_EB_NOTIFICATION_EMAIL'] cname = settings['CNAME'] debug = env['common']['DEBUG'] eb_application_name = env['elasticbeanstalk']['APPLICATION_NAME'] git_url = settings['GIT_URL'] key_pair_name = env['common']['AWS_KEY_PAIR_NAME'] phase = env['common']['PHASE'] subnet_type = settings['SUBNET_TYPE'] template_name = env['template']['NAME'] service_name = env['common'].get('SERVICE_NAME', '') name_prefix = '%s_' % service_name if service_name else '' if hasattr(settings, 'PRIVATE_IP'): private_ip = settings['PRIVATE_IP'] else: private_ip = None cidr_subnet = aws_cli.cidr_subnet str_timestamp = str(int(time.time())) zip_filename = '%s-%s.zip' % (name, str_timestamp) eb_environment_name = '%s-%s' % (name, str_timestamp) eb_environment_name_old = None template_path = 'template/%s' % template_name environment_path = '%s/elasticbeanstalk/%s' % (template_path, name) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen(git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('get vpc id') rds_vpc_id, eb_vpc_id = aws_cli.get_vpc_id() if not eb_vpc_id: print('ERROR!!! No VPC found') raise Exception() ################################################################################ print_message('get subnet id') subnet_id_1 = None subnet_id_2 = None cmd = ['ec2', 'describe-subnets'] result = aws_cli.run(cmd) for r in result['Subnets']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['public_1']: subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['public_2']: subnet_id_2 = r['SubnetId'] elif 'private' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['private_1']: subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_2']: subnet_id_2 = r['SubnetId'] else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get security group id') security_group_id = None cmd = ['ec2', 'describe-security-groups'] result = aws_cli.run(cmd) for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['GroupName'] == '%seb_public' % name_prefix: security_group_id = r['GroupId'] break elif 'private' == subnet_type: if r['GroupName'] == '%seb_private' % name_prefix: security_group_id = r['GroupId'] break else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('configuration %s' % name) with open('%s/configuration/phase' % environment_path, 'w') as f: f.write(phase) f.close() lines = read_file('%s/.ebextensions/%s.config.sample' % (environment_path, name)) lines = re_sub_lines(lines, 'AWS_ASG_MIN_VALUE', aws_asg_min_value) lines = re_sub_lines(lines, 'AWS_ASG_MAX_VALUE', aws_asg_max_value) lines = re_sub_lines(lines, 'AWS_EB_NOTIFICATION_EMAIL', aws_eb_notification_email) write_file('%s/.ebextensions/%s.config' % (environment_path, name), lines) ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd=environment_path).communicate() if not os.path.exists('%s/%s' % (environment_path, name)): raise Exception() git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd='%s/%s' % (environment_path, name)).communicate()[0] subprocess.Popen(['rm', '-rf', './%s/.git' % name], cwd=environment_path).communicate() subprocess.Popen(['rm', '-rf', './%s/.gitignore' % name], cwd=environment_path).communicate() ################################################################################ for ss in settings['SETTINGS_LOCAL_PATH']: lines = read_file('%s/%s/settings_local_sample.py' % (environment_path, ss)) lines = re_sub_lines(lines, '^(DEBUG).*', '\\1 = %s' % debug) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/%s/settings_local.py' % (environment_path, ss), lines) ################################################################################ print_message('check previous version') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] result = aws_cli.run(cmd) for r in result['Environments']: if 'CNAME' not in r: continue if r['CNAME'] == '%s.%s.elasticbeanstalk.com' % (cname, aws_default_region): if r['Status'] == 'Terminated': continue elif r['Status'] != 'Ready': print('previous version is not ready.') raise Exception() eb_environment_name_old = r['EnvironmentName'] cname += '-%s' % str_timestamp break ################################################################################ print_message('create storage location') cmd = ['elasticbeanstalk', 'create-storage-location'] result = aws_cli.run(cmd) s3_bucket = result['S3Bucket'] s3_zip_filename = '/'.join(['s3://' + s3_bucket, eb_application_name, zip_filename]) ################################################################################ print_message('create application version') cmd = ['zip', '-r', zip_filename, '.', '.ebextensions'] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=environment_path).communicate() cmd = ['s3', 'cp', zip_filename, s3_zip_filename] aws_cli.run(cmd, cwd=environment_path) cmd = ['rm', '-rf', zip_filename] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=environment_path).communicate() cmd = ['elasticbeanstalk', 'create-application-version'] cmd += ['--application-name', eb_application_name] cmd += ['--source-bundle', 'S3Bucket="%s",S3Key="%s/%s"' % (s3_bucket, eb_application_name, zip_filename)] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=environment_path) ################################################################################ print_message('create environment %s' % name) option_settings = list() oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'EC2KeyName' oo['Value'] = key_pair_name option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'InstanceType' oo['Value'] = 't2.micro' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'IamInstanceProfile' oo['Value'] = 'aws-elasticbeanstalk-ec2-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'SecurityGroups' oo['Value'] = security_group_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'AssociatePublicIpAddress' oo['Value'] = 'true' if 'private' == subnet_type: oo['Value'] = 'false' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBScheme' oo['Value'] = '...' if 'private' == subnet_type: oo['Value'] = 'internal' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBSubnets' oo['Value'] = ','.join([subnet_id_1, subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'Subnets' oo['Value'] = ','.join([subnet_id_1, subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'VPCId' oo['Value'] = eb_vpc_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'EnvironmentType' oo['Value'] = 'LoadBalanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'ServiceRole' oo['Value'] = 'aws-elasticbeanstalk-service-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'SystemType' oo['Value'] = 'enhanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'ConfigDocument' cw_instance = dict() cw_instance['RootFilesystemUtil'] = 60 cw_instance['InstanceHealth'] = 60 cw_instance['CPUIdle'] = 60 cw = dict() cw['Instance'] = cw_instance cfg_doc = dict() cfg_doc['CloudWatchMetrics'] = cw cfg_doc['Version'] = 1 oo['Value'] = json.dumps(cfg_doc) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:cloudwatch:logs' oo['OptionName'] = 'StreamLogs' oo['Value'] = 'true' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:cloudwatch:logs' oo['OptionName'] = 'DeleteOnTerminate' oo['Value'] = 'true' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:cloudwatch:logs' oo['OptionName'] = 'RetentionInDays' oo['Value'] = '3' option_settings.append(oo) option_settings = json.dumps(option_settings) tag0 = 'Key=git_hash_johanna,Value=%s' % git_hash_johanna.decode('utf-8').strip() tag1 = 'Key=git_hash_%s,Value=%s' % (template_name, git_hash_template.decode('utf-8').strip()) tag2 = 'Key=git_hash_%s,Value=%s' % (name, git_hash_app.decode('utf-8').strip()) cmd = ['elasticbeanstalk', 'create-environment'] cmd += ['--application-name', eb_application_name] cmd += ['--cname-prefix', cname] cmd += ['--environment-name', eb_environment_name] cmd += ['--option-settings', option_settings] cmd += ['--solution-stack-name', '64bit Amazon Linux 2018.03 v2.7.1 running Python 3.6'] cmd += ['--tags', tag0, tag1, tag2] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=environment_path) elapsed_time = 0 while True: cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ee = result['Environments'][0] print(json.dumps(ee, sort_keys=True, indent=4)) if ee.get('Health', '') == 'Green' and ee.get('Status', '') == 'Ready': break print('creating... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 if elapsed_time > 60 * 30: raise Exception() subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() ################################################################################ print_message('revoke security group ingress') cmd = ['ec2', 'describe-security-groups'] cmd += ['--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name] result = aws_cli.run(cmd) for ss in result['SecurityGroups']: cmd = ['ec2', 'revoke-security-group-ingress'] cmd += ['--group-id', ss['GroupId']] cmd += ['--protocol', 'tcp'] cmd += ['--port', '22'] cmd += ['--cidr', '0.0.0.0/0'] aws_cli.run(cmd, ignore_error=True) ################################################################################ if private_ip is not None: print_message('attach network interface') elapsed_time = 0 while True: cmd = ['ec2', 'describe-network-interfaces'] cmd += ['--filters', 'Name=private-ip-address,Values=%s' % private_ip] result = aws_cli.run(cmd) network_interface_id = result['NetworkInterfaces'][0]['NetworkInterfaceId'] if 'Attachment' not in result['NetworkInterfaces'][0]: cmd = ['ec2', 'describe-instances'] cmd += ['--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name] result = aws_cli.run(cmd) instance_id = result['Reservations'][0]['Instances'][0]['InstanceId'] cmd = ['ec2', 'attach-network-interface'] cmd += ['--network-interface-id', network_interface_id] cmd += ['--instance-id', instance_id] cmd += ['--device-index', '1'] aws_cli.run(cmd) break attachment_id = result['NetworkInterfaces'][0]['Attachment']['AttachmentId'] cmd = ['ec2', 'detach-network-interface'] cmd += ['--attachment-id', attachment_id] aws_cli.run(cmd, ignore_error=True) print('detaching network interface... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 ################################################################################ print_message('swap CNAME if the previous version exists') if eb_environment_name_old: cmd = ['elasticbeanstalk', 'swap-environment-cnames'] cmd += ['--source-environment-name', eb_environment_name_old] cmd += ['--destination-environment-name', eb_environment_name] aws_cli.run(cmd)
def run_create_eb_openvpn(name, settings): aws_cli = AWSCli(settings['AWS_DEFAULT_REGION']) accounts = settings['ACCOUNTS'] aws_default_region = settings['AWS_DEFAULT_REGION'] aws_eb_notification_email = settings['AWS_EB_NOTIFICATION_EMAIL'] cname = settings['CNAME'] debug = env['common']['DEBUG'] eb_application_name = env['elasticbeanstalk']['APPLICATION_NAME'] git_url = settings['GIT_URL'] key_pair_name = env['common']['AWS_KEY_PAIR_NAME'] openvpn_ca_crt = settings['CA_CRT'] openvpn_ca_key = settings['CA_KEY'] openvpn_dh2048_pem = settings['DH2048_PEM'] openvpn_server_crt = settings['SERVER_CRT'] openvpn_server_key = settings['SERVER_KEY'] openvpn_subnet_ip = settings['OPENVPN_SUBNET_IP'] phase = env['common']['PHASE'] service_name = env['common'].get('SERVICE_NAME', '') name_prefix = '%s_' % service_name if service_name else '' cidr_vpc = aws_cli.cidr_vpc cidr_subnet = aws_cli.cidr_subnet str_timestamp = str(int(time.time())) zip_filename = '%s-%s.zip' % (name, str_timestamp) eb_environment_name = '%s-%s' % (name, str_timestamp) eb_environment_name_old = None template_path = 'template/%s' % name git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] ################################################################################ # # start # ################################################################################ print_session('create %s' % name) ################################################################################ print_message('get vpc id') rds_vpc_id, eb_vpc_id = aws_cli.get_vpc_id() if not rds_vpc_id or not eb_vpc_id: print('ERROR!!! No VPC found') raise Exception() ################################################################################ print_message('get subnet id') subnet_id_1 = None subnet_id_2 = None cmd = ['ec2', 'describe-subnets'] result = aws_cli.run(cmd) for r in result['Subnets']: if r['VpcId'] != eb_vpc_id: continue if r['CidrBlock'] == cidr_subnet['eb']['public_1']: subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['public_2']: subnet_id_2 = r['SubnetId'] ################################################################################ print_message('get security group id') security_group_id = None cmd = ['ec2', 'describe-security-groups'] result = aws_cli.run(cmd) for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if r['GroupName'] == '%seb_public' % name_prefix: security_group_id = r['GroupId'] break ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', template_path]).communicate() subprocess.Popen(['mkdir', '-p', template_path]).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd=template_path).communicate() if not os.path.exists('%s/%s' % (template_path, name)): raise Exception() git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd='%s/%s' % (template_path, name)).communicate()[0] subprocess.Popen(['rm', '-rf', './%s/.git' % name], cwd=template_path).communicate() subprocess.Popen(['rm', '-rf', './%s/.gitignore' % name], cwd=template_path).communicate() ################################################################################ print_message('configuration openvpn') path = '%s/%s/_provisioning/configuration/etc/openvpn' % (template_path, name) with open('%s/ca.crt' % path, 'w') as f: f.write(openvpn_ca_crt) f.close() with open('%s/ca.key' % path, 'w') as f: f.write(openvpn_ca_key) f.close() with open('%s/dh2048.pem' % path, 'w') as f: f.write(openvpn_dh2048_pem) f.close() with open('%s/server.crt' % path, 'w') as f: f.write(openvpn_server_crt) f.close() with open('%s/server.key' % path, 'w') as f: f.write(openvpn_server_key) f.close() ################################################################################ print_message('configuration %s' % name) with open( '%s/%s/_provisioning/configuration/accounts' % (template_path, name), 'w') as f: for aa in accounts: f.write(aa + '\n') f.close() with open( '%s/%s/_provisioning/configuration/phase' % (template_path, name), 'w') as f: f.write(phase) f.close() lines = read_file('%s/%s/_provisioning/.ebextensions/%s.config.sample' % (template_path, name, name)) lines = re_sub_lines(lines, 'AWS_EB_NOTIFICATION_EMAIL', aws_eb_notification_email) write_file( '%s/%s/_provisioning/.ebextensions/%s.config' % (template_path, name, name), lines) lines = read_file( '%s/%s/_provisioning/configuration/etc/openvpn/server_sample.conf' % (template_path, name)) lines = re_sub_lines(lines, 'OPENVPN_SUBNET_IP', openvpn_subnet_ip) write_file( '%s/%s/_provisioning/configuration/etc/openvpn/server.conf' % (template_path, name), lines) lines = read_file( '%s/%s/_provisioning/configuration/etc/sysconfig/iptables_sample' % (template_path, name)) lines = re_sub_lines(lines, 'AWS_VPC_EB', cidr_vpc['eb']) lines = re_sub_lines(lines, 'OPENVPN_SUBNET_IP', openvpn_subnet_ip) write_file( '%s/%s/_provisioning/configuration/etc/sysconfig/iptables' % (template_path, name), lines) lines = read_file( ('%s/%s/_provisioning/configuration/etc/%s/settings_local_sample.py' % (template_path, name, name))) lines = re_sub_lines(lines, '^(DEBUG).*', '\\1 = %s' % debug) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file(('%s/%s/_provisioning/configuration/etc/%s/settings_local.py' % (template_path, name, name)), lines) ################################################################################ print_message('check previous version') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] result = aws_cli.run(cmd) for r in result['Environments']: if 'CNAME' not in r: continue if r['CNAME'] == '%s.%s.elasticbeanstalk.com' % (cname, aws_default_region): if r['Status'] == 'Terminated': continue elif r['Status'] != 'Ready': print('previous version is not ready.') raise Exception() eb_environment_name_old = r['EnvironmentName'] cname += '-%s' % str_timestamp break ################################################################################ print_message('create storage location') cmd = ['elasticbeanstalk', 'create-storage-location'] result = aws_cli.run(cmd) s3_bucket = result['S3Bucket'] s3_zip_filename = '/'.join( ['s3://' + s3_bucket, eb_application_name, zip_filename]) ################################################################################ print_message('create application version') file_list = list() file_list.append('.ebextensions') file_list.append('configuration') file_list.append('provisioning.py') file_list.append('requirements.txt') for ff in file_list: cmd = ['mv', '%s/_provisioning/%s' % (name, ff), '.'] subprocess.Popen(cmd, cwd=template_path).communicate() cmd = ['rm', '-rf', '%s/_provisioning' % name] subprocess.Popen(cmd, cwd=template_path).communicate() cmd = ['zip', '-r', zip_filename, '.', '.ebextensions'] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=template_path).communicate() cmd = ['s3', 'cp', zip_filename, s3_zip_filename] aws_cli.run(cmd, cwd=template_path) cmd = ['rm', '-rf', zip_filename] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=template_path).communicate() cmd = ['elasticbeanstalk', 'create-application-version'] cmd += ['--application-name', eb_application_name] cmd += [ '--source-bundle', 'S3Bucket="%s",S3Key="%s/%s"' % (s3_bucket, eb_application_name, zip_filename) ] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=template_path) ################################################################################ print_message('create environment %s' % name) option_settings = list() oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'EC2KeyName' oo['Value'] = key_pair_name option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'InstanceType' oo['Value'] = 't3.nano' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'IamInstanceProfile' oo['Value'] = 'aws-elasticbeanstalk-ec2-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'SecurityGroups' oo['Value'] = security_group_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'AssociatePublicIpAddress' oo['Value'] = 'true' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBScheme' oo['Value'] = '...' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBSubnets' oo['Value'] = '...' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'Subnets' oo['Value'] = ','.join([subnet_id_1, subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'VPCId' oo['Value'] = eb_vpc_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'EnvironmentType' oo['Value'] = 'SingleInstance' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'ServiceRole' oo['Value'] = 'aws-elasticbeanstalk-service-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'SystemType' oo['Value'] = 'enhanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'ConfigDocument' cw_instance = dict() cw_instance['RootFilesystemUtil'] = 60 cw_instance['InstanceHealth'] = 60 cw_instance['CPUIdle'] = 60 cw = dict() cw['Instance'] = cw_instance cfg_doc = dict() cfg_doc['CloudWatchMetrics'] = cw cfg_doc['Version'] = 1 oo['Value'] = json.dumps(cfg_doc) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:cloudwatch:logs' oo['OptionName'] = 'StreamLogs' oo['Value'] = 'true' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:cloudwatch:logs' oo['OptionName'] = 'DeleteOnTerminate' oo['Value'] = 'true' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:cloudwatch:logs' oo['OptionName'] = 'RetentionInDays' oo['Value'] = '3' option_settings.append(oo) option_settings = json.dumps(option_settings) tag0 = 'Key=git_hash_johanna,Value=%s' % git_hash_johanna.decode( 'utf-8').strip() tag1 = 'Key=git_hash_%s,Value=%s' % (name, git_hash_app.decode('utf-8').strip()) cmd = ['elasticbeanstalk', 'create-environment'] cmd += ['--application-name', eb_application_name] cmd += ['--cname-prefix', cname] cmd += ['--environment-name', eb_environment_name] cmd += ['--option-settings', option_settings] cmd += [ '--solution-stack-name', '64bit Amazon Linux 2018.03 v2.7.7 running Python 3.6' ] cmd += ['--tags', tag0, tag1] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=template_path) elapsed_time = 0 while True: cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ee = result['Environments'][0] print(json.dumps(ee, sort_keys=True, indent=4)) if ee.get('Health', '') == 'Green' and ee.get('Status', '') == 'Ready': break print('creating... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 if elapsed_time > 60 * 30: raise Exception() ################################################################################ print_message('revoke security group ingress') cmd = ['ec2', 'describe-security-groups'] cmd += [ '--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name ] result = aws_cli.run(cmd) for ss in result['SecurityGroups']: cmd = ['ec2', 'revoke-security-group-ingress'] cmd += ['--group-id', ss['GroupId']] cmd += ['--protocol', 'tcp'] cmd += ['--port', '22'] cmd += ['--cidr', '0.0.0.0/0'] aws_cli.run(cmd, ignore_error=True) ################################################################################ print_message('disable source/destination checking') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ip_address = result['Environments'][0]['EndpointURL'] cmd = ['ec2', 'describe-instances'] cmd += ['--filter=Name=ip-address,Values=%s' % ip_address] result = aws_cli.run(cmd) instance_id = result['Reservations'][0]['Instances'][0]['InstanceId'] cmd = ['ec2', 'modify-instance-attribute'] cmd += ['--instance-id', instance_id] cmd += ['--no-source-dest-check'] aws_cli.run(cmd) ################################################################################ print_message('swap CNAME if the previous version exists') if eb_environment_name_old: cmd = ['elasticbeanstalk', 'swap-environment-cnames'] cmd += ['--source-environment-name', eb_environment_name_old] cmd += ['--destination-environment-name', eb_environment_name] aws_cli.run(cmd)
def run_create_lambda_cron(function_name, settings): aws_cli = AWSCli(settings['AWS_DEFAULT_REGION']) description = settings['DESCRIPTION'] folder_name = settings.get('FOLDER_NAME', function_name) git_url = settings['GIT_URL'] phase = env['common']['PHASE'] schedule_expression = settings['SCHEDULE_EXPRESSION'] concurrency = settings.get('CONCURRENCY', None) mm = re.match(r'^.+/(.+)\.git$', git_url) if not mm: raise Exception() git_folder_name = mm.group(1) ################################################################################ print_session('create %s' % function_name) ################################################################################ print_message('download template: %s' % git_folder_name) subprocess.Popen(['mkdir', '-p', './template']).communicate() if not os.path.exists('template/%s' % git_folder_name): if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd='template').communicate() if not os.path.exists('template/%s' % git_folder_name): raise Exception() deploy_folder = 'template/%s/lambda/%s' % (git_folder_name, folder_name) ################################################################################ print_message('packaging lambda: %s' % function_name) print_message('cleanup generated files') subprocess.Popen(['git', 'clean', '-d', '-f', '-x'], cwd=deploy_folder).communicate() requirements_path = '%s/requirements.txt' % deploy_folder if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip3', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = '%s/settings_local_sample.py' % deploy_folder if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % deploy_folder, lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() print_message('create lambda function') role_arn = aws_cli.get_role_arn('aws-lambda-default-role') git_hash_johanna = subprocess.Popen( ['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, cwd='template/%s' % git_folder_name).communicate()[0] tags = list() # noinspection PyUnresolvedReferences tags.append('git_hash_johanna=%s' % git_hash_johanna.decode('utf-8').strip()) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (git_folder_name, git_hash_template.decode('utf-8').strip())) ################################################################################ print_message('check previous version') need_update = False cmd = ['lambda', 'list-functions'] result = aws_cli.run(cmd) for ff in result['Functions']: if function_name == ff['FunctionName']: need_update = True break ################################################################################ if need_update: print_session('update lambda: %s' % function_name) cmd = [ 'lambda', 'update-function-code', '--function-name', function_name, '--zip-file', 'fileb://deploy.zip' ] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] cmd = [ 'lambda', 'update-function-configuration', '--function-name', function_name, '--description', description, '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.7', '--timeout', '900' ] aws_cli.run(cmd, cwd=deploy_folder) print_message('update lambda tags') cmd = [ 'lambda', 'tag-resource', '--resource', function_arn, '--tags', ','.join(tags) ] aws_cli.run(cmd, cwd=deploy_folder) print_message('update cron event') cmd = [ 'events', 'put-rule', '--name', function_name + 'CronRule', '--description', description, '--schedule-expression', schedule_expression ] aws_cli.run(cmd) return ################################################################################ print_session('create lambda: %s' % function_name) cmd = [ 'lambda', 'create-function', '--function-name', function_name, '--description', description, '--zip-file', 'fileb://deploy.zip', '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.7', '--tags', ','.join(tags), '--timeout', '900' ] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] print_message('create cron event') cmd = [ 'events', 'put-rule', '--name', function_name + 'CronRule', '--description', description, '--schedule-expression', schedule_expression ] result = aws_cli.run(cmd) rule_arn = result['RuleArn'] print_message('give event permission') cmd = [ 'lambda', 'add-permission', '--function-name', function_name, '--statement-id', function_name + 'StatementId', '--action', 'lambda:InvokeFunction', '--principal', 'events.amazonaws.com', '--source-arn', rule_arn ] aws_cli.run(cmd) print_message('link event and lambda') cmd = [ 'events', 'put-targets', '--rule', function_name + 'CronRule', '--targets', '{"Id" : "1", "Arn": "%s"}' % function_arn ] aws_cli.run(cmd) if concurrency: cmd = ['lambda', 'put-function-concurrency'] cmd += ['--function-name', function_name] cmd += ['--reserved-concurrent-executions', concurrency]
def run_create_eb_openvpn(name, settings): aws_default_region = env['aws']['AWS_DEFAULT_REGION'] aws_eb_notification_email = settings['AWS_EB_NOTIFICATION_EMAIL'] cname = settings['CNAME'] eb_application_name = env['elasticbeanstalk']['APPLICATION_NAME'] host_maya = settings['HOST_MAYA'] key_pair_name = env['common']['AWS_KEY_PAIR_NAME'] openvpn_ca_crt = settings['CA_CRT'] openvpn_dh2048_pem = settings['DH2048_PEM'] openvpn_server_crt = settings['SERVER_CRT'] openvpn_server_key = settings['SERVER_KEY'] openvpn_subnet_ip = settings['SUBNET_IP'] phase = env['common']['PHASE'] template_name = env['template']['NAME'] cidr_vpc = aws_cli.cidr_vpc cidr_subnet = aws_cli.cidr_subnet str_timestamp = str(int(time.time())) eb_environment_name = '%s-%s' % (name, str_timestamp) eb_environment_name_old = None template_path = 'template/%s' % template_name environment_path = '%s/elasticbeanstalk/%s' % (template_path, name) opt_config_path = '%s/configuration/opt' % environment_path etc_config_path = '%s/configuration/etc' % environment_path git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ # # start # ################################################################################ print_session('create %s' % name) ################################################################################ print_message('get vpc id') rds_vpc_id, eb_vpc_id = aws_cli.get_vpc_id() if not rds_vpc_id or not eb_vpc_id: print('ERROR!!! No VPC found') raise Exception() ################################################################################ print_message('get subnet id') subnet_id_1 = None subnet_id_2 = None cmd = ['ec2', 'describe-subnets'] result = aws_cli.run(cmd) for r in result['Subnets']: if r['VpcId'] != eb_vpc_id: continue if r['CidrBlock'] == cidr_subnet['eb']['public_1']: subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['public_2']: subnet_id_2 = r['SubnetId'] ################################################################################ print_message('get security group id') security_group_id = None cmd = ['ec2', 'describe-security-groups'] result = aws_cli.run(cmd) for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if r['GroupName'] == 'eb_public': security_group_id = r['GroupId'] break ################################################################################ print_message('configuration openvpn') path = '%s/configuration/etc/openvpn' % environment_path with open('%s/ca.crt' % path, 'w') as f: f.write(openvpn_ca_crt) f.close() with open('%s/dh2048.pem' % path, 'w') as f: f.write(openvpn_dh2048_pem) f.close() with open('%s/server.crt' % path, 'w') as f: f.write(openvpn_server_crt) f.close() with open('%s/server.key' % path, 'w') as f: f.write(openvpn_server_key) f.close() ################################################################################ print_message('configuration %s' % name) with open('%s/configuration/phase' % environment_path, 'w') as f: f.write(phase) f.close() lines = read_file('%s/.elasticbeanstalk/config_sample.yml' % environment_path) lines = re_sub_lines(lines, '^( application_name).*', '\\1: %s' % eb_application_name) lines = re_sub_lines(lines, '^( default_ec2_keyname).*', '\\1: %s' % key_pair_name) write_file('%s/.elasticbeanstalk/config.yml' % environment_path, lines) lines = read_file('%s/.ebextensions/%s.config.sample' % (environment_path, name)) lines = re_sub_lines(lines, 'AWS_EB_NOTIFICATION_EMAIL', aws_eb_notification_email) write_file('%s/.ebextensions/%s.config' % (environment_path, name), lines) lines = read_file('%s/collectd_sample.conf' % etc_config_path) lines = re_sub_lines(lines, 'HOST_MAYA', host_maya) write_file('%s/collectd.conf' % etc_config_path, lines) lines = read_file('%s/ntpdate_sample.sh' % opt_config_path) lines = re_sub_lines(lines, '^(SERVER).*', '\\1=\'%s\'' % host_maya) write_file('%s/ntpdate.sh' % opt_config_path, lines) lines = read_file('%s/openvpn/server_sample.conf' % etc_config_path) lines = re_sub_lines(lines, 'OPENVPN_SUBNET_IP', openvpn_subnet_ip) write_file('%s/openvpn/server.conf' % etc_config_path, lines) lines = read_file('%s/sysconfig/iptables_sample' % etc_config_path) lines = re_sub_lines(lines, 'AWS_VPC_EB', cidr_vpc['eb']) lines = re_sub_lines(lines, 'OPENVPN_SUBNET_IP', openvpn_subnet_ip) write_file('%s/sysconfig/iptables' % etc_config_path, lines) ################################################################################ print_message('check previous version') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] result = aws_cli.run(cmd) for r in result['Environments']: if 'CNAME' not in r: continue if r['CNAME'] == '%s.ap-northeast-2.elasticbeanstalk.com' % cname: if r['Status'] == 'Terminated': continue elif r['Status'] != 'Ready': print('previous version is not ready.') raise Exception() eb_environment_name_old = r['EnvironmentName'] cname += '-%s' % str_timestamp break ################################################################################ print_message('create %s' % name) tags = list() # noinspection PyUnresolvedReferences tags.append('git_hash_johanna=%s' % git_hash_johanna.decode('utf-8')) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (template_name, git_hash_template.decode('utf-8'))) cmd = ['create', eb_environment_name] cmd += ['--cname', cname] cmd += ['--instance_type', 't2.nano'] cmd += ['--region', aws_default_region] cmd += ['--single'] cmd += ['--tags', ','.join(tags)] cmd += ['--vpc.ec2subnets', ','.join([subnet_id_1, subnet_id_2])] cmd += ['--vpc.elbpublic'] cmd += ['--vpc.elbsubnets', ','.join([subnet_id_1, subnet_id_2])] cmd += ['--vpc.id', eb_vpc_id] cmd += ['--vpc.publicip'] cmd += ['--vpc.securitygroups', security_group_id] cmd += ['--quiet'] aws_cli.run_eb(cmd, cwd=environment_path) elapsed_time = 0 while True: cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ee = result['Environments'][0] print(json.dumps(ee, sort_keys=True, indent=4)) if ee.get('Health', '') == 'Green' \ and ee.get('HealthStatus', '') == 'Ok' \ and ee.get('Status', '') == 'Ready': break print('creating... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 if elapsed_time > 60 * 30: raise Exception() ################################################################################ print_message('revoke security group ingress') cmd = ['ec2', 'describe-security-groups'] cmd += [ '--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name ] result = aws_cli.run(cmd) for ss in result['SecurityGroups']: cmd = ['ec2', 'revoke-security-group-ingress'] cmd += ['--group-id', ss['GroupId']] cmd += ['--protocol', 'tcp'] cmd += ['--port', '22'] cmd += ['--cidr', '0.0.0.0/0'] aws_cli.run(cmd, ignore_error=True) ################################################################################ print_message('disable source/destination checking') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ip_address = result['Environments'][0]['EndpointURL'] cmd = ['ec2', 'describe-instances'] cmd += ['--filter=Name=ip-address,Values=%s' % ip_address] result = aws_cli.run(cmd) instance_id = result['Reservations'][0]['Instances'][0]['InstanceId'] cmd = ['ec2', 'modify-instance-attribute'] cmd += ['--instance-id', instance_id] cmd += ['--no-source-dest-check'] aws_cli.run(cmd) ################################################################################ print_message('swap CNAME if the previous version exists') if eb_environment_name_old: cmd = ['elasticbeanstalk', 'swap-environment-cnames'] cmd += ['--source-environment-name', eb_environment_name_old] cmd += ['--destination-environment-name', eb_environment_name] aws_cli.run(cmd)
def run_create_s3_vue(name, settings): aws_cli = AWSCli() deploy_bucket_name = settings['BUCKET_NAME'] git_url = settings['GIT_URL'] phase = env['common']['PHASE'] mm = re.match(r'^.+/(.+)\.git$', git_url) if not mm: raise Exception() git_folder_name = mm.group(1) ################################################################################ print_session('create %s' % name) ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', './%s' % git_folder_name], cwd='template').communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd='template').communicate() if not os.path.exists('template/%s' % git_folder_name): raise Exception() git_hash_app = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, cwd='template/%s' % git_folder_name).communicate()[0] subprocess.Popen(['rm', '-rf', './.git'], cwd='template/%s' % git_folder_name).communicate() subprocess.Popen(['rm', '-rf', './.gitignore'], cwd='template/%s' % git_folder_name).communicate() ################################################################################ print_message('npm install') npm_process = subprocess.Popen(['npm', 'install'], cwd='template/%s' % git_folder_name) npm_result, error = npm_process.communicate() if error: print(error) raise Exception() if npm_process.returncode != 0: print(' '.join(['npm returns:', str(npm_process.returncode)])) raise Exception() ################################################################################ print_message('configure %s' % name) lines = read_file('template/%s/%s/static/settings-local-sample.js' % (git_folder_name, name)) option_list = list() option_list.append(['phase', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(const %s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('template/%s/%s/static/settings-local.js' % (git_folder_name, name), lines) ################################################################################ print_message('npm build') nm = os.path.abspath('template/%s/node_modules' % git_folder_name) subprocess.Popen(['ln', '-s', nm, 'node_modules'], cwd='template/%s/%s' % (git_folder_name, name)).communicate() npm_process = subprocess.Popen(['npm', 'run', 'build-%s' % name], cwd='template/%s' % git_folder_name) npm_result, error = npm_process.communicate() if error: print(error) raise Exception() if npm_process.returncode != 0: print(' '.join(['Npm exited with:', str(npm_process.returncode)])) raise Exception() ################################################################################ print_message('upload to temp bucket') temp_bucket_name = aws_cli.get_temp_bucket() timestamp = int(time.time()) temp_bucket_prefix = '%s/%s/%s/%s' % (temp_bucket_name, git_folder_name, name, timestamp) temp_bucket_uri = 's3://%s' % temp_bucket_prefix cmd = ['s3', 'cp', '.', temp_bucket_uri, '--recursive'] upload_result = aws_cli.run(cmd, cwd='template/%s/%s/dist' % (git_folder_name, name)) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('create deploy bucket if not exists') cmd = ['s3', 'mb', 's3://%s' % deploy_bucket_name] aws_cli.run(cmd, ignore_error=True) ################################################################################ print_message('set bucket policy') lines = read_file('aws_s3/aws-s3-bucket-policy-sample.json') lines = re_sub_lines(lines, 'BUCKET_NAME', deploy_bucket_name) write_file('template/%s/%s/aws-s3-bucket-policy.json' % (git_folder_name, name), lines) cmd = ['s3api', 'put-bucket-policy'] cmd += ['--bucket', deploy_bucket_name] cmd += ['--policy', 'file://template/%s/%s/aws-s3-bucket-policy.json' % (git_folder_name, name)] aws_cli.run(cmd) ################################################################################ print_message('set website configuration') lines = read_file('aws_s3/aws-s3-website-configuration-sample.json') lines = re_sub_lines(lines, 'BUCKET_NAME', deploy_bucket_name) lines = re_sub_lines(lines, 'PROTOCOL', settings.get('PROTOCOL', 'http')) write_file('template/%s/%s/aws-s3-website-configuration.json' % (git_folder_name, name), lines) cmd = ['s3api', 'put-bucket-website'] cmd += ['--bucket', deploy_bucket_name] cmd += ['--website-configuration', 'file://template/%s/%s/aws-s3-website-configuration.json' % (git_folder_name, name)] aws_cli.run(cmd) ################################################################################ print_message('sync to deploy bucket') cmd = ['s3', 'sync', temp_bucket_uri, 's3://%s' % deploy_bucket_name, '--delete'] sync_result = aws_cli.run(cmd) for ll in sync_result.split('\n'): print(ll) ################################################################################ print_message('tag to deploy bucket') git_hash_johanna = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE).communicate()[0] tag_dict = dict() cmd = ['s3api', 'get-bucket-tagging', '--bucket', deploy_bucket_name] tag_result = aws_cli.run(cmd, ignore_error=True) if tag_result: tag_result = dict(tag_result) for tt in tag_result['TagSet']: key = tt['Key'] value = tt['Value'] tag_dict[key] = value tag_dict['phase'] = phase tag_dict['git_hash_johanna'] = git_hash_johanna.decode('utf-8') tag_dict['git_hash_%s/%s' % (git_folder_name, name)] = git_hash_app.decode('utf-8') tag_dict['timestamp_%s' % name] = timestamp tag_format = '{Key=%s, Value=%s}' tag_list = list() for key in tag_dict: value = tag_dict[key] tag_list.append(tag_format % (key, value)) cmd = ['s3api', 'put-bucket-tagging', '--bucket', deploy_bucket_name, '--tagging', 'TagSet=[%s]' % ','.join(tag_list)] aws_cli.run(cmd) ################################################################################ print_message('cleanup temp bucket') cmd = ['s3', 'rm', temp_bucket_uri, '--recursive'] upload_result = aws_cli.run(cmd) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('invalidate cache from cloudfront') cf_dist_id = settings.get('CLOUDFRONT_DIST_ID', '') if len(cf_dist_id) > 0: cmd = ['cloudfront', 'create-invalidation', '--distribution-id', cf_dist_id, '--paths', '/*'] invalidate_result = aws_cli.run(cmd) print(invalidate_result)
def run_create_eb_django(name, settings): aws_asg_max_value = settings['AWS_ASG_MAX_VALUE'] aws_asg_min_value = settings['AWS_ASG_MIN_VALUE'] aws_default_region = env['aws']['AWS_DEFAULT_REGION'] aws_eb_notification_email = settings['AWS_EB_NOTIFICATION_EMAIL'] cname = settings['CNAME'] debug = env['common']['DEBUG'] eb_application_name = env['elasticbeanstalk']['APPLICATION_NAME'] git_url = settings['GIT_URL'] host_maya = settings['HOST_MAYA'] key_pair_name = env['common']['AWS_KEY_PAIR_NAME'] phase = env['common']['PHASE'] subnet_type = settings['SUBNET_TYPE'] template_name = env['template']['NAME'] if hasattr(settings, 'PRIVATE_IP'): private_ip = settings['PRIVATE_IP'] else: private_ip = None cidr_subnet = aws_cli.cidr_subnet str_timestamp = str(int(time.time())) eb_environment_name = '%s-%s' % (name, str_timestamp) eb_environment_name_old = None template_path = 'template/%s' % template_name environment_path = '%s/elasticbeanstalk/%s' % (template_path, name) opt_config_path = '%s/configuration/opt' % environment_path etc_config_path = '%s/configuration/etc' % environment_path app_config_path = '%s/%s' % (etc_config_path, name) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('get vpc id') rds_vpc_id, eb_vpc_id = aws_cli.get_vpc_id() if not eb_vpc_id: print('ERROR!!! No VPC found') raise Exception() ################################################################################ print_message('get subnet id') subnet_id_1 = None subnet_id_2 = None cmd = ['ec2', 'describe-subnets'] result = aws_cli.run(cmd) for r in result['Subnets']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['public_1']: subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['public_2']: subnet_id_2 = r['SubnetId'] elif 'private' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['private_1']: subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_2']: subnet_id_2 = r['SubnetId'] else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get security group id') security_group_id = None cmd = ['ec2', 'describe-security-groups'] result = aws_cli.run(cmd) for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['GroupName'] == 'eb_public': security_group_id = r['GroupId'] break elif 'private' == subnet_type: if r['GroupName'] == 'eb_private': security_group_id = r['GroupId'] break else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get database address') db_address = aws_cli.get_rds_address() ################################################################################ print_message('configuration %s' % name) with open('%s/configuration/phase' % environment_path, 'w') as f: f.write(phase) f.close() lines = read_file('%s/.elasticbeanstalk/config_sample.yml' % environment_path) lines = re_sub_lines(lines, '^( application_name).*', '\\1: %s' % eb_application_name) lines = re_sub_lines(lines, '^( default_ec2_keyname).*', '\\1: %s' % key_pair_name) write_file('%s/.elasticbeanstalk/config.yml' % environment_path, lines) lines = read_file('%s/.ebextensions/%s.config.sample' % (environment_path, name)) lines = re_sub_lines(lines, 'AWS_ASG_MIN_VALUE', aws_asg_min_value) lines = re_sub_lines(lines, 'AWS_ASG_MAX_VALUE', aws_asg_max_value) lines = re_sub_lines(lines, 'AWS_EB_NOTIFICATION_EMAIL', aws_eb_notification_email) write_file('%s/.ebextensions/%s.config' % (environment_path, name), lines) lines = read_file('%s/my_sample.cnf' % app_config_path) lines = re_sub_lines(lines, '^(host).*', '\\1 = %s' % db_address) lines = re_sub_lines(lines, '^(user).*', '\\1 = %s' % env['rds']['USER_NAME']) lines = re_sub_lines(lines, '^(password).*', '\\1 = %s' % env['rds']['USER_PASSWORD']) write_file('%s/my.cnf' % app_config_path, lines) lines = read_file('%s/collectd_sample.conf' % etc_config_path) lines = re_sub_lines(lines, 'HOST_MAYA', host_maya) write_file('%s/collectd.conf' % etc_config_path, lines) lines = read_file('%s/ntpdate_sample.sh' % opt_config_path) lines = re_sub_lines(lines, '^(SERVER).*', '\\1=\'%s\'' % host_maya) write_file('%s/ntpdate.sh' % opt_config_path, lines) lines = read_file('%s/nc_sample.sh' % opt_config_path) lines = re_sub_lines(lines, '^(SERVER).*', '\\1=\'%s\'' % host_maya) write_file('%s/nc.sh' % opt_config_path, lines) lines = read_file('%s/settings_local_sample.py' % app_config_path) lines = re_sub_lines(lines, '^(DEBUG).*', '\\1 = %s' % debug) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % app_config_path, lines) ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd=environment_path).communicate() if not os.path.exists('%s/%s' % (environment_path, name)): raise Exception() git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd='%s/%s' % (environment_path, name)).communicate()[0] subprocess.Popen(['rm', '-rf', './%s/.git' % name], cwd=environment_path).communicate() subprocess.Popen(['rm', '-rf', './%s/.gitignore' % name], cwd=environment_path).communicate() ################################################################################ print_message('check previous version') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] result = aws_cli.run(cmd) for r in result['Environments']: if 'CNAME' not in r: continue if r['CNAME'] == '%s.%s.elasticbeanstalk.com' % (cname, aws_default_region): if r['Status'] == 'Terminated': continue elif r['Status'] != 'Ready': print('previous version is not ready.') raise Exception() eb_environment_name_old = r['EnvironmentName'] cname += '-%s' % str_timestamp break ################################################################################ print_message('create %s' % name) tags = list() # noinspection PyUnresolvedReferences tags.append('git_hash_johanna=%s' % git_hash_johanna.decode('utf-8')) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (template_name, git_hash_template.decode('utf-8'))) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (name, git_hash_app.decode('utf-8'))) cmd = ['create', eb_environment_name] cmd += ['--cname', cname] cmd += ['--instance_type', 't2.nano'] cmd += ['--region', aws_default_region] cmd += ['--tags', ','.join(tags)] cmd += ['--vpc.id', eb_vpc_id] cmd += ['--vpc.securitygroups', security_group_id] cmd += ['--quiet'] if 'public' == subnet_type: cmd += ['--vpc.ec2subnets', ','.join([subnet_id_1, subnet_id_2])] cmd += ['--vpc.elbsubnets', ','.join([subnet_id_1, subnet_id_2])] cmd += ['--vpc.elbpublic'] cmd += ['--vpc.publicip'] elif 'private' == subnet_type: # to attach network interface located at 'ap-northeast-2a' (subnet_id_1), # DO NOT include 'ap-northeast-2c' (subnet_id_2) cmd += ['--vpc.ec2subnets', subnet_id_1] cmd += ['--vpc.elbsubnets', subnet_id_1] aws_cli.run_eb(cmd, cwd=environment_path) elapsed_time = 0 while True: cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ee = result['Environments'][0] print(json.dumps(ee, sort_keys=True, indent=4)) if ee.get('Health', '') == 'Green' \ and ee.get('HealthStatus', '') == 'Ok' \ and ee.get('Status', '') == 'Ready': break print('creating... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 if elapsed_time > 60 * 30: raise Exception() subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() ################################################################################ print_message('revoke security group ingress') cmd = ['ec2', 'describe-security-groups'] cmd += [ '--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name ] result = aws_cli.run(cmd) for ss in result['SecurityGroups']: cmd = ['ec2', 'revoke-security-group-ingress'] cmd += ['--group-id', ss['GroupId']] cmd += ['--protocol', 'tcp'] cmd += ['--port', '22'] cmd += ['--cidr', '0.0.0.0/0'] aws_cli.run(cmd, ignore_error=True) ################################################################################ if private_ip is not None: print_message('attach network interface') elapsed_time = 0 while True: cmd = ['ec2', 'describe-network-interfaces'] cmd += [ '--filters', 'Name=private-ip-address,Values=%s' % private_ip ] result = aws_cli.run(cmd) network_interface_id = result['NetworkInterfaces'][0][ 'NetworkInterfaceId'] if 'Attachment' not in result['NetworkInterfaces'][0]: cmd = ['ec2', 'describe-instances'] cmd += [ '--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name ] result = aws_cli.run(cmd) instance_id = result['Reservations'][0]['Instances'][0][ 'InstanceId'] cmd = ['ec2', 'attach-network-interface'] cmd += ['--network-interface-id', network_interface_id] cmd += ['--instance-id', instance_id] cmd += ['--device-index', '1'] aws_cli.run(cmd) break attachment_id = result['NetworkInterfaces'][0]['Attachment'][ 'AttachmentId'] cmd = ['ec2', 'detach-network-interface'] cmd += ['--attachment-id', attachment_id] aws_cli.run(cmd, ignore_error=True) print( 'detaching network interface... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 ################################################################################ print_message('swap CNAME if the previous version exists') if eb_environment_name_old: cmd = ['elasticbeanstalk', 'swap-environment-cnames'] cmd += ['--source-environment-name', eb_environment_name_old] cmd += ['--destination-environment-name', eb_environment_name] aws_cli.run(cmd)
def run_create_lambda_event(function_name, settings): aws_cli = AWSCli(settings['AWS_DEFAULT_REGION']) description = settings['DESCRIPTION'] folder_name = settings.get('FOLDER_NAME', function_name) git_url = settings['GIT_URL'] phase = env['common']['PHASE'] mm = re.match(r'^.+/(.+)\.git$', git_url) if not mm: raise Exception() git_folder_name = mm.group(1) ################################################################################ print_session('create %s' % function_name) ################################################################################ print_message('download template: %s' % git_folder_name) if not os.path.exists('template/%s' % git_folder_name): if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd='template').communicate() if not os.path.exists('template/%s' % git_folder_name): raise Exception() deploy_folder = 'template/%s/lambda/%s' % (git_folder_name, folder_name) ################################################################################ print_message('packaging lambda: %s' % function_name) print_message('cleanup generated files') subprocess.Popen(['git', 'clean', '-d', '-f', '-x'], cwd=deploy_folder).communicate() requirements_path = '%s/requirements.txt' % deploy_folder if os.path.exists(requirements_path): print_message('install dependencies') cmd = ['pip3', 'install', '-r', requirements_path, '-t', deploy_folder] subprocess.Popen(cmd).communicate() settings_path = '%s/settings_local_sample.py' % deploy_folder if os.path.exists(settings_path): print_message('create environment values') lines = read_file(settings_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % deploy_folder, lines) print_message('zip files') cmd = ['zip', '-r', 'deploy.zip', '.'] subprocess.Popen(cmd, cwd=deploy_folder).communicate() print_message('create lambda function') role_arn = aws_cli.get_role_arn('aws-lambda-default-role') git_hash_johanna = subprocess.Popen( ['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, cwd='template/%s' % git_folder_name).communicate()[0] tags = list() # noinspection PyUnresolvedReferences tags.append('git_hash_johanna=%s' % git_hash_johanna.decode('utf-8').strip()) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (git_folder_name, git_hash_template.decode('utf-8').strip())) ################################################################################ print_message('check previous version') need_update = False cmd = ['lambda', 'list-functions'] result = aws_cli.run(cmd) for ff in result['Functions']: if function_name == ff['FunctionName']: need_update = True break ################################################################################ if need_update: print_session('update lambda: %s' % function_name) cmd = [ 'lambda', 'update-function-code', '--function-name', function_name, '--zip-file', 'fileb://deploy.zip' ] result = aws_cli.run(cmd, cwd=deploy_folder) function_arn = result['FunctionArn'] print_message('update lambda tags') cmd = [ 'lambda', 'tag-resource', '--resource', function_arn, '--tags', ','.join(tags) ] aws_cli.run(cmd, cwd=deploy_folder) else: print_session('create lambda: %s' % function_name) cmd = [ 'lambda', 'create-function', '--function-name', function_name, '--description', description, '--zip-file', 'fileb://deploy.zip', '--role', role_arn, '--handler', 'lambda.handler', '--runtime', 'python3.6', '--tags', ','.join(tags), '--timeout', '120' ] function_arn = aws_cli.run(cmd, cwd=deploy_folder)['FunctionArn'] ################################################################################ print_session('create event') event_pattern = json.dumps({ 'source': ['aws.codebuild'], 'detail-type': ['CodeBuild Build State Change'], 'detail': { 'build-status': ['SUCCEEDED', 'FAILED'] } }) cmd = [ 'events', 'put-rule', '--name', settings['EVENT_NAME'], '--event-pattern', event_pattern ] result = aws_cli.run(cmd) rule_arn = result['RuleArn'] targets = json.dumps([{'Id': settings['EVENT_NAME'], 'Arn': function_arn}]) cmd = [ 'events', 'put-targets', '--rule', settings['EVENT_NAME'], '--targets', targets ] aws_cli.run(cmd) cmd = [ 'lambda', 'add-permission', '--function-name', function_name, '--statement-id', function_name + 'StatementId', '--action', 'lambda:InvokeFunction', '--principal', 'events.amazonaws.com', '--source-arn', rule_arn ] aws_cli.run(cmd, ignore_error=True)
def run_create_eb_graphite_grafana(name, settings): aws_default_region = settings['AWS_DEFAULT_REGION'] aws_eb_notification_email = settings['AWS_EB_NOTIFICATION_EMAIL'] cname = settings['CNAME'] eb_application_name = env['elasticbeanstalk']['APPLICATION_NAME'] git_url = settings['GIT_URL'] host_maya = settings['HOST_MAYA'] key_pair_name = env['common']['AWS_KEY_PAIR_NAME'] phase = env['common']['PHASE'] private_ip = settings['PRIVATE_IP'] template_name = env['template']['NAME'] cidr_vpc = aws_cli.cidr_vpc cidr_subnet = aws_cli.cidr_subnet str_timestamp = str(int(time.time())) eb_environment_name = '%s-%s' % (name, str_timestamp) eb_environment_name_old = None template_path = 'template/%s' % template_name environment_path = '%s/elasticbeanstalk/%s' % (template_path, name) opt_config_path = '%s/configuration/opt' % environment_path etc_config_path = '%s/configuration/etc' % environment_path app_config_path = '%s/%s' % (etc_config_path, name) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('get vpc id') rds_vpc_id, eb_vpc_id = aws_cli.get_vpc_id() if not rds_vpc_id or not eb_vpc_id: print('ERROR!!! No VPC found') raise Exception() ################################################################################ print_message('get subnet id') subnet_id_1 = None cmd = ['ec2', 'describe-subnets'] result = aws_cli.run(cmd) for r in result['Subnets']: if r['VpcId'] != eb_vpc_id: continue if r['CidrBlock'] == cidr_subnet['eb']['private_1']: subnet_id_1 = r['SubnetId'] ################################################################################ print_message('get security group id') security_group_id = None cmd = ['ec2', 'describe-security-groups'] result = aws_cli.run(cmd) for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if r['GroupName'] == 'eb_private': security_group_id = r['GroupId'] break ################################################################################ print_message('get database address') db_address = aws_cli.get_rds_address() settings['DB_HOST'] = db_address settings['DB_PASSWORD'] = env['rds']['USER_PASSWORD'] settings['DB_USER'] = env['rds']['USER_NAME'] ################################################################################ print_message('download influxdb backup if available') subprocess.Popen( ['rm', '-rf', '%s/configuration/influxd/' % environment_path]).communicate() try: bucket_name = _get_s3_bucket_name(settings) s3_folder = '/'.join([bucket_name, 'influxdb_backup']) result = aws_cli.run( ['s3', 'ls', s3_folder, '--recursive', '--page-size', '1']) rr = result.strip() if not rr: raise Exception('backup is not found') rr = rr.split('\n') if len(rr) < 1: raise Exception('backup is not found') rr = sorted(rr, reverse=True)[0] print(rr) s3_path = rr.split()[3] file_name = s3_path.split('/')[1] source_file = '/'.join([bucket_name, s3_path]) target_folder = '%s/configuration/influxd/' % environment_path result = aws_cli.run(['s3', 'cp', source_file, target_folder]) print(result) subprocess.Popen(['unzip', '-P', settings['DB_PASSWORD'], file_name], cwd=target_folder).communicate() subprocess.Popen(['rm', file_name], cwd=target_folder).communicate() except Exception as e: print(e) ################################################################################ print_message('configuration %s' % name) with open('%s/configuration/phase' % environment_path, 'w') as f: f.write(phase) f.close() lines = read_file('%s/.elasticbeanstalk/config_sample.yml' % environment_path) lines = re_sub_lines(lines, '^( application_name).*', '\\1: %s' % eb_application_name) lines = re_sub_lines(lines, '^( default_ec2_keyname).*', '\\1: %s' % key_pair_name) write_file('%s/.elasticbeanstalk/config.yml' % environment_path, lines) lines = read_file('%s/.ebextensions/%s.config.sample' % (environment_path, name)) lines = re_sub_lines(lines, 'AWS_VPC_EB', cidr_vpc['eb']) lines = re_sub_lines(lines, 'AWS_EB_NOTIFICATION_EMAIL', aws_eb_notification_email) write_file('%s/.ebextensions/%s.config' % (environment_path, name), lines) lines = read_file('%s/collectd_sample.conf' % etc_config_path) lines = re_sub_lines(lines, 'HOST_MAYA', host_maya) write_file('%s/collectd.conf' % etc_config_path, lines) lines = read_file('%s/ntpdate_sample.sh' % opt_config_path) lines = re_sub_lines(lines, '^(SERVER).*', '\\1=\'%s\'' % host_maya) write_file('%s/ntpdate.sh' % opt_config_path, lines) lines = read_file('%s/settings_local_sample.py' % app_config_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % app_config_path, lines) lines = read_file( '%s/configuration/grafana-alert-notifications_sample.json' % environment_path) lines = re_sub_lines(lines, 'SLACK_WEBHOOK_URL', settings['SLACK_WEBHOOK_URL']) write_file( '%s/configuration/grafana-alert-notifications.json' % environment_path, lines) file_list = list() file_list.append('grafana-dashboards-database.json') file_list.append('grafana-dashboards-global.json') file_list.append('grafana-dashboards-maya.json') file_list.append('grafana-dashboards-penpen.json') file_list.append('grafana-dashboards-sachiel.json') for ff in file_list: lines = read_file('%s/configuration/%s' % (environment_path, ff)) lines = re_sub_lines(lines, 'ALERT_TITLE_PREFIX', settings['ALERT_TITLE_PREFIX']) write_file('%s/configuration/%s' % (environment_path, ff), lines) lines = read_file('%s/grafana/grafana.ini' % etc_config_path) lines = re_sub_lines(lines, 'HOST_MAYA', host_maya) write_file('%s/grafana/grafana.ini' % etc_config_path, lines) ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd=environment_path).communicate() if not os.path.exists('%s/%s' % (environment_path, name)): raise Exception() git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd='%s/%s' % (environment_path, name)).communicate()[0] subprocess.Popen(['rm', '-rf', './%s/.git' % name], cwd=environment_path).communicate() subprocess.Popen(['rm', '-rf', './%s/.gitignore' % name], cwd=environment_path).communicate() ################################################################################ print_message('check previous version') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] result = aws_cli.run(cmd) for r in result['Environments']: if 'CNAME' not in r: continue if r['CNAME'] == '%s.%s.elasticbeanstalk.com' % (cname, aws_default_region): if r['Status'] == 'Terminated': continue elif r['Status'] != 'Ready': print('previous version is not ready.') raise Exception() eb_environment_name_old = r['EnvironmentName'] cname += '-%s' % str_timestamp break ################################################################################ print_message('create %s' % name) tags = list() # noinspection PyUnresolvedReferences tags.append('git_hash_johanna=%s' % git_hash_johanna.decode('utf-8')) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (template_name, git_hash_template.decode('utf-8'))) # noinspection PyUnresolvedReferences tags.append('git_hash_%s=%s' % (name, git_hash_app.decode('utf-8'))) cmd = ['create', eb_environment_name] cmd += ['--cname', cname] cmd += ['--instance_type', 't2.micro'] cmd += ['--region', aws_default_region] cmd += ['--single'] cmd += ['--tags', ','.join(tags)] # to attach network interface located at 'ap-northeast-2a' (subnet_id_1), # DO NOT include 'ap-northeast-2c' (subnet_id_2) cmd += ['--vpc.ec2subnets', subnet_id_1] cmd += ['--vpc.elbsubnets', subnet_id_1] cmd += ['--vpc.id', eb_vpc_id] cmd += ['--vpc.securitygroups', security_group_id] cmd += ['--quiet'] aws_cli.run_eb(cmd, cwd=environment_path) elapsed_time = 0 while True: cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ee = result['Environments'][0] print(json.dumps(ee, sort_keys=True, indent=4)) if ee.get('Health', '') == 'Green' \ and ee.get('HealthStatus', '') == 'Ok' \ and ee.get('Status', '') == 'Ready': break print('creating... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 if elapsed_time > 60 * 30: raise Exception() subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() ################################################################################ print_message('revoke security group ingress') cmd = ['ec2', 'describe-security-groups'] cmd += [ '--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name ] result = aws_cli.run(cmd) for ss in result['SecurityGroups']: cmd = ['ec2', 'revoke-security-group-ingress'] cmd += ['--group-id', ss['GroupId']] cmd += ['--protocol', 'tcp'] cmd += ['--port', '22'] cmd += ['--cidr', '0.0.0.0/0'] aws_cli.run(cmd, ignore_error=True) ################################################################################ print_message('attach network interface') elapsed_time = 0 while True: cmd = ['ec2', 'describe-network-interfaces'] cmd += ['--filters', 'Name=private-ip-address,Values=%s' % private_ip] result = aws_cli.run(cmd) network_interface_id = result['NetworkInterfaces'][0][ 'NetworkInterfaceId'] if 'Attachment' not in result['NetworkInterfaces'][0]: cmd = ['ec2', 'describe-instances'] cmd += [ '--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name ] result = aws_cli.run(cmd) instance_id = result['Reservations'][0]['Instances'][0][ 'InstanceId'] cmd = ['ec2', 'attach-network-interface'] cmd += ['--network-interface-id', network_interface_id] cmd += ['--instance-id', instance_id] cmd += ['--device-index', '1'] aws_cli.run(cmd) break attachment_id = result['NetworkInterfaces'][0]['Attachment'][ 'AttachmentId'] cmd = ['ec2', 'detach-network-interface'] cmd += ['--attachment-id', attachment_id] aws_cli.run(cmd, ignore_error=True) print('detaching network interface... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 ################################################################################ print_message('swap CNAME if the previous version exists') if eb_environment_name_old: cmd = ['elasticbeanstalk', 'swap-environment-cnames'] cmd += ['--source-environment-name', eb_environment_name_old] cmd += ['--destination-environment-name', eb_environment_name] aws_cli.run(cmd)
def run_create_eb_django(name, settings): aws_cli = AWSCli() aws_asg_max_value = settings['AWS_ASG_MAX_VALUE'] aws_asg_min_value = settings['AWS_ASG_MIN_VALUE'] aws_default_region = env['aws']['AWS_DEFAULT_REGION'] aws_eb_notification_email = settings['AWS_EB_NOTIFICATION_EMAIL'] cname = settings['CNAME'] debug = env['common']['DEBUG'] eb_application_name = env['elasticbeanstalk']['APPLICATION_NAME'] git_url = settings['GIT_URL'] key_pair_name = env['common']['AWS_KEY_PAIR_NAME'] phase = env['common']['PHASE'] ssl_certificate_id = settings['SSL_CERTIFICATE_ID'] subnet_type = settings['SUBNET_TYPE'] template_name = env['template']['NAME'] service_name = env['common'].get('SERVICE_NAME', '') name_prefix = '%s_' % service_name if service_name else '' cidr_subnet = aws_cli.cidr_subnet str_timestamp = str(int(time.time())) zip_filename = '%s-%s.zip' % (name, str_timestamp) eb_environment_name = '%s-%s' % (name, str_timestamp) eb_environment_name_old = None template_path = 'template/%s' % template_name environment_path = '%s/elasticbeanstalk/%s' % (template_path, name) etc_config_path = '%s/configuration/etc' % environment_path app_config_path = '%s/%s' % (etc_config_path, name) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('get vpc id') rds_vpc_id, eb_vpc_id = aws_cli.get_vpc_id() if not eb_vpc_id: print('ERROR!!! No VPC found') raise Exception() ################################################################################ print_message('get subnet id') subnet_id_1 = None subnet_id_2 = None cmd = ['ec2', 'describe-subnets'] result = aws_cli.run(cmd) for r in result['Subnets']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['public_1']: subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['public_2']: subnet_id_2 = r['SubnetId'] elif 'private' == subnet_type: if r['CidrBlock'] == cidr_subnet['eb']['private_1']: subnet_id_1 = r['SubnetId'] if r['CidrBlock'] == cidr_subnet['eb']['private_2']: subnet_id_2 = r['SubnetId'] else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get security group id') security_group_id = None cmd = ['ec2', 'describe-security-groups'] result = aws_cli.run(cmd) for r in result['SecurityGroups']: if r['VpcId'] != eb_vpc_id: continue if 'public' == subnet_type: if r['GroupName'] == '%seb_public' % name_prefix: security_group_id = r['GroupId'] break elif 'private' == subnet_type: if r['GroupName'] == '%seb_private' % name_prefix: security_group_id = r['GroupId'] break else: print('ERROR!!! Unknown subnet type:', subnet_type) raise Exception() ################################################################################ print_message('get database address') db_address = aws_cli.get_rds_address() ################################################################################ print_message('configuration %s' % name) with open('%s/configuration/phase' % environment_path, 'w') as f: f.write(phase) f.close() lines = read_file('%s/.ebextensions/%s.config.sample' % (environment_path, name)) lines = re_sub_lines(lines, 'AWS_ASG_MAX_VALUE', aws_asg_max_value) lines = re_sub_lines(lines, 'AWS_ASG_MIN_VALUE', aws_asg_min_value) lines = re_sub_lines(lines, 'AWS_EB_NOTIFICATION_EMAIL', aws_eb_notification_email) lines = re_sub_lines(lines, 'SSL_CERTIFICATE_ID', ssl_certificate_id) write_file('%s/.ebextensions/%s.config' % (environment_path, name), lines) lines = read_file('%s/my_sample.cnf' % app_config_path) lines = re_sub_lines(lines, '^(host).*', '\\1 = %s' % db_address) lines = re_sub_lines(lines, '^(user).*', '\\1 = %s' % env['rds']['USER_NAME']) lines = re_sub_lines(lines, '^(password).*', '\\1 = %s' % env['rds']['USER_PASSWORD']) write_file('%s/my.cnf' % app_config_path, lines) lines = read_file('%s/collectd_sample.conf' % etc_config_path) write_file('%s/collectd.conf' % etc_config_path, lines) lines = read_file('%s/settings_local_sample.py' % app_config_path) lines = re_sub_lines(lines, '^(DEBUG).*', '\\1 = %s' % debug) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(%s) .*' % oo[0], '\\1 = \'%s\'' % oo[1]) write_file('%s/settings_local.py' % app_config_path, lines) ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd=environment_path).communicate() if not os.path.exists('%s/%s' % (environment_path, name)): raise Exception() git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd='%s/%s' % (environment_path, name)).communicate()[0] subprocess.Popen(['rm', '-rf', './%s/.git' % name], cwd=environment_path).communicate() subprocess.Popen(['rm', '-rf', './%s/.gitignore' % name], cwd=environment_path).communicate() ################################################################################ print_message('check previous version') cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] result = aws_cli.run(cmd) for r in result['Environments']: if 'CNAME' not in r: continue if r['CNAME'] == '%s.%s.elasticbeanstalk.com' % (cname, aws_default_region): if r['Status'] == 'Terminated': continue elif r['Status'] != 'Ready': print('previous version is not ready.') raise Exception() eb_environment_name_old = r['EnvironmentName'] cname += '-%s' % str_timestamp break ################################################################################ print_message('create storage location') cmd = ['elasticbeanstalk', 'create-storage-location'] result = aws_cli.run(cmd) s3_bucket = result['S3Bucket'] s3_zip_filename = '/'.join( ['s3://' + s3_bucket, eb_application_name, zip_filename]) ################################################################################ print_message('create application version') cmd = ['zip', '-r', zip_filename, '.', '.ebextensions'] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=environment_path).communicate() cmd = ['s3', 'cp', zip_filename, s3_zip_filename] aws_cli.run(cmd, cwd=environment_path) cmd = ['rm', '-rf', zip_filename] subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=environment_path).communicate() cmd = ['elasticbeanstalk', 'create-application-version'] cmd += ['--application-name', eb_application_name] cmd += [ '--source-bundle', 'S3Bucket="%s",S3Key="%s/%s"' % (s3_bucket, eb_application_name, zip_filename) ] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=environment_path) ################################################################################ print_message('create environment %s' % name) option_settings = list() oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'EC2KeyName' oo['Value'] = key_pair_name option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'InstanceType' oo['Value'] = 't2.nano' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'IamInstanceProfile' oo['Value'] = 'aws-elasticbeanstalk-ec2-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:autoscaling:launchconfiguration' oo['OptionName'] = 'SecurityGroups' oo['Value'] = security_group_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'AssociatePublicIpAddress' oo['Value'] = 'true' if 'private' == subnet_type: oo['Value'] = 'false' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBScheme' oo['Value'] = '...' if 'private' == subnet_type: oo['Value'] = 'internal' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'ELBSubnets' oo['Value'] = ','.join([subnet_id_1, subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'Subnets' oo['Value'] = ','.join([subnet_id_1, subnet_id_2]) option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:ec2:vpc' oo['OptionName'] = 'VPCId' oo['Value'] = eb_vpc_id option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'EnvironmentType' oo['Value'] = 'LoadBalanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:environment' oo['OptionName'] = 'ServiceRole' oo['Value'] = 'aws-elasticbeanstalk-service-role' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'SystemType' oo['Value'] = 'enhanced' option_settings.append(oo) oo = dict() oo['Namespace'] = 'aws:elasticbeanstalk:healthreporting:system' oo['OptionName'] = 'ConfigDocument' cw_env = dict() cw_env['ApplicationRequestsTotal'] = 60 cw_env['ApplicationRequests2xx'] = 60 cw_env['ApplicationRequests3xx'] = 60 cw_env['ApplicationRequests4xx'] = 60 cw_env['ApplicationRequests5xx'] = 60 cw_instance = dict() cw_instance['RootFilesystemUtil'] = 60 cw_instance['InstanceHealth'] = 60 cw_instance['CPUIdle'] = 60 cw = dict() cw['Environment'] = cw_env cw['Instance'] = cw_instance cfg_doc = dict() cfg_doc['CloudWatchMetrics'] = cw cfg_doc['Version'] = 1 oo['Value'] = json.dumps(cfg_doc) option_settings.append(oo) option_settings = json.dumps(option_settings) tag0 = 'Key=git_hash_johanna,Value=%s' % git_hash_johanna.decode( 'utf-8').strip() tag1 = 'Key=git_hash_%s,Value=%s' % ( template_name, git_hash_template.decode('utf-8').strip()) tag2 = 'Key=git_hash_%s,Value=%s' % (name, git_hash_app.decode('utf-8').strip()) cmd = ['elasticbeanstalk', 'create-environment'] cmd += ['--application-name', eb_application_name] cmd += ['--cname-prefix', cname] cmd += ['--environment-name', eb_environment_name] cmd += ['--option-settings', option_settings] cmd += [ '--solution-stack-name', '64bit Amazon Linux 2017.09 v2.6.5 running Python 3.6' ] cmd += ['--tags', tag0, tag1, tag2] cmd += ['--version-label', eb_environment_name] aws_cli.run(cmd, cwd=environment_path) elapsed_time = 0 while True: cmd = ['elasticbeanstalk', 'describe-environments'] cmd += ['--application-name', eb_application_name] cmd += ['--environment-name', eb_environment_name] result = aws_cli.run(cmd) ee = result['Environments'][0] print(json.dumps(ee, sort_keys=True, indent=4)) if ee.get('Health', '') == 'Green' and ee.get('Status', '') == 'Ready': break print('creating... (elapsed time: \'%d\' seconds)' % elapsed_time) time.sleep(5) elapsed_time += 5 if elapsed_time > 60 * 30: raise Exception() subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() ################################################################################ print_message('revoke security group ingress') cmd = ['ec2', 'describe-security-groups'] cmd += [ '--filters', 'Name=tag-key,Values=Name,Name=tag-value,Values=%s' % eb_environment_name ] result = aws_cli.run(cmd) for ss in result['SecurityGroups']: cmd = ['ec2', 'revoke-security-group-ingress'] cmd += ['--group-id', ss['GroupId']] cmd += ['--protocol', 'tcp'] cmd += ['--port', '22'] cmd += ['--cidr', '0.0.0.0/0'] aws_cli.run(cmd, ignore_error=True) ################################################################################ print_message('swap CNAME if the previous version exists') if eb_environment_name_old: cmd = ['elasticbeanstalk', 'swap-environment-cnames'] cmd += ['--source-environment-name', eb_environment_name_old] cmd += ['--destination-environment-name', eb_environment_name] aws_cli.run(cmd)
def run_create_s3_webapp(name, settings): git_url = settings['GIT_URL'] phase = env['common']['PHASE'] template_name = env['template']['NAME'] template_path = 'template/%s' % template_name environment_path = '%s/s3/%s' % (template_path, name) app_root_path = '%s/%s' % (environment_path, name) git_rev = ['git', 'rev-parse', 'HEAD'] git_hash_johanna = subprocess.Popen( git_rev, stdout=subprocess.PIPE).communicate()[0] git_hash_template = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=template_path).communicate()[0] ################################################################################ print_session('create %s' % name) ################################################################################ print_message('git clone') subprocess.Popen(['rm', '-rf', './%s' % name], cwd=environment_path).communicate() if phase == 'dv': git_command = ['git', 'clone', '--depth=1', git_url] else: git_command = ['git', 'clone', '--depth=1', '-b', phase, git_url] subprocess.Popen(git_command, cwd=environment_path).communicate() if not os.path.exists(app_root_path): raise Exception() git_hash_app = subprocess.Popen(git_rev, stdout=subprocess.PIPE, cwd=app_root_path).communicate()[0] subprocess.Popen(['rm', '-rf', './.git'], cwd=app_root_path).communicate() subprocess.Popen(['rm', '-rf', './.gitignore'], cwd=app_root_path).communicate() ################################################################################ print_message('bower install') bower_process = subprocess.Popen(['bower', 'install'], cwd=app_root_path) bower_result, error = bower_process.communicate() if error: print(error) raise Exception() if bower_process.returncode != 0: print(' '.join(['Bower returns:', str(bower_process.returncode)])) raise Exception() ################################################################################ print_message('configure %s' % name) lines = read_file('%s/configuration/app/scripts/settings-local-sample.js' % environment_path) option_list = list() option_list.append(['PHASE', phase]) for key in settings: value = settings[key] option_list.append([key, value]) for oo in option_list: lines = re_sub_lines(lines, '^(var %s) .*' % oo[0], '\\1 = \'%s\';' % oo[1]) write_file('%s/app/scripts/settings-local.js' % app_root_path, lines) ################################################################################ print_message('grunt build') npm_process = subprocess.Popen(['npm', 'install'], cwd=app_root_path) npm_result, error = npm_process.communicate() if error: print(error) raise Exception() if npm_process.returncode != 0: print(' '.join(['NPM returns:', str(npm_process.returncode)])) raise Exception() subprocess.Popen(['grunt', 'build'], cwd=app_root_path).communicate() ################################################################################ print_message('upload to temp bucket') app_dist_path = '%s/dist' % app_root_path temp_bucket_name = aws_cli.get_temp_bucket() timestamp = int(time.time()) temp_folder = 's3://%s/%s/%s/%s' % (temp_bucket_name, template_name, name, timestamp) cmd = ['s3', 'cp', '.', temp_folder, '--recursive'] upload_result = aws_cli.run(cmd, cwd=app_dist_path) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('sync to deploy bucket') deploy_bucket_name = settings['BUCKET_NAME'] cmd = [ 's3', 'sync', temp_folder, 's3://%s' % deploy_bucket_name, '--delete' ] sync_result = aws_cli.run(cmd) for ll in sync_result.split('\n'): print(ll) tag_format = '{Key=%s, Value=%s}' tag_list = list() tag_list.append(tag_format % ('phase', phase)) tag_list.append(tag_format % ('git_hash_johanna', git_hash_johanna.decode('utf-8'))) tag_list.append(tag_format % ('git_hash_template', git_hash_template.decode('utf-8'))) tag_list.append(tag_format % ('git_hash_app', git_hash_app.decode('utf-8'))) tag_list.append(tag_format % ('timestamp', timestamp)) cmd = [ 's3api', 'put-bucket-tagging', '--bucket', deploy_bucket_name, '--tagging', 'TagSet=[%s]' % ','.join(tag_list) ] aws_cli.run(cmd) ################################################################################ print_message('cleanup temp bucket') cmd = ['s3', 'rm', temp_folder, '--recursive'] upload_result = aws_cli.run(cmd) for ll in upload_result.split('\n'): print(ll) ################################################################################ print_message('purge cache from cloudflare') cf_api_key = env['common']['CLOUDFLARE_API_KEY'] cf_auth_email = env['common']['CLOUDFLARE_AUTH_EMAIL'] cf_zone_id = env['common']['CLOUDFLARE_ZONE_ID'] cf_endpoint = 'https://api.cloudflare.com/client/v4/zones/%s/purge_cache' % cf_zone_id data = dict() data['files'] = list(settings['PURGE_FILES']) cmd = [ 'curl', '-X', 'DELETE', cf_endpoint, '-H', 'X-Auth-Email: %s' % cf_auth_email, '-H', 'X-Auth-Key: %s' % cf_api_key, '-H', 'Content-Type: application/json', '--data', json.dumps(data) ] subprocess.Popen(cmd).communicate()