def pull_down_app_info(app_name, default_env=None): envs = elasticbeanstalk.get_app_environments(app_name) if len(envs) == 0: set_environment_for_current_branch(None) return None, None elif len(envs) == 1: env = envs[0] io.log_info('Setting only environment "' + env.name + '" as default') elif len(envs) > 1: if default_env: if default_env == '/ni': env = envs[0] else: env = next((env for env in envs if env.name == default_env), None) if not default_env or env is None: io.echo(prompts['init.selectdefaultenv']) env = utils.prompt_for_item_in_list(envs) set_environment_for_current_branch(env.name) io.log_info('Pulling down defaults from environment ' + env.name) keyname = elasticbeanstalk.get_specific_configuration_for_env( app_name, env.name, 'aws:autoscaling:launchconfiguration', 'EC2KeyName' ) if keyname is None: keyname = -1 return env.platform.arn, keyname
def create_instance_profile( profile_name, policy_arns, role_name=None, inline_policy_name=None, inline_policy_doc=None ): """ Create instance profile and associated IAM role, and attach policy ARNs. If role_name is omitted profile_name will be used as role name. Inline policy is optional. """ try: name = iam.create_instance_profile(profile_name) if name: io.log_info('Created instance profile: {}.'.format(name)) if not role_name: role_name = profile_name name = _create_instance_role(role_name, policy_arns) if name: io.log_info('Created instance role: {}.'.format(name)) if inline_policy_name: iam.put_role_policy(role_name, inline_policy_name, inline_policy_doc) iam.add_role_to_profile(profile_name, role_name) except NotAuthorizedError: io.log_warning(strings['platformcreateiamdescribeerror.info'].format(profile_name=profile_name)) return profile_name
def test_log_info( self, echo_mock, ebglobals_mock ): io.log_info('hello, world!') echo_mock.assert_not_called()
def do_zip(self, location, staged=False): cwd = os.getcwd() try: # must be in project root for git archive to work. fileoperations._traverse_to_project_root() if staged: commit_id, stderr, exitcode = self._run_cmd(['git', 'write-tree']) else: commit_id = 'HEAD' io.log_info('creating zip using git archive {0}'.format(commit_id)) stdout, stderr, exitcode = self._run_cmd( ['git', 'archive', '-v', '--format=zip', '-o', location, commit_id]) io.log_info('git archive output: {0}'.format(stderr)) project_root = os.getcwd() must_zip_submodules = fileoperations.get_config_setting('global', 'include_git_submodules') if must_zip_submodules: # individually zip submodules if there are any stdout, stderr, exitcode = self._run_cmd(['git', 'submodule', 'foreach', '--recursive']) for index, line in enumerate(stdout.splitlines()): submodule_dir = line.split(' ')[1].strip('\'') os.chdir(os.path.join(project_root, submodule_dir)) self.do_zip_submodule(location, "{0}_{1}".format(location, str(index)), staged=staged, submodule_dir=submodule_dir) finally: os.chdir(cwd)
def _upload_platform_version_to_s3_if_necessary(bucket, key, file_path): try: s3.get_object_info(bucket, key) io.log_info('S3 Object already exists. Skipping upload.') except NotFoundError: io.log_info('Uploading archive to s3 location: ' + key) s3.upload_platform_version(bucket, key, file_path) fileoperations.delete_app_versions()
def get_version_label(self): io.log_info('Getting version label from git with git-describe') stdout, stderr, exitcode = \ self._run_cmd(['git', 'describe', '--always', '--abbrev=4']) version_label = 'app-{}-{:%y%m%d_%H%M%S}'.format(stdout, datetime.datetime.now()) #Replace dots with underscores return version_label.replace('.', '_')
def deploy(app_name, env_name, version, label, message, group_name=None, process_app_versions=False, staged=False, timeout=5, source=None): region_name = aws.get_region_name() build_config = None if fileoperations.build_spec_exists() and version is None: build_config = fileoperations.get_build_configuration() LOG.debug("Retrieved build configuration from buildspec: {0}".format(build_config.__str__())) io.log_info('Deploying code to ' + env_name + " in region " + region_name) if version: app_version_label = version elif source is not None: app_version_label = commonops.create_app_version_from_source( app_name, source, process=process_app_versions, label=label, message=message, build_config=build_config ) io.echo("Starting environment deployment via specified source") process_app_versions = True elif gitops.git_management_enabled() and not staged: app_version_label = commonops.create_codecommit_app_version( app_name, process=process_app_versions, label=label, message=message, build_config=build_config) io.echo("Starting environment deployment via CodeCommit") process_app_versions = True else: app_version_label = commonops.create_app_version( app_name, process=process_app_versions, label=label, message=message, staged=staged, build_config=build_config ) if build_config is not None: buildspecops.stream_build_configuration_app_version_creation( app_name, app_version_label, build_config) elif process_app_versions is True: success = commonops.wait_for_processed_app_versions( app_name, [app_version_label], timeout=timeout or 5 ) if not success: return request_id = elasticbeanstalk.update_env_application_version( env_name, app_version_label, group_name) commonops.wait_for_success_events(request_id, timeout_in_minutes=timeout, can_abort=True, env_name=env_name)
def setup_ignore_file(): io.log_info('Setting up ignore file for source control') sc = fileoperations.get_config_setting('global', 'sc') if not sc: source_control = SourceControl.get_source_control() source_control.set_up_ignore_file() sc_name = source_control.get_name() fileoperations.write_config_setting('global', 'sc', sc_name)
def create_codecommit_app_version(app_name, process=False, label=None, message=None, build_config=None): fileoperations.ProjectRoot.traverse() source_control = SourceControl.get_source_control() if source_control.get_current_commit() is None: io.log_warning( 'There are no commits for the current branch, attempting ' 'to create an empty commit and launching with the sample ' 'application') source_control.create_initial_commit() if source_control.untracked_changes_exist(): io.log_warning(strings['sc.unstagedchanges']) if label: version_label = label else: version_label = source_control.get_version_label() if message: description = message else: description = source_control.get_message() if len(description) > 200: description = description[:195] + '...' try: source_control.push_codecommit_code() except CommandError as e: io.echo("Could not push code to the CodeCommit repository:") raise e from ebcli.operations import gitops repository = gitops.get_default_repository() commit_id = source_control.get_current_commit() if repository is None or commit_id is None: raise ServiceError( "Could not find repository or commit id to create an application version" ) io.log_info('Creating AppVersion ' + version_label) return _create_application_version(app_name, version_label, description, None, None, process, repository=repository, commit_id=commit_id, build_config=build_config)
def push_codecommit_code(self): io.log_info('Pushing local code to codecommit with git-push') stdout, stderr, exitcode = self._run_cmd(['git', 'push', self.get_codecommit_presigned_remote_url()]) if exitcode != 0: io.log_warning('Git is not able to push code: {0}'.format(exitcode)) io.log_warning(stderr) else: LOG.debug('git push result: {0}'.format(stdout)) self._handle_exitcode(exitcode, stderr)
def push_codecommit_code(self): io.log_info('Pushing local code to codecommit with git-push') stdout, stderr, exitcode = self._run_cmd(['git', 'push', self.get_current_repository(), self.get_current_branch()]) if exitcode != 0: io.log_warning('Git is not able to push code: {0}'.format(exitcode)) io.log_warning(stderr) else: LOG.debug('git push result: {0}'.format(stdout)) self._handle_exitcode(exitcode, stderr)
def zip_up_folder(directory, location, ignore_list=None): cwd = os.getcwd() try: os.chdir(directory) io.log_info('Zipping up folder at location: ' + str(os.getcwd())) zipf = zipfile.ZipFile(location, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) _zipdir('./', zipf, ignore_list=ignore_list) zipf.close() LOG.debug('File size: ' + str(os.path.getsize(location))) finally: os.chdir(cwd)
def zip_up_folder(directory, location, ignore_list=None): cwd = os.getcwd() try: os.chdir(directory) io.log_info('Zipping up folder at location: ' + str(os.getcwd())) zipf = zipfile.ZipFile(location, 'w', zipfile.ZIP_DEFLATED) _zipdir('./', zipf, ignore_list=ignore_list) zipf.close() LOG.debug('File size: ' + str(os.path.getsize(location))) finally: os.chdir(cwd)
def get_instance_profile(self): # Check to see if it was specified on the command line profile = self.app.pargs.instance_profile if profile is None: try: # Check to see if it is associated with the workspace profile = fileoperations.get_instance_profile() except NotInitializedError: pass if profile is None: # Check to see if the default instance profile already exists try: existing_profiles = iam.get_instance_profile_names() if iam_attributes.DEFAULT_PLATFORM_BUILDER_ROLE in existing_profiles: profile = iam_attributes.DEFAULT_PLATFORM_BUILDER_ROLE except NotAuthorizedError: io.log_warning(strings['platformcreateiamdescribeerror.info']) if profile is None: # We will now create the default role for the customer try: profile = iam_attributes.DEFAULT_PLATFORM_BUILDER_ROLE try: iam.create_instance_profile(profile) io.log_info(strings['platformcreateiamcreated.info']) except AlreadyExistsError: pass document = iam_documents.EC2_ASSUME_ROLE_PERMISSION try: # Create a role with the same name iam.create_role(profile, document) # Attach required custom platform builder permissions iam.put_role_policy( profile, iam_attributes.PLATFORM_BUILDER_INLINE_POLICY_NAME, iam_documents.CUSTOM_PLATFORM_BUILDER_INLINE_POLICY) # Associate instance profile with the required role iam.add_role_to_profile(profile, profile) io.log_info(strings['platformcreateiampolicyadded.info']) except AlreadyExistsError: # If the role exists then we leave it as is, we do not try to add or modify its policies pass except NotAuthorizedError: io.log_warning(strings['platformcreateiamcreateerror.info']) # Save to disk write_config_setting('global', 'instance_profile', profile)
def get_app_version_s3_location(app_name, version_label): s3_key, s3_bucket = None, None app_version = elasticbeanstalk.application_version_exists( app_name, version_label) if app_version: s3_bucket = app_version['SourceBundle']['S3Bucket'] s3_key = app_version['SourceBundle']['S3Key'] io.log_info( "Application Version '{0}' exists. Source from S3: {1}/{2}.". format(version_label, s3_bucket, s3_key)) return s3_bucket, s3_key
def _zip_up_project(version_label, source_control, staged=False): file_name = version_label + '.zip' file_path = fileoperations.get_zip_location(file_name) if not fileoperations.file_exists(file_path): io.echo(strings['appversion.create'].replace('{version}', version_label)) ignore_files = fileoperations.get_ebignore_list() if ignore_files is None: source_control.do_zip(file_path, staged) else: io.log_info('Found .ebignore, using system zip.') fileoperations.zip_up_project(file_path, ignore_list=ignore_files) return file_name, file_path
def create_app(app_name, default_env=None, tags=[]): try: io.log_info('Creating application: ' + app_name) elasticbeanstalk.create_application(app_name, strings['app.description'], tags) set_environment_for_current_branch(None) set_group_suffix_for_current_branch(None) io.echo('Application', app_name, 'has been created.') return None, None except AlreadyExistsError: io.log_info('Application already exists.') return pull_down_app_info(app_name, default_env=default_env)
def create_default_service_role(): """ Create the default service role """ io.log_info('Creating service role {} with default permissions.' .format(DEFAULT_SERVICE_ROLE_NAME)) trust_document = _get_default_service_trust_document() role_name = DEFAULT_SERVICE_ROLE_NAME try: iam.create_role_with_policy(role_name, trust_document, DEFAULT_SERVICE_ROLE_POLICIES) except NotAuthorizedError as e: raise NotAuthorizedError(prompts['create.servicerole.nopermissions'] .format(DEFAULT_SERVICE_ROLE_NAME, e)) return DEFAULT_SERVICE_ROLE_NAME
def _zip_up_project(version_label, source_control, staged=False): # Create zip file file_name = version_label + '.zip' file_path = fileoperations.get_zip_location(file_name) # Check to see if file already exists from previous attempt if not fileoperations.file_exists(file_path): # If it doesn't already exist, create it io.echo(strings['appversion.create'].replace('{version}', version_label)) ignore_files = fileoperations.get_ebignore_list() if ignore_files is None: source_control.do_zip(file_path, staged) else: io.log_info('Found .ebignore, using system zip.') fileoperations.zip_up_project(file_path, ignore_list=ignore_files) return file_name, file_path
def _zipdir(path, zipf, ignore_list=None): if ignore_list is None: ignore_list = ['.gitignore'] ignore_list = ['./' + i for i in ignore_list] zipped_roots = [] for root, dirs, files in os.walk(path): if '.elasticbeanstalk' in root: io.log_info(' -skipping: {}'.format(root)) continue for d in dirs: cur_dir = os.path.join(root, d) if os.path.islink(cur_dir): # It is probably safe to remove this code since os.walk seems to categorize # symlinks-to-directories as files. This doesn't matter as far as creation # of the zip is concerned, but just having the code around is confusing. zipInfo = zipfile.ZipInfo() zipInfo.filename = os.path.join(root, d) # 2716663808L is the "magic code" for symlinks # Python 3 merged "int" and "long" into int, so we must check the version # to determine what type to use if sys.version_info > (3,): zipInfo.external_attr = 2716663808 else: zipInfo.external_attr = long(2716663808) zipf.writestr(zipInfo, os.readlink(cur_dir)) for f in files: cur_file = os.path.join(root, f) if cur_file.endswith('~') or cur_file in ignore_list: # Ignore editor backup files (like file.txt~) # Ignore anything in the .ebignore file io.log_info(' -skipping: {}'.format(cur_file)) else: if root not in zipped_roots: # Windows requires us to index the folders. io.log_info(' +adding: {}/'.format(root)) zipf.write(root) zipped_roots.append(root) io.log_info(' +adding: {}'.format(cur_file)) if os.path.islink(cur_file): zipInfo = zipfile.ZipInfo() zipInfo.filename = os.path.join(root, f) # 2716663808L is the "magic code" for symlinks # Python 3 merged "int" and "long" into int, so we must check the # version to determine what type to use if sys.version_info > (3,): zipInfo.external_attr = 2716663808 else: zipInfo.external_attr = long(2716663808) zipf.writestr(zipInfo, os.readlink(cur_file)) else: zipf.write(cur_file)
def open_webpage_in_browser(url, ssl=False): io.log_info('Opening webpage with default browser.') if not url.startswith('http'): if ssl: url = 'https://' + url else: url = 'http://' + url LOG.debug('url={}'.format(url)) if utils.is_ssh() or platform.system().startswith('Win'): # Preferred way for ssh or windows # Windows can't do a fork so we have to do inline LOG.debug('Running webbrowser inline.') import webbrowser webbrowser.open_new_tab(url) else: # This is the preferred way to open a web browser on *nix. # It squashes all output which can be typical on *nix. LOG.debug('Running webbrowser as subprocess.') from subprocess import Popen, PIPE p = Popen( [ '{python} -m webbrowser \'{url}\''.format( python=sys.executable, url=url) ], stderr=PIPE, stdout=PIPE, shell=True ) ''' We need to fork the process for various reasons 1. Calling p.communicate waits for the thread. Some browsers (if opening a new window) dont return to the thread until the browser closes. We dont want the terminal to hang in this case 2. If we dont call p.communicate, there is a race condition. If the main process terminates before the browser call is made, the call never gets made and the browser doesn't open. Therefor the solution is to fork, then wait for the child in the backround. ''' pid = os.fork() if pid == 0: # Is child p.communicate()
def _zipdir(path, zipf, ignore_list=None): if ignore_list is None: ignore_list = ['.gitignore'] ignore_list = ['./' + i for i in ignore_list] zipped_roots = [] for root, dirs, files in os.walk(path): if '.elasticbeanstalk' in root: io.log_info(' -skipping: {}'.format(root)) continue for d in dirs: cur_dir = os.path.join(root, d) if os.path.islink(cur_dir): # It is probably safe to remove this code since os.walk seems to categorize # symlinks-to-directories as files. This doesn't matter as far as creation # of the zip is concerned, but just having the code around is confusing. zipInfo = zipfile.ZipInfo() zipInfo.filename = os.path.join(root, d) # 2716663808L is the "magic code" for symlinks # Python 3 merged "int" and "long" into int, so we must check the version # to determine what type to use if sys.version_info > (3, ): zipInfo.external_attr = 2716663808 else: zipInfo.external_attr = long(2716663808) zipf.writestr(zipInfo, os.readlink(cur_dir)) for f in files: cur_file = os.path.join(root, f) if cur_file.endswith('~') or cur_file in ignore_list: # Ignore editor backup files (like file.txt~) # Ignore anything in the .ebignore file io.log_info(' -skipping: {}'.format(cur_file)) else: if root not in zipped_roots: # Windows requires us to index the folders. io.log_info(' +adding: {}/'.format(root)) zipf.write(root) zipped_roots.append(root) io.log_info(' +adding: {}'.format(cur_file)) if os.path.islink(cur_file): zipInfo = zipfile.ZipInfo() zipInfo.filename = os.path.join(root, f) # 2716663808L is the "magic code" for symlinks # Python 3 merged "int" and "long" into int, so we must check the # version to determine what type to use if sys.version_info > (3, ): zipInfo.external_attr = 2716663808 else: zipInfo.external_attr = long(2716663808) zipf.writestr(zipInfo, os.readlink(cur_file)) else: zipf.write(cur_file)
def do_zip_submodule(self, main_location, sub_location, staged=False, submodule_dir=None): if staged: commit_id, stderr, exitcode = self._run_cmd(['git', 'write-tree']) else: commit_id = 'HEAD' io.log_info('creating zip using git submodule archive {0}'.format(commit_id)) # individually zip submodules if there are any stdout, stderr, exitcode = self._run_cmd(['git', 'archive', '-v', '--format=zip', '--prefix', os.path.join(submodule_dir, ''), '-o', sub_location, commit_id]) io.log_info('git archive output: {0}'.format(stderr)) # append and remove the submodule archive fileoperations.zip_append_archive(main_location, sub_location) fileoperations.delete_file(sub_location)
def make_cloned_env(clone_request, nohang=False, timeout=None): io.log_info('Cloning environment') env = elasticbeanstalk.get_environment( app_name=clone_request.app_name, env_name=clone_request.original_name) clone_request.version_label = env.version_label result, request_id = clone_env(clone_request) result.print_env_details(io.echo, elasticbeanstalk.get_environments, elasticbeanstalk.get_environment_resources, health=False) if nohang: return io.echo('Printing Status:') commonops.wait_for_success_events(request_id, timeout_in_minutes=timeout)
def setup_credentials(access_id=None, secret_key=None): io.log_info('Setting up ~/aws/ directory with config file') if access_id is None or secret_key is None: io.echo(strings['cred.prompt']) if access_id is None: access_id = io.prompt('aws-access-id', default='ENTER_AWS_ACCESS_ID_HERE') if secret_key is None: secret_key = io.prompt('aws-secret-key', default='ENTER_SECRET_HERE') fileoperations.save_to_aws_config(access_id, secret_key) fileoperations.touch_config_folder() fileoperations.write_config_setting('global', 'profile', 'eb-cli') aws.set_session_creds(access_id, secret_key)
def create_default_service_role(): """ Create the default service role """ io.log_info('Creating service role {} with default permissions.' .format(DEFAULT_SERVICE_ROLE_NAME)) trust_document = _get_default_service_trust_document() role_name = DEFAULT_SERVICE_ROLE_NAME try: iam.create_role_with_policy(role_name, trust_document, DEFAULT_SERVICE_ROLE_POLICIES) except NotAuthorizedError as e: # NO permissions to create or do something raise NotAuthorizedError(prompts['create.servicerole.nopermissions'] .format(DEFAULT_SERVICE_ROLE_NAME, e)) return DEFAULT_SERVICE_ROLE_NAME
def create_platform_version( version, major_increment, minor_increment, patch_increment, instance_type, vpc=None, staged=False, timeout=None, tags=None, ): _raise_if_directory_is_empty() _raise_if_platform_definition_file_is_missing() version and _raise_if_version_format_is_invalid(version) platform_name = fileoperations.get_platform_name() instance_profile = fileoperations.get_instance_profile(None) key_name = commonops.get_default_keyname() version = version or _resolve_version_number( platform_name, major_increment, minor_increment, patch_increment) tags = tagops.get_and_validate_tags(tags) source_control = SourceControl.get_source_control() io.log_warning(strings['sc.unstagedchanges'] ) if source_control.untracked_changes_exist() else None version_label = _resolve_version_label(source_control, staged) bucket, key, file_path = _resolve_s3_bucket_and_key( platform_name, version_label, source_control, staged) _upload_platform_version_to_s3_if_necessary(bucket, key, file_path) io.log_info('Creating Platform Version ' + version_label) response = elasticbeanstalk.create_platform_version( platform_name, version, bucket, key, instance_profile, key_name, instance_type, tags, vpc) environment_name = 'eb-custom-platform-builder-packer' io.echo( colored( strings['platformbuildercreation.info'].format(environment_name), attrs=['reverse'])) fileoperations.update_platform_version(version) commonops.set_environment_for_current_branch(environment_name) stream_platform_logs(response, platform_name, version, timeout)
def do_zip(self, location, staged=False): cwd = os.getcwd() try: # must be in project root for git archive to work. fileoperations._traverse_to_project_root() if staged: commit_id, stderr, exitcode = self._run_cmd( ['git', 'write-tree']) else: commit_id = 'HEAD' io.log_info('creating zip using git archive HEAD') stdout, stderr, exitcode = self._run_cmd([ 'git', 'archive', '-v', '--format=zip', '-o', location, commit_id ]) io.log_info('git archive output: ' + stderr) finally: os.chdir(cwd)
def get_default_profile(): """ Get the default elasticbeanstalk IAM profile, Create it if it doesn't exist """ # get list of profiles try: profile = DEFAULT_ROLE_NAME try: iam.create_instance_profile(profile) io.log_info('Created default instance profile.') role = get_default_role() iam.add_role_to_profile(profile, role) except AlreadyExistsError: pass except NotAuthorizedError: # Not a root account. Just assume role exists io.log_info('No IAM privileges: assuming default ' 'instance profile exists.') return DEFAULT_ROLE_NAME return profile
def do_zip(self, location, staged=False): cwd = os.getcwd() try: # must be in project root for git archive to work. fileoperations._traverse_to_project_root() if staged: commit_id, stderr, exitcode = self._run_cmd( ['git', 'write-tree']) else: commit_id = 'HEAD' io.log_info('creating zip using git archive {0}'.format(commit_id)) stdout, stderr, exitcode = self._run_cmd([ 'git', 'archive', '-v', '--format=zip', '-o', location, commit_id ]) io.log_info('git archive output: {0}'.format(stderr)) project_root = os.getcwd() must_zip_submodules = fileoperations.get_config_setting( 'global', 'include_git_submodules') if must_zip_submodules: # individually zip submodules if there are any stdout, stderr, exitcode = self._run_cmd( ['git', 'submodule', 'foreach', '--recursive']) for index, line in enumerate(stdout.splitlines()): submodule_dir = line.split(' ')[1].strip('\'') os.chdir(os.path.join(project_root, submodule_dir)) self.do_zip_submodule(location, "{0}_{1}".format( location, str(index)), staged=staged, submodule_dir=submodule_dir) finally: os.chdir(cwd)
def setup_directory(app_name, region, solution, workspace_type, platform_name, platform_version, instance_profile, dir_path=None, repository=None, branch=None): io.log_info('Setting up .elasticbeanstalk directory') fileoperations.create_config_file(app_name, region, solution, workspace_type, platform_name, platform_version, instance_profile, dir_path=dir_path, repository=repository, branch=branch)
def run_app(app): squash_cement_logging() try: app.setup() app.run() app.close() # Handle General Exceptions except CaughtSignal: io.echo() app.close(code=5) except NoEnvironmentForBranchError: app.close(code=5) except InvalidStateError: io.log_error(strings['exit.invalidstate']) app.close(code=3) except NotInitializedError: io.log_error(strings['exit.notsetup']) app.close(code=126) except NoSourceControlError: io.log_error(strings['sc.notfound']) app.close(code=3) except NoRegionError: io.log_error(strings['exit.noregion']) app.close(code=3) except ConnectionError: io.log_error(strings['connection.error']) app.close(code=2) except ArgumentTypeError: io.log_error(strings['exit.argerror']) app.close(code=4) except TooManyPlatformsError: io.log_error(strings['toomanyplatforms.error']) app.close(code=4) except EBCLIException as e: if '--verbose' in sys.argv or '--debug' in sys.argv: io.log_info(traceback.format_exc()) else: io.log_error('{0} - {1}'.format(e.__class__.__name__, e.message)) app.close(code=4) except Exception as e: # Generic catch all if str(e): message = '{exception_class} - {message}'.format( exception_class=e.__class__.__name__, message=str(e)) else: message = '{exception_class}'.format( exception_class=e.__class__.__name__) if '--verbose' in sys.argv or '--debug' in sys.argv: io.log_info(traceback.format_exc()) io.log_info(message) else: io.log_error(message) app.close(code=4)
def do_zip(self, location, staged=False): io.log_info('Creating zip using systems zip') fileoperations.zip_up_project(location)
def create_platform_version( version, major_increment, minor_increment, patch_increment, instance_type, vpc = None, staged=False, timeout=None): platform_name = fileoperations.get_platform_name() instance_profile = fileoperations.get_instance_profile(None) key_name = commonops.get_default_keyname() if version is None: version = _get_latest_version(platform_name=platform_name, owner=Constants.OWNED_BY_SELF, ignored_states=[]) if version is None: version = '1.0.0' else: major, minor, patch = version.split('.', 3) if major_increment: major = str(int(major) + 1) minor = '0' patch = '0' if minor_increment: minor = str(int(minor) + 1) patch = '0' if patch_increment or not(major_increment or minor_increment): patch = str(int(patch) + 1) version = "%s.%s.%s" % (major, minor, patch) if not VALID_PLATFORM_VERSION_FORMAT.match(version): raise InvalidPlatformVersionError(strings['exit.invalidversion']) cwd = os.getcwd() fileoperations._traverse_to_project_root() try: if heuristics.directory_is_empty(): raise PlatformWorkspaceEmptyError(strings['exit.platformworkspaceempty']) finally: os.chdir(cwd) if not heuristics.has_platform_definition_file(): raise PlatformWorkspaceEmptyError(strings['exit.no_pdf_file']) source_control = SourceControl.get_source_control() if source_control.untracked_changes_exist(): io.log_warning(strings['sc.unstagedchanges']) version_label = source_control.get_version_label() if staged: # Make a unique version label timestamp = datetime.now().strftime("%y%m%d_%H%M%S") version_label = version_label + '-stage-' + timestamp file_descriptor, original_platform_yaml = tempfile.mkstemp() os.close(file_descriptor) copyfile('platform.yaml', original_platform_yaml) s3_bucket = None s3_key = None try: # Add option settings to platform.yaml _enable_healthd() s3_bucket, s3_key = get_app_version_s3_location(platform_name, version_label) # Create zip file if the application version doesn't exist if s3_bucket is None and s3_key is None: file_name, file_path = _zip_up_project(version_label, source_control, staged=staged) else: file_name = None file_path = None finally: # Restore original platform.yaml move(original_platform_yaml, 'platform.yaml') # Use existing bucket if it exists bucket = elasticbeanstalk.get_storage_location() if s3_bucket is None else s3_bucket # Use existing key if it exists key = platform_name + '/' + file_name if s3_key is None else s3_key try: s3.get_object_info(bucket, key) io.log_info('S3 Object already exists. Skipping upload.') except NotFoundError: io.log_info('Uploading archive to s3 location: ' + key) s3.upload_platform_version(bucket, key, file_path) # Just deletes the local zip fileoperations.delete_app_versions() io.log_info('Creating Platform Version ' + version_label) response = elasticbeanstalk.create_platform_version( platform_name, version, bucket, key, instance_profile, key_name, instance_type, vpc) # TODO: Enable this once the API returns the name of the environment associated with a # CreatePlatformRequest, and remove hard coded value. There is currently only one type # of platform builder, we may support additional builders in the future. #environment_name = response['PlatformSummary']['EnvironmentName'] environment_name = 'eb-custom-platform-builder-packer' io.echo(colored( strings['platformbuildercreation.info'].format(environment_name), attrs=['reverse'])) fileoperations.update_platform_version(version) commonops.set_environment_for_current_branch(environment_name) arn = response['PlatformSummary']['PlatformArn'] request_id = response['ResponseMetadata']['RequestId'] if not timeout: timeout = 30 # Share streamer for platform events and builder events streamer = io.get_event_streamer() builder_events = threading.Thread( target=logsops.stream_platform_logs, args=(platform_name, version, streamer, 5, None, PackerStreamFormatter())) builder_events.daemon = True # Watch events from builder logs builder_events.start() commonops.wait_for_success_events( request_id, platform_arn=arn, streamer=streamer, timeout_in_minutes=timeout )
def create_app_version_from_source( app_name, source, process=False, label=None, message=None, build_config=None ): cwd = os.getcwd() fileoperations.ProjectRoot.traverse() try: if heuristics.directory_is_empty(): io.echo('NOTE: {}'.format(strings['appversion.none'])) return None finally: os.chdir(cwd) source_control = SourceControl.get_source_control() if source_control.untracked_changes_exist(): io.log_warning(strings['sc.unstagedchanges']) if label: version_label = label else: version_label = source_control.get_version_label() if message: description = message else: description = source_control.get_message() if len(description) > 200: description = description[:195] + '...' source_location, repository, branch = utils.parse_source(source) if not branch or not repository: raise InvalidOptionsError(strings['codecommit.bad_source']) if source_location == "codecommit": try: result = codecommit.get_branch(repository, branch) except ServiceError as ex: io.log_error( "Could not get branch '{0}' for the repository '{1}' " "because of this error: {2}".format( branch, repository, ex.code ) ) raise ex commit_id = result['branch']['commitId'] if repository is None or commit_id is None: raise ServiceError("Could not find repository or commit id to create an application version") else: LOG.debug("Source location '{0}' is not supported".format(source_location)) raise InvalidOptionsError( "This command does not support the given source location: {0}".format( source_location ) ) io.log_info('Creating AppVersion ' + version_label) return _create_application_version(app_name, version_label, description, None, None, process, repository=repository, commit_id=commit_id, build_config=build_config)
def create_app_version(app_name, process=False, label=None, message=None, staged=False, build_config=None): cwd = os.getcwd() fileoperations.ProjectRoot.traverse() try: if heuristics.directory_is_empty(): io.echo('NOTE: {}'.format(strings['appversion.none'])) return None finally: os.chdir(cwd) source_control = SourceControl.get_source_control() if source_control.untracked_changes_exist(): io.log_warning(strings['sc.unstagedchanges']) if label: version_label = label else: version_label = source_control.get_version_label() if staged: timestamp = datetime.now().strftime("%y%m%d_%H%M%S") version_label = version_label + '-stage-' + timestamp if message: description = message else: description = source_control.get_message() if len(description) > 200: description = description[:195] + '...' artifact = fileoperations.get_config_setting('deploy', 'artifact') if artifact: file_name, file_extension = os.path.splitext(artifact) file_name = version_label + file_extension file_path = artifact s3_key = None s3_bucket = None else: s3_bucket, s3_key = get_app_version_s3_location(app_name, version_label) if s3_bucket is None and s3_key is None: file_name, file_path = _zip_up_project( version_label, source_control, staged=staged) else: file_name = None file_path = None bucket = elasticbeanstalk.get_storage_location() if s3_bucket is None else s3_bucket key = app_name + '/' + file_name if s3_key is None else s3_key try: s3.get_object_info(bucket, key) io.log_info('S3 Object already exists. Skipping upload.') except NotFoundError: if file_name is None and file_path is None: raise NotFoundError('Application Version does not exist in the S3 bucket.' ' Try uploading the Application Version again.') io.log_info('Uploading archive to s3 location: ' + key) s3.upload_application_version(bucket, key, file_path) fileoperations.delete_app_versions() io.log_info('Creating AppVersion ' + version_label) return _create_application_version(app_name, version_label, description, bucket, key, process, build_config=build_config)
def make_new_env(env_request, branch_default=False, process_app_version=False, nohang=False, interactive=True, timeout=None, source=None): resolve_roles(env_request, interactive) # Parse and get Build Configuration from BuildSpec if it exists build_config = None if fileoperations.build_spec_exists(): build_config = fileoperations.get_build_configuration() LOG.debug("Retrieved build configuration from buildspec: {0}".format( build_config.__str__())) # deploy code codecommit_setup = gitops.git_management_enabled() if not env_request.sample_application and not env_request.version_label: if source is not None: io.log_info('Creating new application version using remote source') io.echo("Starting environment deployment via remote source") env_request.version_label = commonops.create_app_version_from_source( env_request.app_name, source, process=process_app_version, label=env_request.version_label, build_config=build_config) process_app_version = True elif codecommit_setup: io.log_info('Creating new application version using CodeCommit') io.echo("Starting environment deployment via CodeCommit") env_request.version_label = \ commonops.create_codecommit_app_version(env_request.app_name, process=process_app_version, build_config=build_config) process_app_version = True else: io.log_info('Creating new application version using project code') env_request.version_label = \ commonops.create_app_version(env_request.app_name, process=process_app_version, build_config=build_config) if build_config is not None: buildspecops.stream_build_configuration_app_version_creation( env_request.app_name, env_request.version_label, build_config) elif process_app_version is True: success = commonops.wait_for_processed_app_versions( env_request.app_name, [env_request.version_label]) if not success: return if env_request.version_label is None or env_request.sample_application: env_request.version_label = \ commonops.create_dummy_app_version(env_request.app_name) # Create env if env_request.key_name: commonops.upload_keypair_if_needed(env_request.key_name) io.log_info('Creating new environment') result, request_id = create_env(env_request, interactive=interactive) env_name = result.name # get the (possibly) updated name # Edit configurations ## Get default environment default_env = commonops.get_current_branch_environment() ## Save env as branch default if needed if not default_env or branch_default: commonops.set_environment_for_current_branch(env_name) if codecommit_setup: io.echo("Setting up default branch") gitops.set_branch_default_for_current_environment( gitops.get_default_branch()) gitops.set_repo_default_for_current_environment( gitops.get_default_repository()) # Print status of env commonops.print_env_details(result, health=False) if nohang: return io.echo('Printing Status:') try: commonops.wait_for_success_events(request_id, timeout_in_minutes=timeout) except TimeoutError: io.log_error(strings['timeout.error'])
def make_new_env( env_request, branch_default=False, process_app_version=False, nohang=False, interactive=True, timeout=None, source=None, ): resolve_roles(env_request, interactive) # Parse and get Build Configuration from BuildSpec if it exists build_config = None if fileoperations.build_spec_exists(): build_config = fileoperations.get_build_configuration() LOG.debug("Retrieved build configuration from buildspec: {0}".format(build_config.__str__())) # deploy code codecommit_setup = gitops.git_management_enabled() if not env_request.sample_application and not env_request.version_label: if source is not None: io.log_info('Creating new application version using remote source') io.echo("Starting environment deployment via remote source") env_request.version_label = commonops.create_app_version_from_source( env_request.app_name, source, process=process_app_version, label=env_request.version_label, build_config=build_config) process_app_version = True elif codecommit_setup: io.log_info('Creating new application version using CodeCommit') io.echo("Starting environment deployment via CodeCommit") env_request.version_label = \ commonops.create_codecommit_app_version(env_request.app_name, process=process_app_version, build_config=build_config) process_app_version = True else: io.log_info('Creating new application version using project code') env_request.version_label = \ commonops.create_app_version(env_request.app_name, process=process_app_version, build_config=build_config) if build_config is not None: buildspecops.stream_build_configuration_app_version_creation(env_request.app_name, env_request.version_label, build_config) elif process_app_version is True: success = commonops.wait_for_processed_app_versions(env_request.app_name, [env_request.version_label]) if not success: return if env_request.version_label is None or env_request.sample_application: env_request.version_label = \ commonops.create_dummy_app_version(env_request.app_name) # Create env if env_request.key_name: commonops.upload_keypair_if_needed(env_request.key_name) download_sample_app = None if interactive: download_sample_app = should_download_sample_app() io.log_info('Creating new environment') result, request_id = create_env(env_request, interactive=interactive) env_name = result.name # get the (possibly) updated name # Edit configurations ## Get default environment default_env = commonops.get_current_branch_environment() ## Save env as branch default if needed if not default_env or branch_default: commonops.set_environment_for_current_branch(env_name) if codecommit_setup: io.echo("Setting up default branch") gitops.set_branch_default_for_current_environment(gitops.get_default_branch()) gitops.set_repo_default_for_current_environment(gitops.get_default_repository()) if download_sample_app: download_and_extract_sample_app(env_name) # Print status of env result.print_env_details( io.echo, elasticbeanstalk.get_environments, elasticbeanstalk.get_environment_resources, health=False ) if nohang: return io.echo('Printing Status:') commonops.wait_for_success_events(request_id, timeout_in_minutes=timeout)