def do_zip(self, location, staged=False): cwd = os.getcwd() try: # must be in project root for git archive to work. fileoperations._traverse_to_project_root() if staged: commit_id, stderr, exitcode = self._run_cmd(['git', 'write-tree']) else: commit_id = 'HEAD' io.log_info('creating zip using git archive {0}'.format(commit_id)) stdout, stderr, exitcode = self._run_cmd( ['git', 'archive', '-v', '--format=zip', '-o', location, commit_id]) io.log_info('git archive output: {0}'.format(stderr)) project_root = os.getcwd() must_zip_submodules = fileoperations.get_config_setting('global', 'include_git_submodules') if must_zip_submodules: # individually zip submodules if there are any stdout, stderr, exitcode = self._run_cmd(['git', 'submodule', 'foreach', '--recursive']) for index, line in enumerate(stdout.splitlines()): submodule_dir = line.split(' ')[1].strip('\'') os.chdir(os.path.join(project_root, submodule_dir)) self.do_zip_submodule(location, "{0}_{1}".format(location, str(index)), staged=staged, submodule_dir=submodule_dir) finally: os.chdir(cwd)
def upload_workspace_version(bucket, key, file_path, workspace_type='Application'): cwd = os.getcwd() try: fileoperations._traverse_to_project_root() size = os.path.getsize(file_path) except OSError as err: if err.errno == 2: raise NotFoundError( '{0} Version does not exist locally ({1}).' ' Try uploading the Application Version again.'.format( workspace_type, err.filename)) raise err finally: os.chdir(cwd) LOG.debug('Upload {0} Version. File size = {1}'.format( workspace_type, str(size))) if size > 536870912: raise FileTooLargeError('Archive cannot be any larger than 512MB') if size < 7340032: result = simple_upload(bucket, key, file_path) else: result = multithreaded_upload(bucket, key, file_path) return result
def do_zip(self, location, staged=False): cwd = os.getcwd() try: # must be in project root for git archive to work. fileoperations._traverse_to_project_root() if staged: commit_id, stderr, exitcode = self._run_cmd(['git', 'write-tree']) else: commit_id = 'HEAD' io.log_info('creating zip using git archive {0}'.format(commit_id)) stdout, stderr, exitcode = self._run_cmd( ['git', 'archive', '-v', '--format=zip', '-o', location, commit_id]) io.log_info('git archive output: {0}'.format(stderr)) project_root = os.getcwd() must_zip_submodules = fileoperations.get_config_setting('global', 'include_git_submodules') if must_zip_submodules: # individually zip submodules if there are any stdout, stderr, exitcode = self._run_cmd(['git', 'submodule', 'foreach', '--recursive']) for index, line in enumerate(stdout.splitlines()): submodule_dir = line.split(' ')[1].strip('\'') os.chdir(os.path.join(project_root, submodule_dir)) self.do_zip_submodule(location, "{0}_{1}".format(location, str(index)), staged=staged, submodule_dir=submodule_dir) finally: os.chdir(cwd)
def _raise_if_directory_is_empty(): cwd = os.getcwd() fileoperations._traverse_to_project_root() try: if heuristics.directory_is_empty(): raise PlatformWorkspaceEmptyError( strings['exit.platformworkspaceempty']) finally: os.chdir(cwd)
def clean_up_ignore_file(self): cwd = os.getcwd() try: fileoperations._traverse_to_project_root() in_section = False for line in fileinput.input('.gitignore', inplace=True): if line.startswith(git_ignore[0]): in_section = True if not line.strip(): in_section = False if not in_section: print(line, end='') finally: os.chdir(cwd)
def _enable_healthd(): option_settings = [] option_settings.append({ 'namespace': namespaces.HEALTH_SYSTEM, 'option_name': option_names.SYSTEM_TYPE, 'value': 'enhanced' }) # Attach service role option_settings.append({ 'namespace': namespaces.ENVIRONMENT, 'option_name': option_names.SERVICE_ROLE, 'value': 'aws-elasticbeanstalk-service-role' }) fileoperations._traverse_to_project_root() with open('platform.yaml', 'r') as stream: platform_yaml = yaml.load(stream) try: platform_options = platform_yaml['option_settings'] except KeyError: platform_options = [] options_to_inject = [] for option in option_settings: found_option = False for platform_option in platform_options: # Don't add an option if it was defined by the customer if option['namespace'] == platform_option['namespace'] and option[ 'option_name'] == platform_option['option_name']: found_option = True break if not found_option: options_to_inject.append(option) # inject new options platform_options.extend(options_to_inject) platform_yaml['option_settings'] = list(platform_options) with open('platform.yaml', 'w') as stream: stream.write(yaml.dump(platform_yaml, default_flow_style=False))
def clean_up_ignore_file(self): cwd = os.getcwd() try: fileoperations._traverse_to_project_root() in_section = False for line in fileinput.input('.gitignore', inplace=True): if line.startswith(git_ignore[0]): in_section = True if not line.strip(): in_section = False if not in_section: print(line, end='') finally: os.chdir(cwd)
def _enable_healthd(): option_settings = [] option_settings.append({ 'namespace': namespaces.HEALTH_SYSTEM, 'option_name': option_names.SYSTEM_TYPE, 'value': 'enhanced' }) # Attach service role option_settings.append({ 'namespace': namespaces.ENVIRONMENT, 'option_name': option_names.SERVICE_ROLE, 'value': 'aws-elasticbeanstalk-service-role' }) fileoperations._traverse_to_project_root() with open('platform.yaml', 'r') as stream: platform_yaml = yaml.load(stream) try: platform_options = platform_yaml['option_settings'] except KeyError: platform_options = [] options_to_inject = [] for option in option_settings: found_option = False for platform_option in platform_options: # Don't add an option if it was defined by the customer if option['namespace'] == platform_option['namespace'] and option['option_name']== platform_option['option_name']: found_option = True break if not found_option: options_to_inject.append(option) # inject new options platform_options.extend(options_to_inject) platform_yaml['option_settings'] = list(platform_options) with open('platform.yaml', 'w') as stream: stream.write(yaml.dump(platform_yaml, default_flow_style=False))
def do_zip(self, location, staged=False): cwd = os.getcwd() try: # must be in project root for git archive to work. fileoperations._traverse_to_project_root() if staged: commit_id, stderr, exitcode = self._run_cmd( ['git', 'write-tree']) else: commit_id = 'HEAD' io.log_info('creating zip using git archive HEAD') stdout, stderr, exitcode = self._run_cmd([ 'git', 'archive', '-v', '--format=zip', '-o', location, commit_id ]) io.log_info('git archive output: ' + stderr) finally: os.chdir(cwd)
def download_source_bundle(app_name, env_name): env = elasticbeanstalk.get_environment(app_name=app_name, env_name=env_name) if env.version_label and env.version_label != 'Sample Application': app_version = elasticbeanstalk.get_application_versions( app_name, version_labels=[env.version_label])['ApplicationVersions'][0] source_bundle = app_version['SourceBundle'] bucket_name = source_bundle['S3Bucket'] key_name = source_bundle['S3Key'] io.echo('Downloading application version...') data = s3.get_object(bucket_name, key_name) filename = get_filename(key_name) else: # sample app template = cloudformation.get_template('awseb-' + env.id + '-stack') try: url = template['TemplateBody']['Parameters']['AppSource'][ 'Default'] except KeyError: raise NotFoundError('Can not find app source for environment') utils.get_data_from_url(url) io.echo('Downloading application version...') data = utils.get_data_from_url(url, timeout=30) filename = 'sample.zip' fileoperations.make_eb_dir('downloads/') location = fileoperations.get_eb_file_full_location('downloads/' + filename) fileoperations.write_to_data_file(location, data) io.echo('Application version downloaded to:', location) cwd = os.getcwd() try: fileoperations._traverse_to_project_root() if heuristics.directory_is_empty(): # If we dont have any project code, unzip as current project io.echo('Unzipping application version as project files.') fileoperations.unzip_folder(location, os.getcwd()) io.echo('Done.') finally: os.chdir(cwd)
def create_platform_version( version, major_increment, minor_increment, patch_increment, instance_type, vpc = None, staged=False, timeout=None): platform_name = fileoperations.get_platform_name() instance_profile = fileoperations.get_instance_profile(None) key_name = commonops.get_default_keyname() if version is None: version = _get_latest_version(platform_name=platform_name, owner=Constants.OWNED_BY_SELF, ignored_states=[]) if version is None: version = '1.0.0' else: major, minor, patch = version.split('.', 3) if major_increment: major = str(int(major) + 1) minor = '0' patch = '0' if minor_increment: minor = str(int(minor) + 1) patch = '0' if patch_increment or not(major_increment or minor_increment): patch = str(int(patch) + 1) version = "%s.%s.%s" % (major, minor, patch) if not VALID_PLATFORM_VERSION_FORMAT.match(version): raise InvalidPlatformVersionError(strings['exit.invalidversion']) cwd = os.getcwd() fileoperations._traverse_to_project_root() try: if heuristics.directory_is_empty(): raise PlatformWorkspaceEmptyError(strings['exit.platformworkspaceempty']) finally: os.chdir(cwd) if not heuristics.has_platform_definition_file(): raise PlatformWorkspaceEmptyError(strings['exit.no_pdf_file']) source_control = SourceControl.get_source_control() if source_control.untracked_changes_exist(): io.log_warning(strings['sc.unstagedchanges']) version_label = source_control.get_version_label() if staged: # Make a unique version label timestamp = datetime.now().strftime("%y%m%d_%H%M%S") version_label = version_label + '-stage-' + timestamp file_descriptor, original_platform_yaml = tempfile.mkstemp() os.close(file_descriptor) copyfile('platform.yaml', original_platform_yaml) s3_bucket = None s3_key = None try: # Add option settings to platform.yaml _enable_healthd() s3_bucket, s3_key = get_app_version_s3_location(platform_name, version_label) # Create zip file if the application version doesn't exist if s3_bucket is None and s3_key is None: file_name, file_path = _zip_up_project(version_label, source_control, staged=staged) else: file_name = None file_path = None finally: # Restore original platform.yaml move(original_platform_yaml, 'platform.yaml') # Use existing bucket if it exists bucket = elasticbeanstalk.get_storage_location() if s3_bucket is None else s3_bucket # Use existing key if it exists key = platform_name + '/' + file_name if s3_key is None else s3_key try: s3.get_object_info(bucket, key) io.log_info('S3 Object already exists. Skipping upload.') except NotFoundError: io.log_info('Uploading archive to s3 location: ' + key) s3.upload_platform_version(bucket, key, file_path) # Just deletes the local zip fileoperations.delete_app_versions() io.log_info('Creating Platform Version ' + version_label) response = elasticbeanstalk.create_platform_version( platform_name, version, bucket, key, instance_profile, key_name, instance_type, vpc) # TODO: Enable this once the API returns the name of the environment associated with a # CreatePlatformRequest, and remove hard coded value. There is currently only one type # of platform builder, we may support additional builders in the future. #environment_name = response['PlatformSummary']['EnvironmentName'] environment_name = 'eb-custom-platform-builder-packer' io.echo(colored( strings['platformbuildercreation.info'].format(environment_name), attrs=['reverse'])) fileoperations.update_platform_version(version) commonops.set_environment_for_current_branch(environment_name) arn = response['PlatformSummary']['PlatformArn'] request_id = response['ResponseMetadata']['RequestId'] if not timeout: timeout = 30 # Share streamer for platform events and builder events streamer = io.get_event_streamer() builder_events = threading.Thread( target=logsops.stream_platform_logs, args=(platform_name, version, streamer, 5, None, PackerStreamFormatter())) builder_events.daemon = True # Watch events from builder logs builder_events.start() commonops.wait_for_success_events( request_id, platform_arn=arn, streamer=streamer, timeout_in_minutes=timeout )
def create_platform_version(version, major_increment, minor_increment, patch_increment, instance_type, vpc=None, staged=False, timeout=None): platform_name = fileoperations.get_platform_name() instance_profile = fileoperations.get_instance_profile(None) key_name = commonops.get_default_keyname() if version is None: version = _get_latest_version(platform_name=platform_name, owner=Constants.OWNED_BY_SELF, ignored_states=[]) if version is None: version = '1.0.0' else: major, minor, patch = version.split('.', 3) if major_increment: major = str(int(major) + 1) minor = '0' patch = '0' if minor_increment: minor = str(int(minor) + 1) patch = '0' if patch_increment or not (major_increment or minor_increment): patch = str(int(patch) + 1) version = "%s.%s.%s" % (major, minor, patch) if not VALID_PLATFORM_VERSION_FORMAT.match(version): raise InvalidPlatformVersionError(strings['exit.invalidversion']) cwd = os.getcwd() fileoperations._traverse_to_project_root() try: if heuristics.directory_is_empty(): raise PlatformWorkspaceEmptyError( strings['exit.platformworkspaceempty']) finally: os.chdir(cwd) if not heuristics.has_platform_definition_file(): raise PlatformWorkspaceEmptyError(strings['exit.no_pdf_file']) source_control = SourceControl.get_source_control() if source_control.untracked_changes_exist(): io.log_warning(strings['sc.unstagedchanges']) version_label = source_control.get_version_label() if staged: # Make a unique version label timestamp = datetime.now().strftime("%y%m%d_%H%M%S") version_label = version_label + '-stage-' + timestamp file_descriptor, original_platform_yaml = tempfile.mkstemp() os.close(file_descriptor) copyfile('platform.yaml', original_platform_yaml) try: # Add option settings to platform.yaml _enable_healthd() s3_bucket, s3_key = get_app_version_s3_location( platform_name, version_label) # Create zip file if the application version doesn't exist if s3_bucket is None and s3_key is None: file_name, file_path = _zip_up_project(version_label, source_control, staged=staged) else: file_name = None file_path = None finally: # Restore original platform.yaml move(original_platform_yaml, 'platform.yaml') # Use existing bucket if it exists bucket = elasticbeanstalk.get_storage_location( ) if s3_bucket is None else s3_bucket # Use existing key if it exists key = platform_name + '/' + file_name if s3_key is None else s3_key try: s3.get_object_info(bucket, key) io.log_info('S3 Object already exists. Skipping upload.') except NotFoundError: io.log_info('Uploading archive to s3 location: ' + key) s3.upload_platform_version(bucket, key, file_path) # Just deletes the local zip fileoperations.delete_app_versions() io.log_info('Creating Platform Version ' + version_label) response = elasticbeanstalk.create_platform_version( platform_name, version, bucket, key, instance_profile, key_name, instance_type, vpc) # TODO: Enable this once the API returns the name of the environment associated with a # CreatePlatformRequest, and remove hard coded value. There is currently only one type # of platform builder, we may support additional builders in the future. #environment_name = response['PlatformSummary']['EnvironmentName'] environment_name = 'eb-custom-platform-builder-packer' io.echo( colored( strings['platformbuildercreation.info'].format(environment_name), attrs=['reverse'])) fileoperations.update_platform_version(version) commonops.set_environment_for_current_branch(environment_name) arn = response['PlatformSummary']['PlatformArn'] request_id = response['ResponseMetadata']['RequestId'] if not timeout: timeout = 30 # Share streamer for platform events and builder events streamer = io.get_event_streamer() builder_events = threading.Thread(target=logsops.stream_platform_logs, args=(platform_name, version, streamer, 5, None, PackerStreamFormatter())) builder_events.daemon = True # Watch events from builder logs builder_events.start() commonops.wait_for_success_events(request_id, platform_arn=arn, streamer=streamer, timeout_in_minutes=timeout)
def create_app_version_from_source(app_name, source, process=False, label=None, message=None, build_config=None): cwd = os.getcwd() fileoperations._traverse_to_project_root() try: if heuristics.directory_is_empty(): io.echo('NOTE: {}'.format(strings['appversion.none'])) return None finally: os.chdir(cwd) source_control = SourceControl.get_source_control() if source_control.untracked_changes_exist(): io.log_warning(strings['sc.unstagedchanges']) # get version_label if label: version_label = label else: version_label = source_control.get_version_label() # get description if message: description = message else: description = source_control.get_message() if len(description) > 200: description = description[:195] + '...' # Parse the source and attempt to push via code commit source_location, repository, branch = utils.parse_source(source) if source_location == "codecommit": try: result = codecommit.get_branch(repository, branch) except ServiceError as ex: io.log_error( "Could not get branch '{0}' for the repository '{1}' because of this error: {2}" .format(branch, repository, ex.code)) raise ex commit_id = result['branch']['commitId'] if repository is None or commit_id is None: raise ServiceError( "Could not find repository or commit id to create an application version" ) else: LOG.debug( "Source location '{0}' is not supported".format(source_location)) raise InvalidOptionsError( "This command does not support the given source location: {0}". format(source_location)) # Deploy Application version with freshly pushed git commit io.log_info('Creating AppVersion ' + version_label) return _create_application_version(app_name, version_label, description, None, None, process, repository=repository, commit_id=commit_id, build_config=build_config)
def create_codecommit_app_version(app_name, process=False, label=None, message=None, build_config=None): cwd = os.getcwd() fileoperations._traverse_to_project_root() source_control = SourceControl.get_source_control() if source_control.get_current_commit() is None: io.log_warning( 'There are no commits for the current branch, attempting to create an empty commit and launching with the sample application' ) source_control.create_initial_commit() if source_control.untracked_changes_exist(): io.log_warning(strings['sc.unstagedchanges']) #get version_label if label: version_label = label else: version_label = source_control.get_version_label() # get description if message: description = message else: description = source_control.get_message() if len(description) > 200: description = description[:195] + '...' # Push code with git try: source_control.push_codecommit_code() except CommandError as e: io.echo("Could not push code to the CodeCommit repository:") raise e # Get additional arguments for deploying code commit and poll # for the commit to propagate to code commit. from ebcli.operations import gitops repository = gitops.get_default_repository() commit_id = source_control.get_current_commit() if repository is None or commit_id is None: raise ServiceError( "Could not find repository or commit id to create an application version" ) # Deploy Application version with freshly pushed git commit io.log_info('Creating AppVersion ' + version_label) return _create_application_version(app_name, version_label, description, None, None, process, repository=repository, commit_id=commit_id, build_config=build_config)
def create_app_version(app_name, process=False, label=None, message=None, staged=False, build_config=None): cwd = os.getcwd() fileoperations._traverse_to_project_root() try: if heuristics.directory_is_empty(): io.echo('NOTE: {}'.format(strings['appversion.none'])) return None finally: os.chdir(cwd) source_control = SourceControl.get_source_control() if source_control.untracked_changes_exist(): io.log_warning(strings['sc.unstagedchanges']) #get version_label if label: version_label = label else: version_label = source_control.get_version_label() if staged: # Make a unique version label timestamp = datetime.now().strftime("%y%m%d_%H%M%S") version_label = version_label + '-stage-' + timestamp # get description if message: description = message else: description = source_control.get_message() if len(description) > 200: description = description[:195] + '...' # Check for zip or artifact deploy artifact = fileoperations.get_config_setting('deploy', 'artifact') if artifact: file_name, file_extension = os.path.splitext(artifact) file_name = version_label + file_extension file_path = artifact s3_key = None s3_bucket = None else: # Check if the app version already exists s3_bucket, s3_key = get_app_version_s3_location( app_name, version_label) # Create zip file if the application version doesn't exist if s3_bucket is None and s3_key is None: file_name, file_path = _zip_up_project(version_label, source_control, staged=staged) else: file_name = None file_path = None # Get s3 location bucket = elasticbeanstalk.get_storage_location( ) if s3_bucket is None else s3_bucket key = app_name + '/' + file_name if s3_key is None else s3_key # Upload to S3 if needed try: s3.get_object_info(bucket, key) io.log_info('S3 Object already exists. Skipping upload.') except NotFoundError: # If we got the bucket/key from the app version describe call and it doesn't exist then # the application version must have been deleted out-of-band and we should throw an exception if file_name is None and file_path is None: raise NotFoundError( 'Application Version does not exist in the S3 bucket.' ' Try uploading the Application Version again.') # Otherwise attempt to upload the local application version io.log_info('Uploading archive to s3 location: ' + key) s3.upload_application_version(bucket, key, file_path) fileoperations.delete_app_versions() io.log_info('Creating AppVersion ' + version_label) return _create_application_version(app_name, version_label, description, bucket, key, process, build_config=build_config)