def _add_file_to_pak(context, file_name, file_platform, pak_file, manifest_path, manifest): pak_list = _get_paks_list(context, manifest) pak_platform_type = '' pak_found = False for existing_file in pak_list: if existing_file['pakFile'] == pak_file: pak_platform_type = existing_file['platformType'] pak_found = True break if not pak_found: raise HandledError('No pak named {}'.format(pak_file)) file_list = _get_files_list(context, manifest, 'Files') file_found = False for existing_file in file_list: if not entry_matches_platform(existing_file, pak_platform_type): continue if entry_matches_file(existing_file, file_name, file_platform): existing_file['pakFile'] = pak_file platform_name = existing_file.get('platformType') file_found = True if not file_found: raise HandledError('No matching file found {} platform {}'.format( file_name, pak_platform_type)) manifest['Files'] = file_list _save_content_manifest(context, manifest_path, manifest)
def __initialize_jinja(context): jinja_path = os.path.join(context.config.root_directory_path, 'Code', 'SDKs', 'jinja2', 'x64') if not os.path.isdir(jinja_path): raise HandledError( 'The jinja2 Python library was not found at {}. You must select the "Compile the game code" option in SetupAssistant before you can generate service API code.' .format(jinja_path)) sys.path.append(jinja_path) markupsafe_path = os.path.join(context.config.root_directory_path, 'Code', 'SDKs', 'markupsafe', 'x64') if not os.path.isdir(markupsafe_path): raise HandledError( 'The markupsafe Python library was not found at {}. You must select the "Complile the game code" option in SetupAssistant before you can generate service API code.' .format(markupsafe_path)) sys.path.append(markupsafe_path) loaders_module = __load_module('loaders', os.path.join(jinja_path, 'jinja2')) template_path = os.path.join(os.path.dirname(__file__), 'templates') print 'template_path', template_path loader = loaders_module.FileSystemLoader(template_path) environment_module = __load_module('environment', os.path.join(jinja_path, 'jinja2')) environment = environment_module.Environment(loader=loader) return environment
def command_upload_cloudfront_key(context, args): key_file_path = args.key_path deployment_name = args.deployment_name or None if not os.path.isfile(str(key_file_path)): raise HandledError('No file at {}'.format(key_file_path)) base_name = os.path.basename(key_file_path) if not base_name.endswith('.pem'): raise HandledError('{} is not a .pem file').format(base_name) if not base_name.startswith('pk-'): raise HandledError( '{} does not appear to be a cloudfront key (Expected name format is pk-<accountkey>.pem)' .format(base_name)) s3 = context.aws.client('s3') access_bucket = _get_access_bucket(context, deployment_name) if not access_bucket: raise HandledError('Could not find access bucket!') try: s3.upload_file( key_file_path, access_bucket, dynamic_content_settings.get_access_bucket_key_dir() + '/' + base_name) except Exception as e: raise HandledError('Failed to upload with result {}'.format(e)) print 'Uploaded key file to {}/{}/{}'.format( access_bucket, dynamic_content_settings.get_access_bucket_key_dir(), base_name)
def process_swagger_path(context, swagger_path): try: with open(swagger_path, 'r') as swagger_file: swagger_content = swagger_file.read() except IOError as e: raise HandledError('Could not read file {}: {}'.format( swagger_path, e.message)) try: swagger = json.loads(swagger_content) except ValueError as e: raise HandledError('Cloud not parse {} as JSON: {}'.format( swagger_path, e.message)) try: process_swagger(context, swagger) except ValueError as e: raise HandledError('Could not process {}: {}'.format( swagger_path, e.message)) try: content = json.dumps(swagger, sort_keys=True, indent=4) except ValueError as e: raise HandledError( 'Could not convert processed swagger to JSON: {}'.format( e.message)) return content
def _get_paks_list(context, manifest): if manifest is None: raise HandledError('No manifest data loaded') paksList = manifest.get('Paks', []) if paksList is None: raise HandledError('No Paks list found in manifest') return paksList
def __generate_function_name(self, method, path): # Use name specified in x-amazon-cloud-canvas-client-generation object, # the operationId property or construct one from the path and method. function_name = None client_generation_extension_object = method.get_object('x-amazon-cloud-canvas-client-generation', default=None) if not client_generation_extension_object.is_none: function_name = client_generation_extension_object.get_string_value('function', default=None) if function_name and not self.__is_valid_cpp_symbol(function_name): raise HandledError("x-amazon-cloud-canvas-client-generation.function ({}) for method {} must be a valid C++ symbol".format(function_name, method)) if not function_name: function_name = method.get_string_value('operationId', default=None) if function_name and not self.__is_valid_cpp_symbol(function_name): raise HandledError("operationId ({}) for method {} must be a valid C++ symbol".format(function_name, method)) if not function_name: path_name = method.selector + path.selector path_name = path_name.split("/") function_name = "" for word in path_name: if word[0] in string.ascii_letters: word = word[0:1].upper() + word[1:] function_name += word if not self.__is_valid_cpp_symbol(function_name): raise HandledError("The generated function ({}) for method {} is nto a valid C++ symbol. Use an x-amazon-cloud-canvas-client-generation.function property to override the default.".format(function_name, method)) return function_name
def signing_status_changed(context, key, do_signing): dynamoDB = context.aws.client( 'dynamodb', region=resource_manager.util.get_region_from_arn( context.config.project_stack_id)) table_arn = _get_staging_table(context, context.config.default_deployment) try: response = dynamoDB.get_item(TableName=table_arn, Key={'FileName': { 'S': key }}) except ClientError as ce: if ce.response['Error']['Code'] == 'ResourceNotFoundException': return True else: raise HandledError( 'Could not get signing status for {}'.format(key), ce) except Exception as e: raise HandledError('Failed to get signing status for {}'.format(key), e) pak_is_signed = response.get('Item', {}).get('Signature', {}).get('S', '') != '' return pak_is_signed != do_signing
def _add_file_to_pak(context, file_name, file_platform, pak_file, manifest_path, manifest): pak_list = _get_paks_list(context, manifest) pak_platform_type = '' pak_found = False pak_entry = {} for pak_entry in pak_list: if pak_entry['pakFile'] == pak_file: pak_platform_type = pak_entry['platformType'] pak_found = True break if not pak_found: raise HandledError('No pak named {}'.format(pak_file)) file_list = _get_files_list(context, manifest, 'Files') file_found = False file_entry = {} for file_entry in file_list: if not entry_matches_platform(file_entry, pak_platform_type): continue if entry_matches_file(file_entry, file_name, file_platform): file_entry['pakFile'] = pak_file file_entry[ 'hash'] = '' # Need to be sure to add this to the pak next update platform_name = file_entry.get('platformType') file_found = True break if not file_found: raise HandledError('No matching file found {} platform {}'.format( file_name, pak_platform_type)) manifest['Files'] = file_list _save_content_manifest(context, manifest_path, manifest)
def load_interface_swagger(context, interface_id): gem_name, interface_name, interface_version = parse_interface_id( interface_id) interface_file_name = interface_name + '_' + str( interface_version).replace('.', '_') + '.json' gem = context.gem.get_by_name(gem_name) if gem is None: raise HandledError( 'The gem {} does not exist or is not enabled for the project.'. format(gem_name)) interface_file_path = os.path.join(gem.aws_directory_path, 'api-definition', interface_file_name) if not os.path.exists(interface_file_path): raise HandledError( 'Could not find the definition for interface {} at {}.'.format( interface_id, interface_file_path)) try: with open(interface_file_path, 'r') as file: interface_swagger = json.load(file) swagger_processor.validate_swagger(interface_swagger) except Exception as e: raise HandledError( 'Cloud not load the definition for interface {} from {}: {}'. format(interface_id, interface_file_path, e.message)) return swagger_json_navigator.SwaggerNavigator(interface_swagger)
def __read_functions(self, paths): for path in paths.values(): for method in path.values(): if not method.selector.upper() in VALID_HTTP_METHODS: continue func_def = {} function_name = self.__generate_function_name(method, path) func_def["functionName"] = function_name param_list = [] # for method definition param_name_list = [ ] # for WriteJson body (should not include query or path params) path_params = [] # for request.SetPathParameter query_params = [] # for request.AddQueryParameter signature_params = [] # for function signature params = method.get("parameters", {}) for param in params.values(): if not param.get("in", ""): raise HandledError( "{} has no 'in' property".format(param)) param_type = self.__get_param_type(function_name, param) param_list.append("{} {}".format( SWAGGER_TO_CPP_TYPE.get(param_type, param_type), param.get("name").value)) signature_params.append("const {}& {}".format( SWAGGER_TO_CPP_TYPE.get(param_type, param_type), param.get("name").value)) if param.get("in").value == "body": param_name_list.append(param.get("name").value) elif param.get("in").value == "query": query_params.append(param.get("name").value) elif param.get("in").value == "path": path_params.append(param.get("name").value) else: raise HandledError( "{} has invalid 'in' property: {}".format( param, param.get("in").value)) func_def["path"] = path.selector func_def["http_method"] = method.selector.upper() func_def["queryParamNames"] = query_params func_def["pathParamNames"] = path_params func_def["paramNames"] = param_name_list func_def["params"] = param_list func_def["typedParams"] = ", ".join(signature_params) response_type = self.__get_response_type( function_name, method.get("responses")) if response_type: self.__check_supported_response_type(path, response_type) func_def["responseType"] = SWAGGER_TO_CPP_TYPE.get( response_type, response_type) self._component_json["functions"].append(func_def)
def __check_supported_response_type(self, path, response_type): if not response_type in [ item["name"] for item in self._component_json["otherClasses"] ]: raise HandledError( "{} does not have a object reponse type. The lmbr_aws swagger client generator only supports object response types" .format(path)) if [ item for item in self._component_json["otherClasses"] if item["name"] == response_type and item["isArray"] ]: raise HandledError( "{} has an array reponse type. The lmbr_aws swagger client generator only supports object response types" .format(path))
def open_portal(context, args): project_resources = context.config.project_resources if not project_resources.has_key(constant.PROJECT_CGP_RESOURCE_NAME): raise HandledError( 'You can not open the Cloud Gem Portal without having the Cloud Gem Framework gem installed in your project.' ) cgp_s3_resource = project_resources[constant.PROJECT_CGP_RESOURCE_NAME] stackid = cgp_s3_resource['StackId'] bucket_id = cgp_s3_resource['PhysicalResourceId'] expiration = args.duration_seconds if args.duration_seconds else constant.PROJECT_CGP_DEFAULT_EXPIRATION_SECONDS # default comes from argparse only on cli, gui call doesn't provide a default expiration region = resource_manager.util.get_region_from_arn(stackid) #addressing_style - #https://docs.aws.amazon.com/cli/latest/topic/s3-config.html #https://boto3.readthedocs.io/en/latest/guide/s3.html s3_client = context.aws.session.client( 's3', region, config=Config(region_name=region, signature_version='s3v4', s3={'addressing_style': 'virtual'})) if args.show_current_configuration: try: context.view._output_message( __get_configuration(s3_client, bucket_id)) return except ClientError as e: raise HandledError( "Could not read from the key '{}' in the S3 bucket '{}'.". format(constant.PROJECT_CGP_ROOT_SUPPORT_FILE, bucket_id), e) result = None if result is None or result['ResponseMetadata']['HTTPStatusCode'] == 200: # generate presigned url index_url = update.get_index_url(s3_client, bucket_id, expiration) if args.show_configuration: context.view._output_message( __get_configuration(s3_client, bucket_id)) if args.show_url_only: context.view._output_message(index_url) else: webbrowser.open_new(index_url) else: raise HandledError( "The index.html cloud not be set in the S3 bucket '{}'. This Cloud Gem Portal site will not load." .format(bucket_id))
def add_file_entry(context, manifest_path, file_path, manifest_section, pakFileEntry=None, cache_root=None, bucket_prefix=None, output_root=None, platform_type=None): manifest_path, manifest = _get_path_and_manifest(context, manifest_path) file_section = manifest_section if file_section is None: file_section = 'Files' thisFile = {} if file_path is None: raise HandledError('No file name specified') file_path = file_path.replace('\\', '/') namePair = os.path.split(file_path) fileName = namePair[1] localPath = namePair[0] if len(localPath) and localPath[0] == '/': localPath = localPath[1:] if fileName is None: raise HandledError('No file name specified') validate_add_key_name(fileName) thisFile['keyName'] = fileName if not (localPath and len(localPath)): localPath = '.' thisFile['cacheRoot'] = cache_root or '@assets@' thisFile['localFolder'] = localPath thisFile['bucketPrefix'] = bucket_prefix or '' thisFile['outputRoot'] = output_root or '@user@' thisFile['platformType'] = platform_type or '' if pakFileEntry is not None: thisFile['pakFile'] = pakFileEntry or '' thisFile['isManifest'] = is_manifest_entry(thisFile) existingList = _get_files_list(context, manifest, file_section) filesList = [ thisEntry for thisEntry in existingList if not (thisEntry.get('keyName') == fileName and thisEntry.get('localFolder', '.') == localPath and thisEntry.get('platformType') == platform_type) ] filesList.append(thisFile) manifest[file_section] = filesList _save_content_manifest(context, manifest_path, manifest)
def gui_new_manifest(context, args): if not validate_manifest_name(args.new_file_name): raise HandledError('Invalid manifest name') manifest_path = os.path.normpath( os.path.dirname(_get_default_manifest_path(context))) new_file_name = os.path.join(manifest_path, args.new_file_name + os.path.extsep + 'json') new_file_platforms = args.platform_list manifests = glob.glob( os.path.join(manifest_path, '*' + os.path.extsep + 'json')) if new_file_name in manifests: raise HandledError('File already exists') new_manifest(context, new_file_name, new_file_platforms) manifests.append(new_file_name) context.view.list_manifests(manifests)
def command_new_manifest(context, args): if not validate_manifest_name(args.manifest_name): raise HandledError('Invalid manifest name') manifest_path = determine_manifest_path(context, args.manifest_path) manifest_dir_path = os.path.normpath(os.path.dirname(manifest_path)) new_manifest_name = os.path.join( manifest_dir_path, args.manifest_name + os.path.extsep + 'json') manifests = glob.glob( os.path.join(manifest_dir_path, '*' + os.path.extsep + 'json')) if new_manifest_name in manifests: raise HandledError('Manifest already exists') new_manifest(context, new_manifest_name, args.target_platforms) manifests.append(new_manifest_name) context.view.create_new_manifest(args.manifest_name)
def upload_project_content( context, content_path: str, customer_cognito_id=None, expiration=constant.PROJECT_CGP_DEFAULT_EXPIRATION_SECONDS): # See if cloud gem portal should be deployed # Not supported in Framework versions >= 1.1.5 by default cloud_gem_portal_enabled = context.config.deploy_cloud_gem_portal if cloud_gem_portal_enabled: if not os.path.isdir(content_path): raise HandledError( 'Cloud Gem Portal project content not found at {}.'.format( content_path)) uploader = Uploader(context, bucket=context.stack.get_physical_resource_id( context.config.project_stack_id, AWS_S3_BUCKET_NAME), key='') uploader.upload_dir(BUCKET_ROOT_DIRECTORY_NAME, content_path) write_bootstrap(context, customer_cognito_id, expiration) else: print("CloudGemPortal deployment skipped.")
def __read_property(self, properties, prop_name, obj_name): prop = {} prop["name"] = prop_name if "type" in properties.get(prop_name).value: def_type = properties.get(prop_name).get("type").value if def_type in ["array", "object"]: auto_generated_name = "{}Property{}".format( obj_name[0].upper() + obj_name[1:], prop_name[0].upper() + prop_name[1:]) self.__generate_from_schema(auto_generated_name, properties.get(prop_name)) prop["type"] = auto_generated_name prop["init"] = "" else: prop["type"] = self.get_symbol_type(def_type, def_type) prop["init"] = self.get_symbol_initializer(def_type, "") elif "$ref" in properties.get(prop_name).value: # must be a ref item = properties.get(prop_name) ref_path = item.value['$ref'] ref_name = ref_path.split("/")[-1] if not self.__type_defined(ref_name): ref = item.resolve_ref(ref_path) self.__generate_from_schema(ref_name, ref) prop["type"] = ref_name else: raise HandledError( "no type found for property {} in definition at {}".format( prop_name, properties)) return prop
def __get_raw_param_type(self, function_name, param): param_type = param.get("type", "").value generated_name = "{}Param{}".format(function_name, param.get("name").value) if param_type == "array": if not self.__type_defined(generated_name): self.__generate_from_schema(generated_name, param) return generated_name if param_type == "object": if not self.__type_defined(generated_name): self.__generate_from_schema(generated_name, param) return generated_name if not param_type: # there must be a schema definition here schema = param.get("schema", {}) if not schema.value: raise HandledError("No param type given for {}".format(param)) if "$ref" in schema.value: return schema.get("$ref").value if not self.__type_defined(generated_name): self.__generate_from_schema(generated_name, schema) return generated_name return param_type
def open_portal(context, args): project_resources = context.config.project_resources if not project_resources.has_key(constant.PROJECT_CGP_RESOURCE_NAME): raise HandledError( 'You can not open the Cloud Gem Portal without having the Cloud Gem Framework gem installed in your project.' ) cgp_s3_resource = project_resources[constant.PROJECT_CGP_RESOURCE_NAME] stackid = cgp_s3_resource['StackId'] bucket_id = cgp_s3_resource['PhysicalResourceId'] region = resource_manager.util.get_region_from_arn(stackid) cgp_static_url = update.get_index_url(context, region) if args.show_url_only: context.view._output_message(cgp_static_url) elif args.show_bootstrap_configuration: s3_client = context.aws.session.client( 's3', region, config=Config(region_name=region, signature_version='s3v4', s3={'addressing_style': 'virtual'})) context.view._output_message(__get_configuration(s3_client, bucket_id)) else: webbrowser.open_new(cgp_static_url)
def __load_swagger(context, swagger_file_path): if not os.path.isfile(swagger_file_path): raise HandledError('The {} resource group does not provide a swagger.json file ({} does not exist).'.format(args.resource_group, swagger_file_path)) with open(swagger_file_path, 'r') as file: return json.load(file)
def command_request_url(context, args): file_name = args.file_path file_list = [] file_list.append(file_name) request = {} request['FileList'] = file_list lambdaClient = context.aws.client( 'lambda', region=resource_manager.util.get_region_from_arn( context.config.project_stack_id)) lambda_arn = _get_request_lambda(context) contextRequest = json.dumps(request) print('Using request {}'.format(contextRequest)) try: response = lambdaClient.invoke(FunctionName=lambda_arn, InvocationType='RequestResponse', Payload=contextRequest) payloadstream = response.get('Payload', {}) payload_string = payloadstream.read() print('Got result {}'.format(payload_string)) except Exception as e: raise HandledError( 'Could not get URL for'.format(FilePath=file_name, ), e)
def get_index(s3_client, bucket_id): # Request the index file try: s3_index_obj_request = s3_client.get_object(Bucket=bucket_id, Key=constant.PROJECT_CGP_ROOT_FILE) except ClientError as e: raise HandledError( "Could not read from the key '{}' in the S3 bucket '{}'.".format(constant.PROJECT_CGP_ROOT_FILE, bucket_id), e) # Does the user have access to it? if s3_index_obj_request['ResponseMetadata']['HTTPStatusCode'] != 200: raise HandledError( "The user does not have access to the file index.html file. This Cloud Gem Portal site will not load.") content = s3_index_obj_request['Body'].read().decode('utf-8') return content
def remove_entry(context, file_path, version_id=None): dynamoDB = context.aws.client( 'dynamodb', region=resource_manager.util.get_region_from_arn( context.config.project_stack_id)) versioned = content_bucket.content_versioning_enabled( context, context.config.default_deployment) table_arn = get_staging_table(context, context.config.default_deployment, versioned) key = { 'FileName': { 'S': file_path }, 'VersionId': { 'S': version_id } } if versioned else { 'FileName': { 'S': file_path } } try: response = dynamoDB.delete_item(TableName=table_arn, Key=key) except Exception as e: raise HandledError( 'Could not delete entry for for'.format(FilePath=file_path, ), e)
def create_invalidation(context, file_path, caller_reference): if not file_path.startswith('/'): # Add '/' at the start of the file path if not exists since it's required by the create_invalidation API file_path = '/' + file_path distribution_id = _get_distribution_id(context) if not distribution_id: print( 'No CloudFront distribution is found. Invalidation request will be ignored' ) return client = context.aws.client('cloudfront') try: client.create_invalidation(DistributionId=distribution_id, InvalidationBatch={ 'Paths': { 'Quantity': 1, 'Items': [file_path] }, 'CallerReference': caller_reference }) print('File {} is removed from CloudFront edge caches.'.format( file_path)) except Exception as e: raise HandledError( 'Failed to invalidate file {} with result {}'.format(file_path, e))
def get_bucket_content_list(context: object) -> list: """ list the latest version of files found in the content bucket Arguments context -- context to use deployment_name -- name of the deployment """ s3 = context.aws.client('s3') bucket_name = get_content_bucket(context) next_marker = 0 contents_list = [] while True: try: res = s3.list_objects( Bucket=bucket_name, Marker=str(next_marker) ) this_list = res.get('Contents', []) contents_list += this_list if len(this_list) < get_list_objects_limit(): break next_marker += get_list_objects_limit() except Exception as e: raise HandledError('Could not list_objects on '.format(bucket=bucket_name), e) return contents_list
def import_zip(context, args): print("extracting zip at {} into {} project assets".format( args.download_path, context.config.get_game_directory_name())) # make sure we have a zip if not os.path.exists(args.download_path): raise HandledError("Provided path does not exist") # make sure directory is there for extraction out_path = os.path.join(context.config.game_directory_path, CACHE_FOLDER) if not os.path.isdir(out_path): os.makedirs(out_path) # hold onto character mappings so as to not lose existing ones in an overwrite existing_mappings = {} mappings_file_path = os.path.join(out_path, CHARACTER_MAPPINGS_FILE_NAME) if os.path.exists(mappings_file_path): with open(mappings_file_path, 'r') as mappings_file: existing_mappings = json.load(mappings_file) # extract if args.import_as_wav: extract_as_wav(args.download_path, out_path) else: extract_all(args.download_path, out_path) # If we had mappings before extraction merge them with the new ones if existing_mappings: __merge_character_mappings(existing_mappings, mappings_file_path)
def _update_file_hash_section(context, manifest_path, manifest, section): if section is None: raise HandledError('No specified to update hashes for') filesList = _get_files_list(context, manifest, section) thisFile = {} show_manifest.updating_file_hashes(manifest_path) files_updated = False for thisFile in filesList: this_file_path = _get_path_for_file_entry( context, thisFile, context.config.game_directory_name) if not os.path.isfile(this_file_path): show_manifest.invalid_file(this_file_path) thisFile['hash'] = '' continue hex_return = hashlib.md5(open(this_file_path, 'rb').read()).hexdigest() manifestHash = thisFile.get('hash', '') if hex_return != manifestHash: files_updated = True show_manifest.hash_comparison_disk(this_file_path, manifestHash, hex_return) thisFile['hash'] = hex_return manifest[section] = filesList return files_updated
def _send_bucket_delete_list(context, objectList): s3 = context.aws.client('s3') bucketName = _get_content_bucket(context) try: res = s3.delete_objects(Bucket=bucketName, Delete={'Objects': objectList}) except Exception as e: raise HandledError( 'Could not delete_objects on '.format(bucket=bucketName, ), e)
def __generate_service_api_code(context, args): if args.resource_group not in context.resource_groups: raise HandledError('The resource group {} does not exist.'.format( args.resource_group)) jinja = __initialize_jinja(context, args) swagger = __load_swagger(context, args) __generate_component_client(context, args, jinja, swagger)
def open_portal(context, args): project_resources = context.config.project_resources if not project_resources.has_key(constant.PROJECT_CGP_RESOURCE_NAME): raise HandledError( 'You can not open the Cloud Gem Portal without having the Cloud Gem Portal gem installed in your project.' ) cgp_s3_resource = project_resources[constant.PROJECT_CGP_RESOURCE_NAME] stackid = cgp_s3_resource['StackId'] bucket_id = cgp_s3_resource['PhysicalResourceId'] expiration = args.duration_seconds region = resource_manager.util.get_region_from_arn(stackid) s3_client = context.aws.session.client( 's3', region, config=Config(signature_version='s3v4')) if args.show_current_configuration: try: context.view._output_message( __get_configuration(s3_client, bucket_id)) return except ClientError as e: raise HandledError( "Could not read from the key '{}' in the S3 bucket '{}'.". format(constant.PROJECT_CGP_ROOT_SUPPORT_FILE, bucket_id), e) result = None if result is None or result['ResponseMetadata']['HTTPStatusCode'] == 200: # generate presigned url index_url = update.get_index_url(s3_client, bucket_id, expiration) if args.show_configuration: context.view._output_message( __get_configuration(s3_client, bucket_id)) if args.show_url_only: context.view._output_message(index_url) else: webbrowser.open_new(index_url) else: raise HandledError( "The index.html cloud not be set in the S3 bucket '{}'. This Cloud Gem Portal site will not load." .format(bucket_id))