def Run(self, args): input_path = args.INPUT_PATH import_data = terraform_utils.ParseExportFiles(input_path) # Generate script file. dest_script_file, dest_script_dir = terraform_utils.ProcessOutputParameters( args.output_script_file, args.output_dir) dest_script_file = dest_script_file or terraform_utils.GenerateDefaultScriptFileName( ) dest_script_dir = dest_script_dir or files.GetCWD() with progress_tracker.ProgressTracker( message='Generating import script.', aborted_message='Aborted script generation.'): output_script_filename, script_successes = terraform_utils.GenerateImportScript( import_data, dest_script_file, dest_script_dir) log.status.Print( 'Successfully generated {} with imports for {} resources.'.format( output_script_filename, script_successes)) # Generate module file.. dest_module_file, dest_module_dir = terraform_utils.ProcessOutputParameters( args.output_module_file, args.output_dir) dest_module_file = dest_module_file or terraform_utils.TF_MODULES_FILENAME dest_module_dir = dest_module_dir or files.GetCWD() with progress_tracker.ProgressTracker( message='Generating terraform modules.', aborted_message='Aborted module generation.'): output_module_filename, module_successes = terraform_utils.GenerateModuleFile( import_data, properties.VALUES.core.project.Get(required=True), dest_module_file, dest_module_dir) log.status.Print('Successfully generated {} with {} modules.'.format( output_module_filename, module_successes)) return None
def FromArgs(cls, args): """Create a Settings object from an args object.""" project_name = properties.VALUES.core.project.Get() if args.IsKnownAndSpecified('service_name'): service_name = args.service_name else: dir_name = os.path.basename(files.GetCWD()) # Service names may not include _ and upper case characters. service_name = dir_name.replace('_', '-').lower() if not args.IsKnownAndSpecified('image'): if project_name: image = 'gcr.io/{project}/{service}'.format( project=project_name, service=service_name) else: image = service_name # Image names cannot have upper case characters. If the image name is # autogenerated, then make sure the image name is lower case. If the # user enters an image name manually, it's OK to stop the user and tell # them about the illegal image name. image = image.lower() else: image = args.image if args.IsSpecified('application_default_credential'): credential = ApplicationDefaultCredentialSetting() elif args.IsSpecified('service_account'): credential = ServiceAccountSetting(name=args.service_account) else: credential = None context = os.path.abspath(args.source or files.GetCWD()) builder = None if not getattr(args, 'no_skaffold_file', False): builder = _CreateBuilder(args, context) return cls(service_name=service_name, image=image, credential=credential, context=context, builder=builder, local_port=args.local_port, env_vars=getattr(args, 'env_vars', None) or getattr(args, 'env_vars_file', None), cloudsql_instances=getattr(args, 'cloudsql_instances', []), memory=getattr(args, 'memory', None), cpu=getattr(args, 'cpu', None), namespace=getattr(args, 'namespace', None), readiness_probe=args.readiness_probe)
def _Args(parser): """Add arguments for `gcloud app gen-config`.""" parser.add_argument('source_dir', nargs='?', help='The source directory to fingerprint.', default=files.GetCWD()) parser.add_argument( '--config', default=None, help=( 'The yaml file defining the service configuration. This is ' 'normally one of the generated files, but when generating a ' 'custom runtime there can be an app.yaml containing parameters.')) # TODO(b/24843650): Enumerate the valid runtimes for vm: true/env: 2 rt_list = [ r for r in appinfo.GetAllRuntimes() if r not in ['vm', 'custom'] ] parser.add_argument( '--runtime', default=None, help=('Generate config files for a given runtime. Can be used in ' 'conjunction with --custom. Allowed runtimes are: ' + ', '.join(rt_list) + '.')) parser.add_argument( '--custom', action='store_true', default=False, help=( 'If true, generate config files for a custom runtime. This ' 'will produce a Dockerfile, a .dockerignore file and an app.yaml ' '(possibly other files as well, depending on the runtime).'))
def __init__(self, src_dir): self._CheckIfPathExists(src_dir) # Check if the path resolves to a directory. if src_dir and not os.path.isdir(src_dir): raise errors.SourcePathIsNotDirectoryError(src_dir) self._src_dir = src_dir if src_dir is not None else files.GetCWD() self._tmp_dir = files.TemporaryDirectory()
def FromArgs(cls, args): """Create a LocalRuntimeFiles object from an args object.""" project_name = properties.VALUES.core.project.Get() if args.IsSpecified('service_name'): service_name = args.service_name else: dir_name = os.path.basename( os.path.dirname(os.path.join(files.GetCWD(), args.dockerfile))) service_name = dir_name.replace('_', '-') if not args.IsSpecified('image_name'): if project_name: image_name = 'gcr.io/{project}/{service}'.format( project=project_name, service=service_name) else: image_name = service_name else: image_name = args.image_name return cls(service_name, image_name, args.service_account, args.dockerfile, args.build_context_directory, args.builder, args.local_port, args.env_vars or args.env_vars_file, args.cloudsql_instances, args.memory_limit, args.cpu_limit)
def ProcessOutputParameters(output_file=None, output_dir=None): """Helper function for generating output file and directory.""" output_file = output_file.strip() if output_file else None output_dir = (os.path.abspath(output_dir.strip()) if output_dir else None) dest_file = None dest_dir = None if output_file: if os.path.isfile(output_file): overwrite_prompt = ('{} already exists.'.format(output_file)) console_io.PromptContinue( overwrite_prompt, prompt_string='Do you want to overwrite?', default=True, cancel_string='Aborted script generation.', cancel_on_no=True) dest_file = os.path.basename(output_file) dest_dir = os.path.dirname(output_file) or files.GetCWD() if os.path.isdir(dest_dir) and not files.HasWriteAccessInDir(dest_dir): raise TerraformGenerationError( 'Error writing output file: {} is not writable'.format(dest_dir)) # Output directory. if output_dir: if (os.path.isdir(output_dir) and not files.HasWriteAccessInDir(output_dir)): raise ValueError('Cannot write output to directory {}. ' 'Please check permissions.'.format(output_dir)) dest_file = None dest_dir = output_dir return dest_file, dest_dir
def testUntrustedBuilder(self): args = self.parser.parse_args(['--builder=my-builder:latest']) self.assertEqual( local._CreateBuilder(args, files.GetCWD()), local.BuildpackBuilder(builder='my-builder:latest', trust=False, devmode=False))
def testTrustedDevmodeBuilder(self): args = self.parser.parse_args( ['--builder=gcr.io/buildpack/builder:v1']) self.assertEqual( local._CreateBuilder(args, files.GetCWD()), local.BuildpackBuilder(builder='gcr.io/buildpack/builder:v1', trust=True, devmode=True))
def FromArgs(cls, args): """Create a Settings object from an args object.""" project_name = properties.VALUES.core.project.Get() if args.IsSpecified('service_name'): service_name = args.service_name else: dir_name = os.path.basename(files.GetCWD()) service_name = dir_name.replace('_', '-') if not args.IsSpecified('image'): if project_name: image = 'gcr.io/{project}/{service}'.format( project=project_name, service=service_name) else: image = service_name else: image = args.image if args.IsSpecified('application_default_credential'): credential = ApplicationDefaultCredentialSetting() elif args.IsSpecified('service_account'): credential = ServiceAccountSetting(name=args.service_account) else: credential = None context = os.path.abspath(args.source or files.GetCWD()) builder = _CreateBuilder(args, context) return cls( service_name=service_name, image=image, credential=credential, context=context, builder=builder, local_port=args.local_port, env_vars=args.env_vars or args.env_vars_file, cloudsql_instances=args.cloudsql_instances, memory=args.memory, cpu=args.cpu, namespace=args.namespace if 'namespace' in args else None)
def Defaults(cls): """The settings you get with no args or other overrides.""" dir_name = os.path.basename(files.GetCWD()) # Service names may not include _ and upper case characters. service_name = dir_name.replace('_', '-').lower() image = _DefaultImageName(service_name) dockerfile_arg_default = 'Dockerfile' builder = DockerfileBuilder( dockerfile=os.path.abspath(dockerfile_arg_default)) return cls( builder=builder, cloudsql_instances=[], context=os.path.abspath(files.GetCWD()), image=image, service_name=service_name, )
def ValidateLocalRunArgs(args): """Validates the arguments specified in `local-run` command and normalize them.""" args_local_package_pach = args.local_package_path if args_local_package_pach: work_dir = os.path.abspath( files.ExpandHomeDir(args_local_package_pach)) if not os.path.exists(work_dir) or not os.path.isdir(work_dir): raise exceptions.InvalidArgumentException( '--local-package-path', r"Directory '{}' is not found.".format(work_dir)) else: work_dir = files.GetCWD() args.local_package_path = work_dir _ValidBuildArgsOfLocalRun(args) return args
def ClearPyCache(root_dir=None): """Removes generic `__pycache__` folder and '*.pyc' '*.pyo' files.""" root_dir = root_dir or files.GetCWD() is_cleaned = False for name in os.listdir(root_dir): item = os.path.join(root_dir, name) if os.path.isdir(item): if name == '__pycache__': files.RmTree(item) is_cleaned = True else: _, ext = os.path.splitext(name) if ext in ['.pyc', '.pyo']: os.remove(item) is_cleaned = True return is_cleaned
def Run(self, args): project_name = properties.VALUES.core.project.Get(required=True) if not args.IsSpecified('service_name'): dir_name = os.path.basename( os.path.dirname(os.path.join(files.GetCWD(), args.dockerfile))) service_name = console_io.PromptWithDefault( message='Service name', default=dir_name) else: service_name = args.service_name if not args.IsSpecified('image_name'): default_image_name = 'gcr.io/{project}/{service}'.format( project=project_name, service=service_name) image_name = console_io.PromptWithDefault( message='Docker image tag', default=default_image_name) else: image_name = args.image_name kubernetes_yaml_paths = [] kubernetes_configs = local.CreatePodAndService(service_name, image_name) if args.service_account: service_account = local.CreateDevelopmentServiceAccount( args.service_account) private_key_json = local.CreateServiceAccountKey(service_account) secret_yaml = local.LocalDevelopmentSecretSpec(private_key_json) kubernetes_configs.append(secret_yaml) local.AddServiceAccountSecret(kubernetes_configs) with files.FileWriter(args.kubernetes_file) as output: yaml.dump_all(kubernetes_configs, output) kubernetes_yaml_paths.append(args.kubernetes_file) skaffold_yaml_text = _SKAFFOLD_TEMPLATE.format( image_name=image_name, context_path=args.build_context_directory or os.path.dirname(args.dockerfile) or '.') skaffold_yaml = yaml.load(skaffold_yaml_text) manifests = yaml_helper.GetOrCreate( skaffold_yaml, ('deploy', 'kubectl', 'manifests'), constructor=list) manifests.extend(kubernetes_yaml_paths) with files.FileWriter(args.skaffold_file) as output: yaml.dump(skaffold_yaml, output)
def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. Returns: Some value that we want to have printed later. """ package_path = args.package_path or files.GetCWD() # Mimic behavior of ai-platform jobs submit training package_root = os.path.dirname(os.path.abspath(package_path)) user_args = args.user_args or [] if args.job_dir: user_args.extend(('--job-dir', args.job_dir)) worker_count = 2 if args.worker_count is None else args.worker_count ps_count = 2 if args.parameter_server_count is None else args.parameter_server_count if args.distributed: retval = local_train.RunDistributed( args.module_name, package_root, ps_count, worker_count, args.evaluator_count or 0, args.start_port, user_args=user_args) else: if args.parameter_server_count: log.warning(_BAD_FLAGS_WARNING_MESSAGE.format( flag='--parameter-server-count')) if args.worker_count: log.warning(_BAD_FLAGS_WARNING_MESSAGE.format(flag='--worker-count')) retval = local_train.MakeProcess( args.module_name, package_root, args=user_args, task_type=local_train.GetPrimaryNodeName()) # Don't raise an exception because the users will already see the message. # We want this to mimic calling the script directly as much as possible. self.exit_code = retval
def FromArgs(cls, args): """Create a LocalRuntimeFiles object from an args object.""" project_name = properties.VALUES.core.project.Get(required=True) if not args.IsSpecified('service_name'): dir_name = os.path.basename( os.path.dirname(os.path.join(files.GetCWD(), args.dockerfile))) service_name = console_io.PromptWithDefault( message='Service name', default=dir_name) else: service_name = args.service_name if not args.IsSpecified('image_name'): default_image_name = 'gcr.io/{project}/{service}'.format( project=project_name, service=service_name) image_name = console_io.PromptWithDefault( message='Docker image tag', default=default_image_name) else: image_name = args.image_name return cls(service_name, image_name, args.service_account, args.dockerfile, args.build_context_directory)
def FromArgs(cls, args): """Create a LocalRuntimeFiles object from an args object.""" project_name = properties.VALUES.core.project.Get() if args.IsSpecified('service_name'): service_name = args.service_name else: dir_name = os.path.basename( os.path.dirname(os.path.join(files.GetCWD(), args.dockerfile))) service_name = dir_name.replace('_', '-') if not args.IsSpecified('image_name'): if project_name: image_name = 'gcr.io/{project}/{service}'.format( project=project_name, service=service_name) else: image_name = service_name else: image_name = args.image_name if args.IsSpecified('application_default_credential'): credential = ApplicationDefaultCredentialSetting() elif args.IsSpecified('service_account'): credential = ServiceAccountSetting(name=args.service_account) else: credential = None return cls(service_name=service_name, image_name=image_name, credential=credential, dockerfile=args.dockerfile, build_context_directory=args.build_context_directory, builder=args.builder, local_port=args.local_port, env_vars=args.env_vars or args.env_vars_file, cloudsql_instances=args.cloudsql_instances, memory_limit=args.memory_limit, cpu_limit=args.cpu_limit)
def Run(self, args): do_override, billing_project = self._GetBillingParams(args) project = args.project or properties.VALUES.core.project.Get() region = args.region or properties.VALUES.compute.region.Get() zone = args.zone or properties.VALUES.compute.zone.Get() template_context = { 'project': project, 'region': region, 'zone': zone, 'user_override': do_override, 'billing_project': billing_project } path = os.path.join(files.GetCWD(), 'main.tf') if os.path.isfile(path): console_io.PromptContinue('{} Exists.'.format(path), prompt_string='Overwrite?', cancel_on_no=True, cancel_string='Init Provider cancelled.') with progress_tracker.ProgressTracker('Creating Terraform init module'): with files.FileWriter(path, create_path=True) as f: ctx = runtime.Context(f, **template_context) INIT_FILE_TEMPLATE.render_context(ctx) log.status.Print('Created Terraform module file {path}.'.format(path=path))
def __init__(self, src_dir): self._src_dir = src_dir if src_dir is not None else files.GetCWD() self._tmp_dir = files.TemporaryDirectory()
def main(argv=None): if argv is None: argv = sys.argv parser = argparse.ArgumentParser( description='Regenerates apitools clients in given directory.') parser.add_argument('--config', required=True, help='Regeneration config filename.') parser.add_argument('--base-dir', default=files.GetCWD(), help='Regeneration config filename.') parser.add_argument('--api', help='api_name or api_name/api_version to regenerate. ' 'If api_version is ommitted then all versions are ' 'regenerated. If this argument is ommitted all apis ' 'and their versions will be regenerated.') parser.add_argument('-l', '--log-level', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='INFO', help='Set the logging level') args = parser.parse_args(argv[1:]) if args.log_level: logging.basicConfig( format='%(asctime)s %(filename)s:%(lineno)d %(message)s', level=getattr(logging, args.log_level)) config = yaml.load_path(args.config) logging.debug('Config %s', pprint.pformat(config)) root_dir = config['root_dir'] logging.debug('Based dir %s', args.base_dir) if args.api is not None: if '/' in args.api: api_name, api_version = args.api.split('/') else: api_name, api_version = args.api, None api_section = config['apis'].get(api_name) if api_section is None: raise UnknownApi('api [{api_name}] not found in "apis" section of ' '{config_file}' .format(api_name=api_name, config_file=args.config)) if api_version: api_config = api_section.get(api_version) if api_config is None: raise UnknownApi('api version [{api_version}] is not one of the ' 'defined versions [{defined_versions}] of ' '[{api_name}] found in "apis" section of {config_file}' .format(api_version=api_version, api_name=api_name, defined_versions=','.join( sorted(api_section.keys())), config_file=args.config)) regenerate_list = [(api_name, api_version, api_config)] else: regenerate_list = [ (api_name, api_version, api_config) for api_version, api_config in six.iteritems(api_section)] else: regenerate_list = [ (api_name, api_version, api_config) for api_name, api_version_config in six.iteritems(config['apis']) for api_version, api_config in six.iteritems(api_version_config) ] for api_name, api_version, api_config in sorted(regenerate_list): logging.info('Generating %s %s', api_name, api_version) generate.GenerateApi(args.base_dir, root_dir, api_name, api_version, api_config) generate.GenerateResourceModule(args.base_dir, root_dir, api_name, api_version, api_config['discovery_doc'], api_config.get('resources', {})) generate.GenerateApiMap(args.base_dir, root_dir, config['apis'])
def _GetTempAssetInventoryFilePath(): """Create a temporary file path for AssetInventory export/list results.""" date_string = times.FormatDateTime(times.Now(), fmt='%Y%m%dT%H%M%S%3f') return os.path.join(files.GetCWD(), 'gcloud_assetexport_temp_{}.json'.format(date_string))