Exemplo n.º 1
0
def CreateServiceAccountKey(service_account_name):
  """Create a service account key.

  Args:
    service_account_name: Name of service acccount.

  Returns:
    The contents of the generated private key file as a string.
  """
  default_credential_path = os.path.join(
      config.Paths().global_config_dir,
      _Utf8ToBase64(service_account_name) + '.json')
  credential_file_path = encoding.GetEncodedValue(os.environ,
                                                  'LOCAL_CREDENTIAL_PATH',
                                                  default_credential_path)
  if os.path.exists(credential_file_path):
    return files.ReadFileContents(credential_file_path)

  warning_msg = ('Creating a user-managed service account key for '
                 '{service_account_name}. This service account key will be '
                 'the default credential pointed to by '
                 'GOOGLE_APPLICATION_CREDENTIALS in the local development '
                 'environment. The user is responsible for the storage,'
                 'rotation, and deletion of this key. A copy of this key will '
                 'be stored at {local_key_path}.\n'
                 'Only use service accounts from a test project. Do not use '
                 'service accounts from a production project.').format(
                     service_account_name=service_account_name,
                     local_key_path=credential_file_path)
  console_io.PromptContinue(
      message=warning_msg, prompt_string='Continue?', cancel_on_no=True)

  service = apis.GetClientInstance('iam', 'v1')
  message_module = service.MESSAGES_MODULE

  create_key_request = (
      message_module.IamProjectsServiceAccountsKeysCreateRequest(
          name=service_account_name,
          createServiceAccountKeyRequest=message_module
          .CreateServiceAccountKeyRequest(
              privateKeyType=message_module.CreateServiceAccountKeyRequest
              .PrivateKeyTypeValueValuesEnum.TYPE_GOOGLE_CREDENTIALS_FILE)))
  key = service.projects_serviceAccounts_keys.Create(create_key_request)

  files.WriteFileContents(credential_file_path, key.privateKeyData)

  return six.ensure_text(key.privateKeyData)
    def Run(self, args):
        """Create service account credentials."""

        file_content, is_json = _IsJsonFile(args.key_file)
        if is_json:
            cred = auth_service_account.CredentialsFromAdcDict(file_content)
            if args.password_file or args.prompt_for_password:
                raise c_exc.InvalidArgumentException(
                    '--password-file',
                    'A .json service account key does not require a password.')
            account = cred.service_account_email
            if args.account and args.account != account:
                raise c_exc.InvalidArgumentException(
                    'ACCOUNT',
                    'The given account name does not match the account name in the key '
                    'file.  This argument can be omitted when using .json keys.'
                )
        else:
            account = args.account
            if not account:
                raise c_exc.RequiredArgumentException(
                    'ACCOUNT', 'An account is required when using .p12 keys')
            password = None
            if args.password_file:
                try:
                    password = files.ReadFileContents(
                        args.password_file).strip()
                except files.Error as e:
                    raise c_exc.UnknownArgumentException('--password-file', e)
            elif args.prompt_for_password:
                password = getpass.getpass('Password: '******'Activated service account credentials for: [{0}]'.format(account))
Exemplo n.º 3
0
def ConstructMetadataMessage(message_classes,
                             metadata=None,
                             metadata_from_file=None,
                             existing_metadata=None):
  """Creates a Metadata message from the given dicts of metadata.

  Args:
    message_classes: An object containing API message classes.
    metadata: A dict mapping metadata keys to metadata values or None.
    metadata_from_file: A dict mapping metadata keys to file names
      containing the keys' values or None.
    existing_metadata: If not None, the given metadata values are
      combined with this Metadata message.

  Raises:
    ToolException: If metadata and metadata_from_file contain duplicate
      keys or if there is a problem reading the contents of a file in
      metadata_from_file.

  Returns:
    A Metadata protobuf.
  """
  metadata = metadata or {}
  metadata_from_file = metadata_from_file or {}

  new_metadata_dict = copy.deepcopy(metadata)
  for key, file_path in six.iteritems(metadata_from_file):
    if key in new_metadata_dict:
      raise exceptions.ToolException(
          'Encountered duplicate metadata key [{0}].'.format(key))
    new_metadata_dict[key] = files.ReadFileContents(file_path)

  existing_metadata_dict = _MetadataMessageToDict(existing_metadata)
  existing_metadata_dict.update(new_metadata_dict)
  try:
    _ValidateSshKeys(existing_metadata_dict)
  except InvalidSshKeyException as e:
    log.warning(e)

  new_metadata_message = _DictToMetadataMessage(message_classes,
                                                existing_metadata_dict)

  if existing_metadata:
    new_metadata_message.fingerprint = existing_metadata.fingerprint

  return new_metadata_message
Exemplo n.º 4
0
def OnlineEdit(text):
    """Edit will edit the provided text.

  Args:
    text: The initial text blob to provide for editing.

  Returns:
    The edited text blob.

  Raises:
    NoSaveException: If the user did not save the temporary file.
    EditorException: If the process running the editor has a
        problem.
  """
    fname = tempfile.NamedTemporaryFile(suffix='.txt').name
    files.WriteFileContents(fname, text)

    # Get the mod time, so we can check if anything was actually done.
    start_mtime = FileModifiedTime(fname)
    if (platforms.OperatingSystem.Current() is
            platforms.OperatingSystem.WINDOWS):
        try:
            SubprocessCheckCall([fname], shell=True)
        except subprocess.CalledProcessError as error:
            raise EditorException('Your editor exited with return code {0}; '
                                  'please try again.'.format(error.returncode))
    else:
        try:
            editor = os.getenv('EDITOR', 'vi')
            # We use shell=True and manual smashing of the args to permit users to set
            # EDITOR="emacs -nw", or similar things.
            # We used suprocess.check_call instead of subprocess.check_output because
            # subprocess.check_output requires a direct connection to a terminal.
            SubprocessCheckCall('{editor} {file}'.format(editor=editor,
                                                         file=fname),
                                shell=True)
        except subprocess.CalledProcessError as error:
            raise EditorException('Your editor exited with return code {0}; '
                                  'please try again. You may set the EDITOR '
                                  'environment to use a different text '
                                  'editor.'.format(error.returncode))
    end_mtime = FileModifiedTime(fname)
    if start_mtime == end_mtime:
        raise NoSaveException('edit aborted by user')

    return files.ReadFileContents(fname)
Exemplo n.º 5
0
  def FromFile(cls, file_path):
    """Create a KnownHosts object given a known_hosts_file.

    Args:
      file_path: str, path to the known_hosts_file.

    Returns:
      KnownHosts object corresponding to the file. If the file could not be
      opened, the KnownHosts object will have no entries.
    """
    try:
      known_hosts = files.ReadFileContents(file_path).splitlines()
    except files.Error as e:
      known_hosts = []
      log.debug('SSH Known Hosts File [{0}] could not be opened: {1}'
                .format(file_path, e))
    return KnownHosts(known_hosts, file_path)
Exemplo n.º 6
0
def _Load(path, cli=None, force=False, verbose=False):
  """Load() helper. Returns a tree or None if the tree failed to load."""
  try:
    if not force:
      tree = json.loads(files.ReadFileContents(path))
      if _IsUpToDate(tree, path, bool(cli), verbose):
        return tree
      del tree
    # Clobber path to make sure it's regenerated.
    try:
      os.remove(path)
    except OSError:
      pass
  except files.Error as e:
    if not cli:
      raise CliTreeLoadError(six.text_type(e))
  return None
def GetFileContents(file):
  """Returns the file contents and whether or not the file contains binary data.

  Args:
    file: A file path.

  Returns:
    A tuple of the file contents and whether or not the file contains binary
    contents.
  """
  try:
    contents = file_utils.ReadFileContents(file)
    is_binary = False
  except UnicodeError:
    contents = file_utils.ReadBinaryFileContents(file)
    is_binary = True
  return contents, is_binary
Exemplo n.º 8
0
  def _GetCID():
    """Gets the client id from the config file, or generates a new one.

    Returns:
      str, The hex string of the client id.
    """
    uuid_path = config.Paths().analytics_cid_path
    cid = None
    if os.path.exists(uuid_path):
      cid = files.ReadFileContents(uuid_path)
      if cid:
        return cid

    cid = uuid.uuid4().hex  # A random UUID
    files.MakeDir(os.path.dirname(uuid_path))
    files.WriteFileContents(uuid_path, cid)
    return cid
Exemplo n.º 9
0
    def testUploadWithTooLargeFiles(self):
        self.SetGetRepoSuccess()
        original_size_threshold = upload.UploadManager.SIZE_THRESHOLD

        try:
            self.assertEqual(256 * 2**10, original_size_threshold)
            full_paths = self.CreateFiles(['file1', 'filethatistoolarge'])
            upload.UploadManager.SIZE_THRESHOLD = len(
                file_utils.ReadFileContents(full_paths[1])) - 1

            result = upload.UploadManager().Upload('branch1', self.tmpdir)

            self.push_mock.assert_called_with('branch1', self.tmpdir,
                                              full_paths[:1])
            self.assertEqual(1, result['files_skipped'])
        finally:
            upload.UploadManager.SIZE_THRESHOLD = original_size_threshold
Exemplo n.º 10
0
def _ParsePemChainFromFile(pem_chain_file):
    """Parses a pem chain from a file.

  Args:
    pem_chain_file: file containing the pem_chain.

  Returns:
    The string list of certs in the chain.
  """
    try:
        pem_chain_input = files.ReadFileContents(pem_chain_file)
        return pem_utils.ValidateAndParsePemChain(pem_chain_input)

    except (files.Error, OSError, IOError):
        raise exceptions.InvalidArgumentException(
            'pem-chain', "Could not read provided PEM chain file '{}'.".format(
                pem_chain_file))
Exemplo n.º 11
0
 def _CheckDisk(self):
     """Reads cache from disk."""
     gce_cache_path = config.Paths().GCECachePath()
     with self.file_lock:
         try:
             mtime = os.stat(gce_cache_path).st_mtime
             expiration_time = mtime + _GCE_CACHE_MAX_AGE
             gcecache_file_value = files.ReadFileContents(gce_cache_path)
             return gcecache_file_value == six.text_type(
                 True), expiration_time
         except (OSError, IOError, files.Error):
             # Failed to read Google Compute Engine credential cache file.
             # This could be due to permission reasons, or because it doesn't yet
             # exist.
             # Can't log here because the log module depends (indirectly) on this
             # one.
             return None, None
def _ReadFileMap(src):
    """Returns the recursive file map for the directory.

  Args:
    src: str, path to directory.

  Returns:
    {str: ...}, recursive map structure from path to other file map if
    directory or to a string representing the file contents.
  """
    ret = {}
    for name in os.listdir(src):
        path = os.path.join(src, name)
        if os.path.isfile(path):
            ret[name] = files.ReadFileContents(path)
        else:
            ret[name] = _ReadFileMap(path)
    return ret
Exemplo n.º 13
0
def ReadFromFileOrStdin(path, binary):
    """Returns the contents of the specified file or stdin if path is '-'.

  Args:
    path: str, The path of the file to read.
    binary: bool, True to open the file in binary mode.

  Raises:
    Error: If the file cannot be read or is larger than max_bytes.

  Returns:
    The contents of the file.
  """
    if path == '-':
        return ReadStdin(binary=binary)
    if binary:
        return files.ReadBinaryFileContents(path)
    return files.ReadFileContents(path)
Exemplo n.º 14
0
def SetSource(args, workflow, updated_fields):
    """Set source for the workflow based on the arguments.

  Also update updated_fields accordingly.
  Currently only local source file is supported.

  Args:
    args: args passed to the command.
    workflow: the workflow in which to set the source configuration.
    updated_fields: a list to which an appropriate source field will be added.
  """
    if args.source:
        try:
            workflow.sourceContents = files.ReadFileContents(args.source)
        except files.MissingFileError:
            raise exceptions.BadArgumentException(
                '--source', 'specified file does not exist.')
        updated_fields.append('sourceContents')
Exemplo n.º 15
0
    def testURLContentsGetPreferredAuthForClusterWithLdap(self):
        config_contents = files.ReadFileContents(self.v2_ex1_path)

        self.StartObjectPatch(getpass, 'getpass').return_value = 'password'
        self.WriteInput('5')
        self.WriteInput('user')
        auth_method, username, passwd = anthoscli_backend.GetPreferredAuthForCluster(
            cluster='testcluster',
            login_config='https://www.example.com',
            config_contents=config_contents,
            is_url=True,
            force_update=True)
        self.assertEqual(auth_method, 'ldap2')
        self.assertEqual(username, 'dXNlcg==')
        self.assertEqual(passwd, 'cGFzc3dvcmQ=')
        self.AssertErrNotContains(
            'This will overwrite current preferred auth method')
        self.AssertErrContains('PROMPT_CHOICE')
Exemplo n.º 16
0
def AppengineWebMatcher(path, stager, appyaml):
    """Generate a Service from an appengine-web.xml source path.

  This function is a path matcher that returns if and only if:
  - `path` points to either `.../WEB-INF/appengine-web.xml` or `<app-dir>` where
    `<app-dir>/WEB-INF/appengine-web.xml` exists.
  - the xml-file is a valid appengine-web.xml file according to the Java stager.

  The service will be staged according to the stager as a java-xml runtime,
  which is defined in staging.py.

  Args:
    path: str, Unsanitized absolute path, may point to a directory or a file of
        any type. There is no guarantee that it exists.
    stager: staging.Stager, stager that will be invoked if there is a runtime
        and environment match.
    appyaml: str or None, the app.yaml location to used for deployment.

  Raises:
    staging.StagingCommandFailedError, staging command failed.

  Returns:
    Service, fully populated with entries that respect a staged deployable
        service, or None if the path does not match the pattern described.
  """
    suffix = os.path.join(os.sep, 'WEB-INF', 'appengine-web.xml')
    app_dir = path[:-len(suffix)] if path.endswith(suffix) else path
    descriptor = os.path.join(app_dir, 'WEB-INF', 'appengine-web.xml')
    if not os.path.isfile(descriptor):
        return None

    xml_file = files.ReadFileContents(descriptor)
    if '<application>' in xml_file or '<version>' in xml_file:
        log.warning('<application> and <version> elements in ' +
                    '`appengine-web.xml` are not respected')

    staging_dir = stager.Stage(descriptor, app_dir, 'java-xml', env.STANDARD,
                               appyaml)
    if not staging_dir:
        # After GA launch of appengine-web.xml support, this should never occur.
        return None
    yaml_path = os.path.join(staging_dir, 'app.yaml')
    service_info = yaml_parsing.ServiceYamlInfo.FromFile(yaml_path)
    return Service(descriptor, app_dir, service_info, staging_dir)
Exemplo n.º 17
0
 def __init__(self):
     self.use_client_certificate = (
         properties.VALUES.context_aware.use_client_certificate.GetBool())
     self._cert_and_key_path = None
     self.client_cert_path = None
     self.client_cert_password = None
     self.cert_provider_command = ''
     atexit.register(self.Cleanup)
     if self.use_client_certificate:
         # Search for configuration produced by endpoint verification
         cfg_file = _AutoDiscoveryFilePath()
         # Autodiscover context aware settings from configuration file created by
         # end point verification agent
         try:
             contents = files.ReadFileContents(cfg_file)
             log.debug('context aware settings detected at %s', cfg_file)
             json_out = json.loads(contents)
             if 'cert_provider_command' in json_out:
                 # Execute the cert provider to provision client certificates for
                 # context aware access
                 self.cert_provider_command = json_out[
                     'cert_provider_command']
                 # Remember the certificate path when auto provisioning
                 # to cleanup after use
                 self._cert_and_key_path = os.path.join(
                     config.Paths().global_config_dir, 'caa_cert.pem')
                 # Certs provisioned using endpoint verification are stored as a
                 # single file holding both the public certificate
                 # and the private key
                 self._ProvisionClientCert(self.cert_provider_command,
                                           self._cert_and_key_path)
                 self.client_cert_path = self._cert_and_key_path
             else:
                 raise CertProvisionException('no cert provider detected')
         except files.Error as e:
             log.debug('context aware settings discovery file %s - %s',
                       cfg_file, e)
         except CertProvisionException as e:
             log.error('failed to provision client certificate - %s', e)
         if self.client_cert_path is None:
             raise ConfigException(
                 'Use of client certificate requires endpoint verification agent. '
                 'Run `gcloud topic client-certificate` for installation guide.'
             )
    def testJSONFromStdin(self):
        self.RemoveServiceAccount()

        json_key_file = self._GetTestDataPathFor(
            'inactive_service_account.json')
        contents = files.ReadFileContents(json_key_file)
        self.WriteInput(contents)
        self.Run('auth activate-service-account {0} --key-file=-'.format(
            _SERVICE_ACCOUNT_EMAIL))

        self.AssertErrEquals(
            'Activated service account credentials for: [{0}]\n'.format(
                _SERVICE_ACCOUNT_EMAIL))

        # Loads oauth2client credentials and verifies.
        creds_oauth2client = store.Load()
        self.assertIsInstance(creds_oauth2client,
                              service_account.ServiceAccountCredentials)
        self.AssertCredentialsEqual(
            creds_oauth2client, {
                'access_token': _ACCESS_TOKEN,
                'client_id': _CLIENT_ID,
                'service_account_email': _SERVICE_ACCOUNT_EMAIL,
                '_private_key_id': _PRIVATE_KEY_ID,
                '_private_key_pkcs8_pem': _PRIVATE_KEY,
            })
        # Activate via oauth2client or google-auth will have loaded credentials
        # carry different URLs. Both are valid.
        self.assertIn(creds_oauth2client.token_uri,
                      'https://oauth2.googleapis.com/token')

        # Loads google-auth credentials and verifies.
        creds_google_auth = store.Load(use_google_auth=True)
        self.assertIsInstance(creds_google_auth,
                              google_auth_service_account.Credentials)
        self.AssertCredentialsEqual(
            creds_google_auth, {
                'token': _ACCESS_TOKEN,
                'client_id': _CLIENT_ID,
                'service_account_email': _SERVICE_ACCOUNT_EMAIL,
                'private_key_id': _PRIVATE_KEY_ID,
                'private_key': _PRIVATE_KEY,
                '_token_uri': 'https://oauth2.googleapis.com/token',
            })
Exemplo n.º 19
0
def GetClientSecretsType(client_id_file):
  """Get the type of the client secrets file (web or installed)."""
  invalid_file_format_msg = (
      'Invalid file format. See '
      'https://developers.google.com/api-client-library/'
      'python/guide/aaa_client_secrets')
  try:
    obj = json.loads(files.ReadFileContents(client_id_file))
  except files.Error:
    raise InvalidClientSecretsError(
        'Cannot read file: "%s"' % client_id_file)
  if obj is None:
    raise InvalidClientSecretsError(invalid_file_format_msg)
  if len(obj) != 1:
    raise InvalidClientSecretsError(
        invalid_file_format_msg + ' '
        'Expected a JSON object with a single property for a "web" or '
        '"installed" application')
  return tuple(obj)[0]
Exemplo n.º 20
0
def get_cached_execution_id():
  """Gets the cached execution object.

  Returns:
    execution: the execution resource name
  """
  cache_path = _get_cache_path()

  if not os.path.isfile(cache_path):
    raise exceptions.Error(_NO_CACHE_MESSAGE)
  try:
    cached_execution = files.ReadFileContents(cache_path)
    execution_ref = resources.REGISTRY.Parse(
        cached_execution, collection=EXECUTION_COLLECTION)
    log.status.Print('Using cached execution name: {}'.format(
        execution_ref.RelativeName()))
    return execution_ref
  except Exception:
    raise exceptions.Error(_NO_CACHE_MESSAGE)
    def ValidateConfigFile(self):
        """Validates the config file.

    If the config file has any errors, this method compiles them and then
    returns an easy to read sponge log.

    Raises:
      ValidationFailedError: Error raised when validation fails.
    """
        config_file_errors = []
        if self.parsed_yaml is None:
            return
        if not isinstance(self.parsed_yaml, dict):
            config_file_errors.append(
                InvalidSchemaError(invalid_schema_reasons=[
                    'The file content is not in json format'
                ]))
            raise ValidationFailedError(self.config_file_path,
                                        config_file_errors, {})

        AppendIfNotNone(config_file_errors, self.ValidateAlphabeticalOrder())
        AppendIfNotNone(config_file_errors, self.ValidateSchema())

        config_file_property_errors = {}

        config_file = files.ReadFileContents(self.config_file_path)
        feature_flags_config = config.FeatureFlagsConfig(config_file)
        for section_property in feature_flags_config.properties:
            property_errors = []
            values_list = feature_flags_config.properties[
                section_property].values

            AppendIfNotNone(property_errors,
                            self.ValidateValueTypes(values_list))
            AppendIfNotNone(property_errors,
                            self.ValidateValues(values_list, section_property))
            if property_errors:
                config_file_property_errors[section_property] = property_errors

        if config_file_errors or config_file_property_errors:
            raise ValidationFailedError(self.config_file_path,
                                        config_file_errors,
                                        config_file_property_errors)
Exemplo n.º 22
0
def GetFileOrURL(cluster_config, certificate_file=True):
    """Parses config input to determine whether URL or File logic should execute.

     Determines whether the cluster_config is a file or URL. If it's a URL, it
     then pulls the contents of the file using a GET request. If it's a
     file, then it expands the file path and returns its contents.

  Args:
    cluster_config: str, A file path or URL for the login-config.
    certificate_file: str, Optional file path to the CA certificate to use with
      the GET request to the URL.

  Raises:
    AnthosAuthException: If the data could not be pulled from the URL.

  Returns:
    parsed_config_fileOrURL, config_contents, and is_url
    parsed_config_fileOrURL: str, returns either the URL that was passed or an
      expanded file path if a file was passed.
      config_contents: str, returns the contents of the file or URL.
    is_url: bool, True if the provided cluster_config input was a URL.
  """

    if not cluster_config:
        return None, None, None

    # Handle if input is URL.
    config_url = urllib.parse.urlparse(cluster_config)
    is_url = config_url.scheme == 'http' or config_url.scheme == 'https'
    if is_url:
        response = requests.get(cluster_config,
                                verify=certificate_file or True)
        if response.status_code != requests.codes.ok:
            raise AnthosAuthException(
                'Request to login-config URL failed with'
                'response code [{}] and text [{}]: '.format(
                    response.status_code, response.text))
        return cluster_config, response.text, is_url

    # Handle if input is file.
    expanded_config_path = flags.ExpandLocalDirAndVersion(cluster_config)
    contents = files.ReadFileContents(expanded_config_path)
    return expanded_config_path, contents, is_url
def GetFeatureFlagsConfig(account_id, project_id):
    """Gets the feature flags config.

  If the feature flags config file does not exist or is stale, download and save
  the feature flags config. Otherwise, read the feature flags config. Errors
  will be logged, but will not interrupt normal operation.

  Args:
    account_id: str, account ID.
    project_id: str, project ID


  Returns:
    A FeatureFlagConfig, or None.
  """
    feature_flags_config_path = config.Paths().feature_flags_config_path

    with _FEATURE_FLAGS_LOCK:
        yaml_data = None
        if IsFeatureFlagsConfigStale(feature_flags_config_path):
            yaml_data = FetchFeatureFlagsConfig()
            try:
                file_utils.WriteFileContents(feature_flags_config_path,
                                             yaml_data or '')
            except file_utils.Error as e:
                logging.warning(
                    'Unable to write feature flags config [%s]: %s. Please '
                    'ensure that this path is writeable.',
                    feature_flags_config_path, e)
        else:
            try:
                yaml_data = file_utils.ReadFileContents(
                    feature_flags_config_path)
            except file_utils.Error as e:
                logging.warning(
                    'Unable to read feature flags config [%s]: %s. Please '
                    'ensure that this path is readable.',
                    feature_flags_config_path, e)

    if yaml_data:
        return FeatureFlagsConfig(yaml_data, account_id, project_id)
    return None
Exemplo n.º 24
0
    def Run(self, args):
        kube_client = kube_util.KubernetesClient(
            gke_uri=getattr(args, 'gke_uri', None),
            gke_cluster=getattr(args, 'gke_cluster', None),
            kubeconfig=getattr(args, 'kubeconfig', None),
            context=getattr(args, 'context', None),
            public_issuer_url=getattr(args, 'public_issuer_url', None),
            enable_workload_identity=getattr(args, 'enable_workload_identity',
                                             False),
        )
        kube_util.ValidateClusterIdentifierFlags(kube_client, args)

        yaml_string = files.ReadFileContents(
            args.config) if args.config is not None else _default_cr()

        _validate_cr(yaml_string)

        _apply_cr_to_membership_cluster(kube_client, yaml_string)

        log.status.Print('Added CloudRun CR')
Exemplo n.º 25
0
    def GetMessage(self, args):
        """Get error message.

    Args:
      args: the arguments for the command

    Returns:
      error_message read from error file or provided inline

    Raises:
      CannotOpenFileError: When there is a problem with reading the file
    """
        error_message = ''
        if args.message_file:
            try:
                error_message = files.ReadFileContents(args.message_file)
            except files.Error as e:
                raise exceptions.CannotOpenFileError(args.message_file, e)
        elif args.message:
            error_message = args.message
        return error_message
def GetMessageFromFile(filepath, message):
  """Returns a message populated from the JSON or YAML file.

  Args:
    filepath: str, A local path to an object specification in JSON or YAML
      format.
    message: messages.Message, The message class to populate from the file.
  """
  file_contents = files.ReadFileContents(filepath)

  try:
    yaml_obj = yaml.load(file_contents)
    json_str = json.dumps(yaml_obj)
  except yaml.YAMLParseError:
    json_str = file_contents

  try:
    return encoding.JsonToMessage(message, json_str)
  except Exception as e:
    raise exceptions.InvalidInputError('Unable to parse file [{}]: {}.'.format(
        filepath, e))
Exemplo n.º 27
0
def _ValidateAndMergeArgInputs(args):
    """Turn args.inputs and args.inputs_from_file dicts into a single dict.

  Args:
    args: The parsed command-line arguments

  Returns:
    A dict that is the merge of args.inputs and args.inputs_from_file
  Raises:
    files.Error
  """

    is_local_file = {}

    # If no inputs from file, then no validation or merge needed
    if not args.inputs_from_file:
        return args.inputs, is_local_file

    # Initialize the merged dictionary
    arg_inputs = {}

    if args.inputs:
        # Validate args.inputs and args.inputs-from-file do not overlap
        overlap = set(args.inputs.keys()).intersection(
            set(args.inputs_from_file.keys()))
        if overlap:
            raise exceptions.GenomicsError(
                '--{0} and --{1} may not specify overlapping values: {2}'.
                format('inputs', 'inputs-from-file', ', '.join(overlap)))

        # Add the args.inputs
        arg_inputs.update(args.inputs)

    # Read up the inputs-from-file and add the values from the file
    for key, value in six.iteritems(args.inputs_from_file):
        arg_inputs[key] = files.ReadFileContents(value)
        is_local_file[key] = True

    return arg_inputs, is_local_file
def _GetIAMAuthHeaders():
    """Returns the IAM authorization headers to be used."""
    headers = []

    authority_selector = properties.VALUES.auth.authority_selector.Get()
    if authority_selector:
        headers.append((IAM_AUTHORITY_SELECTOR_HEADER, authority_selector))

    authorization_token = None
    authorization_token_file = (
        properties.VALUES.auth.authorization_token_file.Get())
    if authorization_token_file:
        try:
            authorization_token = files.ReadFileContents(
                authorization_token_file)
        except files.Error as e:
            raise Error(e)

    if authorization_token:
        headers.append(
            (IAM_AUTHORIZATION_TOKEN_HEADER, authorization_token.strip()))
    return headers
Exemplo n.º 29
0
def GenerateApiMap(base_dir, root_dir, api_config):
    """Create an apis_map.py file in the given root_dir with for given api_config.

  Args:
      base_dir: str, Path of directory for the project.
      root_dir: str, Path of the map file location within the project.
      api_config: regeneration config for all apis.
  """

    api_def_filename, _ = os.path.splitext(api_def.__file__)
    api_def_source = files.ReadFileContents(api_def_filename + '.py')

    tpl = template.Template(
        filename=os.path.join(os.path.dirname(__file__), 'template.tpl'))
    api_map_file = os.path.join(base_dir, root_dir, 'apis_map.py')
    logging.debug('Generating api map at %s', api_map_file)
    api_map = _MakeApiMap(root_dir.replace('/', '.'), api_config)
    logging.debug('Creating following api map %s', api_map)
    with files.FileWriter(api_map_file) as apis_map_file:
        ctx = runtime.Context(apis_map_file,
                              api_def_source=api_def_source,
                              apis_map=api_map)
        tpl.render_context(ctx)
Exemplo n.º 30
0
def _GetDockerignoreExclusions(upload_dir, gen_files):
    """Helper function to read the .dockerignore on disk or in generated files.

  Args:
    upload_dir: the path to the root directory.
    gen_files: dict of filename to contents of generated files.

  Returns:
    Set of exclusion expressions from the dockerignore file.
  """
    dockerignore = os.path.join(upload_dir, '.dockerignore')
    exclude = set()
    ignore_contents = None
    if os.path.exists(dockerignore):
        ignore_contents = files.ReadFileContents(dockerignore)
    else:
        ignore_contents = gen_files.get('.dockerignore')
    if ignore_contents:
        # Read the exclusions from the dockerignore, filtering out blank lines.
        exclude = set(filter(bool, ignore_contents.splitlines()))
        # Remove paths that shouldn't be excluded on the client.
        exclude -= set(BLACKLISTED_DOCKERIGNORE_PATHS)
    return exclude