def _GetRefData(self, path):
            """Loads the YAML data from the given reference.

      A YAML reference must refer to a YAML file and an attribute within that
      file to extract.

      Args:
        path: str, The path of the YAML file to import. It must be in the
          form of: package.module:attribute.attribute, where the module path is
          separated from the sub attributes within the YAML by a ':'.

      Raises:
        LayoutException: If the given module or attribute cannot be loaded.

      Returns:
        The referenced YAML data.
      """
            root = os.path.dirname(os.path.dirname(googlecloudsdk.__file__))
            parts = path.split(':')
            if len(parts) != 2:
                raise LayoutException(
                    'Invalid Yaml reference: [{}]. References must be in the format: '
                    'path(.path)+:attribute(.attribute)*'.format(path))
            yaml_path = os.path.join(root, *parts[0].split('.'))
            yaml_path += '.yaml'
            try:
                data = yaml.load(pkg_resources.GetData(yaml_path))
            except IOError as e:
                raise LayoutException(
                    'Failed to load Yaml reference file [{}]: {}'.format(
                        yaml_path, e))

            return self._GetAttribute(data, parts[1], yaml_path)
def LoadCommonType(impl_paths,
                   path,
                   release_track,
                   construction_id,
                   is_command,
                   yaml_command_translator=None):
    """Loads a calliope command or group from a file.

  Args:
    impl_paths: [str], A list of file paths to the command implementation for
      this group or command.
    path: [str], A list of group names that got us down to this command group
      with respect to the CLI itself.  This path should be used for things
      like error reporting when a specific element in the tree needs to be
      referenced.
    release_track: ReleaseTrack, The release track that we should load.
    construction_id: str, A unique identifier for the CLILoader that is
      being constructed.
    is_command: bool, True if we are loading a command, False to load a group.
    yaml_command_translator: YamlCommandTranslator, An instance of a translator
      to use to load the yaml data.

  Raises:
    CommandLoadFailure: If the command is invalid and cannot be loaded.

  Returns:
    The base._Common class for the command or group.
  """
    implementations = []
    for impl_file in impl_paths:
        if impl_file.endswith('.yaml'):
            if not is_command:
                raise CommandLoadFailure(
                    '.'.join(path),
                    Exception('Command groups cannot be implemented in yaml'))
            data = yaml.load(pkg_resources.GetData(impl_file),
                             Loader=CreateYamlLoader(impl_file))
            implementations.extend(
                (_ImplementationsFromYaml(path, data,
                                          yaml_command_translator)))
        else:
            module = _GetModuleFromPath(impl_file, path, construction_id)
            implementations.extend(
                _ImplementationsFromModule(module.__file__,
                                           module.__dict__.values(),
                                           is_command=is_command))

    return _ExtractReleaseTrackImplementation(impl_paths[0], release_track,
                                              implementations)()
def _GetAllImplementations(impl_paths, path, construction_id, is_command,
                           yaml_command_translator):
    """Gets all the release track command implementations.

  Can load both python and yaml modules.

  Args:
    impl_paths: [str], A list of file paths to the command implementation for
      this group or command.
    path: [str], A list of group names that got us down to this command group
      with respect to the CLI itself.  This path should be used for things
      like error reporting when a specific element in the tree needs to be
      referenced.
    construction_id: str, A unique identifier for the CLILoader that is
      being constructed.
    is_command: bool, True if we are loading a command, False to load a group.
    yaml_command_translator: YamlCommandTranslator, An instance of a translator
      to use to load the yaml data.

  Raises:
    CommandLoadFailure: If the command is invalid and cannot be loaded.

  Returns:
    [(func->base._Common, [base.ReleaseTrack])], A list of tuples that can be
    passed to _ExtractReleaseTrackImplementation. Each item in this list
    represents a command implementation. The first element is a function that
    returns the implementation, and the second element is a list of release
    tracks it is valid for.
  """
    implementations = []
    for impl_file in impl_paths:
        if impl_file.endswith('.yaml'):
            if not is_command:
                raise CommandLoadFailure(
                    '.'.join(path),
                    Exception('Command groups cannot be implemented in yaml'))
            data = yaml.load(pkg_resources.GetData(impl_file),
                             Loader=CreateYamlLoader(impl_file))
            implementations.extend(
                (_ImplementationsFromYaml(path, data,
                                          yaml_command_translator)))
        else:
            module = _GetModuleFromPath(impl_file, path, construction_id)
            implementations.extend(
                _ImplementationsFromModule(module.__file__,
                                           module.__dict__.values(),
                                           is_command=is_command))
    return implementations
Esempio n. 4
0
def _CopyDefaultDockerfile(runtime, dst, cleanup):
    """Copies default Dockerfile for a given runtime into destination directory.

  Args:
    runtime: str, Runtime that we're looking for the Dockerfile for.
    dst: str, Directory path where to copy the Dockerfile.
    cleanup: bool, If true, delete the file on gcloud exit.

  Raises:
    InternalError: if there is no directory with default Dockerfiles.
    NoDefaultDockerfileError: if there is no default Dockerfile for a given
        runtime.
  """
    log.info('Looking for the default %s for runtime [%s]', config.DOCKERFILE,
             runtime)
    runtime = _GetCanonicalRuntime(runtime)
    default_dockerfiles_dir = _GetDefaultDockerfilesDir()
    src = os.path.join(default_dockerfiles_dir,
                       '{runtime}_app'.format(runtime=runtime),
                       config.DOCKERFILE)
    try:
        src_data = pkg_resources.GetData(src)
    except IOError:
        raise NoDefaultDockerfileError(
            'No default {dockerfile} for runtime [{runtime}] in the SDK. '
            'Use one of the supported runtimes: [{supported}].'.format(
                dockerfile=config.DOCKERFILE,
                runtime=runtime,
                supported='|'.join(
                    _ListSupportedRuntimes(default_dockerfiles_dir))))

    log.info(
        '%s for runtime [%s] is found in %s. Copying it into application '
        'directory.', config.DOCKERFILE, runtime, default_dockerfiles_dir)

    with open(os.path.join(dst, os.path.basename(src)), 'w') as dst_file:
        dst_file.write(src_data)

    # Delete the file after we're done if necessary.
    if cleanup:
        atexit.register(Clean, os.path.join(dst, config.DOCKERFILE))
Esempio n. 5
0
def FindOrCopyDockerfile(runtime, dst, cleanup=True):
    """Copies default Dockerfile for a given runtime into destination directory.

  Default Dockerfile for runtime is used if there is no user provided dockerfile
  in the destination directory.

  Args:
    runtime: str, Runtime that we're looking for the Dockerfile for.
    dst: str, Directory path where to check for and copy to the Dockerfile.
    cleanup: bool, If true, delete the file on gcloud exit.

  Raises:
    IOError: raised by pkg_resources.GetData if the Dockerfile doesn't exist
      in the expected location.

  Returns:
    callable(), A function to be called to clean up the generated Dockerfile.
  """
    log.info('Looking for the %s in %s', config.DOCKERFILE, dst)
    if os.path.exists(os.path.join(dst, config.DOCKERFILE)):
        log.info('Using %s found in %s', config.DOCKERFILE, dst)
        return lambda: None
    log.info('Looking for the default %s for runtime [%s]', config.DOCKERFILE,
             runtime)
    runtime = _GetCanonicalRuntime(runtime)
    default_dockerfiles_dir = GetGCloudDockerfilesDir()
    src = os.path.join(default_dockerfiles_dir,
                       '{runtime}_app'.format(runtime=runtime),
                       config.DOCKERFILE)
    src_data = pkg_resources.GetData(src)
    log.info(
        '%s for runtime [%s] is found in %s. Copying it into application '
        'directory.', config.DOCKERFILE, runtime, default_dockerfiles_dir)
    with open(os.path.join(dst, os.path.basename(src)), 'w') as dst_file:
        dst_file.write(src_data)
    # Delete the file after we're done if necessary.
    if cleanup:
        full_name = os.path.join(dst, config.DOCKERFILE)
        atexit.register(Clean, full_name)
        return lambda: Clean(full_name)
    return lambda: None
def CreateYamlLoader(impl_path):
    """Creates a custom yaml loader that handles includes from common data.

  Args:
    impl_path: str, The path to the file we are loading data from.

  Returns:
    yaml.Loader, A yaml loader to use.
  """
    # TODO(b/64147277) Allow for importing from other places.
    common_file_path = os.path.join(os.path.dirname(impl_path),
                                    '__init__.yaml')
    common_data = None
    if os.path.exists(common_file_path):
        common_data = yaml.load(pkg_resources.GetData(common_file_path))

    class Loader(yaml.Loader):
        """A custom yaml loader.

    It adds 2 different import capabilities. Assuming __init__.yaml has the
    contents:

    foo:
      a: b
      c: d

    baz:
      - e: f
      - g: h

    The first uses a custom constructor to insert data into your current file,
    so:

    bar: !COMMON foo.a

    results in:

    bar: b

    The second mechanism overrides construct_mapping and construct_sequence to
    post process the data and replace the merge macro with keys from the other
    file. We can't use the custom constructor for this as well because the
    merge key type in yaml is processed before custom constructors which makes
    importing and merging not possible. So:

    bar:
      _COMMON_: foo
      i: j

    results in:

    bar:
      a: b
      c: d
      i: j

    This can also be used to merge list contexts, so:

    bar:
      - _COMMON_baz
      - i: j

    results in:

    bar:
      - e: f
      - g: h
      - i: j

    You may also use the !REF and _REF_ directives in the same way. Instead of
    pulling from the common file, they can pull from an arbitrary yaml file
    somewhere in the googlecloudsdk tree. The syntax looks like:

    bar: !REF googlecloudsdk.foo.bar:a.b.c

    This will load googlecloudsdk/foo/bar.yaml and from that file return the
    a.b.c nested attribute.
    """

        INCLUDE_COMMON_MACRO = '!COMMON'
        MERGE_COMMON_MACRO = '_COMMON_'
        INCLUDE_REF_MACRO = '!REF'
        MERGE_REF_MACRO = '_REF_'

        def __init__(self, stream):
            super(Loader, self).__init__(stream)

        def construct_mapping(self, *args, **kwargs):
            data = super(Loader, self).construct_mapping(*args, **kwargs)
            data = self._ConstructMappingHelper(Loader.MERGE_COMMON_MACRO,
                                                self._GetCommonData, data)
            return self._ConstructMappingHelper(Loader.MERGE_REF_MACRO,
                                                self._GetRefData, data)

        def _ConstructMappingHelper(self, macro, source_func, data):
            attribute_path = data.pop(macro, None)
            if not attribute_path:
                return data

            modified_data = {}
            for path in attribute_path.split(','):
                modified_data.update(source_func(path))
            # Add the explicit data last so it can override the imports.
            modified_data.update(data)
            return modified_data

        def construct_sequence(self, *args, **kwargs):
            data = super(Loader, self).construct_sequence(*args, **kwargs)
            data = self._ConstructSequenceHelper(Loader.MERGE_COMMON_MACRO,
                                                 self._GetCommonData, data)
            return self._ConstructSequenceHelper(Loader.MERGE_REF_MACRO,
                                                 self._GetRefData, data)

        def _ConstructSequenceHelper(self, macro, source_func, data):
            new_list = []
            for i in data:
                if isinstance(i, basestring) and i.startswith(macro):
                    attribute_path = i[len(macro):]
                    for path in attribute_path.split(','):
                        new_list.extend(source_func(path))
                else:
                    new_list.append(i)
            return new_list

        def IncludeCommon(self, node):
            attribute_path = self.construct_scalar(node)
            return self._GetCommonData(attribute_path)

        def IncludeRef(self, node):
            attribute_path = self.construct_scalar(node)
            return self._GetRefData(attribute_path)

        def _GetCommonData(self, attribute_path):
            if not common_data:
                raise LayoutException(
                    'Command [{}] references [common command] data but it does not '
                    'exist.'.format(impl_path))
            return self._GetAttribute(common_data, attribute_path,
                                      'common command')

        def _GetRefData(self, path):
            """Loads the YAML data from the given reference.

      A YAML reference must refer to a YAML file and an attribute within that
      file to extract.

      Args:
        path: str, The path of the YAML file to import. It must be in the
          form of: package.module:attribute.attribute, where the module path is
          separated from the sub attributes within the YAML by a ':'.

      Raises:
        LayoutException: If the given module or attribute cannot be loaded.

      Returns:
        The referenced YAML data.
      """
            root = os.path.dirname(os.path.dirname(googlecloudsdk.__file__))
            parts = path.split(':')
            if len(parts) != 2:
                raise LayoutException(
                    'Invalid Yaml reference: [{}]. References must be in the format: '
                    'path(.path)+:attribute(.attribute)*'.format(path))
            yaml_path = os.path.join(root, *parts[0].split('.'))
            yaml_path += '.yaml'
            try:
                data = yaml.load(pkg_resources.GetData(yaml_path))
            except IOError as e:
                raise LayoutException(
                    'Failed to load Yaml reference file [{}]: {}'.format(
                        yaml_path, e))

            return self._GetAttribute(data, parts[1], yaml_path)

        def _GetAttribute(self, data, attribute_path, location):
            value = data
            for attribute in attribute_path.split('.'):
                value = value.get(attribute, None)
                if not value:
                    raise LayoutException(
                        'Command [{}] references [{}] data attribute [{}] in '
                        'path [{}] but it does not exist.'.format(
                            impl_path, location, attribute, attribute_path))
            return value

    Loader.add_constructor(Loader.INCLUDE_COMMON_MACRO, Loader.IncludeCommon)
    Loader.add_constructor(Loader.INCLUDE_REF_MACRO, Loader.IncludeRef)
    return Loader
def CreateYamlLoader(impl_path):
    """Creates a custom yaml loader that handles includes from common data.

  Args:
    impl_path: str, The path to the file we are loading data from.

  Returns:
    yaml.Loader, A yaml loader to use.
  """
    # TODO(b/64147277) Allow for importing from other places.
    common_file_path = os.path.join(os.path.dirname(impl_path),
                                    '__init__.yaml')
    common_data = None
    if os.path.exists(common_file_path):
        common_data = yaml.load(pkg_resources.GetData(common_file_path))

    class Loader(yaml.Loader):
        """A custom yaml loader.

    It adds 2 different import capabilities. Assuming __init__.yaml has the
    contents:

    foo:
      a: b
      c: d

    baz:
      - e: f
      - g: h

    The first uses a custom constructor to insert data into your current file,
    so:

    bar: !COMMON foo.a

    results in:

    bar: b

    The second mechanism overrides construct_mapping and construct_sequence to
    post process the data and replace the merge macro with keys from the other
    file. We can't use the custom constructor for this as well because the
    merge key type in yaml is processed before custom constructors which makes
    importing and merging not possible. So:

    bar:
      _COMMON_: foo
      i: j

    results in:

    bar:
      a: b
      c: d
      i: j

    This can also be used to merge list contexts, so:

    bar:
      - _COMMON_baz
      - i: j

    results in:

    bar:
      - e: f
      - g: h
      - i: j
    """

        INCLUDE_MACRO = '!COMMON'
        MERGE_MACRO = '_COMMON_'

        def __init__(self, stream):
            super(Loader, self).__init__(stream)

        def construct_mapping(self, *args, **kwargs):
            data = super(Loader, self).construct_mapping(*args, **kwargs)
            attribute_path = data.pop(Loader.MERGE_MACRO, None)
            if attribute_path:
                for path in attribute_path.split(','):
                    data.update(self._GetData(path))
            return data

        def construct_sequence(self, *args, **kwargs):
            data = super(Loader, self).construct_sequence(*args, **kwargs)
            new_list = []
            for i in data:
                if isinstance(i, basestring) and i.startswith(
                        Loader.MERGE_MACRO):
                    attribute_path = i[len(Loader.MERGE_MACRO):]
                    for path in attribute_path.split(','):
                        new_list.extend(self._GetData(path))
                else:
                    new_list.append(i)
            return new_list

        def include(self, node):
            attribute_path = self.construct_scalar(node)
            return self._GetData(attribute_path)

        def _GetData(self, attribute_path):
            if not common_data:
                raise LayoutException(
                    'Command [{}] references common command data but it does not exist.'
                    .format(impl_path))
            value = common_data
            for attribute in attribute_path.split('.'):
                value = value.get(attribute, None)
                if not value:
                    raise LayoutException(
                        'Command [{}] references common command data attribute [{}] in '
                        'path [{}] but it does not exist.'.format(
                            impl_path, attribute, attribute_path))
            return value

    Loader.add_constructor(Loader.INCLUDE_MACRO, Loader.include)
    return Loader