Esempio n. 1
0
def _GetYamlImports(import_object, globbing_enabled=False):
    """Extract the import section of a file.

  If the glob_imports config is set to true, expand any globs (e.g. *.jinja).
  Named imports cannot be used with globs that expand to more than one file.
  If globbing is disabled or a glob pattern does not expand to match any files,
  importer will use the literal string as the file path.

  Args:
    import_object: The object in which to look for imports.
    globbing_enabled: If true, will resolved glob patterns dynamically.

  Returns:
    A list of dictionary objects, containing the keys 'path' and 'name' for each
    file to import. If no name was found, we populate it with the value of path.

  Raises:
   ConfigError: If we cannont read the file, the yaml is malformed, or
       the import object does not contain a 'path' field.
  """
    parent_dir = None
    if not _IsUrl(import_object.full_path):
        parent_dir = os.path.dirname(os.path.abspath(import_object.full_path))
    content = import_object.GetContent()
    yaml_content = yaml.load(content)
    imports = []
    if yaml_content and IMPORTS in yaml_content:
        raw_imports = yaml_content[IMPORTS]
        # Validate the yaml imports, and make sure the optional name is set.
        for i in raw_imports:
            if PATH not in i:
                raise exceptions.ConfigError(
                    'Missing required field %s in import in file %s.' %
                    (PATH, import_object.full_path))
            glob_matches = []
            # Only expand globs if config set and the path is a local fs reference.
            if globbing_enabled and parent_dir and not _IsUrl(i[PATH]):
                # Set our working dir to the import_object's for resolving globs.
                with files.ChDir(parent_dir):
                    # TODO(b/111880973): Replace with gcloud glob supporting ** wildcards.
                    glob_matches = glob.glob(i[PATH])
                    glob_matches = _SanitizeWindowsPathsGlobs(glob_matches)
                # Multiple file case.
                if len(glob_matches) > 1:
                    if NAME in i:
                        raise exceptions.ConfigError((
                            'Cannot use import name %s for path glob in file %s that'
                            ' matches multiple objects.') %
                                                     (i[NAME],
                                                      import_object.full_path))
                    imports.extend([{NAME: g, PATH: g} for g in glob_matches])
                    continue
            # Single file case. (URL, discrete file, or single glob match)
            if len(glob_matches) == 1:
                i[PATH] = glob_matches[0]
            # Populate the name field.
            if NAME not in i:
                i[NAME] = i[PATH]
            imports.append(i)
    return imports
Esempio n. 2
0
    def CopyTarballToGCS(self, storage_client, gcs_object):
        """Copy a tarball of the snapshot to GCS.

    Args:
      storage_client: storage_api.StorageClient, The storage client to use for
                      uploading.
      gcs_object: storage.objects Resource, The GCS object to write.

    Returns:
      storage_v1_messages.Object, The written GCS object.
    """
        with files.ChDir(self.src_dir):
            with files.TemporaryDirectory() as tmp:
                archive_path = os.path.join(tmp, 'file.tgz')
                tf = self._MakeTarball(archive_path)
                tf.close()
                ignore_file_path = os.path.join(self.src_dir,
                                                gcloudignore.IGNORE_FILE_NAME)
                if self.any_files_ignored:
                    if os.path.exists(ignore_file_path):
                        log.info('Using gcloudignore file [{}]'.format(
                            ignore_file_path))
                    else:
                        log.status.Print(
                            _IGNORED_FILE_MESSAGE.format(
                                log_file=log.GetLogFilePath()))
                log.status.write(
                    'Uploading tarball of [{src_dir}] to '
                    '[gs://{bucket}/{object}]\n'.format(
                        src_dir=self.src_dir,
                        bucket=gcs_object.bucket,
                        object=gcs_object.object,
                    ), )
                return storage_client.CopyFileToGCS(archive_path, gcs_object)
Esempio n. 3
0
    def CompilePythonFiles(self):
        """Attempts to compile all the python files into .pyc files.

    This does not raise exceptions if compiling a given file fails.
    """
        # Use two different regex exclusions (passed to compile_dir)
        # based on the python runtime. We package some python code
        # that is not valid python2 syntax.
        if six.PY2:
            regex_exclusion = re.compile('(httplib2/python3'
                                         '|platform/bq/third_party/yaml/lib3)')
        else:
            regex_exclusion = None

        # The self.sdk_root pathname could contain unicode chars and py_compile
        # chokes on unicode paths. Using relative paths from self.sdk_root works
        # around the problem.
        with file_utils.ChDir(self.sdk_root):
            to_compile = [
                os.path.join('bin', 'bootstrapping'),
                os.path.join('data', 'cli'),
                'lib',
                'platform',
            ]
            for d in to_compile:
                # Using rx to skip unused Python3 directory vendored with gsutil's copy
                # of httplib2.
                # Using 2 for quiet, in python 2.7 this value is used as a bool in the
                # implementation and bool(2) is True. Starting in python 3.5 this
                # parameter was changed to a multilevel value, where 1 hides files
                # being processed and 2 suppresses output.
                compileall.compile_dir(d,
                                       rx=regex_exclusion,
                                       quiet=2,
                                       force=True)
Esempio n. 4
0
    def CopyTarballToGCS(self, storage_client, gcs_object):
        """Copy a tarball of the snapshot to GCS.

    Args:
      storage_client: storage.Client, The storage client to use for uploading.
      gcs_object: storage.objects Resource, The GCS object to write.

    Returns:
      storage_v1_messages.Object, The written GCS object.
    """
        with files.ChDir(self.src_dir):
            with files.TemporaryDirectory() as tmp:
                archive_path = os.path.join(tmp, 'file.tgz')
                tf = tarfile.open(archive_path, mode='w:gz')
                for path in self.files:
                    tf.add(path)
                    log.debug('Added [%s]\n', path)
                tf.close()

                log.status.write(
                    'Uploading tarball of [{src_dir}] to '
                    '[gs://{bucket}/{object}]\n'.format(
                        src_dir=self.src_dir,
                        bucket=gcs_object.bucket,
                        object=gcs_object.object,
                    ), )
                return storage_client.CopyFileToGCS(
                    storage_util.BucketReference.FromBucketUrl(
                        gcs_object.bucket), archive_path, gcs_object.object)
 def _RunTest(self, paths, gitignore=None):
   self._TouchFiles(paths)
   if gitignore is not None:
     self.Touch(self.temp_path, '.gitignore', contents=gitignore)
   with files.ChDir(self.temp_path):
     self._GitInit()
     git_uploaded_files = self._GitAddDryRun()
   uploaded_files = self._RunListFilesForUpload(self.temp_path)
   self.assertEqual(set(uploaded_files), set(git_uploaded_files))
 def testHttpTriggerNoSource(self):
     """Test Quickstart Example w/local source, no source argument."""
     func_name = self._GenerateFunctionName()
     with files.ChDir(self.function_path):
         with self._DeployFunction('--trigger-http',
                                   name=func_name,
                                   runtime='nodejs10') as function_name:
             self.Run('functions describe {}'.format(function_name))
             self.AssertOutputContains(function_name)
             self.Run('functions call {}'.format(function_name))
             self.AssertOutputContains('Hello World!')
Esempio n. 7
0
    def CompilePythonFiles(self):
        """Attempts to compile all the python files into .pyc files.

    This does not raise exceptions if compiling a given file fails.
    """
        # Some python code shipped in the SDK is not 2 + 3 compatible.
        # Create execlusion patterns to avoid compilation errors.
        # This is pretty hacky, ideally we would have this information in the
        # component metadata and derive the exclusion patterns from that.
        # However, this is an ok short-term solution until we have bundled python.
        if six.PY2:
            regex_exclusion = re.compile('(httplib2/python3|typing/python3'
                                         '|platform/bq/third_party/yaml/lib3)')
        else:
            # Do not compile anything on python 3.4.x
            if sys.version_info[1] == 4:
                regex_exclusion = re.compile('.*')
            elif sys.version_info[1] >= 7:
                regex_exclusion = re.compile(
                    '(kubernetes/utils/create_from_yaml.py'
                    '|platform/google_appengine'
                    '|gslib/vendored/boto/boto/iam/connection.py'
                    '|gslib/vendored/boto/tests/'
                    '|third_party/.*/python2/'
                    '|third_party/yaml/[a-z]*.py'
                    '|third_party/yaml/lib2/'
                    '|third_party/appengine/'
                    '|third_party/fancy_urllib/'
                    '|platform/bq/third_party/gflags'
                    '|platform/ext-runtime/nodejs/test/'
                    '|platform/gsutil/third_party/apitools/ez_setup'
                    '|platform/gsutil/third_party/crcmod_osx/crcmod/test)')
            else:
                regex_exclusion = None

        # The self.sdk_root pathname could contain unicode chars and py_compile
        # chokes on unicode paths. Using relative paths from self.sdk_root works
        # around the problem.
        with file_utils.ChDir(self.sdk_root):
            to_compile = [
                os.path.join('bin', 'bootstrapping'),
                os.path.join('data', 'cli'),
                'lib',
                'platform',
            ]
            for d in to_compile:
                # Using 2 for quiet, in python 2.7 this value is used as a bool in the
                # implementation and bool(2) is True. Starting in python 3.5 this
                # parameter was changed to a multilevel value, where 1 hides files
                # being processed and 2 suppresses output.
                compileall.compile_dir(d,
                                       rx=regex_exclusion,
                                       quiet=2,
                                       force=True)
    def Deploy(self, service, new_version, code_bucket_ref, image,
               all_services):
        """Deploy the given service.

    Performs all deployment steps for the given service (if applicable):
    * Enable endpoints (for beta deployments)
    * Build and push the Docker image (Flex only, if image_url not provided)
    * Upload files (non-hermetic deployments)
    * Create the new version
    * Promote the version to receieve all traffic (if --promote given (default))
    * Stop the previous version (if new version promoted and
      --stop-previous-version given (default))

    Args:
      service: yaml_parsing.ServiceYamlInfo, service configuration to be
        deployed
      new_version: version_util.Version describing where to deploy the service
      code_bucket_ref: cloud_storage.BucketReference where the service's files
        have been uploaded
      image: str or None, the URL for the Docker image to be deployed (if image
        already exists).
      all_services: dict of service ID to service_util.Service objects
        corresponding to all pre-existing services (used to determine how to
        promote this version to receive all traffic, if applicable).
    """
        log.status.Print(
            'Beginning deployment of service [{service}]...'.format(
                service=new_version.service))

        with self.stager.Stage(service.file, service.runtime,
                               service.env) as app_yaml:
            if app_yaml:
                app_dir = os.path.dirname(app_yaml)
            else:
                app_dir = os.getcwd()
            with files.ChDir(app_dir):
                endpoints_info = self._PossiblyConfigureEndpoints(
                    service, new_version)
                image = self._PossiblyBuildAndPush(new_version, service, image,
                                                   code_bucket_ref)
                manifest = _UploadFiles(service, code_bucket_ref)

                # Actually create the new version of the service.
                message = 'Updating service [{service}]'.format(
                    service=new_version.service)
                with progress_tracker.ProgressTracker(message):
                    self.api_client.DeployService(new_version.service,
                                                  new_version.id, service,
                                                  manifest, image,
                                                  endpoints_info)
                    metrics.CustomTimedEvent(metric_names.DEPLOY_API)
                    self._PossiblyPromote(all_services, new_version)
Esempio n. 9
0
 def testMakeTarball_BrokenSymlink(self):
   """Test basic tarball excluding a broken symlink."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   os.symlink(os.path.join(proj, 'does-not-exist'), os.path.join(proj, 'link'))
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 1)
       self.assertEqual(tf.getmember('Dockerfile').size, 5)
       os.remove('link')  # Delete the broken symlink, it breaks test cleanup.
       tf.close()
Esempio n. 10
0
 def testMakeTarball_gcloudignore(self):
   """Test that gcloudignore is respected."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   self._writeFile(os.path.join(proj, 'file_to_ignore'), 'empty')
   self._writeFile(os.path.join(proj, '.gcloudignore'), '.*\nfile_to_ignore')
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 1)
       self.assertEqual(tf.getmember('Dockerfile').size, 5)
       tf.close()
Esempio n. 11
0
 def testMakeTarball_NestedDir(self):
   """Test tarball with file in nested dir."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'path', 'to', 'Dockerfile'), 'empty')
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 3)
       self.assertEqual(tf.getmember('path/to/Dockerfile').size, 5)
       self.assertTrue(tf.getmember('path').isdir())
       self.assertTrue(tf.getmember('path/to').isdir())
       tf.close()
Esempio n. 12
0
 def testMakeTarball_Symlink(self):
   """Test basic tarball with file resolved from symlink."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   os.symlink(os.path.join(proj, 'Dockerfile'), os.path.join(proj, 'link'))
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 2)
       self.assertEqual(tf.getmember('Dockerfile').size, 5)
       self.assertEqual(tf.getmember('link').size, 0)
       tf.close()
Esempio n. 13
0
def _APIProxyArchive(name, revision, basepath, target_url):
    """Creates a simple API proxy config archive.

  Args:
    name: the name of the API proxy to be configured.
    revision: the API proxy revision to be configured.
    basepath: the path where users would send requests to the API proxy.
    target_url: the URL to which the API proxy should send requests.

  Yields:
    a temporary ZIP archive of the proxy configuration which will be deleted
    upon exiting the context.
  """
    format_params = {
        "name": saxutils.quoteattr(name),
        "revision": saxutils.quoteattr(six.text_type(revision)),
        "basepath": saxutils.escape(basepath),
        "target_url": saxutils.escape(target_url)
    }
    with files.TemporaryDirectory() as archive_dir:
        with files.ChDir(archive_dir):
            files.MakeDir("apiproxy")
            manifest_filename = os.path.join("apiproxy", name + ".xml")
            with open(manifest_filename, "w") as top_xml:
                top_xml.write(
                    _API_REVISION_XML_TEMPLATE.format(**format_params))

            proxies_dir = os.path.join("apiproxy", "proxies")
            proxies_filename = os.path.join(proxies_dir, "default.xml")
            files.MakeDir(proxies_dir)
            with open(proxies_filename, "w") as proxy_xml:
                proxy_xml.write(_PROXY_XML_TEMPLATE.format(**format_params))

            targets_dir = os.path.join("apiproxy", "targets")
            targets_filename = os.path.join(targets_dir, "default.xml")
            files.MakeDir(targets_dir)
            with open(targets_filename, "w") as target_xml:
                target_xml.write(_TARGET_XML_TEMPLATE.format(**format_params))

            archive_file = open("config.zip", "wb+")
            with zipfile.ZipFile(archive_file, "w") as archive:
                archive.write(manifest_filename)
                archive.write(proxies_filename)
                archive.write(targets_filename)
            archive_file.flush()
            archive_file.seek(0)
            try:
                yield archive_file
            finally:
                archive_file.close()
Esempio n. 14
0
 def testMakeTarball(self):
   """Test basic tarball with single file."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 1)
       tinfo = tf.getmember('Dockerfile')
       self.assertEqual(tinfo.size, 5)
       self.assertEqual(tinfo.uid, 0)
       self.assertEqual(tinfo.gid, 0)
       tf.close()
Esempio n. 15
0
 def testMakeTarball_EmptyDir(self):
   """Test tarball with file and empty dir."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   os.mkdir(os.path.join(proj, 'emptydir'))
   os.chmod(os.path.join(proj, 'emptydir'), 0o777)
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 2)
       self.assertEqual(tf.getmember('Dockerfile').size, 5)
       self.assertTrue(tf.getmember('emptydir').isdir())
       mask = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
       self.assertEqual(tf.getmember('emptydir').mode & mask, 0o777)
       tf.close()
Esempio n. 16
0
    def CompilePythonFiles(self):
        """Attempts to compile all the python files into .pyc files.

    This does not raise exceptions if compiling a given file fails.
    """
        # The self.sdk_root pathname could contain unicode chars and py_compile
        # chokes on unicode paths. Using relative paths from self.sdk_root works
        # around the problem.
        with file_utils.ChDir(self.sdk_root):
            to_compile = [
                os.path.join('bin', 'bootstrapping'),
                'lib',
                'platform',
            ]
            for d in to_compile:
                d = console_attr.DecodeFromInput(d)
                compileall.compile_dir(d, quiet=True)
Esempio n. 17
0
def Update(cli):
  """Create or overwrite static completion table.

  Args:
    cli: Calliope CLI object for generating the completion table.
  """
  # Overwrite the completion table file with updated content
  with open(_TablePath(), 'w') as table_file:
    table = CompletionTableGenerator(cli).Walk(hidden=False)
    table_file.write('table=')
    pprint(table, table_file)
  # _TableDirPath() could contain unicode chars and py_compile chokes on unicode
  # paths. Using relative paths from _TableDirPath() works around the problem.
  table_dir_path = _TableDirPath()
  with files.ChDir(table_dir_path):
    # Pre-compile table source to enable fast loading
    compileall.compile_dir('.', quiet=True)
Esempio n. 18
0
    def CopyTarballToGCS(self,
                         storage_client,
                         gcs_object,
                         ignore_file=None,
                         hide_logs=False):
        """Copy a tarball of the snapshot to GCS.

    Args:
      storage_client: storage_api.StorageClient, The storage client to use for
        uploading.
      gcs_object: storage.objects Resource, The GCS object to write.
      ignore_file: Override .gcloudignore file to specify skip files.
      hide_logs: boolean, not print the status message if the flag is true.

    Returns:
      storage_v1_messages.Object, The written GCS object.
    """
        with metrics.RecordDuration(metric_names.UPLOAD_SOURCE):
            with files.ChDir(self.src_dir):
                with files.TemporaryDirectory() as tmp:
                    archive_path = os.path.join(tmp, 'file.tgz')
                    tf = self._MakeTarball(archive_path)
                    tf.close()
                    ignore_file_path = os.path.join(
                        self.src_dir, ignore_file
                        or gcloudignore.IGNORE_FILE_NAME)
                    if self.any_files_ignored:
                        if os.path.exists(ignore_file_path):
                            log.info('Using ignore file [{}]'.format(
                                ignore_file_path))
                        elif not hide_logs:
                            log.status.Print(
                                _IGNORED_FILE_MESSAGE.format(
                                    log_file=log.GetLogFilePath()))
                    if not hide_logs:
                        log.status.write(
                            'Uploading tarball of [{src_dir}] to '
                            '[gs://{bucket}/{object}]\n'.format(
                                src_dir=self.src_dir,
                                bucket=gcs_object.bucket,
                                object=gcs_object.object,
                            ), )
                    return storage_client.CopyFileToGCS(
                        archive_path, gcs_object)
Esempio n. 19
0
    def CompilePythonFiles(self):
        """Attempts to compile all the python files into .pyc files.

    This does not raise exceptions if compiling a given file fails.
    """
        # The self.sdk_root pathname could contain unicode chars and py_compile
        # chokes on unicode paths. Using relative paths from self.sdk_root works
        # around the problem.
        with file_utils.ChDir(self.sdk_root):
            to_compile = [
                os.path.join('bin', 'bootstrapping'),
                'lib',
                'platform',
            ]
            for d in to_compile:
                d = encoding.Decode(d)
                # Using rx to skip unused Python3 directory vendored with gsutil's copy
                # of httplib2.
                compileall.compile_dir(d, rx=re.compile('python3'), quiet=True)
Esempio n. 20
0
    def _RunCloneAndAssert(self, name, project):
        target_dir = os.path.join(self.temp_path, 'tmp-' + name)
        self.Run([
            'source', 'repos', 'clone', 'do-not-delete-gcloud-tests-repo',
            target_dir, '--project', project
        ])
        self.AssertOutputEquals('', normalize_space=True)

        with files.ChDir(target_dir):
            proc = subprocess.Popen(['git', 'status'])
            proc.communicate()
            self.assertEqual(0, proc.returncode)

            properties.VALUES.core.account.Set(
                'some-other-nonexistant account')
            # Make sure git can now authenticate on its own.
            proc = subprocess.Popen(['git', 'pull'])
            proc.communicate()
            self.assertEqual(0, proc.returncode)
    def Run(self, args):
        args = validation.ValidateLocalRunArgs(args)

        with files.ChDir(args.local_package_path):
            log.status.Print('Package is set to {}.'.format(
                args.local_package_path))
            executable_image = args.executor_image_uri or args.base_image

            if args.script:
                # TODO(b/176214485): Consider including the image id in build result.
                built_image = docker_builder.BuildImage(
                    base_image=executable_image,
                    host_workdir=args.local_package_path,
                    main_script=args.script,
                    python_module=args.python_module,
                    requirements=args.requirements,
                    extra_packages=args.extra_packages,
                    extra_dirs=args.extra_dirs,
                    output_image_name=args.output_image_uri)
                executable_image = built_image.name
                log.status.Print('A training image is built.')

            log.status.Print('Starting to run ...')
            docker_runner.RunContainer(
                image_name=executable_image,
                enable_gpu=args.gpu,
                service_account_key=args.service_account_key_file,
                user_args=args.args)

            log.out.Print(
                'A local run is finished successfully using custom image: {}.'.
                format(executable_image))

            # Clean generated cache
            cache_dir, _ = os.path.split(
                os.path.join(args.local_package_path, args.script or ''))
            if local_util.ClearPyCache(cache_dir):
                log.status.Print(
                    'Cleaned Python cache from directory: {}'.format(
                        cache_dir))
Esempio n. 22
0
    def CompilePythonFiles(self):
        """Attempts to compile all the python files into .pyc files.

    This does not raise exceptions if compiling a given file fails.
    """
        # The self.sdk_root pathname could contain unicode chars and py_compile
        # chokes on unicode paths. Using relative paths from self.sdk_root works
        # around the problem.
        with file_utils.ChDir(self.sdk_root):
            to_compile = [
                os.path.join('bin', 'bootstrapping'),
                os.path.join('data', 'cli'),
                'lib',
                'platform',
            ]
            for d in to_compile:
                # Using rx to skip unused Python3 directory vendored with gsutil's copy
                # of httplib2.
                # Using 2 for quiet, in python 2.7 this value is used as a bool in the
                # implementation and bool(2) is True. Starting in python 3.5 this
                # parameter was changed to a multilevel value, where 1 hides files
                # being processed and 2 suppresses output.
                compileall.compile_dir(d, rx=re.compile('python3'), quiet=2)
Esempio n. 23
0
 def testRequiresImageLocalAppYaml(self):
   self.WriteVmRuntime('app.yaml', 'dart')
   with files.ChDir(self.temp_path):
     mod = yaml_parsing.ServiceYamlInfo.FromFile('app.yaml')
     self.AssertModule(mod, 'default', env=app_env.MANAGED_VMS)
     self.assertTrue(mod.RequiresImage())
    def Clone(self, destination_path, dry_run=False):
        """Clone a git repository into a gcloud workspace.

    If the resulting clone does not have a .gcloud directory, create one. Also,
    sets the credential.helper to use the gcloud credential helper.

    Args:
      destination_path: str, The relative path for the repository clone.
      dry_run: bool, If true do not run but print commands instead.

    Returns:
      str, The absolute path of cloned repository.

    Raises:
      CannotInitRepositoryException: If there is already a file or directory in
          the way of creating this repository.
      CannotFetchRepositoryException: If there is a problem fetching the
          repository from the remote host, or if the repository is otherwise
          misconfigured.
    """
        abs_repository_path = os.path.abspath(destination_path)
        if os.path.exists(abs_repository_path):
            CheckGitVersion(
            )  # Do this here, before we start running git commands
            # First check if it's already the repository we're looking for.
            with files.ChDir(abs_repository_path) as _:
                try:
                    output = subprocess.check_output(
                        ['git', 'remote', 'show', 'origin'])
                except subprocess.CalledProcessError:
                    raise CannotFetchRepositoryException(
                        'Repository in [{path}] is misconfigured.'.format(
                            path=abs_repository_path))
                output_match = _ORIGIN_URL_RE.search(output)
                if not output_match or output_match.group('url') != self._uri:
                    raise CannotInitRepositoryException((
                        'Repository [{url}] cannot be cloned to [{path}]: there'
                        ' is something already there.').format(
                            url=self._uri, path=abs_repository_path))
                else:
                    # Repository exists and is correctly configured: abort.
                    log.err.Print((
                        'Repository in [{path}] already exists and maps to [{uri}].'
                        .format(path=abs_repository_path, uri=self._uri)))
                    return None

        # Nothing is there, make a brand new repository.
        try:
            if (self._uri.startswith('https://code.google.com')
                    or self._uri.startswith(
                        'https://source.developers.google.com')):

                # If this is a Google-hosted repo, clone with the cred helper.
                try:
                    CheckGitVersion(_HELPER_MIN)
                except GitVersionException:
                    log.warn(
                        textwrap.dedent("""\
              You are cloning a Google-hosted repository with a version of git
              older than 1.7.9. If you upgrade to 1.7.9 or later, gcloud can
              handle authentication to this repository. Otherwise, to
              authenticate, use your Google account and the password found by
              running the following command.
               $ gcloud auth print-refresh-token
              """))
                    cmd = ['git', 'clone', self._uri, abs_repository_path]
                else:
                    cmd = [
                        'git', 'clone', self._uri, abs_repository_path,
                        '--config', 'credential.helper="{0}"'.format(
                            _GetCredentialHelper())
                    ]
                self._RunCommand(cmd, dry_run)
            else:
                # Otherwise, just do a simple clone. We do this clone, without the
                # credential helper, because a user may have already set a default
                # credential helper that would know the repo's auth info.
                subprocess.check_call(
                    ['git', 'clone', self._uri, abs_repository_path])
        except subprocess.CalledProcessError as e:
            raise CannotFetchRepositoryException(e)
        return abs_repository_path
Esempio n. 25
0
  def CloneGitRepository(self, repository_url, repository_path):
    """Clone a git repository into a gcloud workspace.

    If the resulting clone does not have a .gcloud directory, create one. Also,
    sets the credential.helper to use the gcloud credential helper.

    Args:
      repository_url: str, The URL of the repository to clone.
      repository_path: str, The relative path from the root of the workspace to
          the repository clone.

    Raises:
      InvalidWorkspaceException: If workspace_dir_path is not a workspace.
      CannotInitRepositoryException: If there is already a file or directory in
          the way of creating this repository.
      CannotFetchRepositoryException: If there is a problem fetching the
          repository from the remote host, or if the repository is otherwise
          misconfigured.
    """

    abs_repository_path = os.path.join(self.root_directory, repository_path)
    if os.path.exists(abs_repository_path):
      # First check if it's already the repository we're looking for.
      with files.ChDir(abs_repository_path) as _:
        try:
          output = compat26.subprocess.check_output(
              ['git', 'remote', 'show', 'origin'])
        except subprocess.CalledProcessError:
          raise CannotFetchRepositoryException(
              'Repository in [{path}] is misconfigured.'.format(
                  path=abs_repository_path))
        output_match = _ORIGIN_URL_RE.search(output)
        if not output_match or output_match.group('url') != repository_url:
          raise CannotInitRepositoryException(
              ('Repository [{url}] cannot be cloned to [{path}]: there'
               ' is something already there.').format(
                   url=repository_url,
                   path=os.path.join(self.root_directory, repository_path)))
        else:
          # Repository exists and is correctly configured: abort.
          log.err.Print(
              ('Repository in [{path}] exists and is correctly configured.'
               .format(path=abs_repository_path)))
          return

    # Nothing is there, make a brand new repository.
    try:
      if (repository_url.startswith('https://code.google.com') or
          repository_url.startswith('https://source.developers.google.com')):

        # If this is a Google-hosted repo, clone with the cred helper.
        try:
          CheckGitVersion(_HELPER_MIN)
        except GitVersionException:
          log.warn(textwrap.dedent("""\
              You are cloning a Google-hosted repository with a version of git
              older than 1.7.9. If you upgrade to 1.7.9 or later, gcloud can
              handle authentication to this repository. Otherwise, to
              authenticate, use your Google account and the password found by
              running the following command.
               $ gcloud auth print-refresh-token
              """))
          subprocess.check_call(
              ['git', 'clone', repository_url, abs_repository_path])
        else:
          if (platforms.OperatingSystem.Current() ==
              platforms.OperatingSystem.WINDOWS):
            helper_name = 'gcloud.cmd'
          else:
            helper_name = 'gcloud.sh'
          subprocess.check_call(
              ['git', 'clone', repository_url, abs_repository_path,
               '--config', 'credential.helper=%s' % helper_name])
      else:
        # Otherwise, just do a simple clone. We do this clone, without the
        # credential helper, because a user may have already set a default
        # credential helper that would know the repo's auth info.
        subprocess.check_call(
            ['git', 'clone', repository_url, abs_repository_path])
    except subprocess.CalledProcessError as e:
      raise CannotFetchRepositoryException(e)
Esempio n. 26
0
def CompileAll(directory):
    """Recursively compiles all Python files in directory."""
    # directory could contain unicode chars and py_compile chokes on unicode
    # paths. Using relative paths from within directory works around the problem.
    with files.ChDir(directory):
        compileall.compile_dir('.', quiet=True)
Esempio n. 27
0
 def testBuildPackages_RelativePath(self):
     with files.ChDir(self.package_root):
         self._RunExpectingPackages(['trainer-0.0.0.tar.gz'],
                                    package_dir=os.path.basename(
                                        self.package_dir))
Esempio n. 28
0
def MaybeInstallPythonOnMac():
    """Optionally install Python on Mac machines."""
    if platforms.OperatingSystem.Current() != platforms.OperatingSystem.MACOSX:
        return
    if platforms.Architecture.Current() != platforms.Architecture.x86_64:
        return
    if platforms.Platform.IsActuallyM1ArmArchitecture():
        return

    print(
        '\nGoogle Cloud CLI works best with Python {} and certain modules.\n'.
        format(PYTHON_VERSION))

    already_have_python_version = os.path.isdir(MACOS_PYTHON_INSTALL_PATH)
    if already_have_python_version:
        prompt = ('Python {} installation detected, install recommended'
                  ' modules?'.format(PYTHON_VERSION))
    else:
        prompt = 'Download and run Python {} installer?'.format(PYTHON_VERSION)
    setup_python = console_io.PromptContinue(prompt_string=prompt,
                                             default=True)

    if setup_python:
        install_errors = []
        if not already_have_python_version:
            print('Running Python {} installer, you may be prompted for sudo '
                  'password...'.format(PYTHON_VERSION))
            with files.TemporaryDirectory() as tempdir:
                with files.ChDir(tempdir):
                    curl_args = ['curl', '--silent', '-O', MACOS_PYTHON_URL]
                    exit_code = execution_utils.Exec(curl_args, no_exit=True)
                    if exit_code != 0:
                        install_errors.append(
                            'Failed to download Python installer')
                    else:
                        exit_code = execution_utils.Exec(
                            ['tar', '-xf', MACOS_PYTHON], no_exit=True)
                        if exit_code != 0:
                            install_errors.append(
                                'Failed to extract Python installer')
                        else:
                            exit_code = execution_utils.Exec([
                                'sudo', 'installer', '-target', '/', '-pkg',
                                './python-3.7.9-macosx10.9.pkg'
                            ],
                                                             no_exit=True)
                            if exit_code != 0:
                                install_errors.append('Installer failed.')

        if not install_errors:
            python_to_use = '{}/bin/python3'.format(MACOS_PYTHON_INSTALL_PATH)
            os.environ['CLOUDSDK_PYTHON'] = python_to_use
            print('Setting up virtual environment')
            if os.path.isdir(config.Paths().virtualenv_dir):
                _CLI.Execute(['config', 'virtualenv', 'update'])
                _CLI.Execute(['config', 'virtualenv', 'enable'])
            else:
                _CLI.Execute([
                    'config', 'virtualenv', 'create', '--python-to-use',
                    python_to_use
                ])
                _CLI.Execute(['config', 'virtualenv', 'enable'])
        else:
            print('Failed to install Python. Errors \n\n{}'.format(
                '\n*'.join(install_errors)))