示例#1
0
 def _ValidateArgs(self, args, compute_client):
     instances_flags.ValidateNicFlags(args)
     instances_flags.ValidateNetworkTierArgs(args)
     daisy_utils.ValidateZone(args, compute_client)
     try:
         args.source_uri = daisy_utils.MakeGcsUri(args.source_uri)
     except resources.UnknownCollectionException:
         raise exceptions.InvalidArgumentException(
             'source-uri',
             'must be a path to an object or a directory in Cloud Storage')
示例#2
0
    def Run(self, args):
        compute_holder = base_classes.ComputeApiHolder(self.ReleaseTrack())

        self._ValidateInstanceName(args)
        self._CheckForExistingInstances(args.instance_name,
                                        compute_holder.client)

        instances_flags.ValidateNicFlags(args)
        instances_flags.ValidateNetworkTierArgs(args)

        log.warning('Importing OVF. This may take 40 minutes for smaller OVFs '
                    'and up to a couple of hours for larger OVFs.')

        machine_type = None
        if args.machine_type or args.custom_cpu or args.custom_memory:
            machine_type = instance_utils.InterpretMachineType(
                machine_type=args.machine_type,
                custom_cpu=args.custom_cpu,
                custom_memory=args.custom_memory,
                ext=getattr(args, 'custom_extensions', None),
                vm_type=getattr(args, 'custom_vm_type', None))

        try:
            source_uri = daisy_utils.MakeGcsUri(args.source_uri)
        except resources.UnknownCollectionException:
            raise exceptions.InvalidArgumentException(
                'source-uri',
                'must be a path to an object or a directory in Google Cloud Storage'
            )

        return daisy_utils.RunOVFImportBuild(
            args=args,
            compute_client=compute_holder.client,
            instance_name=args.instance_name,
            source_uri=source_uri,
            no_guest_environment=not args.guest_environment,
            can_ip_forward=args.can_ip_forward,
            deletion_protection=args.deletion_protection,
            description=args.description,
            labels=args.labels,
            machine_type=machine_type,
            network=args.network,
            network_tier=args.network_tier,
            subnet=args.subnet,
            private_network_ip=args.private_network_ip,
            no_restart_on_failure=not args.restart_on_failure,
            os=args.os,
            tags=args.tags,
            zone=properties.VALUES.compute.zone.Get(),
            project=args.project,
            output_filter=_OUTPUT_FILTER,
            compute_release_track=self.ReleaseTrack().id.lower()
            if self.ReleaseTrack() else None)
示例#3
0
    def Run(self, args):
        compute_holder = base_classes.ComputeApiHolder(self.ReleaseTrack())

        self._ValidateInstanceNames(args)
        self._CheckForExistingInstances(args.instance_names,
                                        compute_holder.client)

        instances_flags.ValidateNicFlags(args)
        instances_flags.ValidateNetworkTierArgs(args)

        log.warning('Importing OVF. This may take 40 minutes for smaller OVFs '
                    'and up to a couple of hours for larger OVFs.')

        machine_type = instance_utils.InterpretMachineType(
            machine_type=args.machine_type,
            custom_cpu=args.custom_cpu,
            custom_memory=args.custom_memory,
            ext=getattr(args, 'custom_extensions', None))

        return daisy_utils.RunOVFImportBuild(
            args=args,
            instance_names=args.instance_names,
            source_uri=daisy_utils.MakeGcsUri(args.source_uri),
            no_guest_environment=not args.guest_environment,
            can_ip_forward=args.can_ip_forward,
            deletion_protection=args.deletion_protection,
            description=args.description,
            labels=args.labels,
            machine_type=machine_type,
            network=args.network,
            network_tier=args.network_tier,
            subnet=args.subnet,
            private_network_ip=args.private_network_ip,
            no_restart_on_failure=not args.restart_on_failure,
            os=args.os,
            tags=args.tags,
            zone=properties.VALUES.compute.zone.Get(),
            project=args.project,
            output_filter=_OUTPUT_FILTER)
    def Run(self, args):
        compute_holder = self._GetComputeApiHolder()
        compute_client = compute_holder.client
        messages = compute_client.messages

        self._ValidateArgs(args, compute_client)

        log.warning('Importing OVF. This may take 40 minutes for smaller OVFs '
                    'and up to a couple of hours for larger OVFs.')

        machine_type = None
        if args.machine_type or args.custom_cpu or args.custom_memory:
            machine_type = instance_utils.InterpretMachineType(
                machine_type=args.machine_type,
                custom_cpu=args.custom_cpu,
                custom_memory=args.custom_memory,
                ext=getattr(args, 'custom_extensions', None),
                vm_type=getattr(args, 'custom_vm_type', None))

        try:
            source_uri = daisy_utils.MakeGcsUri(args.source_uri)
        except resources.UnknownCollectionException:
            raise exceptions.InvalidArgumentException(
                'source-uri',
                'must be a path to an object or a directory in Cloud Storage')

        # The value of the attribute 'guest_os_features' can be can be a list, None,
        # or the attribute may not be present at all.
        # We treat the case when it is None or when it is not present as if the list
        # of features is empty. We need to use the trailing `or ()` rather than
        # give () as a default value to getattr() to handle the case where
        # args.guest_os_features is present, but it is None.
        guest_os_features = getattr(args, 'guest_os_features', None) or ()
        uefi_compatible = (
            messages.GuestOsFeature.TypeValueValuesEnum.UEFI_COMPATIBLE.name
            in guest_os_features)

        return daisy_utils.RunInstanceOVFImportBuild(
            args=args,
            compute_client=compute_client,
            instance_name=args.instance_name,
            source_uri=source_uri,
            no_guest_environment=not args.guest_environment,
            can_ip_forward=args.can_ip_forward,
            deletion_protection=args.deletion_protection,
            description=args.description,
            labels=args.labels,
            machine_type=machine_type,
            network=args.network,
            network_tier=args.network_tier,
            subnet=args.subnet,
            private_network_ip=args.private_network_ip,
            no_restart_on_failure=not args.restart_on_failure,
            os=args.os,
            byol=getattr(args, 'byol', False),
            uefi_compatible=uefi_compatible,
            tags=args.tags,
            zone=properties.VALUES.compute.zone.Get(),
            project=args.project,
            output_filter=_OUTPUT_FILTER,
            release_track=(self.ReleaseTrack().id.lower()
                           if self.ReleaseTrack() else None),
            hostname=getattr(args, 'hostname', None),
            no_address=getattr(args, 'no_address', False),
            compute_service_account=getattr(args, 'compute_service_account',
                                            ''),
            scopes=getattr(args, 'scopes', None),
            no_scopes=getattr(args, 'no_scopes', False),
            service_account=getattr(args, 'service_account', None),
            no_service_account=getattr(args, 'no_service_account', False),
        )
示例#5
0
 def __init__(self, storage_client, args):
     self.source_file_gcs_uri = daisy_utils.MakeGcsUri(args.source_file)
     super(ImportFromGSFileStager, self).__init__(storage_client, args)
示例#6
0
 def _MakeGcsUri(self, uri):
     return daisy_utils.MakeGcsUri(uri)
    def Run(self, args):
        compute_holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        # Fail early if the requested image name is invalid or already exists.
        _CheckImageName(args.image_name)
        _CheckForExistingImage(args.image_name, compute_holder)

        storage_client = storage_api.StorageClient()
        daisy_bucket = daisy_utils.GetAndCreateDaisyBucket(
            storage_client=storage_client)
        image_uuid = uuid.uuid4()

        daisy_vars = ['image_name={}'.format(args.image_name)]
        if args.source_image:
            # If we're starting from an image, then we've already imported it.
            workflow = _IMPORT_FROM_IMAGE_WORKFLOW
            daisy_vars.append('translate_workflow={}'.format(
                _GetTranslateWorkflow(args)))
            ref = resources.REGISTRY.Parse(
                args.source_image,
                collection='compute.images',
                params={'project': properties.VALUES.core.project.GetOrFail})
            # source_name should be of the form 'global/images/image-name'.
            source_name = ref.RelativeName()[len(ref.Parent().RelativeName() +
                                                 '/'):]
            daisy_vars.append('source_image={}'.format(source_name))
        else:
            # If the file is an OVA file, print a warning.
            if args.source_file.endswith('.ova'):
                log.warning(
                    'The specified input file may contain more than one virtual disk. '
                    'Only the first vmdk disk will be imported.')
            elif (args.source_file.endswith('.tar.gz')
                  or args.source_file.endswith('.tgz')):
                raise exceptions.BadFileException(
                    '`gcloud compute images import` does not support compressed '
                    'archives. Please extract your image and try again.\n If you got '
                    'this file by exporting an image from Compute Engine (e.g. by '
                    'using `gcloud compute images export`) then you can instead use '
                    '`gcloud compute images create` to create your image from your '
                    '.tar.gz file.')

            # Get the image into the scratch bucket, wherever it is now.
            if _IsLocalFile(args.source_file):
                gcs_uri = _UploadToGcs(args. async, args.source_file,
                                       daisy_bucket, image_uuid,
                                       storage_client)
            else:
                source_file = daisy_utils.MakeGcsUri(args.source_file)
                gcs_uri = _CopyToScratchBucket(source_file, image_uuid,
                                               storage_client, daisy_bucket)

            # Import and (maybe) translate from the scratch bucket.
            daisy_vars.append('source_disk_file={}'.format(gcs_uri))
            if args.data_disk:
                workflow = _IMPORT_WORKFLOW
            else:
                workflow = _IMPORT_AND_TRANSLATE_WORKFLOW
                daisy_vars.append('translate_workflow={}'.format(
                    _GetTranslateWorkflow(args)))

        self._ProcessAdditionalArgs(args, daisy_vars)

        # TODO(b/79591894): Once we've cleaned up the Argo output, replace this
        # warning message with a ProgressTracker spinner.
        log.warning('Importing image. This may take up to 2 hours.')
        return daisy_utils.RunDaisyBuild(args,
                                         workflow,
                                         ','.join(daisy_vars),
                                         daisy_bucket=daisy_bucket,
                                         user_zone=args.zone,
                                         output_filter=_OUTPUT_FILTER)
 def testMakeGcsUriErrorOnNotGcsUri(self):
     with self.AssertRaisesExceptionMatches(
             resources.InvalidResourceException,
             r'could not parse resource [http://google.com]: unknown API host'
     ):
         daisy_utils.MakeGcsUri('http://google.com')
 def testMakeGcsUriBucketTrailingSlash(self):
     uri = 'gs://bucket/'
     result = daisy_utils.MakeGcsUri(uri)
     self.assertEqual(uri, result)
 def testMakeGcsUriBucket(self):
     uri = 'gs://bucket'
     result = daisy_utils.MakeGcsUri(uri)
     self.assertEqual(uri + '/', result)
 def testMakeGcsUriObject(self):
     uri = 'gs://bucket/file/a'
     result = daisy_utils.MakeGcsUri(uri)
     self.assertEqual(uri, result)