Beispiel #1
0
    def Run(self, args):
        log.warn('Importing image, this may take up to 1 hour.')

        storage_client = storage_api.StorageClient()
        daisy_bucket = daisy_utils.GetAndCreateDaisyBucket(
            storage_client=storage_client)

        # Copy image from source-uri to daisy scratch bucket
        image_file = os.path.basename(args.source_uri)
        dest_name = '{0}-{1}'.format(uuid.uuid4(), image_file)
        dest_path = 'gs://{0}/tmpimage/{1}'.format(daisy_bucket, dest_name)
        src_object = resources.REGISTRY.Parse(args.source_uri,
                                              collection='storage.objects')
        dest_object = resources.REGISTRY.Parse(dest_path,
                                               collection='storage.objects')
        log.status.write('\nCopying [{0}] to [{1}]\n'.format(
            args.source_uri, dest_path))
        storage_client.Rewrite(src_object, dest_object)

        variables = """source_disk_file={0},disk_size=50g,image_name={1}""".format(
            dest_path, args.image_name)

        tags = ['gce-daisy-image-import']
        return daisy_utils.RunDaisyBuild(args,
                                         _WORKFLOW,
                                         variables,
                                         daisy_bucket=daisy_bucket,
                                         tags=tags)
Beispiel #2
0
  def Run(self, args):
    storage_client = storage_api.StorageClient()
    daisy_bucket = daisy_utils.GetAndCreateDaisyBucket(
        storage_client=storage_client)
    image_uuid = uuid.uuid4()

    daisy_vars = ['image_name={}'.format(args.image_name)]
    if args.source_image:
      # If we're starting from an image, then we've already imported it.
      workflow = _IMPORT_FROM_IMAGE_WORKFLOW
      daisy_vars.append(
          'translate_workflow={}'.format(_GetTranslateWorkflow(args)))
      ref = resources.REGISTRY.Parse(
          args.source_image,
          collection='compute.images',
          params={'project': properties.VALUES.core.project.GetOrFail})
      # source_name should be of the form 'global/images/image-name'.
      source_name = ref.RelativeName()[len(ref.Parent().RelativeName() + '/'):]
      daisy_vars.append('source_image={}'.format(source_name))
    else:
      # If the file is an OVA file, print a warning.
      if args.source_file.endswith('.ova'):
        log.warning('The specified input file may contain more than one '
                    'virtual disk. Only the first vmdk disk will be '
                    'imported. ')
      elif (args.source_file.endswith('.tar.gz')
            or args.source_file.endswith('.tgz')):
        raise exceptions.BadFileException(
            '"gcloud compute images import" does not support compressed '
            'archives. Please extract your image and try again.\n If you got '
            'this file by exporting an image from Compute Engine (e.g. by '
            'using "gcloud compute images export") then you can instead use '
            '"gcloud compute images create" to create your image from your '
            '.tar.gz file.')

      # Get the image into the scratch bucket, wherever it is now.
      if _IsLocalFile(args.source_file):
        gcs_uri = _UploadToGcs(args.async, args.source_file,
                               daisy_bucket, image_uuid)
      else:
        source_file = _MakeGcsUri(args.source_file)
        gcs_uri = _CopyToScratchBucket(source_file, image_uuid,
                                       storage_client, daisy_bucket)

      # Import and (maybe) translate from the scratch bucket.
      daisy_vars.append('source_disk_file={}'.format(gcs_uri))
      if args.data_disk:
        workflow = _IMPORT_WORKFLOW
      else:
        workflow = _IMPORT_AND_TRANSLATE_WORKFLOW
        daisy_vars.append(
            'translate_workflow={}'.format(_GetTranslateWorkflow(args)))

    # TODO(b/79591894): Once we've cleaned up the Argo output, replace this
    # warning message with a ProgressTracker spinner.
    log.warning('Importing image. This may take up to 2 hours.')
    return daisy_utils.RunDaisyBuild(args, workflow, ','.join(daisy_vars),
                                     daisy_bucket=daisy_bucket,
                                     user_zone=args.zone)
Beispiel #3
0
    def Run(self, args):
        log.warning('Importing image, this may take up to 1 hour.')

        storage_client = storage_api.StorageClient()
        daisy_bucket = daisy_utils.GetAndCreateDaisyBucket(
            storage_client=storage_client)
        image_uuid = uuid.uuid4()

        daisy_vars = ['image_name={}'.format(args.image_name)]
        if args.source_image:
            # If we're starting from an image, then we've already imported it.
            workflow = _IMPORT_FROM_IMAGE_WORKFLOW
            daisy_vars.append('translate_workflow={}'.format(
                _GetTranslateWorkflow(args)))
            ref = resources.REGISTRY.Parse(
                args.source_image,
                collection='compute.images',
                params={'project': properties.VALUES.core.project.GetOrFail})
            # source_name should be of the form 'global/images/image-name'.
            source_name = ref.RelativeName()[len(ref.Parent().RelativeName() +
                                                 '/'):]
            daisy_vars.append('source_image={}'.format(source_name))
        else:
            # If the file is an OVA file, print a warning.
            if args.source_file.endswith('.ova'):
                log.warning(
                    'The specified input file may contain more than one '
                    'virtual disk. Only the first vmdk disk will be '
                    'imported. ')

            # Get the image into the scratch bucket, wherever it is now.
            if _IsLocalFile(args.source_file):
                gcs_uri = _UploadToGcs(args. async, args.source_file,
                                       daisy_bucket, image_uuid)
            else:
                source_file = _MakeGcsUri(args.source_file)
                gcs_uri = _CopyToScratchBucket(source_file, image_uuid,
                                               storage_client, daisy_bucket)

            # Import and (maybe) translate from the scratch bucket.
            daisy_vars.append('source_disk_file={}'.format(gcs_uri))
            if args.data_disk:
                workflow = _IMPORT_WORKFLOW
            else:
                workflow = _IMPORT_AND_TRANSLATE_WORKFLOW
                daisy_vars.append('translate_workflow={}'.format(
                    _GetTranslateWorkflow(args)))

        return daisy_utils.RunDaisyBuild(args,
                                         workflow,
                                         ','.join(daisy_vars),
                                         daisy_bucket=daisy_bucket,
                                         user_zone=args.zone)
Beispiel #4
0
    def Run(self, args):
        log.warning('Importing image, this may take up to 1 hour.')

        storage_client = storage_api.StorageClient()
        daisy_bucket = daisy_utils.GetAndCreateDaisyBucket(
            storage_client=storage_client)
        image_uuid = uuid.uuid4()

        variables = ['image_name={}'.format(args.image_name)]
        if args.source_image:
            # If we're starting from an image, then we've already imported it.
            workflow = '{0}{1}'.format(_WORKFLOW_DIR,
                                       _GetTranslateWorkflow(args))
            ref = resources.REGISTRY.Parse(
                args.source_image,
                collection='compute.images',
                params={'project': properties.VALUES.core.project.GetOrFail})
            # source_name should be of the form 'global/images/image-name'.
            source_name = ref.RelativeName()[len(ref.Parent().RelativeName() +
                                                 '/'):]
            variables.append('source_image={}'.format(source_name))
        else:
            # Get the image into the scratch bucket, wherever it is now.
            if _IsLocalFile(args.source_file):
                gcs_uri = _UploadToGcs(args. async, args.source_file,
                                       daisy_bucket, image_uuid)
            else:
                source_file = _MakeGcsUri(args.source_file)
                gcs_uri = _CopyToScratchBucket(source_file, image_uuid,
                                               storage_client, daisy_bucket)

            # Import and (maybe) translate from the scratch bucket.
            variables.append('source_disk_file={}'.format(gcs_uri))
            if args.translate:
                workflow = _IMPORT_AND_TRANSLATE_WORKFLOW
                variables.append('translate_workflow={}'.format(
                    _GetTranslateWorkflow(args)))
            else:
                workflow = _IMPORT_WORKFLOW

        return daisy_utils.RunDaisyBuild(args,
                                         workflow,
                                         ','.join(variables),
                                         daisy_bucket=daisy_bucket)
Beispiel #5
0
 def _CreateDaisyBucket(self):
     # Create a Daisy bucket in the same region as the source file in GS.
     self.daisy_bucket = daisy_utils.GetAndCreateDaisyBucket(
         storage_client=self.storage_client,
         bucket_location=self.storage_client.GetBucketLocationForFile(
             self.source_file_gcs_uri))
Beispiel #6
0
 def _CreateDaisyBucket(self):
     # Create Daisy bucket in default GS location (US Multi-regional)
     # This is default behaviour for all types of import except from a file in GS
     self.daisy_bucket = daisy_utils.GetAndCreateDaisyBucket(
         storage_client=self.storage_client)
Beispiel #7
0
 def _GetDaisyBucket(self, args):
   storage_client = storage_api.StorageClient()
   return daisy_utils.GetAndCreateDaisyBucket(
       storage_client=storage_client,
       bucket_location=storage_client.GetBucketLocationForFile(
           args.destination_uri))