def Run(self, args): compute_holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) # Fail early if the requested image name is invalid or already exists. _CheckImageName(args.image_name) _CheckForExistingImage(args.image_name, compute_holder) storage_client = storage_api.StorageClient() import_stager = _CreateImportStager(storage_client, args) daisy_vars, workflow = import_stager.Stage() self._ProcessAdditionalArgs(args, daisy_vars) # TODO(b/79591894): Once we've cleaned up the Argo output, replace this # warning message with a ProgressTracker spinner. log.warning('Importing image. This may take up to 2 hours.') tags = ['gce-daisy-image-import'] return daisy_utils.RunDaisyBuild( args, workflow, ','.join(daisy_vars), tags=tags, daisy_bucket=import_stager.GetDaisyBucket(), user_zone=properties.VALUES.compute.zone.Get(), output_filter=_OUTPUT_FILTER, service_account_roles=self._GetServiceAccountRoles())
def Run(self, args): holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) client = holder.client resources = holder.resources project = properties.VALUES.core.project.GetOrFail() image_expander = image_utils.ImageExpander(client, resources) image = image_expander.ExpandImageFlag( user_project=project, image=args.image, image_family=args.image_family, image_project=args.image_project, return_image_resource=False) image_ref = resources.Parse(image[0], collection='compute.images') variables = """source_image={0},destination={1}""".format( image_ref.RelativeName(), args.destination_uri) if args.export_format: workflow = _EXTERNAL_WORKFLOW variables += """,format={0}""".format(args.export_format.lower()) else: workflow = _DEFAULT_WORKFLOW if args.network: variables += """,export_network=global/networks/{0}""".format( args.network.lower()) tags = ['gce-daisy-image-export'] return daisy_utils.RunDaisyBuild(args, workflow, variables, tags=tags, output_filter=_OUTPUT_FILTER)
def Run(self, args): log.warn('Importing image, this may take up to 1 hour.') storage_client = storage_api.StorageClient() daisy_bucket = daisy_utils.GetAndCreateDaisyBucket( storage_client=storage_client) # Copy image from source-uri to daisy scratch bucket image_file = os.path.basename(args.source_uri) dest_name = '{0}-{1}'.format(uuid.uuid4(), image_file) dest_path = 'gs://{0}/tmpimage/{1}'.format(daisy_bucket, dest_name) src_object = resources.REGISTRY.Parse(args.source_uri, collection='storage.objects') dest_object = resources.REGISTRY.Parse(dest_path, collection='storage.objects') log.status.write('\nCopying [{0}] to [{1}]\n'.format( args.source_uri, dest_path)) storage_client.Rewrite(src_object, dest_object) variables = """source_disk_file={0},disk_size=50g,image_name={1}""".format( dest_path, args.image_name) tags = ['gce-daisy-image-import'] return daisy_utils.RunDaisyBuild(args, _WORKFLOW, variables, daisy_bucket=daisy_bucket, tags=tags)
def Run(self, args): storage_client = storage_api.StorageClient() daisy_bucket = daisy_utils.GetAndCreateDaisyBucket( storage_client=storage_client) image_uuid = uuid.uuid4() daisy_vars = ['image_name={}'.format(args.image_name)] if args.source_image: # If we're starting from an image, then we've already imported it. workflow = _IMPORT_FROM_IMAGE_WORKFLOW daisy_vars.append( 'translate_workflow={}'.format(_GetTranslateWorkflow(args))) ref = resources.REGISTRY.Parse( args.source_image, collection='compute.images', params={'project': properties.VALUES.core.project.GetOrFail}) # source_name should be of the form 'global/images/image-name'. source_name = ref.RelativeName()[len(ref.Parent().RelativeName() + '/'):] daisy_vars.append('source_image={}'.format(source_name)) else: # If the file is an OVA file, print a warning. if args.source_file.endswith('.ova'): log.warning('The specified input file may contain more than one ' 'virtual disk. Only the first vmdk disk will be ' 'imported. ') elif (args.source_file.endswith('.tar.gz') or args.source_file.endswith('.tgz')): raise exceptions.BadFileException( '"gcloud compute images import" does not support compressed ' 'archives. Please extract your image and try again.\n If you got ' 'this file by exporting an image from Compute Engine (e.g. by ' 'using "gcloud compute images export") then you can instead use ' '"gcloud compute images create" to create your image from your ' '.tar.gz file.') # Get the image into the scratch bucket, wherever it is now. if _IsLocalFile(args.source_file): gcs_uri = _UploadToGcs(args.async, args.source_file, daisy_bucket, image_uuid) else: source_file = _MakeGcsUri(args.source_file) gcs_uri = _CopyToScratchBucket(source_file, image_uuid, storage_client, daisy_bucket) # Import and (maybe) translate from the scratch bucket. daisy_vars.append('source_disk_file={}'.format(gcs_uri)) if args.data_disk: workflow = _IMPORT_WORKFLOW else: workflow = _IMPORT_AND_TRANSLATE_WORKFLOW daisy_vars.append( 'translate_workflow={}'.format(_GetTranslateWorkflow(args))) # TODO(b/79591894): Once we've cleaned up the Argo output, replace this # warning message with a ProgressTracker spinner. log.warning('Importing image. This may take up to 2 hours.') return daisy_utils.RunDaisyBuild(args, workflow, ','.join(daisy_vars), daisy_bucket=daisy_bucket, user_zone=args.zone)
def Run(self, args): if args.os: workflow = _OS_CHOICES[args.os] else: workflow = args.custom_workflow workflow_path = '../workflows/image_import/{0}'.format(workflow) variables = """source_image=global/images/{0},image_name={1}""".format( args.source_image, args.destination_image) return daisy_utils.RunDaisyBuild(args, workflow_path, variables)
def Run(self, args): log.warning('Importing image, this may take up to 1 hour.') storage_client = storage_api.StorageClient() daisy_bucket = daisy_utils.GetAndCreateDaisyBucket( storage_client=storage_client) image_uuid = uuid.uuid4() daisy_vars = ['image_name={}'.format(args.image_name)] if args.source_image: # If we're starting from an image, then we've already imported it. workflow = _IMPORT_FROM_IMAGE_WORKFLOW daisy_vars.append('translate_workflow={}'.format( _GetTranslateWorkflow(args))) ref = resources.REGISTRY.Parse( args.source_image, collection='compute.images', params={'project': properties.VALUES.core.project.GetOrFail}) # source_name should be of the form 'global/images/image-name'. source_name = ref.RelativeName()[len(ref.Parent().RelativeName() + '/'):] daisy_vars.append('source_image={}'.format(source_name)) else: # If the file is an OVA file, print a warning. if args.source_file.endswith('.ova'): log.warning( 'The specified input file may contain more than one ' 'virtual disk. Only the first vmdk disk will be ' 'imported. ') # Get the image into the scratch bucket, wherever it is now. if _IsLocalFile(args.source_file): gcs_uri = _UploadToGcs(args. async, args.source_file, daisy_bucket, image_uuid) else: source_file = _MakeGcsUri(args.source_file) gcs_uri = _CopyToScratchBucket(source_file, image_uuid, storage_client, daisy_bucket) # Import and (maybe) translate from the scratch bucket. daisy_vars.append('source_disk_file={}'.format(gcs_uri)) if args.data_disk: workflow = _IMPORT_WORKFLOW else: workflow = _IMPORT_AND_TRANSLATE_WORKFLOW daisy_vars.append('translate_workflow={}'.format( _GetTranslateWorkflow(args))) return daisy_utils.RunDaisyBuild(args, workflow, ','.join(daisy_vars), daisy_bucket=daisy_bucket, user_zone=args.zone)
def Run(self, args): log.warning('Importing image, this may take up to 1 hour.') storage_client = storage_api.StorageClient() daisy_bucket = daisy_utils.GetAndCreateDaisyBucket( storage_client=storage_client) image_uuid = uuid.uuid4() variables = ['image_name={}'.format(args.image_name)] if args.source_image: # If we're starting from an image, then we've already imported it. workflow = '{0}{1}'.format(_WORKFLOW_DIR, _GetTranslateWorkflow(args)) ref = resources.REGISTRY.Parse( args.source_image, collection='compute.images', params={'project': properties.VALUES.core.project.GetOrFail}) # source_name should be of the form 'global/images/image-name'. source_name = ref.RelativeName()[len(ref.Parent().RelativeName() + '/'):] variables.append('source_image={}'.format(source_name)) else: # Get the image into the scratch bucket, wherever it is now. if _IsLocalFile(args.source_file): gcs_uri = _UploadToGcs(args. async, args.source_file, daisy_bucket, image_uuid) else: source_file = _MakeGcsUri(args.source_file) gcs_uri = _CopyToScratchBucket(source_file, image_uuid, storage_client, daisy_bucket) # Import and (maybe) translate from the scratch bucket. variables.append('source_disk_file={}'.format(gcs_uri)) if args.translate: workflow = _IMPORT_AND_TRANSLATE_WORKFLOW variables.append('translate_workflow={}'.format( _GetTranslateWorkflow(args))) else: workflow = _IMPORT_WORKFLOW return daisy_utils.RunDaisyBuild(args, workflow, ','.join(variables), daisy_bucket=daisy_bucket)
def _RunImageImport(self, args, import_stager, import_metadata, tags): """Run actual image import. Args: args: list of str, CLI args that might contain network/subnet args. import_stager: BaseImportStager, to do actual stage steps. import_metadata: list of str, contains metadata used by import. It can be daisy vars or import wrapper args. tags: A list of strings for adding tags to the Argo build. Returns: A cloud build that executes importing. """ return daisy_utils.RunDaisyBuild( args, import_stager.GetDaisyWorkflow(), ','.join(import_metadata), tags=tags, daisy_bucket=import_stager.GetDaisyBucket(), user_zone=properties.VALUES.compute.zone.Get(), output_filter=_OUTPUT_FILTER)
def Run(self, args): holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) client = holder.client image_ref = Export.DISK_IMAGE_ARG.ResolveAsResource( args, holder.resources, scope_lister=compute_flags.GetDefaultScopeLister(client)) variables = """source_image={0},destination={1}""".format( image_ref.RelativeName(), args.destination_uri) if args.export_format: workflow = _EXTERNAL_WORKFLOW variables += """,format={0}""".format(args.export_format.lower()) else: workflow = _DEFAULT_WORKFLOW tags = ['gce-daisy-image-export'] return daisy_utils.RunDaisyBuild(args, workflow, variables, tags=tags)
def _RunImageExport(self, args, tags): source_image = self._GetSourceImage(args.image, args.image_family, args.image_project) variables = """source_image={0},destination={1}""".format( source_image, args.destination_uri) if args.export_format: workflow = _EXTERNAL_WORKFLOW variables += """,format={0}""".format(args.export_format.lower()) else: workflow = _DEFAULT_WORKFLOW variables = self._ProcessNetworkArgs(args, variables) return daisy_utils.RunDaisyBuild( args, workflow, variables, tags=tags, user_zone=properties.VALUES.compute.zone.Get(), output_filter=_OUTPUT_FILTER, daisy_bucket=self._GetDaisyBucket(args))
def Run(self, args): variables = """source_disk_file={0},disk_size=50g,image_name={1}""".format( args.source_uri, args.image_name) log.warn('Importing image, this may take up to 1 hour.') return daisy_utils.RunDaisyBuild(args, _WORKFLOW, variables)