def Delete(env_ref, target, gcs_subdir): """Deletes objects in a folder of an environment's bucket. gsutil deletes directory marker objects even when told to delete just the directory's contents, so we need to check that it exists and create it if it doesn't. A better alternative will be to use the storage API to list objects by prefix and implement deletion ourselves Args: env_ref: googlecloudsdk.core.resources.Resource, Resource representing the Environment in whose corresponding bucket to delete objects. target: str, the path within the gcs_subdir directory in the bucket to delete. gcs_subdir: str, subdir of the Cloud Storage bucket in which to delete. Should not contain slashes, for example "dags". """ gcs_bucket = _GetStorageBucket(env_ref) target_ref = storage_util.ObjectReference( gcs_bucket, posixpath.join(gcs_subdir, target)) try: retval = storage_util.RunGsutilCommand( 'rm', command_args=(['-r', target_ref.ToUrl()]), run_concurrent=True, out_func=log.out.write, err_func=log.err.write) except (execution_utils.PermissionError, execution_utils.InvalidCommandError) as e: raise command_util.GsutilError(six.text_type(e)) if retval: raise command_util.GsutilError('gsutil returned non-zero status code.') _EnsureSubdirExists(gcs_bucket, gcs_subdir)
def _ImportGsutil(gcs_bucket, source, destination): """Imports files and directories into a bucket.""" destination_ref = storage_util.ObjectReference(gcs_bucket, destination) try: retval = storage_util.RunGsutilCommand( 'cp', command_args=(['-r', source, destination_ref.ToUrl()]), run_concurrent=True, out_func=log.out.write, err_func=log.err.write) except (execution_utils.PermissionError, execution_utils.InvalidCommandError) as e: raise command_util.GsutilError(six.text_type(e)) if retval: raise command_util.GsutilError('gsutil returned non-zero status code.')
def Export(env_ref, sources, destination, release_track=base.ReleaseTrack.GA): """Exports files and directories from an environment's Cloud Storage bucket. Args: env_ref: googlecloudsdk.core.resources.Resource, Resource representing the Environment whose bucket from which to export. sources: [str], a list of bucket-relative paths from which to export files. Directory sources are imported recursively; the directory itself will be present in the destination bucket. Can also include wildcards. destination: str, existing local directory or path to a Cloud Storage bucket or directory object to which to export. Must have a single trailing slash but no leading slash. For example, 'dir/foo/bar/'. release_track: base.ReleaseTrack, the release track of command. Will dictate which Composer client library will be used. Returns: None Raises: command_util.Error: if the storage bucket could not be retrieved or a non-Cloud Storage destination that is not a local directory was provided. command_util.GsutilError: the gsutil command failed """ gcs_bucket = _GetStorageBucket(env_ref, release_track=release_track) source_refs = [ storage_util.ObjectReference(gcs_bucket, source) for source in sources ] if destination.startswith('gs://'): destination = posixpath.join(destination.strip(posixpath.sep), '') elif not os.path.isdir(destination): raise command_util.Error('Destination for export must be a directory.') try: retval = storage_util.RunGsutilCommand( 'cp', command_args=(['-r'] + [s.ToUrl() for s in source_refs] + [destination]), run_concurrent=True, out_func=log.out.write, err_func=log.err.write) except (execution_utils.PermissionError, execution_utils.InvalidCommandError) as e: raise command_util.GsutilError(six.text_type(e)) if retval: raise command_util.GsutilError('gsutil returned non-zero status code.')
def _DeleteGsutil(gcs_bucket, target, gcs_subdir): """Deletes objects in a folder of an environment's bucket with gsutil.""" target_ref = storage_util.ObjectReference.FromBucketRef( gcs_bucket, _JoinPaths(gcs_subdir, target, gsutil_path=True)) try: retval = storage_util.RunGsutilCommand( 'rm', command_args=(['-r', target_ref.ToUrl()]), run_concurrent=True, out_func=log.out.write, err_func=log.err.write) except (execution_utils.PermissionError, execution_utils.InvalidCommandError) as e: raise command_util.GsutilError(six.text_type(e)) if retval: raise command_util.GsutilError('gsutil returned non-zero status code.')
def _ExportGsutil(gcs_bucket, source, destination): """Exports files and directories from an environment's GCS bucket.""" source_ref = storage_util.ObjectReference.FromBucketRef(gcs_bucket, source) if destination.startswith('gs://'): destination = _JoinPaths( destination.strip(posixpath.sep), '', gsutil_path=True) elif not os.path.isdir(destination): raise command_util.Error('Destination for export must be a directory.') try: retval = storage_util.RunGsutilCommand( 'cp', command_args=['-r', source_ref.ToUrl(), destination], run_concurrent=True, out_func=log.out.write, err_func=log.err.write) except (execution_utils.PermissionError, execution_utils.InvalidCommandError) as e: raise command_util.GsutilError(six.text_type(e)) if retval: raise command_util.GsutilError('gsutil returned non-zero status code.')
def Import(env_ref, sources, destination, release_track=base.ReleaseTrack.GA): """Imports files and directories into a bucket. Args: env_ref: googlecloudsdk.core.resources.Resource, Resource representing the Environment whose bucket into which to import. sources: [str], a list of paths from which to import files into the environment's bucket. Directory sources are imported recursively; the directory itself will be present in the destination bucket. Must contain at least one non-empty value. destination: str, subdir of the Cloud Storage bucket into which to import `sources`. Must have a single trailing slash but no leading slash. For example, 'data/foo/bar/'. release_track: base.ReleaseTrack, the release track of command. Will dictate which Composer client library will be used. Returns: None Raises: command_util.Error: if the storage bucket could not be retrieved command_util.GsutilError: the gsutil command failed """ gcs_bucket = _GetStorageBucket(env_ref, release_track=release_track) destination_ref = storage_util.ObjectReference(gcs_bucket, destination) try: retval = storage_util.RunGsutilCommand( 'cp', command_args=(['-r'] + sources + [destination_ref.ToUrl()]), run_concurrent=True, out_func=log.out.write, err_func=log.err.write) except (execution_utils.PermissionError, execution_utils.InvalidCommandError) as e: raise command_util.GsutilError(six.text_type(e)) if retval: raise command_util.GsutilError('gsutil returned non-zero status code.')