def GetBucketACLs(args: 'argparse.Namespace') -> None: """Retrieve the Access Controls for a GCS bucket. Args: args (argparse.Namespace): Arguments from ArgumentParser. """ gcs = gcp_storage.GoogleCloudStorage(args.project) bucket_acls = gcs.GetBucketACLs(args.path) for role in bucket_acls: logger.info('{0:s}: {1:s}'.format(role, ', '.join(bucket_acls[role])))
def DeleteObject(args: 'argparse.Namespace') -> None: """Deletes an object in GCS. Args: args (argparse.Namespace): Arguments from ArgumentParser. """ gcs = gcp_storage.GoogleCloudStorage(args.project) gcs.DeleteObject(args.path) print('Object deleted.')
def GetBucketSize(args: 'argparse.Namespace') -> None: """Get the size of a GCS bucket. Args: args (argparse.Namespace): Arguments from ArgumentParser. """ gcs = gcp_storage.GoogleCloudStorage(args.project) results = gcs.GetBucketSize(args.path) for obj in results: logger.info('{0:s}: {1:d}b'.format(obj, results[obj]))
def CreateBucket(args: 'argparse.Namespace') -> None: """Create a bucket in a GCP project. Args: args (argparse.Namespace): Arguments from ArgumentParser. """ gcs = gcp_storage.GoogleCloudStorage(args.project) result = gcs.CreateBucket(args.name, labels={'created_by': 'cfu'}) logger.info('{0:s} : {1:s}'.format(result.get('id', 'ID not found'), result.get('selfLink', 'No link')))
def storage(self) -> storage_module.GoogleCloudStorage: """Get a GoogleCloudStorage object for the project. Returns: GoogleCloudLog: Object that represents Google Cloud Logging. """ if self._storage: return self._storage self._storage = storage_module.GoogleCloudStorage(self.project_id) return self._storage
def ListBuckets(args: 'argparse.Namespace') -> None: """List the buckets in a GCP project. Args: args (argparse.Namespace): Arguments from ArgumentParser. """ gcs = gcp_storage.GoogleCloudStorage(args.project) results = gcs.ListBuckets() for obj in results: logger.info('{0:s} : {1:s}'.format(obj.get('id', 'ID not found'), obj.get('selfLink', 'No link')))
def ListBucketObjects(args: 'argparse.Namespace') -> None: """List the objects in a GCS bucket. Args: args (argparse.Namespace): Arguments from ArgumentParser. """ gcs = gcp_storage.GoogleCloudStorage(args.project) results = gcs.ListBucketObjects(args.path) for obj in results: logger.info('{0:s} {1:s}b [{2:s}]'.format( obj.get('id', 'ID not found'), obj.get('size', 'Unknown size'), obj.get('contentType', 'Unknown Content-Type')))
def GetGCSObjectMetadata(args: 'argparse.Namespace') -> None: """List the details of an object in a GCS bucket. Args: args (argparse.Namespace): Arguments from ArgumentParser. """ gcs = gcp_storage.GoogleCloudStorage(args.project) results = gcs.GetObjectMetadata(args.path) if results.get('kind') == 'storage#objects': for item in results.get('items', []): for key, value in item.items(): logger.info('{0:s}: {1:s}'.format(key, value)) logger.info('---------') else: for key, value in results.items(): logger.info('{0:s}: {1:s}'.format(key, value))
def GCSToS3(self, project_id: str, gcs_path: str, s3_path: str) -> None: """Copy an object in GCS to an S3 bucket. (Creates a local copy of the file in a temporary directory) Args: project_id (str): Google Cloud project ID. gcs_path (str): File path to the source GCS object. Ex: gs://bucket/folder/obj s3_path (str): Path to the target S3 bucket. Ex: s3://test/bucket Returns: Dict: An API operation object for an S3 Put request. https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.put_object # pylint: disable=line-too-long Raises: ResourceCreationError: If the object couldn't be uploaded. """ gcs = gcp_storage.GoogleCloudStorage(project_id) if not s3_path.startswith('s3://'): s3_path = 's3://' + s3_path if not gcs_path.startswith('gs://'): gcs_path = 'gs://' + gcs_path object_md = gcs.GetObjectMetadata(gcs_path) logger.warning( 'This will download {0:s}b to a local' ' temporary directory before uploading it to S3.'.format( object_md.get('size', 'Error'))) localcopy = gcs.GetObject(gcs_path) try: self.CreateBucket(gcp_storage.SplitStoragePath(s3_path)[0]) except errors.ResourceCreationError as exception: if 'already exists' in exception.message: logger.info('Target bucket already exists. Reusing.') else: raise exception self.Put(s3_path, localcopy) logger.info('Attempting to delete local (temporary) copy') os.unlink(localcopy) logger.info('Done')
'projects/fake-target-project/logs/GCEGuestAgent', 'projects/fake-target-project/logs/OSConfigAgent' ] FAKE_LOG_ENTRIES = [{ 'logName': 'test_log', 'timestamp': '123456789', 'textPayload': 'insert.compute.create' }, { 'logName': 'test_log', 'timestamp': '123456789', 'textPayload': 'insert.compute.create' }] FAKE_NEXT_PAGE_TOKEN = 'abcdefg1234567' FAKE_GCS = gcp_storage.GoogleCloudStorage('fake-target-project') FAKE_GCB = gcp_build.GoogleCloudBuild('fake-target-project') FAKE_MONITORING = gcp_monitoring.GoogleCloudMonitoring('fake-target-project') # Mock struct to mimic GCP's API responses MOCK_INSTANCES_AGGREGATED = { # See https://cloud.google.com/compute/docs/reference/rest/v1/instances # /aggregatedList for complete structure 'items': { 0: { 'instances': [{ 'name': FAKE_INSTANCE.name, 'zone': '/' + FAKE_INSTANCE.zone }] } }