def main(): logging.getLogger().setLevel(logging.DEBUG) transfer_service_client = create_client.create_transfer_client() result = check_operation(transfer_service_client, PROJECT_ID, JOB_NAME) logging.info('Result of transferOperations/list: %s', json.dumps(result, indent=4, sort_keys=True))
def test_create_client(job_filter: str): client = create_client.create_transfer_client() # a simple test to prove a usable client has been created. # The output isn't relevant - just that a valid API call can be made. # We expect an error to be raised if this operation fails. client.list_transfer_jobs({'filter': job_filter, 'page_size': 1})
def main(): """Create a one-off transfer from Amazon S3 to GCS.""" logging.getLogger().setLevel(logging.DEBUG) transfer_service_client = create_client.create_transfer_client() # Edit this template with desired parameters. # Specify times below using US Pacific Time Zone. transfer_job = ''' { "description": "YOUR DESCRIPTION", "status": "ENABLED", "projectId": "YOUR_PROJECT_ID", "schedule": { "scheduleStartDate": { "day": 1, "month": 1, "year": 2015 }, "scheduleEndDate": { "day": 1, "month": 1, "year": 2015 }, "startTimeOfDay": { "hours": 0, "minutes": 0 } }, "transferSpec": { "awsS3DataSource": { "bucketName": "YOUR_SOURCE_BUCKET", "awsAccessKey": { "accessKeyId": "YOUR_ACCESS_KEY_ID", "secretAccessKey": "YOUR_SECRET_ACCESS_KEY" } }, "gcsDataSink": { "bucketName": "YOUR_SINK_BUCKET" } } } ''' result = transfer_service_client.transferJobs().create(body=json.loads( transfer_job)).execute() logging.info('Returned transferJob: %s', json.dumps(result, indent=4))
def main(): """Create a one-off transfer from Amazon S3 to GCS.""" logging.getLogger().setLevel(logging.DEBUG) transfer_service_client = create_client.create_transfer_client() # Edit this template with desired parameters. # Specify times below using US Pacific Time Zone. transfer_job = ''' { "description": "YOUR DESCRIPTION", "status": "ENABLED", "projectId": "YOUR_PROJECT_ID", "schedule": { "scheduleStartDate": { "day": 1, "month": 1, "year": 2015 }, "scheduleEndDate": { "day": 1, "month": 1, "year": 2015 }, "startTimeOfDay": { "hours": 0, "minutes": 0 } }, "transferSpec": { "awsS3DataSource": { "bucketName": "YOUR_SOURCE_BUCKET", "awsAccessKey": { "accessKeyId": "YOUR_ACCESS_KEY_ID", "secretAccessKey": "YOUR_SECRET_ACCESS_KEY" } }, "gcsDataSink": { "bucketName": "YOUR_SINK_BUCKET" } } } ''' result = transfer_service_client.transferJobs().create( body=json.loads(transfer_job)).execute() logging.info('Returned transferJob: %s', json.dumps(result, indent=4))
def main(): """Transfer from standard Cloud Storage to Cloud Storage Nearline.""" logging.getLogger().setLevel(logging.DEBUG) transfer_service_client = create_client.create_transfer_client() # Edit this template with desired parameters. # Specify times below using US Pacific Time Zone. transfer_job = ''' { "description": "YOUR DESCRIPTION", "status": "ENABLED", "projectId": "YOUR_PROJECT_ID", "schedule": { "scheduleStartDate": { "day": 1, "month": 1, "year": 2015 }, "startTimeOfDay": { "hours": 1, "minutes": 1 } }, "transferSpec": { "gcsDataSource": { "bucketName": "YOUR_SOURCE_BUCKET" }, "gcsDataSink": { "bucketName": "YOUR_SINK_BUCKET" }, "objectConditions": { "minTimeElapsedSinceLastModification": "2592000s" }, "transferOptions": { "deleteObjectsFromSourceAfterTransfer": true } } } ''' result = transfer_service_client.transferJobs().create( body=json.loads(transfer_job)).execute() logging.info('Returned transferJob: %s', json.dumps(result, indent=4))
def test_create_client(self): create_client.create_transfer_client() self.mock_discovery.assert_called_with( 'storagetransfer', 'v1', credentials=self.mock_credentials)