Exemple #1
0
    def test_execute(self, mock_hook):
        mock_hook.return_value.import_from_storage_bucket.return_value = {
            "name": OPERATION_ID
        }
        mock_hook.return_value.poll_operation_until_done.return_value = {
            "metadata": {
                "common": {
                    "state": "SUCCESSFUL"
                }
            }
        }

        op = CloudDatastoreImportEntitiesOperator(
            task_id="test_task",
            datastore_conn_id=CONN_ID,
            project_id=PROJECT_ID,
            bucket=BUCKET,
            file=FILE,
        )
        op.execute({})

        mock_hook.assert_called_once_with(CONN_ID, None)
        mock_hook.return_value.import_from_storage_bucket.assert_called_once_with(
            project_id=PROJECT_ID,
            bucket=BUCKET,
            file=FILE,
            entity_filter=None,
            labels=None,
            namespace=None,
        )

        mock_hook.return_value.export_to_storage_bucketassert_called_once_with(
            OPERATION_ID, 10)
Exemple #2
0
        tags=["example"],
) as dag:
    # [START how_to_export_task]
    export_task = CloudDatastoreExportEntitiesOperator(
        task_id="export_task",
        bucket=BUCKET,
        project_id=GCP_PROJECT_ID,
        overwrite_existing=True,
    )
    # [END how_to_export_task]

    # [START how_to_import_task]
    import_task = CloudDatastoreImportEntitiesOperator(
        task_id="import_task",
        bucket=
        "{{ task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[2] }}",
        file=
        "{{ '/'.join(task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[3:]) }}",
        project_id=GCP_PROJECT_ID,
    )
    # [END how_to_import_task]

    export_task >> import_task

# [START how_to_keys_def]
KEYS = [{
    "partitionId": {
        "projectId": GCP_PROJECT_ID,
        "namespaceId": ""
    },
    "path": {
        "kind": "airflow"
Exemple #3
0
from airflow.utils import dates

GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
BUCKET = os.environ.get("GCP_DATASTORE_BUCKET", "datastore-system-test")

default_args = {"start_date": dates.days_ago(1)}

with models.DAG(
        "example_gcp_datastore",
        default_args=default_args,
        schedule_interval=None,  # Override to match your needs
        tags=['example'],
) as dag:
    export_task = CloudDatastoreExportEntitiesOperator(
        task_id="export_task",
        bucket=BUCKET,
        project_id=GCP_PROJECT_ID,
        overwrite_existing=True,
    )

    bucket = "{{ task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[2] }}"
    file = "{{ '/'.join(task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[3:]) }}"

    import_task = CloudDatastoreImportEntitiesOperator(
        task_id="import_task",
        bucket=bucket,
        file=file,
        project_id=GCP_PROJECT_ID)

    export_task >> import_task