Exemplo n.º 1
0
    def test_enums_match_schema(self):
        with runner(self.default_config()) as r:
            r.migrate_up_to('head')

        # Fetch enum values
        migration_enums = self.fetch_all_enums()

        # Doing teardown/setup to generate a new postgres instance
        local_postgres_helpers.restore_local_env_vars(self.overridden_env_vars)
        local_postgres_helpers.stop_and_clear_on_disk_postgresql_database(
            self.db_dir)

        self.db_dir = local_postgres_helpers.start_on_disk_postgresql_database(
        )
        self.overridden_env_vars = local_postgres_helpers.update_local_sqlalchemy_postgres_env_vars(
        )

        local_postgres_helpers.use_on_disk_postgresql_database(
            SQLAlchemyEngineManager.declarative_method_for_schema(
                self.schema_type))

        # Check enum values
        schema_enums = self.fetch_all_enums()

        # Assert that they all match
        self.assertEqual(len(migration_enums), len(schema_enums))
        for enum_name, migration_values in migration_enums.items():
            schema_values = schema_enums[enum_name]
            self.assertEqual(len(migration_values),
                             len(schema_values),
                             msg=f'{enum_name} lengths differ')
            self.assertEqual(len(migration_values),
                             len(migration_values.intersection(schema_values)),
                             msg=f'{enum_name} values differ')
Exemplo n.º 2
0
def import_gcs_csv_to_cloud_sql(destination_table: str, gcs_uri: GcsfsFilePath,
                                columns: List[str]) -> None:
    """Implements the import of GCS CSV to Cloud SQL by creating a temporary table, uploading the
    results to the temporary table, and then swapping the contents of the table."""
    engine = SQLAlchemyEngineManager.get_engine_for_schema_base(
        SQLAlchemyEngineManager.declarative_method_for_schema(
            SchemaType.CASE_TRIAGE))
    if engine is None:
        raise RuntimeError("Could not create postgres sqlalchemy engine")

    # Drop old temporary table if it exists
    tmp_table_name = f"tmp__{destination_table}"
    with engine.connect() as conn:
        conn.execute(f"DROP TABLE IF EXISTS {tmp_table_name}")

    # Create temporary table
    with engine.connect() as conn:
        conn.execute(
            f"CREATE TABLE {tmp_table_name} AS TABLE {destination_table} WITH NO DATA"
        )

    try:
        # Start actual Cloud SQL import
        logging.info("Starting import from GCS URI: %s", gcs_uri)
        logging.info("Starting import to destination table: %s",
                     destination_table)
        logging.info("Starting import using columns: %s", columns)
        cloud_sql_client = CloudSQLClientImpl()
        instance_name = SQLAlchemyEngineManager.get_stripped_cloudsql_instance_id(
            SchemaType.CASE_TRIAGE)
        if instance_name is None:
            raise ValueError("Could not find instance name.")
        operation_id = cloud_sql_client.import_gcs_csv(
            instance_name=instance_name,
            table_name=tmp_table_name,
            gcs_uri=gcs_uri,
            columns=columns,
        )
        if operation_id is None:
            raise RuntimeError(
                "Cloud SQL import operation was not started successfully.")

        operation_succeeded = cloud_sql_client.wait_until_operation_completed(
            operation_id)

        if not operation_succeeded:
            raise RuntimeError("Cloud SQL import failed.")
    except Exception as e:
        logging.warning(
            "Dropping newly created table due to raised exception.")
        conn.execute(f"DROP TABLE {tmp_table_name}")
        raise e

    # Swap in new table
    old_table_name = f"old__{destination_table}"
    with engine.begin() as conn:
        conn.execute(
            f"ALTER TABLE {destination_table} RENAME TO {old_table_name}")
        conn.execute(
            f"ALTER TABLE {tmp_table_name} RENAME TO {destination_table}")
        conn.execute(f"DROP TABLE {old_table_name}")