예제 #1
0
    def eventstream(*, dataset: Dataset):
        record = json.loads(http_request.data)

        version = record[0]
        if version != 2:
            raise RuntimeError("Unsupported protocol version: %s" % record)

        message: Message[KafkaPayload] = Message(
            Partition(Topic("topic"), 0),
            0,
            KafkaPayload(None, http_request.data, []),
            datetime.now(),
        )

        type_ = record[1]

        storage = dataset.get_writable_storage()
        assert storage is not None

        if type_ == "insert":
            from snuba.consumer import ConsumerWorker

            worker = ConsumerWorker(storage, metrics=metrics)
        else:
            from snuba.replacer import ReplacerWorker

            worker = ReplacerWorker(storage, metrics=metrics)

        processed = worker.process_message(message)
        if processed is not None:
            batch = [processed]
            worker.flush_batch(batch)

        return ("ok", 200, {"Content-Type": "text/plain"})
예제 #2
0
파일: factory.py 프로젝트: ruezetle/snuba
def enforce_table_writer(dataset: Dataset) -> TableWriter:
    writable_storage = dataset.get_writable_storage()

    assert (
        writable_storage is not None
    ), f"Dataset{dataset} does not have a writable storage."
    return writable_storage.get_table_writer()
예제 #3
0
파일: migrate.py 프로젝트: ruezetle/snuba
def run(conn: Client, dataset: Dataset) -> None:
    schemas: MutableSequence[Schema] = []

    writable_storage = dataset.get_writable_storage()
    if writable_storage:
        writer = writable_storage.get_table_writer()
        schemas.append(writer.get_schema())
    for storage in dataset.get_all_storages():
        schemas.append(storage.get_schemas().get_read_schema())

    for schema in schemas:
        _run_schema(conn, schema)
예제 #4
0
def test_no_schema_diffs(dataset: Dataset) -> None:
    from snuba.migrations.parse_schema import get_local_schema

    writable_storage = dataset.get_writable_storage()
    if not writable_storage:
        pytest.skip(f"{dataset!r} has no writable storage")

    clickhouse = writable_storage.get_cluster().get_query_connection(
        ClickhouseClientSettings.MIGRATE)
    table_writer = writable_storage.get_table_writer()
    dataset_schema = table_writer.get_schema()
    local_table_name = dataset_schema.get_local_table_name()
    local_schema = get_local_schema(clickhouse, local_table_name)

    assert not dataset_schema.get_column_differences(local_schema)