def test_write_batch(collection):
    size = 30
    batch = []
    for i in range(size):
        batch.append({"id": i, "value": i})

    f.write_batch(collection, "id", batch, batch_size=5)

    assert len(list(f.get_collection(collection).stream())) == size
def process_stations_response(response_text: str,
                              response_elapsed: float) -> bool:
    phenoyear = get_phenoyear()
    csv_string = _clean_station_csv(response_text)
    if _load_hash("stations") != _get_hash(str(phenoyear) + csv_string):
        reader = csv.DictReader(io.StringIO(csv_string), delimiter=";")
        stations = _get_individuals_dicts(phenoyear, reader)
        log.info("Update %i stations fetched in %s", len(stations),
                 response_elapsed)
        write_batch("individuals", "id", stations, merge=True)
        _set_hash("stations",
                  str(phenoyear) +
                  csv_string)  # trigger re-import in new phenoyear
        return True
    else:
        log.info("Station file did not change.")
        return False
def process_observations_response(response_text: str,
                                  response_elapsed: float) -> bool:
    if _load_hash("observations") != _get_hash(response_text):
        reader = csv.DictReader(io.StringIO(response_text), delimiter=";")
        observations = _get_observations_dicts(reader)
        log.info(
            "Update %i observations fetched in %s",
            len(observations),
            response_elapsed,
        )
        # write observations
        write_batch("observations", "id", observations, merge=True)
        # update stations
        _update_station_species(_get_station_species(observations))
        _set_hash("observations", response_text)
        return True
    else:
        log.info("Observations file did not change.")
        return False
def test_delete_batch(collection):
    properties = 3
    properties_size = 10

    batch = []
    for prop in range(properties):
        for prop_size in range(properties_size):
            batch.append({"id": (prop * properties_size + prop_size), "property": prop})
    f.write_batch(collection, "id", batch)

    assert (
        len(list(f.get_collection(collection).stream())) == properties * properties_size
    )

    f.delete_batch(collection, "property", "==", 2, batch_size=3)

    results = list(f.get_collection(collection).stream())
    assert len(results) == (properties - 1) * properties_size, f.docs2str(results)
    for result in results:
        assert result.to_dict().get("property") is not None
        assert result.to_dict()["property"] != 2
예제 #5
0
def write_individuals(individuals: List[dict], key: str) -> None:
    write_batch("individuals", key, individuals)