Пример #1
0
def test_keys():
    with mock_s3():
        conn = boto3.client("s3")
        conn.create_bucket(Bucket="bucket1")
        index = MemoryStore("index", key=1)
        with pytest.raises(AssertionError, match=r"Since we are.*"):
            store = S3Store(index, "bucket1", s3_workers=4, key=1)
        index = MemoryStore("index", key="key1")
        with pytest.warns(UserWarning, match=r"The desired S3Store.*$"):
            store = S3Store(index, "bucket1", s3_workers=4, key="key2")
        store.connect()
        store.update({"key1": "mp-1", "data": "1234"})
        with pytest.raises(KeyError):
            store.update({"key2": "mp-2", "data": "1234"})
        assert store.key == store.index.key == "key1"
Пример #2
0
def test_additional(memory_store):
    from copy import deepcopy

    import boto3
    from maggma.stores import MemoryStore, S3Store
    from moto import mock_s3

    from jobflow import JobStore

    with mock_s3():
        conn = boto3.resource("s3", region_name="us-east-1")
        conn.create_bucket(Bucket="bucket1")
        index = MemoryStore("index", key="blob_uuid")
        s3_store = S3Store(index, "bucket1", key="blob_uuid")
        store = JobStore(
            memory_store,
            additional_stores={
                "data": deepcopy(memory_store),
                "data_s3": s3_store
            },
        )

        with store as s:
            assert s
            assert s.name == "JobStore-mem://memory_db"
            assert s._collection is not None
            assert s.additional_stores["data_s3"].searchable_fields == [
                "job_uuid",
                "job_index",
            ]
Пример #3
0
def s3store():
    with mock_s3():
        conn = boto3.client("s3")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index")
        store = S3Store(index, "bucket1")
        store.connect()

        store.update(
            [
                {
                    "task_id": "mp-1",
                    "data": "asd",
                    store.last_updated_field: datetime.utcnow(),
                }
            ]
        )
        store.update(
            [
                {
                    "task_id": "mp-3",
                    "data": "sdf",
                    store.last_updated_field: datetime.utcnow(),
                }
            ]
        )

        yield store
Пример #4
0
def test_no_bucket():
    with mock_s3():
        conn = boto3.client("s3")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index")
        store = S3Store(index, "bucket2")
        with pytest.raises(RuntimeError, match=r".*Bucket not present.*"):
            store.connect()
Пример #5
0
def s3store_multi():
    with mock_s3():
        conn = boto3.client("s3")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index")
        store = S3Store(index, "bucket1", s3_workers=4)
        store.connect()

        yield store
Пример #6
0
def s3store_w_subdir():
    with mock_s3():
        conn = boto3.client("s3")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index'")
        store = S3Store(index, "bucket1", sub_dir="subdir1")
        store.connect()

        yield store
Пример #7
0
def test_get_session(s3store):
    index = MemoryStore("index")
    store = S3Store(
        index,
        "bucket1",
        s3_profile={
            "aws_access_key_id": "ACCESS_KEY",
            "aws_secret_access_key": "SECRET_KEY",
        },
    )
    assert store._get_session().get_credentials().access_key == "ACCESS_KEY"
    assert store._get_session().get_credentials().secret_key == "SECRET_KEY"
Пример #8
0
def test_newer_in(s3store):
    with mock_s3():
        tic = datetime(2018, 4, 12, 16)
        tic2 = datetime.utcnow()
        conn = boto3.client("s3")
        conn.create_bucket(Bucket="bucket")

        index_old = MemoryStore("index_old")
        old_store = S3Store(index_old, "bucket")
        old_store.connect()
        old_store.update([{"task_id": "mp-1", "last_updated": tic}])
        old_store.update([{"task_id": "mp-2", "last_updated": tic}])

        index_new = MemoryStore("index_new")
        new_store = S3Store(index_new, "bucket")
        new_store.connect()
        new_store.update([{"task_id": "mp-1", "last_updated": tic2}])
        new_store.update([{"task_id": "mp-2", "last_updated": tic2}])

        assert len(old_store.newer_in(new_store)) == 2
        assert len(new_store.newer_in(old_store)) == 0

        assert len(old_store.newer_in(new_store.index)) == 2
        assert len(new_store.newer_in(old_store.index)) == 0
Пример #9
0
    def _get_s3_store(self, store_name):
        """
        Add a maggma store to this object for storage of large chunk data
        The maggma store will be stored to self.maggma_store[store_name]

        For aws store, all documents will be stored to the same bucket
        and the store_name will double as the sub_dir name.

        Args:
            store_name: correspond to the the key within calcs_reversed.0 that will be stored
        """
        if self.host_uri is not None:
            index_store_ = MongoURIStore(
                uri=self.host_uri,
                database=self.db_name,
                collection_name=
                f"{self.maggma_store_prefix}_{store_name}_index",
                key="fs_id",
            )
        else:
            index_store_ = MongoStore(
                database=self.db_name,
                collection_name=
                f"{self.maggma_store_prefix}_{store_name}_index",
                host=self.host,
                port=self.port,
                username=self.user,
                password=self.password,
                key="fs_id",
            )

        store = S3Store(
            index=index_store_,
            sub_dir=f"{self.maggma_store_prefix}_{store_name}",
            key="fs_id",
            **self._maggma_store_kwargs,
        )

        return store
Пример #10
0
def s3store():
    with mock_s3():
        conn = boto3.client("s3")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index'")
        store = S3Store(index, "bucket1")
        store.connect()

        check_doc = {"task_id": "mp-1", "data": "asd"}
        store.index.update([{"task_id": "mp-1"}])
        store.s3_bucket.put_object(
            Key="mp-1", Body=msgpack.packb(check_doc, default=default)
        )

        check_doc2 = {"task_id": "mp-3", "data": "sdf"}
        store.index.update([{"task_id": "mp-3", "compression": "zlib"}])
        store.s3_bucket.put_object(
            Key="mp-3", Body=zlib.compress(msgpack.packb(check_doc2, default=default))
        )

        yield store
Пример #11
0
def test_bad_import(mocker):
    mocker.patch("maggma.stores.aws.boto3", None)
    with pytest.raises(RuntimeError):
        index = MemoryStore("index")
        S3Store(index, "bucket1")
Пример #12
0
Файл: app.py Проект: hhaoyan/api
        database="mp_core",
        key="fs_id",
        collection_name="s3_bandstructure_index",
    )

    s3_dos_index = MongoURIStore(
        uri=f"mongodb+srv://{db_uri}",
        database="mp_core",
        key="fs_id",
        collection_name="s3_dos_index",
    )

    s3_bs = S3Store(
        index=s3_bs_index,
        bucket="mp-bandstructures",
        compress=True,
        key="fs_id",
        searchable_fields=["task_id", "fs_id"],
    )

    s3_dos = S3Store(
        index=s3_dos_index,
        bucket="mp-dos",
        compress=True,
        key="fs_id",
        searchable_fields=["task_id", "fs_id"],
    )

    s3_chgcar_index = MongoURIStore(
        uri=f"mongodb+srv://{db_uri}",
        database="mp_core",
Пример #13
0
    dos_store = MongoURIStore(
        uri=f"mongodb+srv://{db_uri}",
        database="mp_core",
        key="task_id",
        collection_name="dos",
    )

    s3_dos_index = MongoURIStore(
        uri=f"mongodb+srv://{db_uri}",
        database="mp_core",
        key="task_id",
        collection_name="s3_dos_index",
    )

    s3_bs = S3Store(index=s3_bs_index,
                    bucket="mp-bandstructures",
                    compress=True)

    s3_dos = S3Store(index=s3_dos_index, bucket="mp-dos", compress=True)

else:
    materials_store = loadfn(materials_store_json)
    task_store = loadfn(task_store_json)
    thermo_store = loadfn(thermo_store_json)
    dielectric_piezo_store = loadfn(dielectric_piezo_store_json)
    magnetism_store = loadfn(magnetism_store_json)
    phonon_bs_store = loadfn(phonon_bs_store_json)
    phonon_img_store = loadfn(phonon_img_store_json)
    eos_store = loadfn(eos_store_json)
    similarity_store = loadfn(similarity_store_json)
    xas_store = loadfn(xas_store_json)