示例#1
0
class TestMaterials(BuilderTest):
    def setUp(self):
        self.materials = MongoStore("emmet_test", "materials")
        self.materials.connect()

        self.materials.collection.drop()
        self.mbuilder = MaterialsBuilder(self.tasks,
                                         self.materials,
                                         mat_prefix="",
                                         chunk_size=1)

    def test_get_items(self):
        to_process = list(self.mbuilder.get_items())
        to_process_forms = {tasks[0]["formula_pretty"] for tasks in to_process}

        self.assertEqual(len(to_process), 12)
        self.assertEqual(len(to_process_forms), 12)
        self.assertEqual(len(list(chain.from_iterable(to_process))), 197)
        self.assertTrue("Sr" in to_process_forms)
        self.assertTrue("Hf" in to_process_forms)
        self.assertTrue("O2" in to_process_forms)
        self.assertFalse("H" in to_process_forms)

    def test_process_item(self):
        tasks = list(self.tasks.query(criteria={"chemsys": "Sr"}))
        mats = self.mbuilder.process_item(tasks)
        self.assertEqual(len(mats), 7)

        tasks = list(self.tasks.query(criteria={"chemsys": "Hf"}))
        mats = self.mbuilder.process_item(tasks)
        self.assertEqual(len(mats), 4)

        tasks = list(self.tasks.query(criteria={"chemsys": "O"}))
        mats = self.mbuilder.process_item(tasks)

        self.assertEqual(len(mats), 6)

        tasks = list(self.tasks.query(criteria={"chemsys": "O-Sr"}))
        mats = self.mbuilder.process_item(tasks)
        self.assertEqual(len(mats), 5)

        tasks = list(self.tasks.query(criteria={"chemsys": "Hf-O-Sr"}))
        mats = self.mbuilder.process_item(tasks)
        self.assertEqual(len(mats), 13)

    def test_update_targets(self):
        tasks = list(self.tasks.query(criteria={"chemsys": "Sr"}))
        mats = self.mbuilder.process_item(tasks)
        self.assertEqual(len(mats), 7)

        self.mbuilder.update_targets([mats])
        self.assertEqual(len(self.materials.distinct("task_id")), 7)
        self.assertEqual(len(list(self.materials.query())), 7)

    def tearDown(self):
        self.materials.collection.drop()
示例#2
0
class TestMaterials(BuilderTest):
    def setUp(self):
        self.ml_strucs = MongoStore("emmet_test", "ml_strucs", key="entry_id")
        self.ml_strucs.connect()

        self.ml_strucs.collection.drop()
        self.mlbuilder = MLStructuresBuilder(
            self.tasks,
            self.ml_strucs,
            task_types=("Structure Optimization", "Static"))

    def test_get_items(self):
        to_process = list(self.mlbuilder.get_items())
        to_process_forms = {task["formula_pretty"] for task in to_process}

        self.assertEqual(len(to_process), 197)
        self.assertEqual(len(to_process_forms), 12)
        self.assertTrue("Sr" in to_process_forms)
        self.assertTrue("Hf" in to_process_forms)
        self.assertTrue("O2" in to_process_forms)
        self.assertFalse("H" in to_process_forms)

    def test_process_item(self):
        for task in self.tasks.query():
            ml_strucs = self.mlbuilder.process_item(task)
            t_type = task_type(get(task, 'input.incar'))
            if not any([t in t_type for t in self.mlbuilder.task_types]):
                self.assertEqual(len(ml_strucs), 0)
            else:
                self.assertEqual(
                    len(ml_strucs),
                    sum([
                        len(t["output"]["ionic_steps"])
                        for t in task["calcs_reversed"]
                    ]))

    def test_update_targets(self):
        for task in self.tasks.query():
            ml_strucs = self.mlbuilder.process_item(task)
            self.mlbuilder.update_targets([ml_strucs])
        self.assertEqual(len(self.ml_strucs.distinct("task_id")), 102)
        self.assertEqual(len(list(self.ml_strucs.query())), 1012)

    def tearDown(self):
        self.ml_strucs.collection.drop()
示例#3
0
class ElasticAnalysisBuilderTest(unittest.TestCase):
    @classmethod
    def setUp(self):
        # Set up test db, set up mpsft, etc.
        self.test_tasks = MongoStore("test_emmet", "tasks")
        self.test_tasks.connect()
        docs = loadfn(test_tasks, cls=None)
        self.test_tasks.update(docs)
        self.test_elasticity = MongoStore("test_emmet", "elasticity")
        self.test_elasticity.connect()
        if PROFILE_MODE:
            self.pr = cProfile.Profile()
            self.pr.enable()
            print("\n<<<---")

    @classmethod
    def tearDown(self):
        if not DEBUG_MODE:
            self.test_elasticity.collection.drop()
            self.test_tasks.collection.drop()
        if PROFILE_MODE:
            p = Stats(self.pr)
            p.strip_dirs()
            p.sort_stats('cumtime')
            p.print_stats()
            print("\n--->>>")

    def test_builder(self):
        ec_builder = ElasticAnalysisBuilder(self.test_tasks,
                                            self.test_elasticity,
                                            incremental=False)
        ec_builder.connect()
        for t in ec_builder.get_items():
            processed = ec_builder.process_item(t)
            self.assertTrue(bool(processed))
        runner = Runner([ec_builder])
        runner.run()
        # Test warnings
        doc = ec_builder.elasticity.query_one(
            criteria={"pretty_formula": "NaN3"})
        self.assertEqual(doc['warnings'], None)
        self.assertAlmostEqual(doc['compliance_tensor'][0][0], 41.576072, 6)

    def test_grouping_functions(self):
        docs1 = list(
            self.test_tasks.query(criteria={"formula_pretty": "NaN3"}))
        docs_grouped1 = group_by_parent_lattice(docs1)
        self.assertEqual(len(docs_grouped1), 1)
        grouped_by_opt = group_deformations_by_optimization_task(docs1)
        self.assertEqual(len(grouped_by_opt), 1)
        docs2 = self.test_tasks.query(
            criteria={"task_label": "elastic deformation"})
        sgroup2 = group_by_parent_lattice(docs2)

    def test_get_distinct_rotations(self):
        struct = PymatgenTest.get_structure("Si")
        conv = SpacegroupAnalyzer(struct).get_conventional_standard_structure()
        rots = get_distinct_rotations(conv)
        ops = SpacegroupAnalyzer(conv).get_symmetry_operations()
        for op in ops:
            self.assertTrue(
                any([np.allclose(op.rotation_matrix, r) for r in rots]))
        self.assertEqual(len(rots), 48)

    def test_process_elastic_calcs(self):
        test_struct = PymatgenTest.get_structure('Sn')  # use cubic test struct
        dss = DeformedStructureSet(test_struct)

        # Construct test task set
        opt_task = {
            "output": {
                "structure": test_struct.as_dict()
            },
            "input": {
                "structure": test_struct.as_dict()
            }
        }
        defo_tasks = []
        for n, (struct, defo) in enumerate(zip(dss, dss.deformations)):
            strain = defo.green_lagrange_strain
            defo_task = {
                "output": {
                    "structure": struct.as_dict(),
                    "stress": (strain * 5).tolist()
                },
                "input": None,
                "task_id": n,
                "completed_at": datetime.utcnow()
            }
            defo_task.update({
                "transmuter": {
                    "transformation_params": [{
                        "deformation": defo
                    }]
                }
            })
            defo_tasks.append(defo_task)

        defo_tasks.pop(0)
        explicit, derived = process_elastic_calcs(opt_task, defo_tasks)
        self.assertEqual(len(explicit), 23)
        self.assertEqual(len(derived), 1)

    def test_process_elastic_calcs_toec(self):
        # Test TOEC tasks
        test_struct = PymatgenTest.get_structure('Sn')  # use cubic test struct
        strain_states = get_default_strain_states(3)
        # Default stencil in atomate, this maybe shouldn't be hard-coded
        stencil = np.linspace(-0.075, 0.075, 7)
        strains = [
            Strain.from_voigt(s * np.array(strain_state))
            for s, strain_state in product(stencil, strain_states)
        ]
        strains = [s for s in strains if not np.allclose(s, 0)]
        sym_reduced = symmetry_reduce(strains, test_struct)
        opt_task = {
            "output": {
                "structure": test_struct.as_dict()
            },
            "input": {
                "structure": test_struct.as_dict()
            }
        }
        defo_tasks = []
        for n, strain in enumerate(sym_reduced):
            defo = strain.get_deformation_matrix()
            new_struct = defo.apply_to_structure(test_struct)
            defo_task = {
                "output": {
                    "structure": new_struct.as_dict(),
                    "stress": (strain * 5).tolist()
                },
                "input": None,
                "task_id": n,
                "completed_at": datetime.utcnow()
            }
            defo_task.update({
                "transmuter": {
                    "transformation_params": [{
                        "deformation": defo
                    }]
                }
            })
            defo_tasks.append(defo_task)
        explicit, derived = process_elastic_calcs(opt_task, defo_tasks)
        self.assertEqual(len(explicit), len(sym_reduced))
        self.assertEqual(len(derived), len(strains) - len(sym_reduced))
        for calc in derived:
            self.assertTrue(
                np.allclose(calc['strain'], calc['cauchy_stress'] / -0.5))
示例#4
0
class ElasticAggregateBuilderTest(unittest.TestCase):
    def setUp(self):
        # Empty aggregated collection
        self.test_elasticity_agg = MongoStore("test_emmet", "elasticity_agg")
        self.test_elasticity_agg.connect()

        # Generate test materials collection
        self.test_materials = MongoStore("test_emmet", "materials")
        self.test_materials.connect()
        mat_docs = []
        for n, formula in enumerate(['Si', 'BaNiO3', 'Li2O2', 'TiO2']):
            structure = PymatgenTest.get_structure(formula)
            structure.add_site_property("magmoms", [0.0] * len(structure))
            mat_docs.append({
                "task_id": "mp-{}".format(n),
                "structure": structure.as_dict(),
                "pretty_formula": formula
            })
        self.test_materials.update(mat_docs, update_lu=False)

        # Create elasticity collection and add docs
        self.test_elasticity = MongoStore("test_emmet",
                                          "elasticity",
                                          key="optimization_task_id")
        self.test_elasticity.connect()

        si = PymatgenTest.get_structure("Si")
        si.add_site_property("magmoms", [0.0] * len(si))
        et = ElasticTensor.from_voigt([[50, 25, 25, 0, 0, 0],
                                       [25, 50, 25, 0, 0, 0],
                                       [25, 25, 50, 0, 0, 0],
                                       [0, 0, 0, 75, 0,
                                        0], [0, 0, 0, 0, 75, 0],
                                       [0, 0, 0, 0, 0, 75]])
        doc = {
            "input_structure": si.copy().as_dict(),
            "order": 2,
            "magnetic_type": "non-magnetic",
            "optimization_task_id": "mp-1",
            "last_updated": datetime.utcnow(),
            "completed_at": datetime.utcnow(),
            "optimized_structure": si.copy().as_dict(),
            "pretty_formula": "Si",
            "state": "successful"
        }
        doc['elastic_tensor'] = et.voigt
        doc.update(et.property_dict)
        self.test_elasticity.update([doc])
        # Insert second doc with diff params
        si.perturb(0.005)
        doc.update({
            "optimized_structure": si.copy().as_dict(),
            "updated_at": datetime.utcnow(),
            "optimization_task_id": "mp-5"
        })
        self.test_elasticity.update([doc])
        self.builder = self.get_a_new_builder()

    def tearDown(self):
        if not DEBUG_MODE:
            self.test_elasticity.collection.drop()
            self.test_elasticity_agg.collection.drop()
            self.test_materials.collection.drop()

    def test_materials_aggregator(self):
        materials_dict = generate_formula_dict(self.test_materials)
        docs = []
        grouped_by_mpid = group_by_material_id(
            materials_dict['Si'],
            [{
                'structure': PymatgenTest.get_structure('Si').as_dict(),
                'magnetic_type': "non-magnetic"
            }])
        self.assertEqual(len(grouped_by_mpid), 1)
        materials_dict = generate_formula_dict(self.test_materials)

    def test_get_items(self):
        iterator = self.builder.get_items()
        for item in iterator:
            self.assertIsNotNone(item)

    def test_process_items(self):
        docs = list(
            self.test_elasticity.query(criteria={"pretty_formula": "Si"}))
        formula_dict = generate_formula_dict(self.test_materials)
        processed = self.builder.process_item((docs, formula_dict['Si']))
        self.assertEqual(len(processed), 1)
        self.assertEqual(len(processed[0]['all_elastic_fits']), 2)

    def test_update_targets(self):
        processed = [
            self.builder.process_item(item)
            for item in self.builder.get_items()
        ]
        self.builder.update_targets(processed)

    def test_aggregation(self):
        runner = Runner([self.builder])
        runner.run()
        all_agg_docs = list(self.test_elasticity_agg.query())
        self.assertTrue(bool(all_agg_docs))

    def get_a_new_builder(self):
        return ElasticAggregateBuilder(self.test_elasticity,
                                       self.test_materials,
                                       self.test_elasticity_agg)
示例#5
0
class TestCopyBuilder(TestCase):
    @classmethod
    def setUpClass(cls):
        cls.dbname = "test_" + uuid4().hex
        s = MongoStore(cls.dbname, "test")
        s.connect()
        cls.client = s.collection.database.client

    @classmethod
    def tearDownClass(cls):
        cls.client.drop_database(cls.dbname)

    def setUp(self):
        tic = datetime.now()
        toc = tic + timedelta(seconds=1)
        keys = list(range(20))
        self.old_docs = [{"lu": tic, "k": k, "v": "old"} for k in keys]
        self.new_docs = [{"lu": toc, "k": k, "v": "new"} for k in keys[:10]]
        kwargs = dict(key="k", lu_field="lu")
        self.source = MongoStore(self.dbname, "source", **kwargs)
        self.target = MongoStore(self.dbname, "target", **kwargs)
        self.builder = CopyBuilder(self.source, self.target)
        self.source.connect()
        self.source.collection.create_index("lu")
        self.target.connect()
        self.target.collection.create_index("lu")
        self.target.collection.create_index("k")

    def tearDown(self):
        self.source.collection.drop()
        self.target.collection.drop()

    def test_get_items(self):
        self.source.collection.insert_many(self.old_docs)
        self.assertEqual(len(list(self.builder.get_items())),
                         len(self.old_docs))
        self.target.collection.insert_many(self.old_docs)
        self.assertEqual(len(list(self.builder.get_items())), 0)
        self.source.update(self.new_docs, update_lu=False)
        self.assertEqual(len(list(self.builder.get_items())),
                         len(self.new_docs))

    def test_process_item(self):
        self.source.collection.insert_many(self.old_docs)
        items = list(self.builder.get_items())
        self.assertCountEqual(items, map(self.builder.process_item, items))

    def test_update_targets(self):
        self.source.collection.insert_many(self.old_docs)
        self.source.update(self.new_docs, update_lu=False)
        self.target.collection.insert_many(self.old_docs)
        items = list(map(self.builder.process_item, self.builder.get_items()))
        self.builder.update_targets(items)
        self.assertEqual(self.target.query_one(criteria={"k": 0})["v"], "new")
        self.assertEqual(self.target.query_one(criteria={"k": 10})["v"], "old")

    def test_confirm_lu_field_index(self):
        self.source.collection.drop_index("lu_1")
        with self.assertRaises(Exception) as cm:
            self.builder.get_items()
        self.assertTrue(cm.exception.args[0].startswith("Need index"))
        self.source.collection.create_index("lu")

    def test_runner(self):
        self.source.collection.insert_many(self.old_docs)
        self.source.update(self.new_docs, update_lu=False)
        self.target.collection.insert_many(self.old_docs)
        runner = Runner([self.builder])
        runner.run()
        self.assertEqual(self.target.query_one(criteria={"k": 0})["v"], "new")
        self.assertEqual(self.target.query_one(criteria={"k": 10})["v"], "old")

    def test_query(self):
        self.builder.query = {"k": {"$gt": 5}}
        self.source.collection.insert_many(self.old_docs)
        self.source.update(self.new_docs, update_lu=False)
        runner = Runner([self.builder])
        runner.run()
        all_docs = list(self.target.query(criteria={}))
        self.assertEqual(len(all_docs), 14)
        self.assertTrue(min([d['k'] for d in all_docs]), 6)
示例#6
0
class TestCopyBuilder(TestCase):
    @classmethod
    def setUpClass(cls):
        cls.dbname = "test_" + uuid4().hex
        s = MongoStore(cls.dbname, "test")
        s.connect()
        cls.client = s.collection.database.client

    @classmethod
    def tearDownClass(cls):
        cls.client.drop_database(cls.dbname)

    def setUp(self):
        tic = datetime.now()
        toc = tic + timedelta(seconds=1)
        keys = list(range(20))
        self.old_docs = [{"lu": tic, "k": k, "v": "old"} for k in keys]
        self.new_docs = [{"lu": toc, "k": k, "v": "new"} for k in keys[:10]]
        kwargs = dict(key="k", lu_field="lu")
        self.source = MongoStore(self.dbname, "source", **kwargs)
        self.target = MongoStore(self.dbname, "target", **kwargs)
        self.builder = CopyBuilder(self.source, self.target)

        self.source.connect()
        self.source.ensure_index(self.source.key)
        self.source.ensure_index(self.source.lu_field)

        self.target.connect()
        self.target.ensure_index(self.target.key)
        self.target.ensure_index(self.target.lu_field)

    def tearDown(self):
        self.source.collection.drop()
        self.target.collection.drop()

    def test_get_items(self):
        self.source.collection.insert_many(self.old_docs)
        self.assertEqual(len(list(self.builder.get_items())),
                         len(self.old_docs))
        self.target.collection.insert_many(self.old_docs)
        self.assertEqual(len(list(self.builder.get_items())), 0)
        self.source.update(self.new_docs, update_lu=False)
        self.assertEqual(len(list(self.builder.get_items())),
                         len(self.new_docs))

    def test_process_item(self):
        self.source.collection.insert_many(self.old_docs)
        items = list(self.builder.get_items())
        self.assertCountEqual(items, map(self.builder.process_item, items))

    def test_update_targets(self):
        self.source.collection.insert_many(self.old_docs)
        self.source.update(self.new_docs, update_lu=False)
        self.target.collection.insert_many(self.old_docs)
        items = list(map(self.builder.process_item, self.builder.get_items()))
        self.builder.update_targets(items)
        self.assertEqual(self.target.query_one(criteria={"k": 0})["v"], "new")
        self.assertEqual(self.target.query_one(criteria={"k": 10})["v"], "old")

    @unittest.skip(
        "Have to refactor how we force read-only so a warning will get thrown")
    def test_index_warning(self):
        """Should log warning when recommended store indexes are not present."""
        self.source.collection.drop_index([(self.source.key, 1)])
        with self.assertLogs(level=logging.WARNING) as cm:
            list(self.builder.get_items())
        self.assertIn("Ensure indices", "\n".join(cm.output))

    def test_run(self):
        self.source.collection.insert_many(self.old_docs)
        self.source.update(self.new_docs, update_lu=False)
        self.target.collection.insert_many(self.old_docs)
        self.builder.run()
        self.assertEqual(self.target.query_one(criteria={"k": 0})["v"], "new")
        self.assertEqual(self.target.query_one(criteria={"k": 10})["v"], "old")

    def test_query(self):
        self.builder.query = {"k": {"$gt": 5}}
        self.source.collection.insert_many(self.old_docs)
        self.source.update(self.new_docs, update_lu=False)
        self.builder.run()
        all_docs = list(self.target.query(criteria={}))
        self.assertEqual(len(all_docs), 14)
        self.assertTrue(min([d['k'] for d in all_docs]), 6)

    def test_delete_orphans(self):
        self.builder = CopyBuilder(self.source,
                                   self.target,
                                   delete_orphans=True)
        self.source.collection.insert_many(self.old_docs)
        self.source.update(self.new_docs, update_lu=False)
        self.target.collection.insert_many(self.old_docs)

        deletion_criteria = {"k": {"$in": list(range(5))}}
        self.source.collection.delete_many(deletion_criteria)
        self.builder.run()

        self.assertEqual(
            self.target.collection.count_documents(deletion_criteria), 0)
        self.assertEqual(self.target.query_one(criteria={"k": 5})["v"], "new")
        self.assertEqual(self.target.query_one(criteria={"k": 10})["v"], "old")

    def test_incremental_false(self):
        tic = datetime.now()
        toc = tic + timedelta(seconds=1)
        keys = list(range(20))
        earlier = [{"lu": tic, "k": k, "v": "val"} for k in keys]
        later = [{"lu": toc, "k": k, "v": "val"} for k in keys]
        self.source.collection.insert_many(earlier)
        self.target.collection.insert_many(later)
        query = {"k": {"$gt": 5}}
        self.builder = CopyBuilder(self.source,
                                   self.target,
                                   incremental=False,
                                   query=query)
        self.builder.run()
        docs = sorted(self.target.query(), key=lambda d: d["k"])
        self.assertTrue(all(d["lu"] == tic) for d in docs[5:])
        self.assertTrue(all(d["lu"] == toc) for d in docs[:5])
示例#7
0
class TestThermo(BuilderTest):
    def setUp(self):

        self.materials = MongoStore("emmet_test", "materials")
        self.thermo = MongoStore("emmet_test", "thermo")

        self.materials.connect()
        self.thermo.connect()

        self.mbuilder = MaterialsBuilder(self.tasks,
                                         self.materials,
                                         mat_prefix="",
                                         chunk_size=1)
        self.tbuilder = ThermoBuilder(self.materials,
                                      self.thermo,
                                      chunk_size=1)
        runner = Runner([self.mbuilder])
        runner.run()

    def test_get_entries(self):
        self.assertEqual(len(self.tbuilder.get_entries("Sr")), 7)
        self.assertEqual(len(self.tbuilder.get_entries("Hf")), 4)
        self.assertEqual(len(self.tbuilder.get_entries("O")), 6)
        self.assertEqual(len(self.tbuilder.get_entries("Hf-O-Sr")), 44)
        self.assertEqual(len(self.tbuilder.get_entries("Sr-Hf")), 11)

    def test_get_items(self):
        self.thermo.collection.drop()
        comp_systems = list(self.tbuilder.get_items())
        self.assertEqual(len(comp_systems), 1)
        self.assertEqual(len(comp_systems[0]), 44)

    def test_process_item(self):

        tbuilder = ThermoBuilder(self.materials,
                                 self.thermo,
                                 query={"elements": ["Sr"]},
                                 chunk_size=1)
        entries = list(tbuilder.get_items())[0]
        self.assertEqual(len(entries), 7)

        t_docs = self.tbuilder.process_item(entries)
        e_above_hulls = [t['thermo']['e_above_hull'] for t in t_docs]
        sorted_t_docs = list(
            sorted(t_docs, key=lambda x: x['thermo']['e_above_hull']))
        self.assertEqual(sorted_t_docs[0]["task_id"], "mp-76")

    def test_update_targets(self):
        self.thermo.collection.drop()

        tbuilder = ThermoBuilder(self.materials,
                                 self.thermo,
                                 query={"elements": ["Sr"]},
                                 chunk_size=1)
        entries = list(tbuilder.get_items())[0]
        self.assertEqual(len(entries), 7)

        t_docs = self.tbuilder.process_item(entries)
        self.tbuilder.update_targets([t_docs])
        self.assertEqual(len(list(self.thermo.query())), len(t_docs))

    def tearDown(self):
        self.materials.collection.drop()
        self.thermo.collection.drop()
示例#8
0
def get_ent_from_db(
    elec_store: MongoStore,
    material_store: MongoStore,
    tasks_store: MongoStore,
    batt_id: Union[str, int] = None,
    task_id: Union[str, int] = None,
    get_aeccar: bool = False,
    working_ion: str = "Li",
    add_fields: list = None,
    get_initial: bool = False,
):
    """
    Get the migration path information in the form of a ComputedEntryGraph
    object from the an atomate data stack

    The algorithm gets all tasks with structures that are valid (i.e. matches a
    base structure) and generates a migration pathway object using all possible
    relaxed working ion positions found in that set. Since each material entry
    might contain multiple calculations with different cell sizes, this will
    have to work at the task level. Need to group tasks together based on the
    cell size of the base material

    Note that SPGlib is some times inconsistent when it comes to the getting
    the number of symmetry operations for a given structure. Sometimes
    structures that are the same using StructureMatcher.fit will have
    different number of symmetry operation. As such we will check the number
    of operations for each base structure in a given family of structures
    and take the case with the highest number symmetry operations In cases
    where AECCAR is required, only the tasks with AECCARs will have this data.

    Args:

        elec_store: Electrode documents one per each similar group of
            insertion materials, can also use any db that contains a
        material_ids list with topotactic structures
        material_store: Material documenets one per each similar structure (
            multiple tasks)
        tasks_store: Task documents one per each VASP calculation
        batt_id: battery id to lookup in a database.
        task_id: if battery id is not provided then look up a materials id.
        get_aeccar: If True, only find base tasks with the charge density stored
        working_ion: Name of the working ion. Defaults to 'Li'.
        add_fields: Take these fields from the task_documents and store them
            in ComputedStructureEntry
        get_initial: Store the initial structure of a calculation

    """

    task_ids_type = type(material_store.query_one({})["task_ids"][0])
    material_ids_type = type(elec_store.query_one({})["material_ids"][0])

    logger.debug(material_ids_type)

    def get_task_ids_from_batt_id(b_id):
        mat_ids = list(
            map(task_ids_type,
                elec_store.query_one({"battid": b_id})["material_ids"]))
        logger.debug(f"mat_ids : {mat_ids}")
        l_task_ids = [
            imat["task_ids"]
            for imat in material_store.query({"task_ids": {
                "$in": mat_ids
            }})
        ]
        l_task_ids = list(chain.from_iterable(l_task_ids))
        logger.debug(f"l_task_ids : {l_task_ids}")
        return l_task_ids

    def get_batt_ids_from_task_id(t_id):
        l_task_ids = [
            c0["task_ids"]
            for c0 in material_store.query({"task_ids": {
                "$in": [int(t_id)]
            }})
        ]
        l_task_ids = list(chain.from_iterable(l_task_ids))
        l_task_ids = list(map(material_ids_type, l_task_ids))
        logger.debug(f"l_task_ids : {l_task_ids}")
        l_mat_ids = [
            c0["material_ids"]
            for c0 in elec_store.query({"material_ids": {
                "$in": l_task_ids
            }})
        ]
        l_mat_ids = list(chain.from_iterable(l_mat_ids))
        l_mat_ids = list(map(task_ids_type, l_mat_ids))
        logger.debug(f"l_mat_ids : {l_mat_ids}")
        l_task_ids = [
            c0["task_ids"]
            for c0 in material_store.query({"task_ids": {
                "$in": l_mat_ids
            }})
        ]
        l_task_ids = list(chain.from_iterable(l_task_ids))
        logger.debug(f"l_task_ids : {l_task_ids}")
        return l_task_ids

    def get_entry(task_doc,
                  base_with_aeccar=False,
                  add_fields=None,
                  get_initial=None):
        # we don't really need to think about compatibility for now if just
        # want to make a code that automate NEB calculations
        tmp_struct = Structure.from_dict(task_doc["output"]["structure"])
        settings_dict = dict(
            potcar_spec=task_doc["calcs_reversed"][0]["input"]["potcar_spec"],
            rung_type=task_doc["calcs_reversed"][0]["run_type"],
        )
        if "is_hubbard" in task_doc["calcs_reversed"][0].keys():
            settings_dict["hubbards"] = task_doc["calcs_reversed"][0][
                "hubbards"]
            settings_dict["is_hubbard"] = (
                task_doc["calcs_reversed"][0]["is_hubbard"], )

        entry = ComputedStructureEntry(
            structure=tmp_struct,
            energy=task_doc["output"]["energy"],
            parameters=settings_dict,
            entry_id=task_doc["task_id"],
        )
        if base_with_aeccar:
            logger.debug("test")
            aec_id = tasks_store.query_one({"task_id":
                                            entry.entry_id})["task_id"]
            aeccar = get_aeccar_from_store(tasks_store, aec_id)
            entry.data.update({"aeccar": aeccar})

        if add_fields:
            for field in add_fields:
                if field in task_doc:
                    entry.data.update({field: task_doc[field]})
        if get_initial:
            entry.data.update(
                {"initial_structure": task_doc["input"]["structure"]})

        return entry

    # Require a single base entry and multiple inserted entries to populate
    # the migration pathways

    # getting a full list of task ids
    # Batt_id -> material_id -> all task_ids
    # task_id -> mat_ids -> Batt_ids -> material_id -> all task_ids
    if batt_id:
        all_tasks = get_task_ids_from_batt_id(batt_id)
    else:
        all_tasks = get_batt_ids_from_task_id(task_id)
    # get_all the structures
    if get_aeccar:
        all_ents_base = [
            get_entry(
                c0,
                base_with_aeccar=True,
                add_fields=add_fields,
                get_initial=get_initial,
            ) for c0 in tasks_store.query({
                "task_id": {
                    "$in": all_tasks
                },
                "elements": {
                    "$nin": [working_ion]
                },
                "calcs_reversed.0.aeccar0_fs_id": {
                    "$exists": 1
                },
            })
        ]
    else:
        all_ents_base = [
            get_entry(c0) for c0 in tasks_store.query({
                "task_id": {
                    "$in": all_tasks
                },
                "elements": {
                    "$nin": [working_ion]
                }
            })
        ]
    logger.debug(f"Number of base entries: {len(all_ents_base)}")

    all_ents_insert = [
        get_entry(c0, add_fields=add_fields, get_initial=get_initial)
        for c0 in tasks_store.query({
            "task_id": {
                "$in": all_tasks
            },
            "elements": {
                "$in": [working_ion]
            }
        })
    ]
    logger.debug(f"Number of inserted entries: {len(all_ents_insert)}")
    tmp = [f"{itr.name}({itr.entry_id})" for itr in all_ents_insert]
    logger.debug(f"{tmp}")
    return all_ents_base, all_ents_insert