Esempio n. 1
0
    def get_structures_with_filter(
            self, optimade_filter: str) -> Dict[str, Structure]:
        """
        Get structures satisfying a given OPTIMADE filter.

        Args:
            filter: An OPTIMADE-compliant filter

        Returns: Dict of Structures keyed by that database's id system
        """

        fields = "response_fields=lattice_vectors,cartesian_site_positions,species,species_at_sites"

        url = f"{self.resource}/v1/structures?filter={optimade_filter}&fields={fields}"

        json = self.session.get(url, timeout=self._timeout).json()

        structures = self._get_structures_from_resource(json)

        if "next" in json["links"] and json["links"]["next"]:
            pbar = PBar(total=json["meta"].get("data_returned"))
            while "next" in json["links"] and json["links"]["next"]:
                json = self.session.get(json["links"]["next"],
                                        timeout=self._timeout).json()
                structures.update(self._get_structures_from_resource(json))
                pbar.update(len(structures))

        return structures
Esempio n. 2
0
    def get_snls_with_filter(
            self, optimade_filter: str) -> Dict[str, Dict[str, StructureNL]]:
        """
        Get structures satisfying a given OPTIMADE filter.

        Args:
            filter: An OPTIMADE-compliant filter

        Returns: Dict of Structures keyed by that database's id system
        """

        all_snls = {}

        for identifier, resource in self.resources.items():

            fields = "response_fields=lattice_vectors,cartesian_site_positions,species,species_at_sites"

            url = join(resource,
                       f"v1/structures?filter={optimade_filter}&{fields}")

            try:

                json = self._get_json(url)

                structures = self._get_snls_from_resource(
                    json, url, identifier)

                pbar = PBar(total=json["meta"].get("data_returned", 0),
                            desc=identifier,
                            initial=len(structures))

                # TODO: check spec for `more_data_available` boolean, may simplify this conditional
                if ("links" in json) and ("next" in json["links"]) and (
                        json["links"]["next"]):
                    while "next" in json["links"] and json["links"]["next"]:
                        next_link = json["links"]["next"]
                        if isinstance(next_link, dict) and "href" in next_link:
                            next_link = next_link["href"]
                        json = self._get_json(next_link)
                        additional_structures = self._get_snls_from_resource(
                            json, url, identifier)
                        structures.update(additional_structures)
                        pbar.update(len(additional_structures))

                if structures:

                    all_snls[identifier] = structures

            except Exception as exc:

                # TODO: manually inspect failures to either (a) correct a bug or (b) raise more appropriate error

                _logger.error(
                    f"Could not retrieve required information from provider {identifier} and url {url}: {exc}"
                )

        return all_snls
Esempio n. 3
0
    def _preprocess_pourbaix_entries(self, entries, nproc=None):
        """
        Generates multi-entries for pourbaix diagram

        Args:
            entries ([PourbaixEntry]): list of PourbaixEntries to preprocess
                into MultiEntries
            nproc (int): number of processes to be used in parallel
                treatment of entry combos

        Returns:
            ([MultiEntry]) list of stable MultiEntry candidates

        """
        # Get composition
        tot_comp = Composition(self._elt_comp)

        min_entries, valid_facets = self._get_hull_in_nph_nphi_space(entries)

        combos = []
        for facet in valid_facets:
            for i in range(1, self.dim + 2):
                these_combos = []
                for combo in itertools.combinations(facet, i):
                    these_entries = [min_entries[i] for i in combo]
                    these_combos.append(frozenset(these_entries))
                combos.append(these_combos)

        all_combos = set(itertools.chain.from_iterable(combos))

        list_combos = []
        for i in all_combos:
            list_combos.append(list(i))
        all_combos = list_combos

        multi_entries = []

        # Parallel processing of multi-entry generation
        if nproc is not None:
            f = partial(self.process_multientry, prod_comp=tot_comp)
            with Pool(nproc) as p:
                multi_entries = list(
                    PBar(p.imap(f, all_combos), total=len(all_combos)))
            multi_entries = list(filter(bool, multi_entries))
        else:
            # Serial processing of multi-entry generation
            for combo in PBar(all_combos):
                multi_entry = self.process_multientry(combo,
                                                      prod_comp=tot_comp)
                if multi_entry:
                    multi_entries.append(multi_entry)

        return multi_entries
Esempio n. 4
0
    def _generate_multielement_entries(self, entries, nproc=None):
        """
        Create entries for multi-element Pourbaix construction.

        This works by finding all possible linear combinations
        of entries that can result in the specified composition
        from the initialized comp_dict.

        Args:
            entries ([PourbaixEntries]): list of pourbaix entries
                to process into MultiEntries
            nproc (int): number of processes to be used in parallel
                treatment of entry combos
        """

        N = len(self._elt_comp)  # No. of elements
        total_comp = Composition(self._elt_comp)

        # generate all combinations of compounds that have all elements
        entry_combos = [
            itertools.combinations(entries, j + 1) for j in range(N)
        ]
        entry_combos = itertools.chain.from_iterable(entry_combos)

        entry_combos = filter(lambda x: total_comp < MultiEntry(x).composition,
                              entry_combos)

        # Generate and filter entries
        processed_entries = []
        total = sum(comb(len(entries), j + 1) for j in range(N))
        if total > 1e6:
            warnings.warn(
                f"Your pourbaix diagram includes {total} entries and may take a long time to generate."
            )

        # Parallel processing of multi-entry generation
        if nproc is not None:
            f = partial(self.process_multientry, prod_comp=total_comp)
            with Pool(nproc) as p:
                processed_entries = list(
                    PBar(p.imap(f, entry_combos), total=total))
            processed_entries = list(filter(bool, processed_entries))
        # Serial processing of multi-entry generation
        else:
            for entry_combo in entry_combos:
                processed_entry = self.process_multientry(
                    entry_combo, total_comp)
                if processed_entry is not None:
                    processed_entries.append(processed_entry)

        return processed_entries