예제 #1
0
파일: core.py 프로젝트: xhqu1981/MPWorks
from matgendb.builders.util import get_builder_log
from base import SNLGroupBaseChecker
from init_plotly import categories
from mpworks.snl_utils.mpsnl import MPStructureNL
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer

_log = get_builder_log("snl_group_checks")

class SNLGroupCrossChecker(SNLGroupBaseChecker):
    """cross-check all SNL Groups via StructureMatcher.fit of their canonical SNLs"""
    def process_item(self, item, index):
        nrow, ncol, snlgroups = super(SNLGroupCrossChecker, self).process_item(item, index)
        for idx,primary_id in enumerate(item['snlgroup_ids'][:-1]):
            cat_key = ''
            local_mismatch_dict = dict((k,[]) for k in categories[self.checker_name])
            primary_group = snlgroups[primary_id]
            composition, primary_sg_num = primary_group.canonical_snl.snlgroup_key.split('--')
            for secondary_id in item['snlgroup_ids'][idx+1:]:
                secondary_group = snlgroups[secondary_id]
                secondary_sg_num = secondary_group.canonical_snl.snlgroup_key.split('--')[1]
                if not self._matcher.fit(
                    primary_group.canonical_structure,
                    secondary_group.canonical_structure
                ): continue
                cat_key = 'same SGs' if primary_sg_num == secondary_sg_num else 'diff. SGs'
                local_mismatch_dict[cat_key].append('(%d,%d)' % (primary_id, secondary_id))
            if cat_key:
              _log.info('(%d) %r', self._counter_total.value, local_mismatch_dict)
            self._increase_counter(nrow, ncol, local_mismatch_dict)

class SNLGroupIcsdChecker(SNLGroupBaseChecker):
예제 #2
0
Copies from one collection to another.
With the optional incremental feature, running twice will only copy the new records, i.e.
running twice in succession will cause the second run to do nothing.

To run:

mgbuild
"""
__author__ = 'Dan Gunter <*****@*****.**>'
__date__ = '4/22/14'

from matgendb.builders import core
from matgendb.builders import util
from matgendb.query_engine import QueryEngine

_log = util.get_builder_log("copy")

class CopyBuilder(core.Builder):
    """Copy from one MongoDB collection to another.
    """
    def __init__(self, *args, **kwargs):
        self._target_coll = None
        core.Builder.__init__(self, *args, **kwargs)

    def get_items(self, source=None, target=None, crit=None):
        """Copy records from source to target collection.

        :param source: Input collection
        :type source: QueryEngine
        :param target: Output collection
        :type target: QueryEngine
예제 #3
0
from abc import ABCMeta, abstractmethod
import copy
import logging
import multiprocessing
try:
    import Queue
except ImportError:
    import queue as Queue
import traceback
# local
from matgendb.builders import schema, util
from matgendb import util as dbutil

## Logging

_log = util.get_builder_log("core")

## Exceptions


class BuildError(Exception):
    def __init__(self, who, why):
        errmsg = "Builder {} failed: {}".format(who, why)
        Exception.__init__(self, errmsg)

## Versioning (experimental)

DB_VERSION = 1

## Functions
예제 #4
0
"""
Build a derived collection with the maximum
value from each 'group' defined in the source
collection.
"""
__author__ = 'Dan Gunter <*****@*****.**>'
__date__ = '5/21/14'

from matgendb.builders import core
from matgendb.builders import util
from matgendb.query_engine import QueryEngine

_log = util.get_builder_log("incr")


class MaxValueBuilder(core.Builder):
    """Example of incremental builder that requires
    some custom logic for incremental case.
    """
    def get_items(self, source=None, target=None):
        """Get all records from source collection to add to target.

        :param source: Input collection
        :type source: QueryEngine
        :param target: Output collection
        :type target: QueryEngine
        """
        self._groups = self.shared_dict()
        self._target_coll = target.collection
        self._src = source
        return source.query()
예제 #5
0
# system
from abc import ABCMeta, abstractmethod
import copy
import logging
import multiprocessing
import Queue
import traceback
# local
from matgendb.builders import schema, util
from matgendb import util as dbutil
import six

## Logging

_log = util.get_builder_log("core")

## Exceptions


class BuildError(Exception):
    def __init__(self, who, why):
        errmsg = "Builder {} failed: {}".format(who, why)
        Exception.__init__(self, errmsg)

## Versioning (experimental)

DB_VERSION = 1

## Functions
예제 #6
0
    print(mark.update().as_dict())

"""
__author__ = 'Dan Gunter <*****@*****.**>'
__date__ = '4/11/14'

from abc import abstractmethod, ABCMeta
import pymongo
from enum import Enum
from matgendb.query_engine import QueryEngine
from matgendb.builders import util as bld_util
import six

# Logging

_log = bld_util.get_builder_log("incr")

# Exceptions


class DBError(Exception):
    """Generic database error.
    """
    pass


class NoTrackingCollection(Exception):
    """Raised if no tracking collection is present,
    but some operation is requested on that collection.
    """
    pass
예제 #7
0
    print(mark.update().as_dict())

"""
__author__ = 'Dan Gunter <*****@*****.**>'
__date__ = '4/11/14'

from abc import abstractmethod, ABCMeta
import pymongo
from enum import Enum
from matgendb.query_engine import QueryEngine
from matgendb.builders import util as bld_util
import six

# Logging

_log = bld_util.get_builder_log("incr")

# Exceptions

class DBError(Exception):
    """Generic database error.
    """
    pass


class NoTrackingCollection(Exception):
    """Raised if no tracking collection is present,
    but some operation is requested on that collection.
    """
    pass