Exemplo n.º 1
0
def test_pickelable_tinydb_can_be_pickled_and_unpickled():
    """PickleableTinyDB should be able to be pickled and unpickled."""
    test_dict = {'test_key': ['test', 'values']}
    db = PickleableTinyDB(storage=MemoryStorage)
    db.insert(test_dict)
    db = pickle.loads(pickle.dumps(db))
    assert db.search(where('test_key').exists())[0] == test_dict
Exemplo n.º 2
0
def load_datasets(dataset_filenames):
    """
    Create a PickelableTinyDB with the data from a list of filenames.

    Parameters
    ----------
    dataset_filenames : [str]
        List of filenames to load as datasets

    Returns
    -------
    PickleableTinyDB
    """
    ds_database = PickleableTinyDB(storage=MemoryStorage)
    for fname in dataset_filenames:
        with open(fname) as file_:
            try:
                d = json.load(file_)
                check_dataset(d)
                ds_database.insert(clean_dataset(d))
            except ValueError as e:
                raise ValueError('JSON Error in {}: {}'.format(fname, e))
            except DatasetError as e:
                raise DatasetError('Dataset Error in {}: {}'.format(fname, e))
    return ds_database
Exemplo n.º 3
0
def load_datasets(dataset_filenames, include_disabled=False) -> PickleableTinyDB:
    """
    Create a PickelableTinyDB with the data from a list of filenames.

    Parameters
    ----------
    dataset_filenames : [str]
        List of filenames to load as datasets

    Returns
    -------
    PickleableTinyDB
    """
    ds_database = PickleableTinyDB(storage=MemoryStorage)
    for fname in dataset_filenames:
        with open(fname) as file_:
            try:
                d = json.load(file_)
                if not include_disabled and d.get('disabled', False):
                    # The dataset is disabled and not included
                    continue
                check_dataset(d)
                ds_database.insert(clean_dataset(d))
            except ValueError as e:
                raise ValueError('JSON Error in {}: {}'.format(fname, e))
            except DatasetError as e:
                raise DatasetError('Dataset Error in {}: {}'.format(fname, e))
    return ds_database
Exemplo n.º 4
0
def test_dataplot_plots_binary_equilibria_types():
    """Dataplot should be able to reproduce a single boundary (null) equilibria, a tieline, and a 3 phase equilibria for a binary"""
    ds = PickleableTinyDB(storage=MemoryStorage)
    ds.insert(A_B_DATASET_BINARY_PHASE_EQUILIBRIA)

    comps = ['A', 'B']
    phases = ["PHASE_1", "PHASE_2", "PHASE_3"]
    conds = {v.P: 101325, v.T: (0, 400, 40), v.X('B'): (0, 1, 0.01)}

    ax = dataplot(comps, phases, conds, ds)
    ax.set_xlim(0, 1)
    ax.set_ylim(0, 400)
Exemplo n.º 5
0
def test_get_data_for_a_minimal_example():
    """Given a dataset and the congfiguration pertaining to that dataset, we should find the values."""
    SAMPLE_DATASET = {
        "components": ["CU", "MG", "VA"],
        "phases": ["LAVES_C15"],
        "solver": {
            "mode":
            "manual",
            "sublattice_site_ratios": [2, 1],
            "sublattice_configurations": [["CU", "MG"], ["MG", "CU"],
                                          ["MG", "MG"], ["CU", "CU"]]
        },
        "conditions": {
            "P": 101325,
            "T": 298.15
        },
        "output": "HM_FORM",
        "values": [[[-15720, 34720, 7000, 15500]]]
    }
    datasets = PickleableTinyDB(storage=MemoryStorage)
    datasets.insert(SAMPLE_DATASET)
    comps = ['CU', 'MG', 'VA']
    phase_name = 'LAVES_C15'
    configuration = ('MG', 'CU')
    symmetry = None
    desired_props = ['HM_FORM']

    # The following lines replace "get_data" in a more functional form
    solver_qry = (tinydb.where('solver').test(
        symmetry_filter, configuration,
        recursive_tuplify(symmetry) if symmetry else symmetry))
    desired_data = get_prop_data(comps,
                                 phase_name,
                                 desired_props,
                                 datasets,
                                 additional_query=solver_qry)
    desired_data = filter_configurations(desired_data, configuration, symmetry)
    desired_data = filter_temperatures(desired_data)

    assert len(desired_data) == 1
    desired_data = desired_data[0]
    assert desired_data['components'] == comps
    assert desired_data['phases'][0] == phase_name
    assert desired_data['solver']['sublattice_site_ratios'] == [2, 1]
    assert desired_data['solver']['sublattice_configurations'] == (('MG',
                                                                    'CU'), )
    assert desired_data['conditions']['P'] == 101325
    assert desired_data['conditions']['T'] == 298.15
    assert desired_data['output'] == 'HM_FORM'
    assert desired_data['values'] == np.array([[[34720.0]]])
Exemplo n.º 6
0
def test_get_data_for_a_minimal_example():
    """Given a dataset and the congfiguration pertaining to that dataset, we should find the values."""
    SAMPLE_DATASET = {
        "components": ["CU", "MG", "VA"],
        "phases": ["LAVES_C15"],
        "solver": {
            "mode":
            "manual",
            "sublattice_site_ratios": [2, 1],
            "sublattice_configurations": [["CU", "MG"], ["MG", "CU"],
                                          ["MG", "MG"], ["CU", "CU"]]
        },
        "conditions": {
            "P": 101325,
            "T": 298.15
        },
        "output": "HM_FORM",
        "values": [[[-15720, 34720, 7000, 15500]]]
    }
    datasets = PickleableTinyDB(storage=MemoryStorage)
    datasets.insert(SAMPLE_DATASET)
    comps = ['CU', 'MG', 'VA']
    phase_name = 'LAVES_C15'
    configuration = ('MG', 'CU')
    symmetry = None
    desired_props = ['HM_FORM']

    desired_data = get_data(comps, phase_name, configuration, symmetry,
                            datasets, desired_props)
    assert len(desired_data) == 1
    desired_data = desired_data[0]
    assert desired_data['components'] == comps
    assert desired_data['phases'][0] == phase_name
    assert desired_data['solver']['sublattice_site_ratios'] == [2, 1]
    assert desired_data['solver']['sublattice_configurations'] == (('MG',
                                                                    'CU'), )
    assert desired_data['conditions']['P'] == 101325
    assert desired_data['conditions']['T'] == 298.15
    assert desired_data['output'] == 'HM_FORM'
    assert desired_data['values'] == np.array([[[34720.0]]])
Exemplo n.º 7
0
def test_weighting_invariance():
    """Test that weights do not affect model selection using perfect L0 and L1 cases."""
    phase_models = {
        "components": ["AL", "B"],
        "phases": {
            "ALPHA": {
                "sublattice_model": [["AL", "B"]],
                "sublattice_site_ratios": [1]
            }
        }
    }

    L0_data = {
        "components": ["AL", "B"],
        "phases": ["ALPHA"],
        "solver": {
            "sublattice_site_ratios": [1],
            "sublattice_occupancies": [[[0.5, 0.5]]],
            "sublattice_configurations": [[["AL", "B"]]],
            "mode": "manual"
        },
        "conditions": {
            "P": 101325,
            "T": 298.15
        },
        "output": "HM_MIX",
        "values": [[[-1000]]]
    }

    L1_data = {
        "components": ["AL", "B"],
        "phases": ["ALPHA"],
        "solver": {
            "sublattice_site_ratios": [1],
            "sublattice_occupancies": [[[0.25, 0.75]], [[0.5, 0.5]],
                                       [[0.75, 0.25]]],
            "sublattice_configurations": [[["AL", "B"]], [["AL", "B"]],
                                          [["AL", "B"]]],
            "mode":
            "manual"
        },
        "conditions": {
            "P": 101325,
            "T": 298.15
        },
        "output": "HM_MIX",
        "values": [[[-1000.0, 0, 1000.0]]]
    }

    # Perfect L0, no weight
    datasets_db = PickleableTinyDB(storage=MemoryStorage)
    datasets_db.insert(L0_data)
    dbf = generate_parameters(phase_models, datasets_db, 'SGTE91', 'linear')
    datasets_db.close()
    params = dbf._parameters.search(where('parameter_type') == 'L')
    print([f"L{p['parameter_order']}: {p['parameter']}" for p in params])
    print({
        str(p['parameter']): dbf.symbols[str(p['parameter'])]
        for p in params
    })
    assert len(params) == 1
    assert dbf.symbols['VV0000'] == -4000

    # Perfect L0, with weight
    datasets_db = PickleableTinyDB(storage=MemoryStorage)
    L0_data['weight'] = 0.1  # lower weight
    datasets_db.insert(L0_data)
    dbf = generate_parameters(phase_models, datasets_db, 'SGTE91', 'linear')
    datasets_db.close()
    params = dbf._parameters.search(where('parameter_type') == 'L')
    print([f"L{p['parameter_order']}: {p['parameter']}" for p in params])
    print({
        str(p['parameter']): dbf.symbols[str(p['parameter'])]
        for p in params
    })
    assert len(params) == 1
    assert dbf.symbols['VV0000'] == -4000

    # Perfect L1, no weight
    datasets_db = PickleableTinyDB(storage=MemoryStorage)
    datasets_db.insert(L1_data)
    dbf = generate_parameters(phase_models, datasets_db, 'SGTE91', 'linear')
    datasets_db.close()
    params = dbf._parameters.search(where('parameter_type') == 'L')
    print([f"L{p['parameter_order']}: {p['parameter']}" for p in params])
    print({
        str(p['parameter']): dbf.symbols[str(p['parameter'])]
        for p in params
    })
    assert len(params) == 2
    assert np.isclose(dbf.symbols['VV0000'], 1000 * 32 / 3)  # L1
    assert np.isclose(dbf.symbols['VV0001'], 0)  # L0

    # Perfect L1, with weight
    datasets_db = PickleableTinyDB(storage=MemoryStorage)
    L1_data['weight'] = 0.1  # lower weight
    datasets_db.insert(L1_data)
    dbf = generate_parameters(phase_models, datasets_db, 'SGTE91', 'linear')
    datasets_db.close()
    params = dbf._parameters.search(where('parameter_type') == 'L')
    print([f"L{p['parameter_order']}: {p['parameter']}" for p in params])
    print({
        str(p['parameter']): dbf.symbols[str(p['parameter'])]
        for p in params
    })
    # TODO: sometimes the presence of L0 terms can be flaky
    # assert len(params) == 2
    assert np.isclose(dbf.symbols['VV0000'], 1000 * 32 / 3)  # L1