Esempio n. 1
0
def default(tmp_path, reference_dir):
    """Small Result file in temp location for modification."""
    fname = '12grains6x7x8_tensionY.hdf5'
    shutil.copy(os.path.join(reference_dir, fname), tmp_path)
    f = Result(os.path.join(tmp_path, fname))
    f.pick('times', 20.0)
    return f
Esempio n. 2
0
 def test_vtk(self, request, tmp_path, ref_path, update,
              patch_execution_stamp, patch_datetime_now, output, fname,
              inc):
     result = Result(ref_path / fname).view('increments', inc)
     os.chdir(tmp_path)
     result.save_VTK(output)
     fname = fname.split(
         '.')[0] + f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vtr'
     last = ''
     for i in range(10):
         if os.path.isfile(tmp_path / fname):
             with open(fname) as f:
                 cur = hashlib.md5(f.read().encode()).hexdigest()
                 if cur == last:
                     break
                 else:
                     last = cur
         time.sleep(.5)
     if update:
         with open((ref_path / 'save_VTK' /
                    request.node.name).with_suffix('.md5'), 'w') as f:
             f.write(cur)
     with open((ref_path / 'save_VTK' /
                request.node.name).with_suffix('.md5')) as f:
         assert cur == f.read()
Esempio n. 3
0
    def test_get(self, update, request, ref_path, view, output, flatten,
                 prune):
        result = Result(ref_path / '4grains2x4x3_compressionY.hdf5')
        for key, value in view.items():
            result = result.view(key, value)

        fname = request.node.name
        cur = result.get(output, flatten, prune)
        if update:
            with bz2.BZ2File((ref_path / 'get' / fname).with_suffix('.pbz2'),
                             'w') as f:
                pickle.dump(cur, f)

        with bz2.BZ2File((ref_path / 'get' / fname).with_suffix('.pbz2')) as f:
            ref = pickle.load(f)
            assert cur is None if ref is None else dict_equal(cur, ref)
Esempio n. 4
0
 def test_vtk(self, request, tmp_path, ref_path, update,
              patch_execution_stamp, patch_datetime_now, output, fname,
              inc):
     result = Result(ref_path / fname).view(increments=inc)
     os.chdir(tmp_path)
     result.export_VTK(output, parallel=False)
     fname = fname.split(
         '.')[0] + f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vti'
     v = VTK.load(tmp_path / fname)
     v.set_comments('n/a')
     v.save(tmp_path / fname, parallel=False)
     with open(fname) as f:
         cur = hashlib.md5(f.read().encode()).hexdigest()
     if update:
         with open((ref_path / 'export_VTK' /
                    request.node.name).with_suffix('.md5'), 'w') as f:
             f.write(cur + '\n')
     with open((ref_path / 'export_VTK' /
                request.node.name).with_suffix('.md5')) as f:
         assert cur == f.read().strip('\n')
Esempio n. 5
0
def read_damask_hdf5_file(hdf5_path, incremental_data, operations=None, visualise=None):

    out = read_HDF5_file(hdf5_path, incremental_data, operations=operations)

    if visualise is not None:

        if visualise is True:
            visualise = {}

        os.mkdir('viz')
        with working_directory('viz'):

            from damask import Result

            result = Result(hdf5_path)

            incs = visualise.pop('increments', None)
            if incs:
                if not isinstance(incs, list):
                    incs = [incs]
                incs_normed = []
                for i in incs:
                    if i >= 0:
                        i_normed = i
                    else:
                        i_normed = len(result.increments) + i
                    incs_normed.append(i_normed)
                result.pick('increments', incs_normed)
            result.to_vtk(**visualise)

    return out
Esempio n. 6
0
 def test_marc_coordinates(self, ref_path):
     result = Result(ref_path / 'check_compile_job1.hdf5').view(
         'increments', -1)
     c_n = result.coordinates0_node + result.get('u_n')
     c_p = result.coordinates0_point + result.get('u_p')
     assert len(c_n) > len(c_p)
Esempio n. 7
0
 def test_vtk_marc(self, tmp_path, ref_path, mode, output):
     os.chdir(tmp_path)
     result = Result(ref_path / 'check_compile_job1.hdf5')
     result.save_VTK(output, mode)
Esempio n. 8
0
def single_phase(tmp_path, ref_path):
    """Single phase Result file in temp location for modification."""
    fname = '6grains6x7x8_single_phase_tensionY.hdf5'
    shutil.copy(ref_path / fname, tmp_path)
    return Result(tmp_path / fname)
Esempio n. 9
0
def default(tmp_path, ref_path):
    """Small Result file in temp location for modification."""
    fname = '12grains6x7x8_tensionY.hdf5'
    shutil.copy(ref_path / fname, tmp_path)
    f = Result(tmp_path / fname)
    return f.view('times', 20.0)
Esempio n. 10
0
def read_HDF5_file(hdf5_path, incremental_data, operations=None):
    """Operate on and extract data from an HDF5 file generated by a DAMASK run.

    Parameters
    ----------
    hdf5_path : Path or str
        Path to the HDF5 file.
    incremental_data : list of dict
        List of incremental data to extract from the HDF5 file. This is a list of dicts
        with the following keys:
            name: str
                The name by which the quantity will be stored in the output dict.
            path: str
                The HDF5 "path" to the dataset.
            transforms: list of dict, optional
                If specified this is a list of dicts, each with the following keys:
                    sum_along_axes : int, optional
                        If specified, take the sum the array along this axis.
                    mean_along_axes: int, optional
                        If specified, take the mean average of the array along this axis.
    operations : list of dict, optional
        List of methods to invoke on the DADF5 object. This is a list of dicts with the
        following keys:
            name : str
                The name of the DADF5 method.
            args : dict
                Parameter names and their values to pass to the DADF5 method. This
                assumes all DADF5 method parameters are of positional-or-keyword type.
            opts : dict
                Additional options.

    Returns
    -------
    volume_element_response : dict
        Dict with keys determined by the `incremental_data` list.

    """

    try:
        from damask import Result
        sim_data = Result(hdf5_path)
    except ImportError:
        from damask import DADF5
        sim_data = DADF5(hdf5_path)

    for op in operations or []:
        func = getattr(sim_data, op['name'], None)
        if not func:
            raise AttributeError(
                f'The Result object has no attribute: {op["name"]}.')
        else:
            func(**op['args'])

        # Deal with specific options:
        if op['opts'].get('add_Mises', {}):

            if op["name"] == 'add_Cauchy':
                label = f'sigma'

            elif op["name"] == 'add_strain_tensor':
                # Include defaults from `DADF5.add_strain_tensor`:
                t = op['args'].get('t', 'U')
                m = op['args'].get('m', 0)
                F = op['args'].get('F', 'F')
                label = f'epsilon_{t}^{m}({F})'

            else:
                msg = (
                    f'Operation "{op["name"]}" is not compatible with option '
                    f'"add_Mises".')
                raise ValueError(msg)

            sim_data.add_Mises(label)

    volume_element_response = {}
    for inc_dat_spec in incremental_data:
        inc_dat = get_HDF5_incremental_quantity(
            hdf5_path=hdf5_path,
            dat_path=inc_dat_spec['path'],
            transforms=inc_dat_spec.get('transforms'),
            increments=inc_dat_spec.get('increments', 1),
        )
        volume_element_response.update({
            inc_dat_spec['name']: {
                'data': inc_dat,
                'meta': {
                    'path': inc_dat_spec['path'],
                    'transforms': inc_dat_spec.get('transforms'),
                    'increments': inc_dat_spec.get('increments', 1),
                },
            }
        })

    return volume_element_response
Esempio n. 11
0
 def test_export_setup(self, ref_path, tmp_path, fname, output, overwrite):
     os.chdir(tmp_path)
     r = Result(ref_path / fname)
     r.export_setup(output, overwrite)
     r.export_setup(output, overwrite)
Esempio n. 12
0
 def test_add_generic_grid_invalid(self, ref_path):
     result = Result(ref_path / '4grains2x4x3_compressionY.hdf5')
     with pytest.raises(NotImplementedError):
         result.add_curl('F')
Esempio n. 13
0
def single_phase(tmp_path, reference_dir):
    """Single phase Result file in temp location for modification."""
    fname = '6grains6x7x8_single_phase_tensionY.hdf5'
    shutil.copy(os.path.join(reference_dir, fname), tmp_path)
    return Result(os.path.join(tmp_path, fname))