コード例 #1
0
def test_generate_group():
    # test 1, generate a group
    parse_dir = os.path.join(test_source_dir, 'TwoDSubduction', 'parse')
    _test_prm_file = os.path.join(parse_dir, 'base.prm')
    _config_file = os.path.join(parse_dir, 'base_config_group.json')
    _odir = os.path.join(test_dir, 'test_group')
    if os.path.isdir(_odir):
        # remove older files
        rmtree(_odir)
    os.mkdir(_odir)
    # Read files
    with open(_test_prm_file, 'r') as fin:
        _inputs = Parse.ParseFromDealiiInput(fin)
    with open(_config_file, 'r') as fin:
        _json_inputs = json.load(fin)
    # Initial test group
    MyGroup = Parse.GROUP_CASE(TwoDSubduction.MYCASE, _inputs, _json_inputs)
    # Call __call__ to generate cases
    _extra = {'T660': 1663.0, 'P660': 21e9, 'LowerV': 1.5e-6}  # extra configuration
    _operations = ['LowerMantle', "MeshRefinement"]  # operations to do
    parse_operations = TwoDSubduction.MY_PARSE_OPERATIONS()
    MyGroup(parse_operations, _odir, operations=_operations, extra=_extra)
    # Assertions
    _case_names = ['ULV1.000e+02testIAR6', 'ULV1.000e+02testIAR8', 'ULV3.000e+01testIAR6','ULV3.000e+01testIAR8']
    for _case_name in _case_names:
        _case_dir = os.path.join(_odir, _case_name)  # case name is 'ULV3.000e+01_testIAR8'
        _prm_file = os.path.join(_case_dir, 'case.prm')
        assert(os.path.isfile(_prm_file))
コード例 #2
0
def test_change_disc_values():
    test_file = os.path.join(os.path.dirname(__file__), 'fixtures', 'parse_test.prm')
    assert(os.access(test_file, os.R_OK))
    with open(test_file, 'r') as fin:
        inputs = Parse.ParseFromDealiiInput(fin)
    _config = {'names': [['End time'], ['Material model', 'Visco Plastic', 'Reset corner viscosity constant']],
               'values': ['80.0e6', '1e21']}
    Parse.ChangeDiscValues(inputs, _config['names'], _config['values'])  # call function
    assert(inputs['End time'] == '80.0e6')
    assert(inputs['Material model']['Visco Plastic']['Reset corner viscosity constant'] == '1e21')
    pass
コード例 #3
0
def test_get_snaps_steps():
    case_dir = os.path.join(test_source_dir, 'foo')
    
    # call function for graphical outputs
    snaps, times, steps = Parse.GetSnapsSteps(case_dir)
    # assertions
    assert(snaps == [6, 7, 8, 9])
    assert(times == [0.0, 100000.0, 200000.0, 300000.0])
    assert(steps == [0, 104, 231, 373])
    
    # call function for particle outputs
    snaps, times, steps = Parse.GetSnapsSteps(case_dir, 'particle')
    # assertions
    assert(snaps == [0, 1])
    assert(times == [0.0, 2e5])
    assert(steps == [0, 231])
コード例 #4
0
def test_parse_from_file():
    # test_file = 'fixtures/parse_test.prm'
    test_file = os.path.join(os.path.dirname(__file__), 'fixtures', 'parse_test.prm')
    assert(os.access(test_file, os.R_OK))
    with open(test_file, 'r') as fin:
        inputs = Parse.ParseFromDealiiInput(fin)
    assert(inputs['Dimension'] == '2')
    assert(inputs['Use years in output instead of seconds'] == 'true')
    assert(inputs['End time'] == '40.0e6')
    assert(inputs['Additional shared libraries']
           == '/home/lochy/ASPECT_PROJECT/aspect_plugins/subduction_temperature2d/libsubduction_temperature2d.so, /home/lochy/ASPECT_PROJECT/aspect_plugins/prescribe_field/libprescribed_temperature.so')
コード例 #5
0
def SlabMorph(case_dir, kwargs={}):
    """
    Slab morphology
    Inputs:
        case_dir(str): directory of case
        kwargs(dict): options
    """
    case_output_dir = os.path.join(case_dir, 'output')
    case_morph_dir = os.path.join(case_output_dir, 'slab_morphs')

    # Initiation
    Visit_Xyz = VISIT_XYZ()

    # a header for interpreting file format
    # note that 'col' starts form 0
    header = {
        'x': {
            'col': 1,
            'unit': 'm'
        },
        'y': {
            'col': 2,
            'unit': 'm'
        },
        'id': {
            'col': 4
        }
    }

    # depth range
    # this is for computing dip angles with different ranges
    depth_ranges = kwargs.get('depth_ranges',
                              [[0, 100e3], [100e3, 400e3], [400e3, 6371e3]])
    my_assert(
        type(depth_ranges) == list, TypeError, "depth_ranges mush be a list")

    # remove older results
    ofile = os.path.join(case_output_dir, 'slab_morph')
    if os.path.isfile(ofile):
        os.remove(ofile)

    #   loop for every snap and call function
    snaps, times, _ = Parse.GetSnapsSteps(case_dir, 'particle')

    for i in snaps:
        visit_xyz_file = os.path.join(case_morph_dir,
                                      'visit_particles_%06d.xyz' % i)
        Visit_Xyz(visit_xyz_file,
                  header=header,
                  ofile=ofile,
                  depth_ranges=depth_ranges,
                  time=times[i])
コード例 #6
0
def test_visit_options():
    case_dir = os.path.join(test_source_dir, 'foo')

    # initiate 
    Visit_Options = Parse.VISIT_OPTIONS(case_dir)

    # call the Interpret function
    Visit_Options.Interpret()

    # compare to standard
    json_file = os.path.join(case_dir, 'odict_std.json')
    with open(json_file, 'r') as fin:
        odict_std = json.load(fin)

    assert(Visit_Options.odict == odict_std)
    pass
コード例 #7
0
def test_parse_to_new_case():
    # test usage1: 
    #   step a: read a parameter file
    #   step b: initiate a CASE class with parameters
    #   step c: call calss function UpdatePrmDict explicitly to update parameters
    #   step d: call class __class__ function by method='manual' and filename=some_file
    test_file = os.path.join(os.path.dirname(__file__), 'fixtures', 'parse_test.prm')
    assert(os.access(test_file, os.R_OK))
    with open(test_file, 'r') as fin:
        inputs = Parse.ParseFromDealiiInput(fin)
    Case = Parse.CASE(inputs)
    _names = [['End time'], ['Material model', 'Visco Plastic', 'Reset corner viscosity constant']]
    _values = ['80.0e6', '1e21']
    Case.UpdatePrmDict(_names, _values)
    assert(Case.idict['End time'] == '80.0e6')
    assert(Case.idict['Material model']['Visco Plastic']['Reset corner viscosity constant'] == '1e21')
    # call CASE: __call__ function to generate a output
    _ofile = os.path.join(_test_dir, 'foo.prm')
    if os.path.isfile(_ofile):
        # remove older file
        os.remove(_ofile)
    parse_operations = Parse.PARSE_OPERATIONS()
    Case(parse_operations, method='manual', filename=_ofile)
    assert(os.path.isfile(_ofile))
    # test usage1: 
    #   step a: read a parameter file
    #   step b: initiate a CASE class with parameters and a config dictionary
    #   step d: call class __class__ function by method='auto'
    _odir = os.path.join('.test', 'test_case_by_auto')
    _ofile = os.path.join(_odir, 'case.prm')
    if os.path.isdir(_odir):
        # remove older directory
        rmtree(_odir)
    test_file = os.path.join(os.path.dirname(__file__), 'fixtures', 'parse_test.prm')
    assert(os.access(test_file, os.R_OK))
    with open(test_file, 'r') as fin:
        inputs = Parse.ParseFromDealiiInput(fin)
    Case = Parse.CASE(inputs, config={})
    parse_operations = Parse.PARSE_OPERATIONS()
    Case(parse_operations, dirname='.test', basename='test_case_by_auto')
    assert(os.path.isfile(_ofile))
コード例 #8
0
def test_get_sub_cases():
    case_dirs = Parse.GetSubCases(test_source_dir)
    assert(case_dirs == 
           ['/home/lochy/ASPECT_PROJECT/aspectLib/tests/integration/fixtures/parse/foo1',
           '/home/lochy/ASPECT_PROJECT/aspectLib/tests/integration/fixtures/parse/foo'])
コード例 #9
0
def main():
    '''
    main function of this module
    Inputs:
        sys.arg[1](str):
            commend
        sys.arg[2, :](str):
            options
    '''
    _commend = sys.argv[1]
    # parse options
    parser = argparse.ArgumentParser(description='TwoDSubdunction Project')
    parser.add_argument('-b',
                        '--base_file',
                        type=str,
                        default='./files/TwoDSubduction/base.prm',
                        help='Filename for base file')
    parser.add_argument('-U',
                        '--use_basename_as_base_file',
                        type=int,
                        default=1,
                        help='Whether we use basename as base file')
    parser.add_argument('-j',
                        '--json_file',
                        type=str,
                        default='./config_case.json',
                        help='Filename for json file')
    parser.add_argument('-o',
                        '--output_dir',
                        type=str,
                        default='../TwoDSubduction/',
                        help='Directory for output')
    parser.add_argument(
        '-e',
        '--operations_file',
        type=str,
        default=None,
        help=
        'A file that has a list of operations, if not given, do all the available operations'
    )
    parser.add_argument('-i',
                        '--input_dir',
                        type=str,
                        default=shilofue_DIR,
                        help='A directory that contains the input')
    parser.add_argument('-s', '--step', type=int, default=0, help='timestep')
    parser.add_argument('-ex',
                        '--extension',
                        type=str,
                        default='png',
                        help='extension for output')
    _options = []
    try:
        _options = sys.argv[2:]
    except IndexError:
        pass
    arg = parser.parse_args(_options)

    # execute commend
    if _commend == 'create_group':
        # create a group
        # example usage:
        #    python -m shilofue.TwoDSubduction create_group -j config_group.json 2>&1 > .temp
        # create a group of cases
        # read files
        # read configuration
        with open(arg.json_file, 'r') as fin:
            _config = json.load(fin)
        _base_name = _config.get('basename', '')
        # read base file
        if arg.use_basename_as_base_file == 1:
            _filename = './files/TwoDSubduction/%s.prm' % _base_name
        else:
            _filename = arg.base_file
        with open(_filename, 'r') as fin:
            _inputs = Parse.ParseFromDealiiInput(fin)
        if not os.path.isdir(arg.output_dir):
            os.mkdir(arg.output_dir)
            print('Now we create a group of cases:')  # screen output
        else:
            print('Now we update a group of cases:')  # screen output

        # create a directory under the name of the group
        _group_name = _config.get('name', 'foo')
        _odir = os.path.join(arg.output_dir, _group_name)
        # By default, we don't update
        update_ = _config.get('update', 0)
        if not update_:
            my_assert(
                os.path.isdir(_odir) is False, ValueError,
                'Going to update a pr-exiting group, but update is not included in the option'
            )
        if not os.path.isdir(_odir):
            os.mkdir(_odir)

        # initialte a class instance
        MyGroup = Parse.GROUP_CASE(MYCASE, _inputs, _config)
        # call __call__ function to generate
        _extra = _config.get('extra', {})
        # add an entry for parse_operations
        parse_operations = MY_PARSE_OPERATIONS()
        _case_names = MyGroup(parse_operations,
                              _odir,
                              extra=_extra,
                              basename=_base_name,
                              update=update_)
        # generate auto.md
        # check if there is alread a preexisting group
        Parse.AutoMarkdownGroup(_group_name, _config, dirname=_odir)
        for _case_name in _case_names:
            _case_dir = os.path.join(_odir, _case_name)
            _case_json_file = os.path.join(_case_dir, 'config.json')
            with open(_case_json_file, 'r') as fin:
                _case_config = json.load(fin)
            Parse.AutoMarkdownCase(_case_name, _case_config, dirname=_case_dir)
        print(_group_name)
        for _case_name in _case_names:
            # ouptut to screen
            print(_case_name)

    elif _commend == 'create':
        print('Now we create a single case:')  # screen output
        # create a case
        # read files
        # read configuration
        with open(arg.json_file, 'r') as fin:
            _config = json.load(fin)
        _base_name = _config.get('basename', '')
        # read base file
        if arg.use_basename_as_base_file == 1:
            _filename = './files/TwoDSubduction/%s.prm' % _base_name
        else:
            _filename = arg.base_file
        with open(_filename, 'r') as fin:
            _inputs = Parse.ParseFromDealiiInput(fin)
        if not os.path.isdir(arg.output_dir):
            os.mkdir(arg.output_dir)
        # Initial a case
        MyCase = MYCASE(_inputs,
                        config=_config['config'],
                        test=_config['test'])
        # call __call__ function to generate
        _extra = _config.get('extra', {})
        # also add extra files
        _extra_files = _config.get('extra_file', {})
        # add an entry for parse_operations
        parse_operations = MY_PARSE_OPERATIONS()
        _case_name = MyCase(parse_operations,
                            dirname=arg.output_dir,
                            extra=_config['extra'],
                            basename=_base_name,
                            extra_file=_extra_files)
        # generate markdown file
        _case_dir = os.path.join(arg.output_dir, _case_name)
        Parse.AutoMarkdownCase(_case_name, _config, dirname=_case_dir)
        print(_case_name)
        # check this group exist
        my_assert(os.path.isdir(arg.output_dir), FileExistsError,
                  "%s doesn't exist" % arg.output_dir)
        # initial class instance, future
        # MyCase = MYCASE(_inputs, config=_config['config'], test=_config['test'])
        # call function to return case names
        # check that these cases exit

        pass

    elif _commend == 'update_docs':
        # update the contents of the mkdocs
        # example usage:
        #   python -m shilofue.TwoDSubduction update_docs -o /home/lochy/ASPECT_PROJECT/TwoDSubduction -j post_process.json
        _project_dir = arg.output_dir
        _project_dict = Parse.UpdateProjectJson(
            _project_dir)  # update project json file

        # load options for post_process
        # load the project level configuration as default
        project_pp_json = os.path.join(ASPECT_LAB_DIR, 'files', project,
                                       'post_process.json')
        with open(project_pp_json, 'r') as fin:
            pdict = json.load(fin)
        # load explicitly defined parameters
        with open(arg.json_file, 'r') as fin:
            pdict1 = json.load(fin)
        pdict.update(pdict1)

        # append analysis
        analysis_file = os.path.join(ASPECT_LAB_DIR, 'analysis.json')
        if os.path.isfile(analysis_file):
            with open(analysis_file, 'r') as fin:
                analysis_dict = json.load(fin)
        else:
            analysis_dict = {}

        # update docs
        docs_dict = pdict.get('docs', {})
        imgs = docs_dict.get('imgs', [])
        Doc.UpdateProjectDoc(_project_dict,
                             _project_dir,
                             images=imgs,
                             analysis=analysis_dict)

    elif _commend == 'update':
        # update a case
        # example usage:
        #   python -m shilofue.TwoDSubduction update -o /home/lochy/ASPECT_PROJECT/TwoDSubduction -j post_process.json
        _project_dir = arg.output_dir
        _project_dict = Parse.UpdateProjectJson(
            _project_dir)  # update project json file

        # load options for post_process
        # load the project level configuration as default
        project_pp_json = os.path.join(ASPECT_LAB_DIR, 'files', project,
                                       'post_process.json')
        with open(project_pp_json, 'r') as fin:
            pdict = json.load(fin)
        # load explicitly defined parameters
        with open(arg.json_file, 'r') as fin:
            pdict1 = json.load(fin)
        # update every entry in pdict1
        for key, value in pdict1.items():
            if type(value) == dict:
                try:
                    _ = pdict[key]
                    pdict[key].update(value)
                except KeyError:
                    pdict[key] = value
            else:
                pdict[key] = value

        # update auto.md file for every case
        Parse.UpdateProjectMd(_project_dict, _project_dir)

        # plot figures for every case
        # get sub cases
        pp_source_dirs = pdict.get('dirs', [])
        _format = pdict.get('py_format', 'png')
        for pp_source_dir_base in pp_source_dirs:
            pp_source_dir = os.path.join(_project_dir, pp_source_dir_base)
            pp_case_dirs = Parse.GetSubCases(pp_source_dir)
            Plot.ProjectPlot(pp_case_dirs, _format, update=False, pdict=pdict)
            # deal with project defined plots
            ProjectPlot(pp_case_dirs, _format, update=False, pdict=pdict)

    elif _commend == 'plot_newton_solver_step':
        # Plot one step from Newton solver
        # use -i option as input and -o option as output dir
        # example usage:
        #   python -m shilofue.TwoDSubduction plot_newton_solver_step -i tests/integration/fixtures/test-plot/newton_solver -o .test -s 1 --extension pdf
        filein = arg.input_dir
        output_dir = arg.output_dir
        step = arg.step
        extension = arg.extension
        ofile_route = os.path.join(output_dir,
                                   'NewtonSolverStep.%s' % extension)
        # plot newton solver output
        NewtonSolverStep = Plot.NEWTON_SOLVER_PLOT('NewtonSolverStep')
        # plot step0
        NewtonSolverStep.GetStep(step)
        NewtonSolverStep(filein, fileout=ofile_route)
        pass

    elif _commend == 'plot_newton_solver':
        # plot the whole history outputs from Newton solver
        # use -i option as input and -o option as output dir
        # example usages:
        #   python -m shilofue.TwoDSubduction plot_newton_solver -i tests/integration/fixtures/test-plot/newton_solver -o .test
        filein = arg.input_dir
        output_dir = arg.output_dir
        step = arg.step
        ofile_route = os.path.join(output_dir, 'NewtonSolver.pdf')
        # plot newton solver output
        NewtonSolverStep = Plot.NEWTON_SOLVER_PLOT('NewtonSolver')
        # plot step0
        NewtonSolverStep(filein, fileout=ofile_route)
        pass

    elif _commend == 'plot_machine_time':
        # plot the machine time output
        # use -i option as input and -o option as output dir
        # example usages:
        #   python -m shilofue.TwoDSubduction plot_machine_time -i tests/integration/fixtures/test-plot/machine_time -o .test
        filein = arg.input_dir
        output_dir = arg.output_dir
        ofile = os.path.join(output_dir, 'MachineTime.pdf')
        # plot newton solver output
        MachineTime = Plot.MACHINE_TIME_PLOT('MachineTime')
        # plot step0
        MachineTime(filein, fileout=ofile)
        pass

    elif _commend == 'plot_slab_morph':
        # plot the slab morph output
        # use -i option as input and -o option as output dir
        # example usages:
        #   python -m shilofue.TwoDSubduction plot_slab_morph
        #       -i /home/lochy/ASPECT_PROJECT/TwoDSubduction/non_linear26/cr80w5ULV3.000e+01/output/slab_morph
        #       -o /home/lochy/ASPECT_PROJECT/TwoDSubduction/non_linear26/cr80w5ULV3.000e+01/img
        filein = arg.input_dir
        output_dir = arg.output_dir
        ofile = os.path.join(output_dir, 'slab_morph.png')
        # Init the UnitConvert class
        UnitConvert = UNITCONVERT()
        # Get options
        project_pp_json = os.path.join(ASPECT_LAB_DIR, 'files',
                                       'TwoDSubduction', 'post_process.json')
        with open(project_pp_json, 'r') as fin:
            pdict = json.load(fin)
        plot_options = pdict.get('slab_morph', {})
        Slab_morph_plot = SLAB_MORPH_PLOT('slab_morph',
                                          unit_convert=UnitConvert,
                                          options=plot_options)
        # plot
        Slab_morph_plot(filein, fileout=ofile)

    elif _commend == 'process_slab_morph':
        # process slab morphology from visit particle output
        # generate a file that can be used for plot
        # example usages:
        # python -m shilofue.TwoDSubduction process_slab_morph -i
        #   /home/lochy/ASPECT_PROJECT/TwoDSubduction/non_linear26/cr80w5ULV3.000e+01 -j post_process.json
        case_dir = arg.input_dir
        # process slab morph with extra options
        with open(arg.json_file, 'r') as fin:
            dict_in = json.load(fin)
            extra_options = dict_in.get('slab_morph', {})
        try:
            SlabMorph(case_dir, extra_options)
        except FileNotFoundError:
            warnings.warn(
                'process_slab_morph: file existence requirements are not met')

    elif _commend == 'plot_slab_morph_case':
        # plot the slab morph output for a case
        # first generate slab_morph output
        case_dir = arg.input_dir
        # process slab morph with extra options
        with open(arg.json_file, 'r') as fin:
            dict_in = json.load(fin)
            extra_options = dict_in.get('slab_morph', {})
        try:
            SlabMorph(case_dir, extra_options)
        except FileNotFoundError:
            warnings.warn(
                'process_slab_morph: file existence requirements are not met')
        # then plot the slab morph figure
        filein = os.path.join(case_dir, 'output', 'slab_morph')
        output_dir = os.path.join(case_dir, 'img')
        ofile = os.path.join(output_dir, 'slab_morph.png')
        # Init the UnitConvert class
        UnitConvert = UNITCONVERT()
        # Get options
        project_pp_json = os.path.join(ASPECT_LAB_DIR, 'files',
                                       'TwoDSubduction', 'post_process.json')
        with open(project_pp_json, 'r') as fin:
            pdict = json.load(fin)
        plot_options = pdict.get('slab_morph', {})
        Slab_morph_plot = SLAB_MORPH_PLOT('slab_morph',
                                          unit_convert=UnitConvert,
                                          options=plot_options)
        # plot
        Slab_morph_plot(filein, fileout=ofile)

    elif _commend == 'plot':
        # future
        # plot something
        pass

    elif _commend == 'visit_options':
        # output bash options to a file that could be
        # read by bash script
        # initiate class object
        case_dir = arg.input_dir

        Visit_Options = VISIT_OPTIONS(case_dir)

        # load extra options
        if arg.json_file == './config_case.json':
            # no json file is giving
            extra_options = {}
        else:
            with open(arg.json_file, 'r') as fin:
                dict_in = json.load(fin)
                extra_options = dict_in.get('visit', {})

        # call function
        ofile = os.path.join(ASPECT_LAB_DIR, 'visit_keys_values')
        Visit_Options(ofile, extra_options)
        pass

    elif _commend == 'plot_test_results':
        # plot the result of tests
        # example:
        # python -m shilofue.TwoDSubduction plot_test_results -i
        #  /home/lochy/softwares/aspect/build_TwoDSubduction/tests/ -o $TwoDSubduction_DIR/test_results
        source_dir = arg.input_dir
        # todo
        PlotTestResults(source_dir, output_dir=arg.output_dir)

    else:
        raise ValueError('Commend %s is not available.' % _commend)
コード例 #10
0
 def LowerMantle0(self, Inputs, _config):
     """
     calculate flow law parameters, when phase transition only happens on mantle composition
     """
     # parse from input
     jump = _config['upper_lower_viscosity']
     T = _config['T660']
     P = _config['P660']
     V1 = _config['LowerV']
     visco_plastic = Inputs["Material model"]['Visco Plastic']
     prefactors_for_diffusion_creep = Parse.COMPOSITION(
         visco_plastic["Prefactors for diffusion creep"])
     grain_size = float(visco_plastic["Grain size"])
     grain_size_exponents_for_diffusion_creep = Parse.COMPOSITION(
         visco_plastic["Grain size exponents for diffusion creep"])
     activation_energies_for_diffusion_creep = Parse.COMPOSITION(
         visco_plastic["Activation energies for diffusion creep"])
     activation_volumes_for_diffusion_creep = Parse.COMPOSITION(
         visco_plastic["Activation volumes for diffusion creep"])
     # call GetLowerMantleRheology to derive parameters for lower mantle flow law
     backgroud_upper_mantle_diffusion = {}
     backgroud_upper_mantle_diffusion[
         'A'] = prefactors_for_diffusion_creep.data['background'][0]
     backgroud_upper_mantle_diffusion['d'] = grain_size
     backgroud_upper_mantle_diffusion['n'] = 1.0
     backgroud_upper_mantle_diffusion[
         'm'] = grain_size_exponents_for_diffusion_creep.data['background'][
             0]
     backgroud_upper_mantle_diffusion[
         'E'] = activation_energies_for_diffusion_creep.data['background'][
             0]
     backgroud_upper_mantle_diffusion[
         'V'] = activation_volumes_for_diffusion_creep.data['background'][0]
     backgroud_lower_mantle_diffusion = Rheology.GetLowerMantleRheology(
         backgroud_upper_mantle_diffusion, jump, T, P, V1=V1, strategy='A')
     # future: add in choice of phases
     prefactors_for_diffusion_creep.data['background'] = [
         backgroud_upper_mantle_diffusion['A'],
         backgroud_lower_mantle_diffusion['A']
     ]
     grain_size_exponents_for_diffusion_creep.data['background'] = [
         backgroud_upper_mantle_diffusion['m'],
         backgroud_lower_mantle_diffusion['m']
     ]
     activation_energies_for_diffusion_creep.data['background'] = [
         backgroud_upper_mantle_diffusion['E'],
         backgroud_lower_mantle_diffusion['E']
     ]
     activation_volumes_for_diffusion_creep.data['background'] = [
         backgroud_upper_mantle_diffusion['V'],
         backgroud_lower_mantle_diffusion['V']
     ]
     # parse back
     visco_plastic[
         "Prefactors for diffusion creep"] = prefactors_for_diffusion_creep.parse_back(
         )
     visco_plastic[
         "Grain size exponents for diffusion creep"] = grain_size_exponents_for_diffusion_creep.parse_back(
         )
     visco_plastic[
         "Activation energies for diffusion creep"] = activation_energies_for_diffusion_creep.parse_back(
         )
     visco_plastic[
         "Activation volumes for diffusion creep"] = activation_volumes_for_diffusion_creep.parse_back(
         )
     return Inputs
コード例 #11
0
def test_generate_case():
    # Test 1: generate case with Lower Mantle viscosity
    parse_dir = os.path.join(test_source_dir, 'TwoDSubduction', 'parse')
    _test_prm_file = os.path.join(parse_dir, 'base.prm')
    _config_file = os.path.join(parse_dir, 'base_config.json')
    _case_dir = os.path.join('.test', 'ULV3.000e+01')  # case name is 'ULV3.000e+01'
    if os.path.isdir(_case_dir):
        # remove older files
        rmtree(_case_dir)
    _prm_file = os.path.join(_case_dir, 'case.prm')
    _standard_prm_file = os.path.join(parse_dir, 'standard_base.prm')
    # Read files
    with open(_test_prm_file, 'r') as fin:
        _inputs = Parse.ParseFromDealiiInput(fin)
    with open(_config_file, 'r') as fin:
        _config = json.load(fin)
    # Initiate Class MyCase
    MyCase = TwoDSubduction.MYCASE(_inputs, config=_config)
    # call __call__ function
    _extra = {'T660': 1663.0, 'P660': 21e9, 'LowerV': 1.5e-6}  # extra configuration
    # add operations
    parse_operations = TwoDSubduction.MY_PARSE_OPERATIONS()
    MyCase(parse_operations, dirname='.test', extra=_extra)
    # Assertions
    assert(os.path.isfile(_prm_file))
    assert(filecmp.cmp(_standard_prm_file, _prm_file))
    
    # Test 2: generate case with Lower Mantle viscosity with changed mesh_refinement
    _test_prm_file = os.path.join(parse_dir, 'base.prm')
    _config_file = os.path.join(parse_dir, 'base_config_1.json')
    _case_dir = os.path.join('.test', 'ULV1.000e+02testIAR8')  # case name is 'ULV3.000e+01_testIAR8'
    if os.path.isdir(_case_dir):
        # remove older files
        rmtree(_case_dir)
    _prm_file = os.path.join(_case_dir, 'case.prm')
    _standard_prm_file = os.path.join(parse_dir, 'standard_base_1.prm')
    # Read files
    with open(_test_prm_file, 'r') as fin:
        _inputs = Parse.ParseFromDealiiInput(fin)
    with open(_config_file, 'r') as fin:
        _json_inputs = json.load(fin)
        _config = _json_inputs['config']
        _test = _json_inputs.get('test', {})
    # Initiate Class MyCase
    MyCase = TwoDSubduction.MYCASE(_inputs, config=_config, test=_test)
    # call __call__ function
    _extra = {'T660': 1663.0, 'P660': 21e9, 'LowerV': 1.5e-6}  # extra configuration
    parse_operations = TwoDSubduction.MY_PARSE_OPERATIONS()
    MyCase(parse_operations, dirname='.test', extra=_extra)
    # Assertions
    assert(os.path.isfile(_prm_file))
    assert(filecmp.cmp(_standard_prm_file, _prm_file))
    
    # Test 3: generate case with non_liear rheology and test solver
    _test_prm_file = os.path.join(parse_dir, 'non_linear_1e18.prm')
    _config_file = os.path.join(parse_dir, 'non_linear_1e18_config.json')
    _case_dir = os.path.join('.test', 'ULV3.000e+01testC4.000e-01MLT9.000e-01NST1.000e-04SBR10')  # case name is 'ULV3.000e+01_testIAR8'
    if os.path.isdir(_case_dir):
        # remove older files
        rmtree(_case_dir)
    _prm_file = os.path.join(_case_dir, 'case.prm')
    _standard_prm_file = os.path.join(parse_dir, 'standard_non_linear_1e18.prm')
    # Read files
    with open(_test_prm_file, 'r') as fin:
        _inputs = Parse.ParseFromDealiiInput(fin)
    with open(_config_file, 'r') as fin:
        _json_inputs = json.load(fin)
        _config = _json_inputs['config']
        _test = _json_inputs.get('test', {})
        _extra = _json_inputs.get('extra', {})
    # Initiate Class MyCase
    MyCase = TwoDSubduction.MYCASE(_inputs, config=_config, test=_test, extra=_extra)
    # call __call__ function
    parse_operations = TwoDSubduction.MY_PARSE_OPERATIONS()
    MyCase(parse_operations, dirname='.test', extra=_extra)
    # Assertions
    assert(os.path.isfile(_prm_file))
    assert(filecmp.cmp(_standard_prm_file, _prm_file))
    
    # Test 4: generate case with phase transitions on all compositions and a eclogite transition of crustal layer
    _test_prm_file = os.path.join(parse_dir, 'crust_terminate.prm')
    _config_file = os.path.join(parse_dir, 'crust_terminate_config.json')
    _case_dir = os.path.join('.test', 'crust_terminateULV1.000e+01')  # case name is 'ULV3.000e+01_testIAR8'
    if os.path.isdir(_case_dir):
        # remove older files
        rmtree(_case_dir)
    _prm_file = os.path.join(_case_dir, 'case.prm')
    _standard_prm_file = os.path.join(parse_dir, 'crust_terminate_standard.prm')
    # Read files
    with open(_test_prm_file, 'r') as fin:
        _inputs = Parse.ParseFromDealiiInput(fin)
    with open(_config_file, 'r') as fin:
        _json_inputs = json.load(fin)
        _config = _json_inputs['config']
        _test = _json_inputs.get('test', {})
        _extra = _json_inputs.get('extra', {})
    # Initiate Class MyCase
    MyCase = TwoDSubduction.MYCASE(_inputs, config=_config, test=_test, extra=_extra)
    # call __call__ function
    parse_operations = TwoDSubduction.MY_PARSE_OPERATIONS()
    MyCase(parse_operations, basename="crust_terminate", dirname='.test', extra=_extra)
    # Assertions
    assert(os.path.isfile(_prm_file))
    assert(filecmp.cmp(_standard_prm_file, _prm_file))