def generate_doxygen(doxydoc, verbose):
    """
    Generate the Doxygen documentation (In html) for the Python and the sources

    @param doxydoc name of the doxygen folder to use (doxydocs or doxypydocs)
    @param verbose If True display doxygen listing
    """

    if doxydoc == "doxydocs":
        # Checking that the converter is compiled
        converter_path = path.join(CFGS.get_root(),
                                   'optionals',
                                   'addons',
                                   'to_f90')

        if not path.exists(path.join(converter_path, 'f77_to_f90')):
            raise TelemacException(
                "You need to compile the converter in :\n"+converter_path)

    # Compiling sources doxygen
    doxy_dir = path.join(CFGS.get_root(),
                         'documentation',
                         doxydoc)
    doxy_file = path.join(doxy_dir, 'Doxyfile')

    compile_doxygen(doxy_file, verbose)
Beispiel #2
0
def get_report_path(report_name, type_valid):
    """
    Build report full path
    $HOMETEL/[report_name]_[config]_[version]_[type_valid]_[date].csv
    where:
    - report_name is the one given as argument
    - config is the name of the configuration for which the validation is run
    - version is the version of the code
    - type_valid is the one given as argument
    - date is the data at which the report is written

    @param report_name (str) Name given to the report
    @param type_valid (str) Type of validation (notebook, examples...)


    @returns (str) The path
    """

    full_report_name = "{}_{}_{}_{}_{}.csv".format(\
                            report_name,
                            CFGS.cfgname,
                            CFGS.configs[CFGS.cfgname].get('version', 'trunk'),
                            type_valid,
                            ttime.strftime("%Y-%m-%d-%Hh%Mmin%Ss",
                                           ttime.localtime(ttime.time())))

    report_path = path.join(CFGS.get_root(), full_report_name)

    return report_path
def hide_root(string):
    """
    Replace the path to sources of telemac by <root>

    @param string (string) The string in which to hide root

    @return (string) The updated string
    """

    return string.replace(CFGS.get_root(), "<root>")
def run_validation_notebooks(options, report, xcpts):
    """
    Run validation of the notebooks

    @param options (ArgumentParser) Options of the script
    @param report (Report) Contains execution time
    @param xcpts (Message) Error handler
    """
    root = CFGS.get_root()

    if options.args != []:
        # Files to run given in arguments
        nb_files = options.args
    else:
        # Looking through notebook folder for notebook to run
        nb_files = []

        for dirpath, _, ffiles in walk(path.join(root, 'notebooks')):
            for ffile in ffiles:
                # Skipping jupyter temporary folders
                if '.ipynb' in dirpath:
                    continue
                # If we have a notebook
                if '.ipynb' in ffile:
                    nb_files.append(path.join(dirpath, ffile))

    # Removing exlcuded notebooks
    if options.nb_exclude is not None:
        options.nb_exclude = options.nb_exclude.strip("'")
        for nb_file in list(nb_files):
            for exclude in options.nb_exclude.split(','):
                if nb_file.endswith(exclude + ".ipynb"):
                    if nb_file in nb_files:
                        print("  ~> Excluding: ", nb_file)
                        nb_files.remove(nb_file)

    # Run notebook validation
    n_nb = len(nb_files)
    for i, nb_file in enumerate(sorted(nb_files)):
        print('Validation <{}/{}> ~> Running notebook {} in {}'\
              .format(i+1, n_nb, path.basename(nb_file), path.dirname(nb_file)))
        try:
            start = time.time()
            run_notebook(nb_file,
                         options.nb_timeout,
                         update_nb=options.nb_update)
            end = time.time()
            report.add_notebook(nb_file, end - start, True)
        except Exception as exc:
            if options.bypass:
                report.add_notebook(nb_file, 0.0, False)
                xcpts.add_messages([{'name': nb_file, 'msg': str(exc)}])
            else:
                raise exc
Beispiel #5
0
def get_dico(module):
    """
    Returns path of the dictionary for a given module

    @param module (str) name of a telemac-mascaret module

    @returns the path
    """
    from config import CFGS
    if CFGS is None:
        raise TelemacException(\
                "This function only wors if a configuration is set")
    return path.join(CFGS.get_root(), 'sources', module, module+'.dico')
Beispiel #6
0
def generate_doxygen(doxydoc, verbose):
    """
    Generate the Doxygen documentation (In html) for the Python and the sources

    @param doxydoc name of the doxygen folder to use (doxydocs or doxypydocs)
    @param verbose If True display doxygen listing
    """

    # Compiling sources doxygen
    doxy_dir = path.join(CFGS.get_root(), 'documentation', doxydoc)
    doxy_file = path.join(doxy_dir, 'Doxyfile')

    if doxydoc == "doxypydocs":
        try:
            import doxypypy
        except ImportError:
            raise TelemacException("doxypypy is mandatory to compile doxygen "
                                   "for Telemac scripts")

    compile_doxygen(doxy_file, verbose)
def run_validation_python(cfg, options, report, xcpts):
    """
    Run validation for vnv Python scripts

    @param cfg (Dict) Configuration information
    @parma options (ArgumentParser) List of arguments
    @param report (Report) Time of actions
    @param xcpts () Error handler
    """
    # Building list of files to run

    if options.args != []:
        list_files = []
        for ffile in options.args:
            list_files.append(path.abspath(ffile))
    else:
        # Looping on all folders to find the scripts to execute
        list_files = []
        # Loop on modules
        for code_name in cfg['VALIDATION']:
            val_root = cfg['val_root']
            dirpath, dirnames, _ = next(walk(path.join(val_root, code_name)))
            for ddir in dirnames:
                _, _, filenames = next(walk(path.join(dirpath, ddir)))
                for fle in filenames:
                    root, ext = path.splitext(path.basename(fle))
                    if ext == '.py' and root[0:4] == 'vnv_':
                        # check rank and tag
                        file_name = path.join(dirpath, ddir, fle)
                        if check_python_rank_tags(file_name, options):
                            list_files.append(file_name)

    n_files = len(list_files)
    root = CFGS.get_root()
    for ifile, py_file in enumerate(sorted(list_files)):
        if options.cleanup or options.full_cleanup:
            clean_vnv_working_dir(py_file, full=options.full_cleanup)
        else:
            print('\n\nValidation < {}/{} > of {}'\
                  .format(ifile+1, n_files, py_file.replace(root, '<root>')))
            run_python(py_file, options, report, xcpts)
def run_validation_python_mpi(cfg, options, report, xcpts):
    """
    Run validation for vnv Python scripts Normale mode

    @param cfg (Dict) Configuration information
    @param options (ArgumentParser) List of arguments
    @param report (Report) Time of actions
    @param xcpts () Error handler
    """
    # Building list of files to run
    list_files = get_list_python_files(cfg, options)

    n_files = len(list_files)
    root = CFGS.get_root()
    for ifile, py_file in enumerate(sorted(list_files)):
        if options.cleanup or options.full_cleanup:
            clean_vnv_working_dir(py_file, full=options.full_cleanup)
        else:
            print('\n\nValidation < {}/{} > of {}'\
                  .format(ifile+1, n_files, py_file.replace(root, '<root>')))
            run_python(py_file, options, report, xcpts)
Beispiel #9
0
def main():
    """ Main function of gretel """
    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~ Reads config file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nLoading Options and Configurations\n' + 72 * '~' + '\n')
    parser = argparse.ArgumentParser(\
            description='Run the merging step (gretel)')
    parser = add_config_argument(parser)
    parser.add_argument(\
              "--input-file",
              dest="input_file",
              default='',
              help="Name of gretel parameter file (GRETEL.PAR). "\
                     "This option will surcharge all the others")
    parser.add_argument(\
              "--geo-file",
              dest="geo_file",
              default='T2DGEO',
              help="Name of the geometry file associated with the "\
                     "file to be merged")
    parser.add_argument(\
              "--geo-file-format",
              dest="geo_file_fmt",
              default='SERAFIN',
              help="Format of the geometry file(SERAFIN,SERAFIND or MED), "\
                     "default is SERAFIN")
    parser.add_argument(\
              "--res-file",
              dest="res_file",
              default='T2DRES',
              help="Name of the file to be merged")
    parser.add_argument(\
              "--res-file-format",
              dest="res_file_fmt",
              default='SERAFIN',
              help="Format of the geometry file(SERAFIN,SERAFIND or MED), "\
                     "default is SERAFIN")
    parser.add_argument(\
              "--bnd-file",
              dest="bnd_file",
              default='T2DCLI',
              help="Name of the boundary file")
    parser.add_argument(\
              "--ncsize",
              dest="ncsize",
              default=8,
              help="Number of partitions (should be equal to number of "\
                     "parallel processors), default is 8")
    parser.add_argument(\
              "--nplan",
              dest="nplan",
              default=0,
              help="Number of horizontal levels ,default is 0")
    args = parser.parse_args()

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    update_config(args)

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Works for all configurations unless specified ~~~~~~~~~~~~~~~

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Reporting errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Works for all configurations unless specified ~~~~~~~~~~~~~~~
    CFGS.compute_execution_info()

    if args.input_file != "":
        with open(args.input_file, 'r') as f:
            geo_file = f.readline().strip('\n')
            geo_file_fmt = f.readline().strip('\n')
            bnd = f.readline().strip('\n')
            res_file = f.readline().strip('\n')
            res_file_fmt = f.readline().strip('\n')
            ncsize = f.readline().strip('\n')
            nplan = f.readline().strip('\n')
    else:
        geo_file = args.geo_file
        geo_file_fmt = args.geo_file_fmt
        bnd = args.bnd_file
        res_file = args.res_file
        res_file_fmt = args.res_file_fmt
        ncsize = args.ncsize
        nplan = args.nplan

    # Getting partel command from configuration
    pbin = path.join(CFGS.get_root(), 'builds', CFGS.cfgname, 'bin')
    grecmd = get_gretel_cmd(pbin, CFGS.configs[CFGS.cfgname])
    # Running paritionning

    run_gretel(grecmd, res_file, res_file_fmt, geo_file, geo_file_fmt, bnd,
               ncsize, nplan, False)

    print('\n\nMy work is done\n\n')
    sys.exit(0)
def main():
    """
    Main program for the compilation of the documentation of
    the telemac-mascaret system
    """
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~ Reads config file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nLoading Options and Configurations\n'+'~'*72+'\n')
    parser = ArgumentParser(
        formatter_class=RawDescriptionHelpFormatter,
        description=('''\n
By Default all the documentation are generated\n
use the options --validation/reference/user/release/theory to compile only one
        '''))
    parser = add_config_argument(parser)
    parser.add_argument(
        "-v", "--verbose", action="store_true",
        dest="verbose", default=False,
        help="Will display listing for all commands")
    parser.add_argument(
        "-m", "--modules",
        dest="modules", default='',
        help="specify the list modules (, separated), default is all of them")
    parser.add_argument(
        "-M", "--misc",
        dest="misc", default='',
        help="specify the list of misc documentation (, separated) to compile, "
             "default is all of them")
    parser.add_argument(
        "--validation", action="store_true",
        dest="validation", default=False,
        help="Will generate the validation documentation")
    parser.add_argument(
        "--case-list",
        dest="case_list", default='',
        help="List of cas to include in the validation documentation"
             "separated by ',' (default all of them)")
    parser.add_argument(
        "--reference", action="store_true",
        dest="reference", default=False,
        help="Will generate the reference documentation")
    parser.add_argument(
        "--user", action="store_true",
        dest="user", default=False,
        help="Will generate the user documentation")
    parser.add_argument(
        "--release", action="store_true",
        dest="release_note", default=False,
        help="Will generate the release note")
    parser.add_argument(
        "--theory", action="store_true",
        dest="theory_guide", default=False,
        help="Will generate the theory guide")
    parser.add_argument(
        "--clean", action="store_true",
        dest="cleanup", default=False,
        help="Will remove all temporary file generated by pdflatex")
    parser.add_argument(
        "--fullclean", action="store_true",
        dest="fullcleanup", default=False,
        help="Same as clean but removes the pdf as well")

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    options = parser.parse_args()
    update_config(options)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Works for all configurations unless specified ~~~~~~~~~~~~~~~
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Compile the valiation documentation
    doall = not (options.validation or options.user or options.reference
                 or options.release_note or options.theory_guide)
    cfg = CFGS.configs[CFGS.cfgname]
    # still in lower case
    root = CFGS.get_root()
    # Get what i to be compiled
    # By default everything if something is defined compiling only that
    if options.modules != '':
        module_list = options.modules.split(',')
    else:
        # all modules
        module_list = ['artemis', 'stbtel', 'sisyphe', 'postel3d',
                       'telemac2d', 'telemac3d', 'tomawac', 'waqtel',
                       'telapy', 'mascaret', 'gaia', 'nestor', 'khione']
    if options.misc != '':
        misc_list = options.misc.split(',')
        module_list = []
    else:
        # all docs
        misc_list = ['developer_guide', 'software_quality_plan',
                     'TelemacDocTemplate', 'git_guide',
                     'doxypydocs']
        # If a module was specified or a specific documentation for modules
        # not compiling Misc documentation
        if options.modules != '' or not doall:
            misc_list = []

    CFGS.compute_vnv_info()

    # Get version in config if it exist use trunk otherwise
    version = cfg.get('version', 'trunk')

    # Initialise output message
    output_mess = '\n\n'
    # Look on all the modules for the documentation
    for code_name in module_list:
        print('\nCompilation of the documentation for ' + code_name
              + '\n'+'~'*72)
        # list of what to do for the module
        todo = []
        if options.validation or doall:
            if code_name not in ['telapy', 'mascaret']:
                # Building Validation LaTeX file
                doc_dir = path.join(root, 'documentation',
                                    code_name, 'validation')
                chdir(doc_dir)
                if options.case_list != '':
                    list_of_case = options.case_list.split(',')
                else:
                    list_of_case = list(cfg['VALIDATION'][code_name].keys())
                    list_of_case.remove('path')
                skiped_case = \
                    create_case_list_file(
                        doc_dir,
                        cfg['VALIDATION'][code_name]['path'],
                        list_of_case,
                        options.cleanup or options.fullcleanup)
                for case in skiped_case:
                    output_mess += r'   - /!\ Missing LaTeX file for ' + \
                                   case+'\n'
                todo.append('validation')
        if options.reference or doall:
            if code_name not in ['telapy', 'mascaret', 'nestor']:
                # Path to the dictionary
                dictionary = path.join(root, 'sources', code_name,
                                       code_name+'.dico')
                # Path to latex File
                latex_file = path.join(root, 'documentation',
                                       code_name, 'reference',
                                       'latex', 'Corpus.tex')
                # English only for now
                lng = '2'
                # Path to bin directory
                exe_path = path.join(\
                        root, 'builds', CFGS.cfgname,
                        'bin', 'damocles'+cfg['SYSTEM']['sfx_exe'])
                generate_ref_from_dict(\
                        exe_path, dictionary, latex_file, lng,
                        options.cleanup or options.fullcleanup,
                        options.verbose)
                todo.append('reference')
        if options.user or doall:
            if code_name not in ['mascaret']:
                # Normal Compilation of a LaTeX file
                todo.append('user')
        if options.theory_guide or doall:
            # theory guide only available for telemac3d
            if code_name in ['telemac3d', 'mascaret', 'waqtel']:
                todo.append('theory_guide')
        for doc_type in todo:
            print('\n     ~> Compilation of the {} documentation'\
                  .format(doc_type))
            doc_dir = path.join(root, 'documentation',
                                code_name, doc_type)
            chdir(doc_dir)
            # Check if the file exist
            if path.exists(path.join(doc_dir,
                                     code_name + "_" + doc_type + ".tex")):
                compile_doc(doc_dir, code_name+'_'+doc_type,
                            version,
                            options.cleanup, options.fullcleanup,
                            options.verbose)
            else:
                raise TelemacException(\
                        "   - Error for {} {}, {}.tex "
                        "not found ".format(code_name,
                                            path.basename(doc_dir),
                                            code_name+"_"+doc_type))
            if not (options.cleanup or options.fullcleanup):
                output_mess += '   - Created %s_%s_%s.pdf\n' % \
                              (code_name, doc_type, version)
    # List of the other documentation
    print('\nCompilation of the documentation for Misc'
          + '\n'+'~'*72)
    for doc in misc_list:
        print('\n     ~> Compilation of the {} documentation'.format(doc))
        doc_dir = path.join(root, 'documentation',
                            'Misc', doc)

        if doc == 'notebook':
            notebook_dir = path.join(root, 'notebooks')
            generate_notebook_pdf(doc_dir, notebook_dir)
        elif doc in ['doxydocs', 'doxypydocs']:
            generate_doxygen(doc, options.verbose)
        else:
            chdir(doc_dir)
            if path.exists(path.join(doc_dir, doc + ".tex")):
                compile_doc(doc_dir, doc,
                            version,
                            options.cleanup, options.fullcleanup,
                            options.verbose)
            else:
                raise TelemacException(\
                        "   - Error in {}, {}.tex "
                        "not found ".format(path.basename(doc_dir), doc))
        if not (options.cleanup or options.fullcleanup) or \
           doc not in ['notebooks', 'doxydocs', 'doxypydocs']:
            output_mess += '   - Created %s_%s.pdf\n' % \
                          (doc, version)

    print(output_mess)
    print('\n\n'+'~'*72)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nMy work is done\n\n')

    sys.exit(0)
def main(modules, example, nncsize, clean):
    """
    Main function
    """
    # Running main function
    root_dir = CFGS.get_root()

    if path.exists('ValidationTelApy.log'):
        remove('ValidationTelApy.log')
    fichier = open('ValidationTelApy.log', 'a')
    fichier.write("-----Listing Validation telapy-------\n")

    seq_only = {}
    skip_test = {}
    # Specifcation for each module
    for module in MODULE_HANDLED:
        seq_only[module] = []
        skip_test[module] = []

    # Sequential only test cases
    seq_only['telemac2d'].append('t2d_hydraulic_jump_v1p0.cas')
    seq_only['telemac2d'].append('t2d_hydraulic_jump_v2p0.cas')
    seq_only['telemac2d'].append('t2d_wesel.cas')
    seq_only['telemac2d'].append('t2d_wesel_pos.cas')
    seq_only['telemac2d'].append('t2d_delwaq.cas')
    seq_only['telemac2d'].append('t2d_ruptmoui.cas')
    seq_only['telemac2d'].append('t2d_triangular_shelf.cas')
    seq_only['telemac2d'].append('t2d_island.cas')
    seq_only['telemac2d'].append('t2d_tide-jmj_real_gen.cas')
    seq_only['telemac2d'].append('t2d_tide-jmj_type_gen.cas')
    seq_only['telemac2d'].append('t2d_dambreak_v1p0.cas')

    seq_only['telemac3d'].append('t3d_delwaq.cas')
    seq_only['telemac3d'].append('t3d_pluie.cas')
    seq_only['telemac3d'].append('t3d_tide-jmj_real_gen.cas')

    seq_only['artemis'].append('none')

    seq_only['tomawac'].append('tom_turning_wind.cas')
    seq_only['tomawac'].append('tom_manche.cas')
    seq_only['tomawac'].append('tom_manchelim.cas')
    # Test case that can not work with api

    # Using homere_adj not handle by api
    skip_test['telemac2d'].append('estimation')
    # Reruning telemac from homere not handled by api
    skip_test['telemac2d'].append('convergence')
    # Case that are not run by validation
    skip_test['telemac2d'].append('t2d_tide-jmj_type_med.cas')
    skip_test['telemac2d'].append('t2d_tide-ES_real.cas')

    # Non telemac3d case in folder
    skip_test['telemac3d'].append('t2d_canal.cas')
    skip_test['telemac3d'].append('p3d_amr.cas')
    skip_test['telemac3d'].append('p3d_bump.cas')
    skip_test['telemac3d'].append('p3d_canal.cas')
    skip_test['telemac3d'].append('p3d_cooper.cas')
    skip_test['telemac3d'].append('p3d_depot.cas')
    skip_test['telemac3d'].append('p3d_flume_slope.cas')
    skip_test['telemac3d'].append('p3d_gouttedo.cas')
    skip_test['telemac3d'].append('p3d_lock-hydro.cas')
    skip_test['telemac3d'].append('p3d_lock-nonhydro.cas')
    skip_test['telemac3d'].append('p3d_nonlinearwave.cas')
    skip_test['telemac3d'].append('p3d_piledepon.cas')
    skip_test['telemac3d'].append('p3d_piledepon-nonhydro.cas')
    skip_test['telemac3d'].append('p3d_pluie.cas')
    skip_test['telemac3d'].append('p3d_rouse.cas')
    skip_test['telemac3d'].append('p3d_stratification.cas')
    skip_test['telemac3d'].append('p3d_tetra.cas')
    skip_test['telemac3d'].append('p3d_vent.cas')
    skip_test['telemac3d'].append('p3d_V.cas')
    # Coupling test case
    skip_test['telemac3d'].append('depot')
    skip_test['telemac3d'].append('heat_exchange')

    # Artemis animated test case
    skip_test['artemis'].append('art_bj78_animated.cas')
    skip_test['artemis'].append('art_creocean_animated.cas')
    skip_test['artemis'].append('art_creocean_2.cas')
    skip_test['artemis'].append('art_creocean.cas')

    # Tomawac coupled test cases
    skip_test['tomawac'].append('3Dcoupling')

    for module in modules:
        fichier.write("-- For module " + module + "\n")
        module_dir = path.join(root_dir, 'examples', module)
        list_test_case = []
        if example != '':
            list_test_case.append(example)
        else:
            list_test_case = sorted(listdir(module_dir))

        # Sequential only test_case
        for i, test_case in enumerate(list_test_case):
            if test_case in skip_test[module]:
                continue
            case_dir = path.join(module_dir, test_case)
            tmp_dir = path.join(case_dir, 'tmp')
            print("<" + str(i + 1) + "/" + str(len(list_test_case)) + '> ' +
                  str(test_case))
            fichier.write('Running test case ' + test_case + '\n')
            list_file = copy_file_to_tmp.copy_file_to_tmp(\
                    case_dir, tmp_dir, \
                    module, root_dir, skip_test[module])

            chdir(tmp_dir)

            for cas, fortran in list_file:
                #
                # Running Telemac based on telapy
                #
                if cas in skip_test[module]:
                    continue
                # Get results names
                res_file = get_result_file_name.get_result_file_name(
                    module, cas)
                api_res_file = res_file + '_api'

                # Running in sequential mode
                # if the case does not run in parallel
                if cas in seq_only[module]:
                    ncsize = 1
                else:
                    ncsize = nncsize
                passed_api = run_telemac_api.run_telemac_api(
                    module, cas, ncsize, fortran)

                if passed_api:
                    shutil.move(res_file, api_res_file)
                # Running Telemac classical way
                #
                passed_normal = run_telemac_normal.run_telemac_normal(
                    module, cas, ncsize)

                #
                # Result comparison between api and
                #  classical Telemac computation
                #
                if not passed_normal:
                    fichier.write('   Normal run crashed\n')
                if not passed_api:
                    fichier.write('   Api run crashed\n')
                if not passed_api or not passed_normal:
                    fichier.write(
                        str(cas) + '                       FAILED' + '\n')
                    continue
                if not path.exists(res_file):
                    fichier.write('   Missing ' + res_file + "\n")
                    fichier.write(
                        str(cas) + '                       FAILED' + '\n')
                    continue
                if not path.exists(api_res_file):
                    fichier.write('   Missing ' + api_res_file + "\n")
                    fichier.write(
                        str(cas) + '                       FAILED' + '\n')
                    continue
                compare = cmp(res_file, api_res_file)

                if compare:
                    fichier.write(
                        str(cas) + '                       PASSED' + '\n')
                else:
                    fichier.write(
                        str(cas) + '                       FAILED' + '\n')

            if clean:
                chdir(module_dir + sep + test_case)
                shutil.rmtree(module_dir + sep + test_case + sep + 'tmp')

        fichier.write('my work is done ' + '\n')
Beispiel #12
0
def main():
    """ Main function of partel.py """
    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~ Reads config file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nLoading Options and Configurations\n' + 72 * '~' + '\n')
    parser = argparse.ArgumentParser(
        description='Run the partitionning step (partel)')
    parser = add_config_argument(parser)
    parser.add_argument("--input-file",
                        dest="input_file",
                        default='',
                        help="Name of partel parameter file (PARTEL.PAR)."
                        " This option will surcharge all the others")
    parser.add_argument("--file",
                        dest="geo_file",
                        default='T2DGEO',
                        help="Name of the file to be partitionned")
    parser.add_argument(
        "--file-format",
        dest="geo_file_fmt",
        default='SERAFIN',
        help="Format of the geometry file(SERAFIN,SERAFIND or MED), "
        "default is SERAFIN")
    parser.add_argument(
        "--bnd-file",
        dest="bnd_file",
        default='T2DCLI',
        help="Name of the boundary file associated to the mesh file, "
        "default is T2DCLI")
    parser.add_argument(
        "--ncsize",
        dest="ncsize",
        default=8,
        help="Number of partitions (should be equal to number of "
        "parallel processors), default is 8")
    parser.add_argument(
        "--section-name",
        dest="section_file",
        default='',
        help="Name of the section file, default no section file")
    parser.add_argument("--zone-name",
                        dest="zone_file",
                        default='',
                        help="Name of the zone file, default no zone file")
    parser.add_argument("--weir-name",
                        dest="weir_file",
                        default='',
                        help="Name of the weir file, default no weir file")
    parser.add_argument(
        "--partitioning-method",
        dest="part_method",
        default=1,
        help="Method used for the partitionning (1:metis, 2:scotch)")
    parser.add_argument("--concat",
                        dest="concat",
                        action="store_true",
                        default=False,
                        help="If true concatenate partel output")
    parser.add_argument(
        "--mpi",
        dest="mpi",
        action="store_true",
        default=False,
        help="Run partel as executable (note using command given in systel.cfg)"
    )
    args = parser.parse_args()

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    update_config(args)

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Works for all configurations unless specified ~~~~~~~~~~~~~~~
    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Reporting errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Works for all configurations unless specified ~~~~~~~~~~~~~~~
    CFGS.compute_execution_info()

    if args.input_file != "":
        with open(args.input_file, 'r') as f:
            geo_file = f.readline().strip('\n')
            geo_file_fmt = f.readline().strip('\n')
            bnd_file = f.readline().strip('\n')
            ncsize = f.readline().strip('\n')
            part_method = f.readline().strip('\n')
            section_file = f.readline().strip('\n')
            zone_file = f.readline().strip('\n')
            weir_file = f.readline().strip('\n')
            _ = f.readline().strip('\n')
            _ = f.readline().strip('\n')
            concat = f.readline().strip('\n')
    else:
        concat = 'YES' if args.concat else 'NO'
        geo_file = args.geo_file
        geo_file_fmt = args.geo_file_fmt
        bnd_file = args.bnd_file
        ncsize = args.ncsize
        section_file = args.section_file
        zone_file = args.zone_file
        weir_file = args.weir_file
        part_method = args.part_method

    # Getting partel command from configuration
    pbin = path.join(CFGS.get_root(), 'builds', CFGS.cfgname, 'bin')
    if args.mpi:
        exe_ext = CFGS.configs[CFGS.cfgname]['SYSTEM']['sfx_exe']
        parcmd = path.join(pbin, 'partel'+exe_ext+\
                                 ' < <partel.par> >> <partel.log>')
    else:
        parcmd = get_partel_cmd(pbin, CFGS.configs[CFGS.cfgname], '')
    # Running paritionning

    run_partel(parcmd, geo_file, geo_file_fmt, bnd_file, ncsize, False,
               section_file, zone_file, weir_file, geo_file, geo_file_fmt,
               part_method, concat)

    print('\n\nMy work is done\n\n')
    sys.exit(0)
def main(module=None):
    """
    @brief Main function of the runcode.py module

    @param module (string): the name of the module to run (
      available modules are: telemac2d, telemac3d, artemis, tomawac,
      sisyphe, artemis, postel3d, ...)

    @return None
    """

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Reads config file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nLoading Options and Configurations\n' + 72 * '~' + '\n')
    if module is None:
        parser = ArgumentParser(
            formatter_class=RawDescriptionHelpFormatter,
            description=('''\n
runcode is the execution launcher for all TELEMAC modules.\n
where module can be:\n
    mascaret     the 1D hydrodyanmic / tracer / water quality solver
    telemac2d    the 2D hydrodyanmic / tracer / water quality solver
    telemac3d    the 3D hydrodynamic / tracer / water quality solver
    artemis      the phase resolving wave solver
    tomawac      the 3rd generation wave transformation solver
    sisyphe      the sediment transport and geomorphogical solver
    stbtel       a pre-processor for the modules
    postel3d     a post-processor for telemac3d
            '''),
            usage=' (--help for help)\n---------\n        =>  '
            '%(prog)s module [options] casfile(s)\n---------',
            epilog=('''\nexamples:\n---------
1:     => runcode.py telemac2d -s t2d.cas
---------'''))
        parser.add_argument("module",
                            default=None,
                            choices=[
                                'telemac2d', 'telemac3d', 'artemis', 'tomawac',
                                'stbtel', 'postel3d', 'sisyphe', 'partel',
                                'estel3d', 'mascaret'
                            ])
    else:
        parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter,
                                description=('''\n
%(prog)s is one of the execution launcher for the TELEMAC system.
            '''),
                                epilog=('''\nexamples:\n---------
1:     => %(prog)s -s t2d.cas
---------'''))
        parser.set_defaults(module=module)

    parser = add_runcode_argument(parser, module=module)
    # Arguments
    parser.add_argument("args", metavar='cas file(s)', nargs="+")

    options = parser.parse_args()

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    update_config(options)

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ banners ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    svn_banner(CFGS.get_root())

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Works for one configuration only ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    # Checking if symlink is available
    if options.use_link and not check_sym_link(options.use_link):
        raise TelemacException(\
                '\nThe symlink option is only '
                'available on Linux systems. '
                'Remove the option and try again')

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Reads command line arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    code_name = options.module
    cas_files = options.args

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Works for only one configuration ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    # bypass errors and carries on
    options.bypass = False
    if options.split or options.merge or options.run:
        if options.w_dir == '':
            raise TelemacException(\
                    '\nPlease use option -w (--workdirectory)'
                    ' with either of the options '
                    '--split, --run or --merge\n')
    # parsing for proper naming
    CFGS.compute_execution_info()
    cfg = CFGS.configs[CFGS.cfgname]

    print('\n\nRunning your CAS file(s) for:\n' + '~' * 72 + '\n')
    CFGS.light_dump()
    if options.w_dir != '':
        print('     +> directory        ' + options.w_dir)
        options.tmpdirectory = False
    print('\n\n' + '~' * 72 + '\n')

    # >>> Check wether the config has been compiled for the runcode
    if options.compileonly:
        cfg['REBUILD'] = 1
    if code_name not in cfg['MODULES']:
        raise TelemacException(\
                '\nThe code requested is not installed '
                'on this system : {}\n'.format(code_name))

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Reporting errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    xcpts = Messages()

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Run the Code from the CAS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    for cas_file in cas_files:
        run_study(cas_file, code_name, options)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Reporting errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    if xcpts.not_empty():
        raise TelemacException(\
                '\n\nHummm ... I could not complete '
                'my work.\n{}{}'.format('~'*72, xcpts.except_messages()))


# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nMy work is done\n\n')
    sys.exit(0)
def main():
    """
     @brief Main function of validateTELEMAC.
    """
    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~ Handles input arguments
    options = set_parser()

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    update_config(options)
    version = CFGS.configs[CFGS.cfgname].get('version', 'trunk')

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ banners ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    svn_banner(CFGS.get_root(), version)

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Works for all configurations unless specified ~~~~~~~~~~~~~~~
    # Checking if symlink is available
    if options.use_link and not check_sym_link(options.use_link):
        raise TelemacException(\
                '\nThe symlink option is only available on Linux systems. '
                'Remove the option and try again')

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Forces not to use any Xwindows backend for Jenkins ~~~~~~~~~~
    if options.vnv_post:
        import matplotlib.pyplot as plt

        plt.switch_backend('agg')

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Reporting errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    xcpts = Messages()

    # ~~~~ Reporting summary ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    if options.notebook:
        type_valid = 'notebooks'
    else:
        type_valid = 'examples'

    report = Report(options.report_name, type_valid)

    # ~~~ Running validation
    cfg = config_corrections(options, CFGS.cfgname)

    if options.notebook:
        run_validation_notebooks(options, report, xcpts)
    else:
        if options.vnv_mode == 'slurm':
            run_validation_python_slurm(cfg, options, report, xcpts)
        else:
            run_validation_python_mpi(cfg, options, report, xcpts)

    # Writting report
    if options.report_name != '':
        report.write()

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Reporting errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    if xcpts.not_empty():
        print('\n\nHummm ... I could not complete my work.\n'
              '{}\n{}'.format('~' * 72, xcpts.except_messages()))
        sys.exit(1)

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    else:
        print('\n\nMy work is done\n\n')
        sys.exit(0)
def run_validation_python_slurm(cfg, options, report, xcpts):
    """
    Run validation for vnv Python scripts slurm mode

    @param cfg (Dict) Configuration information
    @param options (ArgumentParser) List of arguments
    @param report (Report) Time of actions
    @param xcpts () Error handler
    """
    # Building list of files to run
    list_files = get_list_python_files(cfg, options)

    if list_files == []:
        print("Nothing to run (check tags and rank)")
        return

    n_files = len(list_files)
    root = CFGS.get_root()

    if options.cleanup or options.full_cleanup:
        for ifile, py_file in enumerate(sorted(list_files)):
            clean_vnv_working_dir(py_file, full=options.full_cleanup)

        return

    # Making sure that the file is not there before first run
    jobid_file = options.id_log
    if path.exists(jobid_file):
        remove(jobid_file)

    # ~> First submission run
    options.vnv_pre = True
    options.vnv_run = True
    options.vnv_post = False
    options.vnv_check = False
    options.bypass = True

    if options.hpc_queue == '':
        raise TelemacException(
            "Option --queue is mandatary with --vnv-mode=slurm")

    print("  ~> Submission part")
    for ifile, py_file in enumerate(sorted(list_files)):
        print('\n\nValidation < {}/{} > of {}'\
              .format(ifile+1, n_files, py_file.replace(root, '<root>')))
        run_python(py_file, options, report, xcpts)

    # Removing from list files all the ones that crashed in the first
    # submission run
    run_list_files = list_files.copy()
    for error in xcpts.messages:
        if error['name'] in run_list_files:
            run_list_files.remove(error['name'])

    # ~> Waiting for jobs to finish

    jobs = {}
    jobs_ini = {}
    run_times = {}
    crashed = {'failed': [], 'timeout': []}

    # File is generated by the first run
    # In case no run was launched in the previous command
    if not path.exists(jobid_file):
        actual_len = 0
    else:
        # Building dictionary of jobs:
        with open(jobid_file, 'r') as f:
            for line in f.readlines():
                job_id, action_path = line.split(';')
                if job_id == '':
                    raise TelemacException(\
                       "Error in the job id file. "\
                       "Generated by hpc_runcode in systel.cfg:\n{}"
                       .format(jobid_file))
                jobs[job_id] = action_path.strip('\n')

        jobs_ini.update(jobs)

        # Check job status
        print("  ~> Waiting for completion")
        prev_len = 0
        actual_len = len(jobs)
        # Waiting time between each check in second
        wait_time = 10

        start_time = time.time()
        time.sleep(60)

    while actual_len != 0:
        # Only printing remaining jobs if there was some changes
        if prev_len != actual_len:
            print("Remaining jobs: ", len(jobs))
        t1 = time.time()
        for job_id in list(jobs.keys()):
            state = check_job_slurm(job_id)
            # Job crashed
            if state == 'failed':
                crashed['failed'].append(jobs[job_id])
                del jobs[job_id]
            # job timed out
            elif state == 'timeout':
                crashed['timeout'].append(jobs[job_id])
                del jobs[job_id]
            # Job is done
            elif state == 'success':
                run_time = get_job_time_slurm(job_id)
                run_times[jobs[job_id]] = run_time
                del jobs[job_id]
            # Otherwise job is still running
        t2 = time.time()
        # Only wait if the loop was done in less than wait_time
        if (t2 - t1) < wait_time:
            time.sleep(wait_time)
        # Update info on len
        prev_len = actual_len
        actual_len = len(jobs)

    elapsed_time = time.time() - start_time
    time_str = time.strftime("%H:%M:%S", time.gmtime(elapsed_time))

    print("Waited {} for jobs to complete".format(time_str))

    # Adding run times to the report
    for py_file, run_time in run_times.items():
        # Getting absolute name but same as in jobs
        tmp = py_file.split(sep)
        # Mascaret does not have a temporary folder
        # So not splitting at the same index
        if 'mascaret' in tmp or 'courlis' in tmp:
            abs_py_file = sep.join(tmp[:-1]) + ".py"
            action = tmp[-1]
        else:
            abs_py_file = sep.join(tmp[:-2]) + ".py"
            action = tmp[-2]

        rank = report.values[abs_py_file]['pre']['rank']

        report.add_action(abs_py_file, rank, action, run_time, True)

    # Building new list of files (without the ones that crashed)
    new_list_files = []
    for py_file in run_list_files:
        # Extract folder of validation from script name (minus estension)
        py_folder, _ = path.splitext(py_file)
        failed_action = crashed['timeout'] + crashed['failed']

        # Chekc if that folder is in one of the cases that crashed
        failed = False
        for action in failed_action:
            if py_folder + sep in action:
                failed = True
                break
        # If it is next file
        if failed:
            continue

        new_list_files.append(py_file)

    # Adding exception for all the run that crashed
    for crash_type, failed in crashed.items():
        if failed != []:
            for fail in failed:
                xcpts.add_messages([{
                    'name': fail,
                    'msg': 'The job {}'.format(crash_type)
                }])

    print("  ~> Displaying listing of all runs")
    # Displaying listings (before merging because merging will remove temporary folder)
    for ddir in jobs_ini.values():
        run_dir = ddir.replace('\n', '')
        print('\n\nListing for {}:'\
              .format(run_dir.replace(path.realpath(root), '<root>')))
        # If cmdexec hpc mode listing is in the temporary folder
        if 'hpc_cmdexec' in cfg:
            for ffile in listdir(run_dir):
                if ffile[:4] == 'tmp_' and \
                   path.isdir(path.join(run_dir, ffile)):
                    run_dir = path.join(run_dir, ffile)
                    break

        for ffile in listdir(run_dir):
            if ffile.endswith(".out"):
                with open(path.join(run_dir, ffile), 'r',
                          encoding='utf-8') as f:
                    print(f.read())

    # If we are in hpc_cmdexec configuration (only out_telemac is in the batch
    # job)
    # Running on more pass to do the merge step
    # Second run
    if 'hpc_cmdexec' in cfg:
        print("  ~> Merging part")
        options.vnv_pre = True
        options.vnv_run = True
        options.vnv_post = False
        options.vnv_check = False
        options.bypass = True
        options.merge = True
        options.split = False
        options.run = False
        options.compileonly = False
        # Running only on jobs that finished
        for ifile, py_file in enumerate(sorted(new_list_files)):
            print('\n\nValidation < {}/{} > of {}'\
                  .format(ifile+1, n_files, py_file.replace(root, '<root>')))
            run_python(py_file, options, report, xcpts)

    # Second run
    options.vnv_pre = True
    options.vnv_run = False
    options.vnv_post = True
    options.vnv_check = True
    options.bypass = True
    print("  ~> Check + Post-traitment")
    # Running only on jobs that finished
    for ifile, py_file in enumerate(sorted(new_list_files)):
        print('\n\nValidation < {}/{} > of {}'\
              .format(ifile+1, n_files, py_file.replace(root, '<root>')))
        run_python(py_file, options, report, xcpts, time_from_report=True)
Beispiel #16
0
def main():
    """ Main function of compactTELEMAC """
    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Reads config file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nLoading Options and Configurations\n' + 72 * '~' + '\n')
    parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter,
                            description=('''\n\
Compact the TELEMAC system files, into various archived:\n
1. archiving sources if necessary
2. archiving examples if necessary
3. archiving binaries if necessary
4. ...
        '''),
                            usage=' (--help for help)\n---------\n       =>'
                            '  %(prog)s [options] \n---------')
    parser = add_config_argument(parser)
    parser.add_argument(
        "-a",
        "--archive-name",
        metavar="archive name",
        dest="archive_name",
        default='',
        help="specify the archive name, default is taken as the config name")
    parser.add_argument(
        "-m",
        "--modules",
        metavar="modules",
        dest="modules",
        default='',
        help="specify the list modules, default is taken from config file")
    parser.add_argument(
        "--src",
        action="store_true",
        dest="src_only",
        default=False,
        help="create a zip containing only the sources i.e. the "
        "bare minimum to use telemac-mascaret")
    parser.add_argument(
        "--examples",
        action="store_true",
        dest="examplesOnly",
        default=False,
        help="create a zip containing only the sources i.e. the "
        "bare minimum to use telemac-mascaret")
    options = parser.parse_args()

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    update_config(options)
    root_dir = CFGS.get_root()

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ banners ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    svn_banner(root_dir)

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Works for only one common root and zipper ~~~~~~~~~~~~~~~~~~~
    CFGS.compute_zip_info()
    cfg = CFGS.configs[CFGS.cfgname]
    version = cfg['version']
    zip_ext = cfg['ZIPPER']

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ srcOnlly is independent of config ~~~~~~~~~~~~~~~~~~~~~~~~~~~
    if options.src_only:
        # ~~> create the archive directory
        if options.archive_name != '':
            archive_name = options.archive_name
        else:
            archive_name = 'otm_' + version + '-src'
        print('\n\nArchive ' + archive_name + '\n' + '~' * 72 + '\n')
        tmp_dir = path.join(root_dir, archive_name)
        if path.exists(tmp_dir):
            remove_directories(tmp_dir)
        create_directories(tmp_dir)
        # ~~> copy the content of the following dirs into the archive directory
        dirs = ['optionals', 'scripts', 'sources', 'documentation', 'configs']
        for pid in dirs:
            input_path = path.join(root_dir, pid)
            output_path = input_path.replace(root_dir, tmp_dir)
            copytree(input_path,
                     output_path,
                     ignore=ignore_patterns('.svn', '*.pyc'))
            print('    +> ' + input_path)
        # ~~> copy the following files into the archive directory
        files = ['NEWS.txt', 'README.txt']
        for pid in files:
            input_path = path.join(root_dir, pid)
            output_path = input_path.replace(root_dir, tmp_dir)
            copy_file(input_path, output_path)
            print('    +> ' + input_path)
        # ~~> prepare an empty diretory for future builds
        pid = path.join(root_dir, 'builds')
        output_path = pid.replace(root_dir, tmp_dir)
        create_directories(output_path)
        # ~~> zipping the archive directory
        print('\n... now packaging ' + archive_name)
        tel_zip(archive_name, tmp_dir, zip_ext)
        # ~~> cleaning the archive directory
        print('\n... now cleaning ')
        remove_directories(tmp_dir)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ examplesOnly is independent of config ~~~~~~~~~~~~~~~~~~~~~~~
    elif options.examplesOnly:
        # ~~> create the archive directory
        if options.archive_name != '':
            archive_name = options.archive_name
        else:
            archive_name = 'otm_' + version + '-examples'
        print('\n\nArchive ' + archive_name + '\n' + '~' * 72 + '\n')
        tmp_dir = path.join(root_dir, archive_name)
        if path.exists(tmp_dir):
            remove_directories(tmp_dir)
        create_directories(tmp_dir)
        # ~~> copy the content of the following dir into the archive directory
        dirs = ['examples']
        for pid in dirs:
            input_path = path.join(root_dir, pid)
            output_path = input_path.replace(root_dir, tmp_dir)
            copytree(input_path,
                     output_path,
                     ignore=ignore_patterns('.svn', '*.pyc'))
            print('    +> ' + input_path)
        # ~~> zipping the archive directory
        print('\n... now packaging ' + archive_name)
        tel_zip(archive_name, tmp_dir, zip_ext)
        # ~~> cleaning the archive directory
        print('\n... now cleaning ')
        remove_directories(tmp_dir)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Works for all configurations unless specified ~~~~~~~~~~~~~~~
    else:
        for cfgname in CFGS.configs:
            if options.modules != '':
                cfg['modules'] = \
                    options.modules.replace(',', ' ')\
                                   .replace(';', ' ').replace('.', ' ')
            # parsing for proper naming
            CFGS.compute_compact_info()
            CFGS.light_dump()

            # ~~ Scans all source files to build a relation database ~~~~~~~~~~~
            if cfg['MODULES'] == {}:
                raise TelemacException(\
                        '\nNot able to find any modules within'
                        'your root directory {}\n'.format(cfg['root']))

            # ~~> create the archive directory
            if options.archive_name != '':
                archive_name = options.archive_name
            else:
                archive_name = 'otm_' + version + '-builds-' + cfgname
            print('\n\nArchive ' + archive_name + '\n' + '~' * 72 + '\n')
            tmp_dir = path.join(root_dir, archive_name)
            if path.exists(tmp_dir):
                remove_directories(tmp_dir)
            create_directories(tmp_dir)
            # ~~> copy the content of the following dir
            # into the archive directory
            dirs = ['builds' + sep + cfgname, 'scripts', 'sources', 'configs']
            for pid in dirs:
                input_path = path.join(root_dir, pid)
                output_path = input_path.replace(root_dir, tmp_dir)
                copytree(input_path,
                         output_path,
                         ignore=ignore_patterns('.svn', '*.pyc'))
                print('    +> ' + input_path)
            # ~~> zipping the archive directory
            print('\n... now packaging ' + cfgname)
            tel_zip(cfgname, tmp_dir, cfg['ZIPPER'])
            # ~~> cleaning the archive directory
            print('\n... now cleaning ')
            remove_directories(tmp_dir)


# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nMy work is done\n\n')

    sys.exit(0)
Beispiel #17
0
def run_mascaret():
    """
    Main function that runs the mascaret executable in the current folder
    """

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Reads config file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nLoading Options and Configurations\n'+72*'~'+'\n')
    parser = ArgumentParser(
        formatter_class=RawDescriptionHelpFormatter,
        description=('''\n\
Run the mascaret executable in the current folder, given a CAS file.
        '''))
    parser.add_argument("args", nargs='*')
    # ~~> Environment
    parser = add_config_argument(parser)
    parser.add_argument(
        "-s", "--sortiefile", action="store_true", dest="sortie_file",
        default=False,
        help="specify whether there is a sortie file, default is no")
    parser.add_argument(
        "-b", "--bypass", action="store_true",
        dest="bypass", default=False,
        help="will bypass execution failures and try to carry on "
             "(final report at the end)")
    options = parser.parse_args()

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    update_config(options)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ banners ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    svn_banner(CFGS.get_root())

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Works for one configuration only ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    if len(options.args) < 1:
        print('\nThe name of the CAS file is required\n')
        parser.print_help()
        sys.exit(1)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Reads command line arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    cas = options.args[0]
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Works for only one configuration ~~~~~~~~~~~~~~~~~~~~~~~~~~~~

    xcpts = Messages()

    # still in lower case
    # parsing for proper naming
    CFGS.compute_execution_info()
    cfg = CFGS.configs[CFGS.cfgname]

    create_mascaret_files(cfg, cas)

    mascaret_exe = path.join(cfg['root'], 'builds', CFGS.cfgname, 'bin',
                             'mascaret' + cfg['sfx_exe'])
    _, code = xcpts.run_cmd(mascaret_exe, options.bypass)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Reporting errors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    if xcpts.not_empty() or code != 0:
        print('\n\nHummm ... I could not complete my work.\n'+'~'*72
              + xcpts.except_messages())
        sys.exit(1)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    else:
        print('\n\nMy work is done\n\n')
        sys.exit(0)
def main():
    """ Main function of compileTELEMAC """

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~ Reads config file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nLoading Options and Configurations\n' + 72 * '~' + '\n')
    parser = ArgumentParser(
        formatter_class=RawDescriptionHelpFormatter,
        description=('''\n
Compile the TELEMAC system:\n
1. rescan the tree dependencies if necessary
2. check which files need re-compilation
3. create object files, libraries, executable, and other binaries
    depending on your configuration settings
Work with all active configurations.
        '''))

    parser = add_config_argument(parser)
    parser.add_argument(
        "-m", "--modules", metavar="modules",
        dest="modules", default='',
        help="specify the list modules . separated, default is taken from config file")
    parser.add_argument(
        "-b", "--bypass", action="store_true",
        dest="bypass", default=False,
        help="will bypass execution failures and try to carry on "
             "(final report at the end)")
    parser.add_argument(
        "--rescan", action="store_true",
        dest="rescan", default=False,
        help="will redo the scan of sources for an update of "
             "all the cmdf files")
    parser.add_argument(
        "--clean", action="store_true",
        dest="cleanup", default=False,
        help="will erase all object, executable libraries from folder "
             "on the selected configs/modules")
    parser.add_argument(
        "-j", type=int,
        dest="ncsize", default=0,
        help="set the number of core used for the parallel "
             "compilation of objects")
    parser.add_argument(
        "-v", "--verbose", action="store_true",
        dest="verbose", default=False,
        help="If given will print every command")
    options = parser.parse_args()

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    update_config(options)

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ banners ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    svn_banner(CFGS.get_root())

    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # ~~~~ Separately dealing with rescan for all configs ? ~~~~~~~~~~~~
    for cfgname in CFGS.configs:
        # Setting configuration name
        CFGS.cfgname = cfgname
        # Getting configuration
        cfg = CFGS.configs[cfgname]

        if options.cleanup:
            CFGS.clean_install(cfgname)

        print('\n\n' + '\n'.join(banner(cfgname)))
        print('Scanning the source code for:\n' + '~' * 72 + '\n')

        compute_config(options.cleanup and options.rescan,
                       options.bypass)
        CFGS.light_dump()
        # Only if we ask for a scan
        if options.rescan:
            update_cmdf(options.bypass, options.cleanup, options.verbose)

        # /!\ multiple configurations will now generate multiple rescan
        # (because of tags and adds, specific to some configurations)
        compute_config(False, options.bypass)

        # ~~ Scans all cmdf files found in all modules ~~~~~~~~~~~~~~~~~~~~~
        # Specifying what module to compile
        if options.modules == '':
            modules = []
        else:
            modules = options.modules.split(".")
        compile_cmdf(options.ncsize, modules, options.verbose)

        # Compiling api if asked for
        cfg_opt = cfg.get('options', [])
        if 'api' in cfg_opt:
            compile_api_files(silent=not options.verbose,
                              static='static' in cfg_opt,
                              hermes_only='hermes_only' in cfg_opt)

    print('\n\nMy work is done\n\n')
    sys.exit(0)
def scan_xcas(fle):
    """
    @brief : read the xml file to extract the list of input file
    :param fle: xcas file of mascaret computation
    :return: list of file needed for computation
    """
    inputfile = []
    tree = ET.parse(fle)
    root = tree.getroot()
    root2 = root[0]

    # looking for geometry
    inputfile.append(root2.find('parametresGeometrieReseau')\
                                 .find('geometrie')\
                                 .find('fichier').text)

    # looking for laws
    lois = root2.find('parametresLoisHydrauliques').find('lois')
    for loi in lois:
        inputfile.append(loi.find('donnees').find('fichier').text)

    #looking for initial conditions
    linits = root2.find('parametresConditionsInitiales').find('ligneEau')
    if linits.find('LigEauInit').text == 'true':
        inputfile.append(linits.find('fichLigEau').text)

    #looking for "casier"
    if root2.find('parametresCasier') is not None:
        inputfile.append((root2.find('parametresCasier')\
                               .find('fichierGeomCasiers').text))

    #looking for "paramtresPhysique"
    if root2.find('parametresTraceur') is not None:
        root_tracer = root2.find('parametresTraceur')
        if root_tracer.find(
                'parametresConcentrationsInitialesTraceur') is not None:
            inputfile.append(\
                    root_tracer.find('parametresConcentrationsInitialesTraceur')\
                               .find('fichConcInit').text)

        if root_tracer.find('parametresNumeriquesQualiteEau') is not None:
            inputfile.append(root_tracer.find('parametresNumeriquesQualiteEau')\
                                        .find('fichParamPhysiqueTracer').text)
            inputfile.append(root_tracer.find('parametresNumeriquesQualiteEau')\
                                        .find('fichMeteoTracer').text)

        lois = root_tracer.find('parametresLoisTraceur').find('loisTracer')
        for loi in lois:
            inputfile.append(loi.find('fichier').text)

    #looking for "Courlis"
    if root2.find('parametresGeneraux').find('optionCourlis') is not None:
        inputfile.append((root2.find('parametresGeneraux')\
                                     .find('fichierMotCleCourlis').text))

        casfile = root2.find('parametresGeneraux')\
                       .find('fichierMotCleCourlis').text
        print(casfile)
        dicofile = path.join(CFGS.get_root(), "sources", "mascaret", "data",
                             "dico_Courlis.txt")

        cas = TelemacCas(casfile, dicofile)
        geo_courlis = cas.get('FICHIER DE GEOMETRIE COURLIS')
        inputfile.append(geo_courlis)

    return inputfile
def main():
    """
        Main function of doxygenTELEMAC
    """
    bypass = False  # /!\ Temporary bypass for subroutine within programs

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Reads config file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nLoading Options and Configurations\n'+'~'*72+'\n')
    parser = ArgumentParser(\
        formatter_class=RawDescriptionHelpFormatter,
        description=('''\n
Generate the DOXYGEN documentation of the whole TELEMAC system.
        '''),
        usage=' (--help for help)\n---------\n       =>  '\
                '%(prog)s [options] \n---------')
    parser = add_config_argument(parser)
    parser.add_argument(\
        "-d", "--doxydir",
        dest="doxyDir", default='',
        help="specify the root, default is taken from config file")
    parser.add_argument(\
        "-m", "--modules",
        dest="modules", default='',
        help="specify the list modules, default is taken from config file")
    options = parser.parse_args()


# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    update_config(options)

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ banners ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    svn_banner(CFGS.get_root())

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Works for one configuration only ~~~~~~~~~~~~~~~~~~~~~~~~~~~~

# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Works for only one configuration ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    cfg = CFGS.configs[CFGS.cfgname]

    # still in lower case
    if options.modules != '':
        cfg['modules'] = options.modules.replace(',', ' ')\
                                        .replace(';', ' ')\
                                        .replace('.', ' ')
    if options.doxyDir == '':
        cfg.update({'doxydocs':path.join(cfg['root'],
                                         'documentation',
                                         CFGS.cfgname)})
    else:
        cfg.update({'doxydocs':options.doxyDir})
    if not path.isdir(cfg['doxydocs']):
        create_directories(cfg['doxydocs'])
    # parsing for proper naming
    CFGS.compute_doxy_info()
    print('\n\nScanning the source code for:\n'+'~'*72+'\n')
    CFGS.light_dump()

    # ~~ Scans all source files to build a relation database ~~
    fic, _, _, _, _, _, _, racine = scan_sources(CFGS.cfgname, cfg, bypass)

    # ~~ Scann all source files to update Doxygen ~~~~~~~~~~~~~~~~
    for mod in fic:
        print('\nCreating the DOXYGEN headers for ' + mod + '\n'+'~'*72+'\n')
        for ifile in fic[mod]:

            # ~~ Read the content of the source file ~~~~~~~~~~~~
            ilines = get_file_content(ifile)
            # ~~ Update its Doxygen content ~~~~~~~~~~~~~~~~~~~~~
            olines = create_doxygen(ifile, ilines, mod, racine)
            # ~~ Make sure the distination exists ~~~~~~~~~~~~~~~
            ofile = ifile.replace(cfg['root'], cfg['doxydocs'])
            create_directories(path.dirname(ofile))
            # ~~ Write the content of the source file ~~~~~~~~~~~
            put_file_content(ofile, olines)

    # ~~ Run Doxygen ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\nNow running DOXYGEN within ' + cfg['doxydocs'] + '\n'+'~'*72+'\n')
    chdir(cfg['doxydocs'])
    if not path.exists(cfg['cmd_doxygen']):
        raise Exception('Do not know where to find {}\n '
                        '... you can correct this through the key '
                        'cmd_doxygen in your configuration file'
                        ''.format(cfg['cmd_doxygen']))
    if sp.call([cfg['cmd_doxygen']]):
        raise Exception

    # ~~ Scan all HTML files and replace template in phases
    replace_doxygen(path.join(cfg['doxydocs'], 'html'))


# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nMy work is done\n\n')

    sys.exit(0)
Beispiel #21
0
def main():
    """
       Main program for the execution of damocles
    """
    #   ~~ Reads config file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nLoading Options and Configurations\n' + '~' * 72 + '\n')
    parser = ArgumentParser()
    parser = add_config_argument(parser)
    parser.add_argument(\
                  "-m", "--modules",
                  dest="modules",
                  default='',
                  help="specify the list modules, default is "\
                      "taken from config file")
    parser.add_argument(\
                  "--dump",
                  action="store_true",
                  dest="dump",
                  default=False,
                  help="Will dump a reordered dictionary by rubrique")
    parser.add_argument(\
                  "--dump2",
                  action="store_true",
                  dest="dump2",
                  default=False,
                  help="Will dump a reordered dictionary by index")
    parser.add_argument(\
                  "--eficas",
                  action="store_true",
                  dest="eficas",
                  default=False,
                  help="Will generate the eficas Catalogue from the dictionary")
    parser.add_argument(\
                  "--latex",
                  action="store_true",
                  dest="latex",
                  default=False,
                  help="Will generate the LaTeX file for the reference manual")

    #   ~~~~ Environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    args = parser.parse_args()
    # path to the root

    #   ~~~~ Works for all configurations unless specified ~~~~~~~~~~~~~~~
    update_config(args)
    cfg = CFGS.configs[CFGS.cfgname]
    #   <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    # Defining which modules to use
    if args.modules is '':
        module_list = [
            'artemis', 'postel3d', 'stbtel', 'sisyphe', 'telemac2d',
            'telemac3d', 'tomawac', 'waqtel', 'gaia', 'khione'
        ]
    else:
        module_list = args.modules.split(';')
    # Identify Root value
    CFGS.compute_vnv_info()
    root = CFGS.get_root()
    exe_path = path.join(root, 'builds', CFGS.cfgname,\
                         'bin', 'damocles'+\
                         cfg['SYSTEM']['sfx_exe'])
    # Looping on all modules
    for module in module_list:
        module_path = path.join(root, 'sources', module)
        if args.dump:
            input_dict = path.join(module_path, module + ".dico")
            output_dict = path.join(module_path, module + "2.dico")
            gen_dump(exe_path, input_dict, output_dict)

        if args.dump2:
            input_dict = path.join(module_path, module + ".dico")
            output_dict = path.join(module_path, module + "2.dico")
            gen_dump2(exe_path, input_dict, output_dict)

        if args.eficas:

            # Creating eficas folder and __init__ if it does not exists
            eficas_path = path.join(root, 'scripts', 'python3', 'eficas')
            if not path.exists(eficas_path):
                mkdir(eficas_path)
                with open(path.join(eficas_path, '__init__.py'), 'w') as fobj:
                    fobj.write("#! /usr/bin/env python")
                    fobj.write("# -*- coding: utf-8 -*")

            input_dict = path.join(module_path, module + ".dico")
            input_dep = path.join(module_path, module + ".dico.dep")
            fancy_module = module
            cata_name = path.join(eficas_path, fancy_module + "_cata_auto.py")
            enum_name = path.join(eficas_path, fancy_module + "_enum_auto.py")
            ts_path = eficas_path
            gen_cata(module.upper(), exe_path, input_dict, input_dep,
                     cata_name, enum_name, ts_path + path.sep)

        if args.latex:
            input_dict = path.join(module_path, module + ".dico")
            latex_name = path.join(root, 'documentation', module, 'reference',\
                                  'latex', 'Corpus.tex')
            # English only
            lng = '2'
            gen_latex(exe_path, input_dict, latex_name, lng)


#   ~~~~ Compile the valiation documentation

    print('\n\n' + '~' * 72)

    #   ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    print('\n\nMy work is done\n\n')

    sys.exit(0)