Esempio n. 1
0
def _create_opl_mod_text(tdf, output):
    verify(not find_case_space_duplicates(tdf),
           "There are case space duplicate field names in the schema.")
    verify(not tdf.generator_tables,
           "Input schema error - doesn't work with generator tables.")
    verify(
        not tdf.generic_tables,
        "Input schema error - doesn't work with generic tables. (not yet - will \
            add ASAP as needed) ")
    tdf = _fix_fields_with_opl_keywords(tdf)
    rtn = ''
    dict_tables = {t for t, pk in tdf.primary_key_fields.items() if pk}
    verify(
        set(dict_tables) == set(tdf.all_tables),
        "not yet handling non-PK tables of any sort")

    prepend = getattr(tdf, "opl_prepend", "")

    def _get_type(data_types, table, field, is_pk=False):
        try:
            return "float" if data_types[table][
                field].number_allowed else "string"
        except KeyError:
            if is_pk:
                return "string"
            return "float"

    def get_table_as_mod_text(tdf, tbn, output):
        rtn = ''
        sig = '{}' if output else '...'
        if len(tdf.primary_key_fields[tbn]) == 1 and len(
                tdf.data_fields[tbn]) == 0:
            rtn = "{" + _get_type(tdf.data_types, tbn, tdf.primary_key_fields[tbn][0], True) + "} " + \
                  prepend + tbn + " = " + sig + ";\n\n"
        else:
            rtn += "tuple " + prepend + tbn + "_type\n{"
            for pk in tdf.primary_key_fields[tbn]:
                pk_m = pk.replace(' ', '_').lower()
                rtn += "\n\tkey " + _get_type(tdf.data_types, tbn, pk,
                                              True) + " " + pk_m + ";"
            for df in tdf.data_fields[tbn]:
                df_m = df.replace(' ', '_').lower()
                rtn += "\n\t" + _get_type(tdf.data_types, tbn,
                                          df) + " " + df_m + ";"
            rtn += "\n};\n\n{" + prepend + tbn + "_type} " + prepend + tbn + "=" + sig + ";\n\n"
        return rtn

    for t in dict_tables:
        rtn += get_table_as_mod_text(tdf, t, output)

    if output:
        # Add a function writeOutputToFile() to the mod file
        rtn += '\nexecute {\n\tfunction writeOutputToFile() {'
        rtn += '\n\t\tvar ofile = new IloOplOutputFile("results.dat");'
        for t in dict_tables:
            rtn += '\n\tofile.writeln("' + prepend + t + ' = ", ' + prepend + t + ');'
        rtn += '\n\t}\t\n}'
    return rtn
Esempio n. 2
0
def create_lingo_mod_text(tdf):
    """
    Generate a Lingo .lng string from a TicDat object for diagnostic purposes

    :param tdf: A TicDatFactory defining the input schema

    :return: A string consistent with the Lingo .lng input format
    """
    verify(not find_case_space_duplicates(tdf),
           "There are case space duplicate field names in the schema.")
    verify(not tdf.generator_tables,
           "Input schema error - doesn't work with generator tables.")
    verify(
        not tdf.generic_tables,
        "Input schema error - doesn't work with generic tables. (not yet - will \
            add ASAP as needed) ")
    rtn = 'sets:\n'
    dict_tables = {t for t, pk in tdf.primary_key_fields.items() if pk}
    verify(
        set(dict_tables) == set(tdf.all_tables),
        "not yet handling non-PK tables of any sort")

    prepend = getattr(tdf, "lingo_prepend", "")

    def get_table_as_mod_text(tdf, tbn):
        p_tbn = prepend + tbn
        rtn = p_tbn
        if len(tdf.primary_key_fields[tbn]) > 1:
            fkr = []
            for i in range(len(tdf.primary_key_fields[tbn])):
                pk = tdf.primary_key_fields[tbn][i]
                fk = list(
                    filter(
                        lambda k: k.native_table == tbn and k.mapping.
                        native_field == pk, tdf.foreign_keys))
                verify(
                    len(fk) == 1,
                    "Table '%s' needs to fully link it's primary key fields to parent tables via"
                    " foreign keys." % tbn)
                fkr.append(prepend + fk[0].foreign_table)
            rtn += '(' + ','.join(fkr) + ')'
        rtn += ':'
        fields = []
        for df in tdf.data_fields[tbn]:
            df_m = p_tbn + '_' + df.replace(' ', '_').lower()
            fields.append(df_m)
        rtn += ','.join(fields)
        rtn += ';\n'
        return rtn

    for t in _sorted_tables(tdf):
        rtn += get_table_as_mod_text(tdf, t)
    rtn += 'endsets'
    return rtn
Esempio n. 3
0
def opl_run(mod_file, input_tdf, input_dat, soln_tdf, infinity=INFINITY, oplrun_path=None):
    """
    solve an optimization problem using an OPL .mod file
    :param mod_file: An OPL .mod file.
    :param input_tdf: A TicDatFactory defining the input schema
    :param input_dat: A TicDat object consistent with input_tdf
    :param soln_tdf: A TicDatFactory defining the solution schema
    :param infinity: A number used to represent infinity in OPL
    :return: a TicDat object consistent with soln_tdf, or None if no solution found
    """
    verify(os.path.isfile(mod_file), "mod_file %s is not a valid file."%mod_file)
    verify(not find_case_space_duplicates(input_tdf), "There are case space duplicate field names in the input schema.")
    verify(not find_case_space_duplicates(soln_tdf), "There are case space duplicate field names in the solution schema.")
    verify(len({input_tdf.opl_prepend + t for t in input_tdf.all_tables}.union(
               {soln_tdf.opl_prepend + t for t in soln_tdf.all_tables})) ==
           len(input_tdf.all_tables) + len(soln_tdf.all_tables),
           "There are colliding input and solution table names.\nSet opl_prepend so " +
           "as to insure the input and solution table names are effectively distinct.")
    msg  = []
    verify(input_tdf.good_tic_dat_object(input_dat, msg.append),
           "tic_dat not a good object for the input_tdf factory : %s"%"\n".join(msg))
    orig_input_tdf, orig_soln_tdf = input_tdf, soln_tdf
    input_tdf = _fix_fields_with_opl_keywords(input_tdf)
    soln_tdf = _fix_fields_with_opl_keywords(soln_tdf)
    input_dat = input_tdf.TicDat(**make_json_dict(orig_input_tdf, input_dat))
    assert input_tdf.good_tic_dat_object(input_dat)
    mod_file_name = os.path.basename(mod_file)[:-4]
    with open(mod_file, "r") as f:
        mod = f.read()
        assert 'writeOutputToFile()' in mod
        assert ("ticdat_" + mod_file_name + ".mod") in mod
        assert ("ticdat_" + mod_file_name + "_output.mod") in mod
    working_dir = os.path.abspath(os.path.dirname(mod_file))
    if tu.development_deployed_environment:
        working_dir = os.path.join(working_dir, "oplticdat_%s"%uuid.uuid4())
        shutil.rmtree(working_dir, ignore_errors = True)
        os.mkdir(working_dir)
        working_dir = os.path.abspath(working_dir)
        _ = os.path.join(working_dir, os.path.basename(mod_file))
        shutil.copy(mod_file, _)
        mod_file = _
    datfile = os.path.join(working_dir, "temp.dat")
    output_txt = os.path.join(working_dir, "output.txt")
    results_dat = os.path.join(working_dir, "results.dat")
    if os.path.isfile(results_dat):
        os.remove(results_dat)
    with open(datfile, "w") as f:
        f.write(create_opl_text(input_tdf, input_dat, infinity))
    verify(os.path.isfile(datfile), "Could not create temp.dat")
    with open(os.path.join(working_dir, "ticdat_"+mod_file_name+".mod"), "w") as f:
        f.write("/* Autogenerated input file, created by opl.py on " + time.asctime() + " */\n")
        f.write(create_opl_mod_text(orig_input_tdf))
    with open(os.path.join(working_dir,"ticdat_"+mod_file_name+"_output.mod"), "w") as f:
        f.write("/* Autogenerated output file, created by opl.py on " + time.asctime() + " */\n")
        f.write(create_opl_mod_output_text(orig_soln_tdf))
    if not oplrun_path:
        verify_str = "need to either pass oplrun_path argument or run oplrun_setup.py"
        if tu.development_deployed_environment:
            verify_str = "could not find oplrun. Make sure the Application Type is set correctly"
        verify(os.path.isfile(os.path.join(_code_dir(),"oplrun_path.txt")), verify_str)
        with open(os.path.join(_code_dir(),"oplrun_path.txt"),"r") as f:
            oplrun_path = f.read().strip()
    verify(os.path.isfile(oplrun_path), "%s not a valid path to oplrun"%oplrun_path)
    if tu.development_deployed_environment:
        if "LD_LIBRARY_PATH" not in os.environ.keys():
            os.environ["LD_LIBRARY_PATH"] = os.path.abspath(os.path.join(oplrun_path,'..'))
        elif not oplrun_path in os.environ["LD_LIBRARY_PATH"]:
            os.environ["LD_LIBRARY_PATH"] = os.path.abspath(os.path.join(oplrun_path,'..')) + \
                                            ":" + os.environ["LD_LIBRARY_PATH"]
    try:
        output = subprocess.check_output([oplrun_path, mod_file, datfile], stderr=subprocess.STDOUT)
    except subprocess.CalledProcessError as err:
        if tu.development_deployed_environment:
            raise Exception("oplrun failed to complete: " + err.output)
        output = err.output
    with open(output_txt, "w") as f:
        f.write(str(output))
    if not os.path.isfile(results_dat):
        print("%s is not a valid file. A solution was likely not generated. Check 'output.txt' for details."%results_dat)
        return None
    with open(results_dat, "r") as f:
        output = f.read()
    soln_tdf = _unfix_fields_with_opl_keywords(soln_tdf)
    return read_opl_text(soln_tdf, output, False)
Esempio n. 4
0
def lingo_run(lng_file, input_tdf, input_dat, soln_tdf, infinity=INFINITY, runlingo_path=None):
    """
    solve an optimization problem using an Lingo .lng file
    :param lng_file: An Lingo .lng file.
    :param input_tdf: A TicDatFactory defining the input schema
    :param input_dat: A TicDat object consistent with input_tdf
    :param soln_tdf: A TicDatFactory defining the solution variables
    :param infinity: A number used to represent infinity in Lingo
    :param runlingo_path: A path to the runlingo executable
    :return: a TicDat object consistent with soln_tdf, or None if no solution found
    """
    verify(os.path.isfile(lng_file), "lng_file %s is not a valid file."%lng_file)
    verify(not find_case_space_duplicates(input_tdf),
           "There are case space duplicate field names in the input schema.")
    verify(not find_case_space_duplicates(soln_tdf),
           "There are case space duplicate field names in the solution schema.")
    verify(len({input_tdf.lingo_prepend + t for t in input_tdf.all_tables}.union(
               {soln_tdf.lingo_prepend + t for t in soln_tdf.all_tables})) ==
           len(input_tdf.all_tables) + len(soln_tdf.all_tables),
           "There are colliding input and solution table names.\nSet lingo_prepend so " +
           "as to insure the input and solution table names are effectively distinct.")
    msg = []
    verify(input_tdf.good_tic_dat_object(input_dat, msg.append),
           "tic_dat not a good object for the input_tdf factory : %s"%"\n".join(msg))
    mapping = _try_create_space_case_mapping(input_tdf, input_dat)
    verify("failures" not in mapping, "The following case-space mapping data collisions were found.\n%s"%
                                       mapping.get("failures"))
    input_dat = _apply_space_case_mapping(input_tdf, input_dat, {v:k for k,v in mapping["mapping"].items()})
    orig_input_tdf, orig_soln_tdf = input_tdf, soln_tdf
    input_dat = input_tdf.TicDat(**make_json_dict(orig_input_tdf, input_dat))
    assert input_tdf.good_tic_dat_object(input_dat)
    lng_file_name = os.path.basename(lng_file)[:-4]
    with open(lng_file, "r") as f:
        lng = f.read()
        assert ("ticdat_" + lng_file_name + ".lng") in lng
        assert ("ticdat_" + lng_file_name + "_output.ldt") in lng
        assert ("ticdat_" + lng_file_name + ".ldt") in lng
    working_dir = os.path.abspath(os.path.dirname(lng_file))
    if tu.development_deployed_environment:
        working_dir = os.path.join(working_dir, "lingoticdat_%s"%uuid.uuid4())
        shutil.rmtree(working_dir, ignore_errors = True)
        os.mkdir(working_dir)
        working_dir = os.path.abspath(working_dir)
        _ = os.path.join(working_dir, os.path.basename(lng_file))
        shutil.copy(lng_file, _)
        lng_file = _
    commandsfile = os.path.join(working_dir, "ticdat_"+lng_file_name+".ltf")
    ldtfile = os.path.join(working_dir, "ticdat_"+lng_file_name+".ldt")
    output_txt = os.path.join(working_dir, "output.txt")
    soln_tables = {t for t, pk in soln_tdf.primary_key_fields.items() if pk}
    results = []
    for tbn in soln_tables:
        fn = os.path.join(working_dir, tbn+".ldt")
        if os.path.isfile(fn):
            os.remove(fn)
        results.append(fn)
    with open(ldtfile, "w") as f:
        f.write(create_lingo_text(input_tdf, input_dat, infinity))
    verify(os.path.isfile(ldtfile), "Could not create ticdat_" + lng_file_name+".ldt")
    with open(os.path.join(working_dir, "ticdat_"+lng_file_name+".lng"), "w") as f:
        f.write("! Autogenerated input file, created by lingo.py on " + time.asctime() + " ;\n")
        f.write(create_lingo_mod_text(orig_input_tdf))
    with open(os.path.join(working_dir,"ticdat_"+lng_file_name+"_output.ldt"), "w") as f:
        f.write("! Autogenerated output file, created by lingo.py on " + time.asctime() + " ;\n")
        f.write(create_lingo_output_text(orig_soln_tdf))
    commands = [
        "! Autogenerated commands file, created by lingo.py on " + time.asctime() + " ;",
        "TAKE " + lng_file,
        "GO",
        "QUIT"
    ]
    with open(commandsfile, "w") as f:
        f.write("\n".join(commands))
    if not runlingo_path:
        if 'TICDAT_LINGO_PATH' in os.environ:
            runlingo_path = os.environ['TICDAT_LINGO_PATH']
        else:
            verify(os.path.isfile(os.path.join(_code_dir(),"runlingo_path.txt")),
               "need to either pass runlingo_path argument or run lingo_run_setup.py")
            with open(os.path.join(_code_dir(),"runlingo_path.txt"),"r") as f:
                runlingo_path = f.read().strip()
    verify(os.path.isfile(runlingo_path), "%s not a valid path to runlingo"%runlingo_path)
    output = ''
    try:
        output = subprocess.check_output([runlingo_path, commandsfile], stderr=subprocess.STDOUT, cwd=working_dir)
    except subprocess.CalledProcessError as err:
        if tu.development_deployed_environment:
            raise Exception("runlingo failed to complete: " + str(err.output))
    with open(output_txt, "w") as f:
        f.write(str(output))
    output_data = {}
    for i in zip(soln_tables,results):
        if not os.path.isfile(i[1]):
            print("%s is not a valid file. A solution was likely not generated. Check 'output.txt' for details."%i[1])
            return None
        with open(i[1], "r") as f:
            output_data[i[0]] = f.read()
    rtn = read_lingo_text(soln_tdf, output_data)
    return _apply_space_case_mapping(soln_tdf, rtn, mapping["mapping"])