예제 #1
0
def runExperiment(filter, removedWordFile, modelfile, resultFile, vocabFile):
    data = FileReader.getData(fileToRead, types, 2018)

    vocabDict = data[0]
    catDict = data[1]

    sortedVocabDict = OrderedDict(sorted(vocabDict.items()))

    removedWords = Processing.filterAndRemove(sortedVocabDict, catDict, filter)

    Utility.outputListToFile(sortedVocabDict.keys(), vocabFile)
    Utility.outputListToFile(removedWords, removedWordFile)

    Processing.smoothAllCategories(vocabDict, catDict, smoothingValue)

    ModelGenerator.outputModelToFile(modelfile, sortedVocabDict, catDict,
                                     smoothingValue)

    model = FileReader.getModel(modelfile)

    testData = FileReader.getTestData(fileToRead, 2019)

    return ModelGenerator.outputBaysianClassification(resultFile, testData,
                                                      model, types,
                                                      sortedVocabDict, catDict)
예제 #2
0
def main():
    model_path = os.path.join(directory, f"%s.py")
    criteria_path = os.path.join(directory, f"%sCriteria.py")
    mapper_path = os.path.join(directory, f"%sMapper.py")
    for table_name in tables:
        model_name = ''.join(
            [word[0].upper() + word[1:] for word in table_name.split('_')])
        if switch[0]:
            ModelGenerator.generate(
                mysql=mysql,
                table_name=table_name,
                model_name=model_name,
                model_path=model_path,
            )
        if switch[1]:
            CriteriaGenerator.generate(
                model_name=model_name,
                model_path=model_path,
                criteria_path=criteria_path,
            )
        if switch[2]:
            MapperGenerator.generate(table_name=table_name,
                                     model_name=model_name,
                                     mapper_path=mapper_path)
        with open(os.path.join(directory, '__init__.py'), 'a') as f:
            if switch[0]:
                f.write(f"from .{model_name} import {model_name}\n")
            if switch[1]:
                f.write(
                    f"from .{model_name}Criteria import {model_name}Criteria\n"
                )
            if switch[2]:
                f.write(
                    f"from .{model_name}Mapper import {model_name}Mapper\n")
예제 #3
0
def RuntTest(seed):

    #Init random generator
    net.NetworkGraph.SetRandomSeed(seed)

    #Define flow bounds
    FLOW_BOUNDS = (0.001, 3)
    CAPACITY_BOUNDS = (0.3, 1)

    #Network
    world_init_data = [{
        'NodesNumber': 3,
        'EdgesNumber': 8,
        'ExternalEdgesNumber': 3
    }, {
        'NodesNumber': 6,
        'EdgesNumber': 4,
        'ExternalEdgesNumber': 2
    }, {
        'NodesNumber': 4,
        'EdgesNumber': 6,
        'ExternalEdgesNumber': 1
    }, {
        'NodesNumber': 1,
        'EdgesNumber': 3,
        'ExternalEdgesNumber': 1
    }]
    network = net.NetworkGraph.GenerateSmallWorld(world_init_data,
                                                  *CAPACITY_BOUNDS)
    network.GenerateRandomSrcDst(4)

    bnb_solver = bb.BranchAndBoundSolverD(network, FLOW_BOUNDS)
    bnb_res = bnb_solver.Solve(log=True)
    print('BnB:')
    print(bnb_res)

    #Get network params
    n_list = network.GetNodeList()
    f_list = network.GetFlowList()
    sd_dict = network.GetSrcDstDict()
    a_list = network.GetArcList()
    c_dict = network.GetCapacityParam()
    nmg = mg.RsModelGenerator(mg.QuadObjectiveGenerator(),
                              mg.RouteConstraintsGenerator(),
                              mg.LinearCapacityConstraintsGenerator())
    rs_model = nmg.CreateCompletRsModel(f_list, sd_dict, n_list, a_list,
                                        c_dict, FLOW_BOUNDS)
    solver = sm.CplexSolver()
    solver_result = solver.Solve(rs_model.cmodel)
    print("SOLVER:")
    print(
        f"OBJECTIVE: {solver_result['Objective']}, TIME: {solver_result['Time']}"
    )
예제 #4
0
def EstimateObjectvie(rs_model, preserve_feasibility=True):
    """Find shortest pathes and appropriate strains for them."""

    #copy model for less interference
    cmodel = cp.deepcopy(rs_model.cmodel)

    #create and solve shortest path model
    sp_amodel = mg.RsModelGenerator(
        mg.RouteConstraintsGenerator()).CreateAbstractModel()

    def ObjectiveShortestPathRule(model):
        return sum(model.FlowRoute[flow, arc] for flow in model.Flows
                   for arc in model.Arcs)

    sp_amodel.RouteObj = pyo.Objective(rule=ObjectiveShortestPathRule,
                                       sense=pyo.minimize)
    sp_cmodel = sp_amodel.create_instance(data=rs_model.init_data)
    sp_solver = sm.GlpkSolver()
    sp_solution = sp_solver.Solve(sp_cmodel, False)
    if sp_solution == False:
        return None
    sp_route = [{
        arc: pyo.value(sp_cmodel.FlowRoute[flow, arc])
        for arc in sp_cmodel.Arcs
    } for flow in sp_cmodel.Flows]

    #find feasible flow strains
    if preserve_feasibility:
        feasible_solution = RecoverFeasibleStrain(rs_model, sp_route,
                                                  sm.IpoptSolver())
        feasible_solution['Time'] += sp_solver.time
        return feasible_solution

    #find maximum flow
    minimum_capacity = defaultdict(lambda: float('inf'))
    for flow, node_s, node_d in cmodel.FlowRoute:
        route_val = sp_route[flow][(node_s, node_d)]
        cmodel.FlowRoute[(flow, node_s, node_d)].fix(route_val)
        if route_val >= (1 - 1e-3):
            minimum_capacity[flow] = min(
                [minimum_capacity[flow], cmodel.Capacity[node_s, node_d]])
    for flow in cmodel.FlowStrain:
        cmodel.FlowStrain[flow].fix(minimum_capacity[flow])
    obj_val, strain_val, route_val = sm.isolver.ISolver.ExtractSolution(cmodel)
    max_solution = {
        'Objective': obj_val,
        'Strain': strain_val,
        'Route': route_val,
        'Time': sp_solver.time
    }
    return max_solution
예제 #5
0
 def __init__(self, network, flow_bounds):
     self.log = True
     self.BranchingCutsName = 'BranchingCuts'
     self.required_precision = 1e-4
     # get network params
     n_list = network.GetNodeList()
     f_list = network.GetFlowList()
     sd_dict = network.GetSrcDstDict()
     a_list = network.GetArcList()
     c_dict = network.GetCapacityParam()
     # create model
     nmg = mg.RsModelGenerator(mg.QuadObjectiveGenerator(),
                               mg.RouteConstraintsGenerator(),
                               mg.NonlinearCapacityConstraintsGenerator())
     self.rs_model = nmg.CreateCompletRsModel(f_list, sd_dict, n_list,
                                              a_list, c_dict, flow_bounds)
예제 #6
0
def RecoverFeasibleStrain(rs_model, routes, solver):
    """Recover optimal and feasible solution for the given routes"""

    #update route for required format
    routes_updated = {}
    for indx, route in enumerate(routes):
        ru = {(indx, *k): v for k, v in route.items()}
        routes_updated.update(ru)

    #copy model for less interference
    amodel = cp.deepcopy(rs_model.amodel)

    #create basic model
    rec_amodel = mg.RsModelGenerator(
        mg.NonlinearCapacityConstraintsGenerator()).CreateAbstractModel()

    #add objective from the original model
    objective_name = amodel.Suffix[amodel.Obj]
    if objective_name == 'Linear':
        obj_maker = mg.objmk.LinearObjectiveGenerator()
    if objective_name == 'Quadratic':
        obj_maker = mg.objmk.QuadObjectiveGenerator()
    if objective_name == 'Logarithmic':
        obj_maker = mg.objmk.LogObjectiveGenerator()
    obj_maker(rec_amodel)

    #create concrete instance
    rec_cmodel = rec_amodel.create_instance(data=rs_model.init_data)
    rec_cmodel.FlowUbMax = rs_model.cmodel.FlowUbMax.value
    rec_cmodel.FlowLbMin = rs_model.cmodel.FlowLbMin.value

    #fix variables
    for indx in routes_updated:
        rec_cmodel.FlowRoute[indx].fix(routes_updated[indx])

    #solve recovery model
    ret_val = solver.Solve(rec_cmodel)

    #add generated model to the output
    if ret_val is not None:
        ret_val['Cmodel'] = rec_cmodel

    return ret_val
예제 #7
0
def deserialize(json_str, destdb):
    json_objs = json.loads(json_str)
    objs = []

    for obj in json_objs:
        o = ModelGenerator.getModel(destdb, obj['class'])()
        for f, v in obj['fields'].iteritems():
            o.__dict__[f] = v

        objs.append(o)

    return objs
예제 #8
0
def json2schema(schema_json, commit = True, destdb = None):
    """
    Creates Database, Table, and Column objects as needed to satisfy the incoming schema.
    If the table is already present, assume we are updating: delete all columns and recreate from the schema.
    Unless commit is false, call the required sql to create the incoming tables in the destination database.
    """

    schema = json.loads(schema_json)

    for dbname, table_schema in schema.iteritems():
        if destdb:
            dbname = destdb

        try:
            db = Database.objects.get(name=dbname)
        except Database.DoesNotExist:
            db = Database(name=dbname)
            db.save()

        for tablename, column_schema in table_schema.iteritems():
            try:
                table = Table.objects.get(db=db, name=tablename)
                for column in Column.objects.filter(table=table):
                    column.delete()
            except Table.DoesNotExist:
                table = Table(db=db, name=tablename)
                table.save()

            for columnname, columntype in column_schema.iteritems():
                column = Column(table=table, name=columnname, type=columntype)
                column.save()

            if commit:
                model = ModelGenerator.getModel(dbname, tablename)
                cursor = connections[dbname].cursor()
                for sql in ModelGenerator.getSQL(model):
                    cursor.execute(sql)
    return None
예제 #9
0
from flask import Flask
from os import path
from flask.ext.admin import Admin
from flask.ext.sqlalchemy import SQLAlchemy

app = Flask(__name__)
app.config.from_object('config')

db = SQLAlchemy(app)

from views import *
from documents import *
from ModelGenerator import *

modelGenerator = ModelGenerator(app, db)

init_login()
admin = Admin(app, name = 'Admin', index_view=LoginView(name='Login', category='Home'))
admin.add_view(LoggedUserIndexView(name='Home', endpoint='home', category='Home'))
admin.add_view(LogoutView(name='Logout', endpoint='logout', category='Home'))

	
for key, tupple in  modelGenerator.compile().items():
	admin.add_view(tupple[1](tupple[0], db.session))

'''

admin.add_view(RaceView(Race, db.session))
admin.add_view(HighlightView(Highlight, db.session))
'''
예제 #10
0
import InputExtractor as IE
import ModelGenerator as MG
import pandas as pd

if __name__ == '__main__':

    #Paso 1: Conseguir las columnas (words + songs)
    words = []
    songs = []

    path = r'H:\Workshop\Machine Learning and AI\Datasets\spotify\jsons'
    data = IE.InputExtractor(path)
    songs, words = data.Extract_Input()
    print("Total songs: " + str(len(songs)))
    print("Total words: " + str(len(words)))

    #Paso 2: Crear el dataframe de Pandas

    model = MG.ModelGenerator(songs, words, path)
    df = model.CreateFrame()


    #Paso 3: Crear la Neural Network

def RunTest(seed):

    #desribe run
    print('#######################################################')
    print(f'Seed is equal: {seed}')

    #define flow bounds
    FLOW_BOUNDS = (0.001, 3)
    CAPACITY_BOUNDS = (0.3, 1)

    #random network
    net.NetworkGraph.SetRandomSeed(seed)

    #Network small
    world_init_data = [ {'NodesNumber': 3, 'EdgesNumber': 8,  'ExternalEdgesNumber': 3 }, 
                        {'NodesNumber': 12, 'EdgesNumber': 32, 'ExternalEdgesNumber': 3 },
                        {'NodesNumber': 6, 'EdgesNumber': 22,  'ExternalEdgesNumber': 2 },
                         {'NodesNumber': 4, 'EdgesNumber': 12,  'ExternalEdgesNumber': 2 }
                        ]
    network = net.NetworkGraph.GenerateSmallWorld(world_init_data, *CAPACITY_BOUNDS)
    network.GenerateRandomSrcDst(12)

    # #Network medium
    # world_init_data = [ {'NodesNumber': 3, 'EdgesNumber': 8,  'ExternalEdgesNumber': 3 }, 
    #                     {'NodesNumber': 12, 'EdgesNumber': 32, 'ExternalEdgesNumber': 3 },
    #                     {'NodesNumber': 6, 'EdgesNumber': 22,  'ExternalEdgesNumber': 2 },
    #                      {'NodesNumber': 4, 'EdgesNumber': 12,  'ExternalEdgesNumber': 2 },
    #                      {'NodesNumber': 9, 'EdgesNumber': 16,  'ExternalEdgesNumber': 1 },
    #                      {'NodesNumber': 8, 'EdgesNumber': 20,  'ExternalEdgesNumber': 2 },
    #                      {'NodesNumber': 4, 'EdgesNumber': 12,  'ExternalEdgesNumber': 1 },
    #                      {'NodesNumber': 3, 'EdgesNumber': 6,  'ExternalEdgesNumber': 1 },
    #                     ]
    # network = net.NetworkGraph.GenerateSmallWorld(world_init_data, *CAPACITY_BOUNDS)
    # network.GenerateRandomSrcDst(32)

    #get network params
    n_list = network.GetNodeList()
    f_list = network.GetFlowList()
    sd_dict = network.GetSrcDstDict()
    a_list = network.GetArcList()
    c_dict = network.GetCapacityParam()

    nmg = mg.RsModelGenerator(mg.QuadObjectiveGenerator(), mg.RouteConstraintsGenerator(), mg.NonlinearCapacityConstraintsGenerator())
    rs_model = nmg.CreateCompletRsModel(f_list, sd_dict, n_list, a_list, c_dict, FLOW_BOUNDS)


    start_time_lb = t.perf_counter()
    estimate_result_lb = mp.EstimateObjectvie(rs_model, False)
    elapsed_time_lb = t.perf_counter() - start_time_lb
    objective_lb, strains_lb, routes_lb, time_lb = (estimate_result_lb['Objective'], estimate_result_lb['Strain'], 
                                                        estimate_result_lb['Route'], estimate_result_lb['Time'] )
    print('###################!RESULTS!#############################')
    print(f'LB:\nObjective: {objective_lb}, Time: {time_lb}, Total time: {elapsed_time_lb}')
    violations = mp.FindConstraintsViolation(rs_model.cmodel, strains_lb, routes_lb)
    cc_vn = violations['capacity_constraints'][1]
    rc_vn = violations['route_constraints'][1]
    if cc_vn == 0 and rc_vn == 0:
        print('Feasible')
    else:
        print(f'Capacity constraint violations number: {cc_vn}')
        print(f'Route constraint violations number: {rc_vn}')
    print('__________________________________________________________')


    start_time_ub = t.perf_counter()
    estimate_result_ub = mp.EstimateObjectvie(rs_model, True)
    elapsed_time_ub = t.perf_counter() - start_time_ub
    objective_ub, strains_ub, routes_ub, time_ub = (estimate_result_ub['Objective'], estimate_result_ub['Strain'], 
                                                        estimate_result_ub['Route'], estimate_result_ub['Time'] )
    print('###################!RESULTS!#############################')
    print(f'UB:\nObjective: {objective_ub}, Time: {time_ub}, Total time: {elapsed_time_ub}')
    violations = mp.FindConstraintsViolation(rs_model.cmodel, strains_ub, routes_ub)
    cc_vn = violations['capacity_constraints'][1]
    rc_vn = violations['route_constraints'][1]
    if cc_vn == 0 and rc_vn == 0:
        print('Feasible')
    else:
        print(f'Capacity constraint violations number: {cc_vn}')
        print(f'Route constraint violations number: {rc_vn}')
    print('__________________________________________________________')
예제 #12
0
def RunTest(seed, network_size, formulation, decomposition=0, coordination=0):

    #desribe run
    network_description = ['Medium', 'Large']
    formulations_description = [
        'Original', 'Linearized Constraints', 'Alternative'
    ]
    decomposition_description = [
        'Solve undecomposed', 'Demands', 'Subnetworks', 'Variable separating'
    ]
    coordination_description = [
        'Solve unrelaxed', 'Proximal cutting planes',
        'Subgradient level evaluation'
    ]
    print('#######################################################')
    print(f'Seed is equal: {seed}')
    print(f'Network size: {network_description[network_size]}')
    print(f'Formulation: {formulations_description[formulation]}')
    print(f'Decomposition: {decomposition_description[decomposition]}')
    print(f'Coordination: {coordination_description[coordination]}')

    #define flow bounds
    FLOW_BOUNDS = (0.001, 3)
    CAPACITY_BOUNDS = (0.3, 1)

    #random network
    net.NetworkGraph.SetRandomSeed(seed)

    networks = [None, None]

    #Network medium
    world_init_data = [{
        'NodesNumber': 3,
        'EdgesNumber': 8,
        'ExternalEdgesNumber': 3
    }, {
        'NodesNumber': 12,
        'EdgesNumber': 32,
        'ExternalEdgesNumber': 3
    }, {
        'NodesNumber': 6,
        'EdgesNumber': 22,
        'ExternalEdgesNumber': 2
    }, {
        'NodesNumber': 4,
        'EdgesNumber': 12,
        'ExternalEdgesNumber': 2
    }]
    networks[0] = net.NetworkGraph.GenerateSmallWorld(world_init_data,
                                                      *CAPACITY_BOUNDS)
    networks[0].GenerateRandomSrcDst(12)

    #Network large
    world_init_data = [
        {
            'NodesNumber': 3,
            'EdgesNumber': 8,
            'ExternalEdgesNumber': 3
        },
        {
            'NodesNumber': 12,
            'EdgesNumber': 32,
            'ExternalEdgesNumber': 3
        },
        {
            'NodesNumber': 6,
            'EdgesNumber': 22,
            'ExternalEdgesNumber': 2
        },
        {
            'NodesNumber': 4,
            'EdgesNumber': 12,
            'ExternalEdgesNumber': 2
        },
        {
            'NodesNumber': 9,
            'EdgesNumber': 16,
            'ExternalEdgesNumber': 1
        },
        {
            'NodesNumber': 8,
            'EdgesNumber': 20,
            'ExternalEdgesNumber': 2
        },
        {
            'NodesNumber': 4,
            'EdgesNumber': 12,
            'ExternalEdgesNumber': 1
        },
        {
            'NodesNumber': 3,
            'EdgesNumber': 6,
            'ExternalEdgesNumber': 1
        },
    ]
    networks[1] = net.NetworkGraph.GenerateSmallWorld(world_init_data,
                                                      *CAPACITY_BOUNDS)
    networks[1].GenerateRandomSrcDst(32)

    network = networks[network_size]

    #get network params
    n_list = network.GetNodeList()
    f_list = network.GetFlowList()
    sd_dict = network.GetSrcDstDict()
    a_list = network.GetArcList()
    c_dict = network.GetCapacityParam()

    #select formulation
    if formulation == 0:
        nmg = mg.RsModelGenerator(mg.QuadObjectiveGenerator(),
                                  mg.RouteConstraintsGenerator(),
                                  mg.NonlinearCapacityConstraintsGenerator())
    if formulation == 1:
        nmg = mg.RsModelGenerator(mg.QuadObjectiveGenerator(),
                                  mg.RouteConstraintsGenerator(),
                                  mg.LinearCapacityConstraintsGenerator())
    if formulation == 2:
        nmg = mg.RsModelGenerator(mg.QuadObjectiveGenerator(),
                                  mg.ReformulatedConstraintsGenerator())

    rs_model = nmg.CreateCompletRsModel(f_list, sd_dict, n_list, a_list,
                                        c_dict, FLOW_BOUNDS)

    #initialize solvers
    opt_solver = sm.CplexSolver()
    mstr_solver = sm.IpoptSolver()
    rec_solver = sm.IpoptSolver()

    #select coordinator
    coordinator = None
    if coordination == 0:
        coordinator = None
    if coordination == 1:
        coordinator = cr.CoordinatorCuttingPlaneProximal(lm_min=-4.6,
                                                         lm_max=6.6)
    if coordination == 2:
        coordinator = cr.CoordinatorGradient(
            step_rule=cr.gradstep.ObjectiveLevelStepRule(3.2, 1.4))

    #select decomposer
    decomposer = None
    decomposed_solvers = []
    if decomposition == 0:
        decomposer = None
    if decomposition == 1:
        decomposed_solvers = [opt_solver for _ in f_list]
        decomposer = dec.FlowDecomposer(rs_model, coordinator)
    if decomposition == 2:
        decomposed_solvers = [opt_solver for _ in world_init_data]
        decomposer = dec.SubnetsDecomposer(rs_model, coordinator,
                                           network.GetWorldNodes())
    if decomposition == 3:
        decomposed_solvers = [opt_solver, opt_solver]
        decomposer = dec.SepVarDecomposer(rs_model, coordinator)

    #deduce run parameter
    run_decomposition = (coordination != 0) and (decomposition != 0)
    run_original = not run_decomposition

    #run computation
    tf.RunTest(network,
               rs_model,
               decomposer, {
                   'Original': opt_solver,
                   'Recovered': rec_solver,
                   'Master': mstr_solver,
                   'Decomposed': decomposed_solvers
               },
               solve_original=run_original,
               solve_decomposed=run_decomposition,
               max_iter=25,
               validate_feasability=True,
               recover_feasible=True,
               draw_progress=True,
               draw_solution=False)