Пример #1
0
def create_db():
    config.clear()
    tempdir = tempfile.mkdtemp()
    config["LOG_FILE"] = os.path.join(tempdir, "oar.log")
    config["DB_TYPE"] = "Pg"
    config["DB_PORT"] = "5432"
    config["DB_BASE_NAME"] = "oar"
    config["DB_BASE_PASSWD"] = "oar"
    config["DB_BASE_LOGIN"] = "******"
    config["DB_HOSTNAME"] = "localhost"
Пример #2
0
def bataar(database_mode, socket, node_size, scheduler_policy, types, scheduler_delay, redis_port, verbose):
    """Adaptor to Batsim Simulator."""
    #    import pdb; pdb.set_trace()
    if database_mode == "memory":
        config.clear()
        config.update(BATSIM_DEFAULT_CONFIG)

    print("Starting simulation...")
    print("Scheduler Policy:", scheduler_policy)
    print("Scheduler delay:", scheduler_delay)
    scheduler = BatSched(scheduler_policy, types, scheduler_delay, node_size, database_mode)
    bs = Batsim(scheduler, validatingmachine=None, server_address=socket, verbose=verbose, redis_port=redis_port)

    bs.start()
    # elif database_mode == 'memory':

    #    global offset_idx
    #    offset_idx = 1
    #    monkeypatch_oar_lib_tools()
    #    db_initialization(nb_res)

    #    #
    #    # prepare jobs
    #    #
    #    db_jid2s_jid = {}
    #    print("Prepare jobs")
    #    for i, j in enumerate(json_jobs):
    #        jid = int(j["id"])
    #        walltime = int(math.ceil(float(j["walltime"])))
    #        jobs[jid] = JobSimu(id=jid,
    #                            state="Waiting",
    #                            queue="test",
    #                            start_time=0,
    #                            walltime=0,
    #                            moldable_id=0,
    #                            mld_res_rqts=[(jid, walltime,
    #                                           [([("resource_id", j["res"])],
    #                                             [(1, nb_res - 0)])])],
    #                            run_time=0,
    #                            db_jid=i + 1,
    #                            assign=False,
    #                            find=False)

    #        insert_job(res=[(walltime, [('resource_id=' + str(j["res"]), "")])],
    #                   state='Hold', properties='', user='')
    #        db_jid2s_jid[i + 1] = jid
    #
    #    db.flush()  # TO REMOVE ???
    #    # import pdb; pdb.set_trace()
    #    BatSched([], jobs, 'batsim-db', db_jid2s_jid, scheduler_delay, socket).run()

    if __name__ != "__main__" and database_mode == "memory":
        # If used oar.lib.tools' functions are used after we need to undo monkeypatching.
        # Main use case is suite testing evaluation
        restore_oar_lib_tools()
Пример #3
0
def create_db():
    config.clear()
    tempdir = tempfile.mkdtemp()
    config["LOG_FILE"] = os.path.join(tempdir, 'oar.log')
    config['DB_TYPE'] = 'Pg'
    config['DB_PORT'] = '5432'
    config['DB_BASE_NAME'] = 'oar'
    config['DB_BASE_PASSWD'] = 'oar'
    config['DB_BASE_LOGIN'] = '******'
    config['DB_HOSTNAME'] = 'localhost'

    db.create_all()
Пример #4
0
def setup_config(request):
    config.clear()
    config.update(DEFAULT_CONFIG.copy())
    tempdir = tempfile.mkdtemp()
    config["LOG_FILE"] = os.path.join(tempdir, 'oar.log')

    db_type = os.environ.get('DB_TYPE', 'memory')
    os.environ.setdefault('DB_TYPE', db_type)

    if db_type not in ('memory', 'sqlite', 'postgresql'):
        pytest.exit("Unsupported database '%s'" % db_type)

    if db_type == "sqlite":
        config['DB_BASE_FILE'] = os.path.join(tempdir, 'db.sqlite')
        config['DB_TYPE'] = 'sqlite'
    elif db_type == "memory":
        config['DB_TYPE'] = 'sqlite'
        config['DB_BASE_FILE'] = ':memory:'
    else:
        config['DB_TYPE'] = 'Pg'
        config['DB_PORT'] = '5432'
        config['DB_BASE_NAME'] = 'oar'
        config['DB_BASE_PASSWD'] = 'oar'
        config['DB_BASE_LOGIN'] = '******'
        config['DB_BASE_PASSWD_RO'] = 'oar_ro'
        config['DB_BASE_LOGIN_RO'] = 'oar_ro'
        config['DB_HOSTNAME'] = 'localhost'

    def dump_configuration(filename):
        with open(filename, 'w', encoding='utf-8') as fd:
            for key, value in six.iteritems(config):
                if not key.startswith('SQLALCHEMY_'):
                    fd.write("%s=%s\n" % (key, str(value)))

    dump_configuration('/etc/oar/oar.conf')
    db.metadata.drop_all(bind=db.engine)
    db.create_all(bind=db.engine)
    kw = {"nullable": True}
    db.op.add_column('resources', db.Column('core', db.Integer, **kw))
    db.op.add_column('resources', db.Column('cpu', db.Integer, **kw))
    db.op.add_column('resources', db.Column('host', db.String(255), **kw))
    db.op.add_column('resources', db.Column('mem', db.Integer, **kw))
    db.reflect()
    yield
    db.close()
    shutil.rmtree(tempdir)
Пример #5
0
def bataar(wkp_filename, database_mode, socket, node_size, scheduler_policy, types, scheduler_delay):
    #    import pdb; pdb.set_trace()
    if database_mode == 'memory':
        config.clear()
        config.update(BATSIM_DEFAULT_CONFIG)

    assign = False
    assign_func = None
    find = False
    find_func = None

    add_1h = False  # control addition of one level of hierarchy in resources' request
    add_mld = False  # control addition of one modldable instance in resources' request

    sp = scheduler_policy
    if sp == 'BASIC' or sp == '0':
        print("BASIC scheduler_policy selected")
        # default
        pass
    elif sp == 'BEST_EFFORT_CONTIGUOUS' or sp == '1':
        print("BEST_EFFORT_CONTIGUOUS scheduler_policy selected")
        find = True
        find_func = getattr(oar.kao.advanced_scheduling, 'find_contiguous_1h')
        assign = True
        assign_func = getattr(oar.kao.advanced_scheduling, 'assign_one_time_find')
    elif sp == 'CONTIGUOUS' or sp == '2':
        print("CONTIGUOUS scheduler_policy selected")
        find = True
        find_func = getattr(oar.kao.advanced_scheduling, 'find_contiguous_1h')
    elif sp == 'BEST_EFFORT_LOCAL' or sp == '3':
        print("BEST_EFFORT_LOCAL scheduler_policy selected")
        add_1h = True
        add_mld = True
    elif sp == 'LOCAL' or sp == '4':
        print("LOCAL scheduler_policy selected")
        add_1h = True

    #
    # Load workload
    #

    json_jobs, nb_res = load_json_workload_profile(wkp_filename)

    print("nb_res:", nb_res)

    if types and types != '':
        types_array = types.split(',')
        for type_value in types_array:
            t, v = type_value.split('=')
            if t == "assign":
                print("type assign with function: ", v)
                assign = True
                assign_func = getattr(oar.kao.advanced_scheduling, 'assign_' + v)
            if t == "find":
                print("type find with function: ", v)
                find = True
                find_func = getattr(oar.kao.advanced_scheduling, 'find_' + v)

    if database_mode == 'no-db':
        #
        # generate ResourceSet
        #

        hy_resource_id = [[(i, i)] for i in range(1,nb_res+1)]
        hierarchy = {'resource_id': hy_resource_id}
        if node_size > 0:
            node_id = [[(node_size*i, node_size*(i+1)-1)] for i in range(int(nb_res/node_size))]
            hierarchy['node'] = node_id

        print('hierarchy: ', hierarchy)

        res_set = ResourceSetSimu(
            rid_i2o=range(nb_res+1),
            rid_o2i=range(nb_res+1),
            roid_itvs=[(1, nb_res)],
            hierarchy=hierarchy,
            available_upto={2147483600: [(1, nb_res)]}
        )

        #
        # prepare jobs
        #
        mld_id = 1
        print("Genererate jobs")

        for j in json_jobs:
            jid = int(j['id'])
            walltime = int(math.ceil(float(j["walltime"])))
            rqb = [([('resource_id', j['res'])], [(1, nb_res)])]
            rqbh = [([('node', 1), ('resource_id', j['res'])], [(1, nb_res)])]

            if add_1h:
                if add_mld:
                    mld_res_rqts = [(mld_id, walltime, rqbh), (mld_id+1, walltime, rqb)]
                    mld_id += 2
                else:
                    mld_res_rqts = [(mld_id, walltime, rqbh)]
                    mld_id += 1
            else:
                if add_mld:
                    mld_res_rqts = [(mld_id, walltime, rqb), (mld_id+1, walltime, rqb)]
                    mld_id += 2
                else:
                    mld_res_rqts = [(mld_id, walltime, rqb)]
                    mld_id += 1

            jobs[jid] = JobSimu(id=jid,
                                state="Waiting",
                                queue="test",
                                start_time=0,
                                walltime=0,
                                types={},
                                res_set=[],
                                moldable_id=0,
                                mld_res_rqts=mld_res_rqts,
                                run_time=0,
                                deps=[],
                                key_cache={},
                                ts=False, ph=0,
                                assign=assign, assign_func=assign_func,
                                find=find, find_func=find_func)

            # print("jobs: ", jid, " mld_res_rqts: ", mld_res_rqts)
        # import pdb; pdb.set_trace()
        BatSched(res_set, jobs, 'simu', {}, scheduler_delay, socket).run()

    elif database_mode == 'memory':

        global offset_idx
        offset_idx = 1
        monkeypatch_oar_lib_tools()
        db_initialization(nb_res)

        #
        # prepare jobs
        #
        db_jid2s_jid = {}
        print("Prepare jobs")
        for i, j in enumerate(json_jobs):
            jid = int(j["id"])
            walltime = int(math.ceil(float(j["walltime"])))
            jobs[jid] = JobSimu(id=jid,
                                state="Waiting",
                                queue="test",
                                start_time=0,
                                walltime=0,
                                moldable_id=0,
                                mld_res_rqts=[(jid, walltime,
                                               [([("resource_id", j["res"])],
                                                 [(1, nb_res - 0)])])],
                                run_time=0,
                                db_jid=i + 1,
                                assign=False,
                                find=False)

            insert_job(res=[(walltime, [('resource_id=' + str(j["res"]), "")])],
                       state='Hold', properties='', user='')
            db_jid2s_jid[i + 1] = jid

        db.flush()  # TO REMOVE ???
        # import pdb; pdb.set_trace()
        BatSched([], jobs, 'batsim-db', db_jid2s_jid, scheduler_delay, socket).run()

        if __name__ != '__main__':
            # If used oar.lib.tools' functions are used after we need to undo monkeypatching.
            # Main use case is suite testing evaluation
            restore_oar_lib_tools()