Example #1
0
def set_affinity(ppid):
    """ Attemts to set the affinity of the caller process to the CPU
    specified in options.CPUID. CPUID must be initialized, e.g., using
    load_process_affinities.

    Args:
        ppid: A string containing the name of the caller process. Must 
              be either 'rcvloop','sendloop','parser' or 'main'.
        
    """
    pid = os.getpid()
    cpu = options.CPUID.get(ppid, None)
    if not cpu:
        return

    try:
        import affinity

        # CPUID contains the CPU number for each process id
        # (e.g. parser:2). The pid of the current process is pinned to
        # the corresponding CPU
        affinity.set_process_affinity_mask(pid, cpu)
    except (ImportError, KeyError):
        DEBUG("could not set CPU affinity")

    DEBUG("CPU=%d:\t process [%s] (PID=%d)" % (int(np.log2(cpu)), ppid, pid))
Example #2
0
def run(idx):
    affinity.set_process_affinity_mask(os.getpid(), 1 << idx)

    env = lmdb.open('/ram/dbtest', map_size=1048576 * 1024,
        metasync=False, sync=False, map_async=True)

    k = list(keys)
    random.shuffle(k)
    k = k[:1000]

    while 1:
        with env.begin() as txn:
            nextkey = iter(k).next
            try:
                while 1:
                    hash(txn.get(nextkey()))
            except StopIteration:
                pass
            arr[idx] += len(k)

        samp = random.sample(keys, int(len(k) / 10))
        with env.begin(write=True) as txn:
            for sk in samp:
                txn.put(sk, sk+sk)
        arrw[idx] += len(samp)
Example #3
0
def set_affinity(PID, target_affinity):
    try:
        affinity.set_process_affinity_mask(PID, target_affinity)
    except:
        print("Unknown error")
        exit()
    print("New, optimized CPU affinity for {} applied".format(pName))
Example #4
0
def main():
    from suite import benchmarks
    # GitRepo wants exactly 7 character hash?
    if args.base_commit:
        args.base_commit = args.base_commit[:7]
    if args.target_commit:
        args.target_commit = args.target_commit[:7]

    if not args.log_file:
        args.log_file = os.path.abspath(
            os.path.join(REPO_PATH, 'vb_suite.log'))

    saved_dir = os.path.curdir
    if args.outdf:
        # not bullet-proof but enough for us
        args.outdf = os.path.realpath(args.outdf)

    if args.log_file:
        # not bullet-proof but enough for us
        args.log_file = os.path.realpath(args.log_file)

    random.seed(args.seed)
    np.random.seed(args.seed)

    try:
        import affinity
        affinity.set_process_affinity_mask(0,args.affinity)
        assert affinity.get_process_affinity_mask(0) == args.affinity
        print("CPU affinity set to %d" % args.affinity)
    except ImportError:
        print("Warning: The 'affinity' module is not available.")
        time.sleep(2)

    print("\n")
    prprint("LOG_FILE = %s" % args.log_file)
    if args.outdf:
        prprint("PICKE_FILE = %s" % args.outdf)

    print("\n")

    # move away from the pandas root dit, to avoid possible import
    # surprises
    os.chdir(os.path.dirname(os.path.abspath(__file__)))

    benchmarks = [x for x in benchmarks if re.search(args.regex,x.name)]

    for b in benchmarks:
        b.repeat = args.repeats
        if args.ncalls:
            b.ncalls = args.ncalls

    if benchmarks:
        if args.head:
            profile_head(benchmarks)
        else:
            profile_comparative(benchmarks)
    else:
        print( "No matching benchmarks")

    os.chdir(saved_dir)
Example #5
0
def migration():
    id1=1
    id2=2
    thread1 = thread("Thread" + str(id1), id1)
    print(str(thread1.thread_name) + " " + str(thread1.thread_ID));

    thread2 = thread("Thread" + str(id2), id2)
    print(str(thread2.thread_name) + " " + str(thread2.thread_ID));
    pid=0
    aff1 = affinity.get_process_affinity_mask(pid)
    print("Thread1 is eligibl to run on:", aff1)

    affinity.set_process_affinity_mask(0, 100)
    print("CPU affinity mask is modified for process id % s" % pid)
    # pid = 0
    aff = affinity.get_process_affinity_mask(pid)
    print("Now, process for th1 is eligibl to run on:", aff)

    pid = 0
    aff2 = affinity.get_process_affinity_mask(pid)
    print("Thread2 is eligibl to run on:", aff2)

    affinity.set_process_affinity_mask(0, 45)
    print("CPU affinity mask is modified for process id % s" % pid)
    # pid = 0
    aff2 = affinity.get_process_affinity_mask(pid)
    print("Now, process for th2is eligibl to run on:", aff2)

    thread1.start()
    thread2.start()
    thread1.join()
    thread2.join()
Example #6
0
def fun(i,sParaOfshareMemory,hResultOfshareMemo):
    print('process{} is running'.format(i+1))
    af.set_process_affinity_mask(os.getpid(),pow(2,i))# 邦核
    sSUBsList,dealFunc,sPara=sParaOfshareMemory
    sSub=sSUBsList[i]
    print('{} is ready to do dealfunc'.format(i+1))
    dealFunc(sSub,sPara,hResultOfshareMemo)
    print('process{} done'.format(i+1))
    return 0
    def affinity_cpu(cls, started_process, cpuId):
        if cpuId > 0:
            p_name = started_process.name
            p_pid = started_process.pid
#             cpuId_usage = cls.get_cpu_usage(cpuId)
#             if cpuId_usage(balabala) > 0.8:
#                 print "cpu utilization is %r, more than 80%, \
#                 better binding process %r (pid %r) to another cpu " %(cpuId_usage, p_pid, bin(cpuId))
            affinity.set_process_affinity_mask(p_pid, cpuId)
            print "process: %r (pid %r) is running on processor(after binding) : %r" %(p_name, p_pid, bin(cpuId))
        else:
            print "process: %r (pid %r) could not set CPU affinity(%r), continuing..." % (p_name, p_pid, bin(cpuId))
Example #8
0
def train_individual_cpu(mean_list, sigma_list, pool, env, ARGS, refer_batch,
                         seed):
    jobs = []
    for idx, mean in enumerate(mean_list):
        sigma = sigma_list[idx]
        jobs = None
        model = build_model(ARGS)
        #seed = [np.random.randint(1,1000000) for i in range(ARGS.population_size)]
        # create multiprocessing jobs
        for k_id in range(ARGS.population_size):
            # jobs.append(pool.apply_async(
            #             get_reward_atari,
            #             (model,mean,sigma,env,seed[k_id],ARGS,refer_batch,None,False,False,)
            #         ))
            import affinity
            p = mp.Process(target=get_reward_atari,
                           args=(
                               model,
                               mean,
                               sigma,
                               env,
                               seed[k_id],
                               ARGS,
                               refer_batch,
                               None,
                               False,
                               False,
                           ))
            p.start()
            cpurank = affinity.get_process_affinity_mask(p.pid)
            affinity.set_process_affinity_mask(p.pid, cpurank)
            p.join()

    rewards_list, frame_list, models_list, noops_list,detail_rewards_list = [],[],[],[],[]
    rewards, frames, models, noops, detail_rewards = [], [], [], [], []

    # get reward(evaluate)
    for idx, j in enumerate(jobs):
        rewards.append(j.get()[0])
        frames.append(j.get()[1])
        models.append(j.get()[2])
        noops.append(j.get()[3])
        detail_rewards.append(j.get()[4])
    for i in range(ARGS.lam):
        mu = ARGS.population_size
        rewards_list.append(rewards[i * mu:(i + 1) * mu])
        frame_list.append(frames[i * mu:(i + 1) * mu])
        models_list.append(models[i * mu:(i + 1) * mu])
        noops_list.append(noops[i * mu:(i + 1) * mu])
        detail_rewards_list.append(detail_rewards[i * mu:(i + 1) * mu])
    frame_count = np.sum(np.array(frame_list))
    return rewards_list, frame_count, models_list, noops_list, detail_rewards_list
Example #9
0
def train_parallel_cpu(mean_list, sigma_list, pool, env, ARGS, refer_batch,
                       seed):
    """Evaluates all offsprings of all populations in parallel by population seperately."""
    rewards_list,frame_list,models_list,noops_list,detail_rewards_list= [],[],[],[],[]
    for idx, mean in enumerate(mean_list):
        sigma = sigma_list[idx]
        jobs = []
        model = build_model(ARGS)

        #seed = [np.random.randint(1,1000000) for i in range(ARGS.population_size)]
        # create multiprocessing jobs of population
        for k_id in range(ARGS.population_size):
            #jobs.append(pool.apply_async(
            #            get_reward_atari,
            #            (model,mean,sigma,env,seed[k_id],ARGS,refer_batch,None,False,False,)
            #        ))
            p = mp.Process(target=get_reward_atari,
                           args=(
                               model,
                               mean,
                               sigma,
                               env,
                               seed[k_id],
                               ARGS,
                               refer_batch,
                               None,
                               False,
                               False,
                           ))
            p.start()
            cpurank[k_id] = affinity.get_process_affinity_mask(p.pid)
            affinity.set_process_affinity_mask(p.pid, cpurank)
            p.join()

        rewards, frames, models, noops, detail_rewards = [], [], [], [], []
        for j in jobs:
            rewards.append(j.get()[0])
            frames.append(j.get()[1])
            models.append(j.get()[2])
            noops.append(j.get()[3])
            detail_rewards.append(j.get()[4])
        rewards_list.append(rewards)
        frame_list.append(frames)
        models_list.append(models)
        noops_list.append(noops)
        detail_rewards_list.append(detail_rewards)

    frame_count = np.sum(np.array(frame_list))
    return rewards_list, frame_count, models_list, noops_list, detail_rewards_list
Example #10
0
def run(idx):
    affinity.set_process_affinity_mask(os.getpid(), 1 << idx)

    env = open_env()
    k = list(keys)
    random.shuffle(k)
    k = k[:1000]

    while 1:
        with env.begin() as txn:
            nextkey = iter(k).next
            try:
                while 1:
                    hash(txn.get(nextkey()))
            except StopIteration:
                pass
            arr[idx] += len(k)
Example #11
0
def run(idx):
    if affinity:
        affinity.set_process_affinity_mask(os.getpid(), 1 << idx)

    env = open_env()
    k = list(keys)
    random.shuffle(k)
    k = k[:1000]

    while 1:
        with env.begin() as txn:
            nextkey = iter(k).next
            try:
                while 1:
                    hash(txn.get(nextkey()))
            except StopIteration:
                pass
            arr[idx] += len(k)
Example #12
0
 def run():
     # make sure that we run the reactor with the sentinel log context,
     # otherwise other PreserveLoggingContext instances will get confused
     # and complain when they see the logcontext arbitrarily swapping
     # between the sentinel and `run` logcontexts.
     with PreserveLoggingContext():
         logger.info("Running")
         if cpu_affinity is not None:
             if not affinity:
                 quit_with_error(
                     "Missing package 'affinity' required for cpu_affinity\n"
                     "option\n\n"
                     "Install by running:\n\n"
                     "   pip install affinity\n\n")
             logger.info("Setting CPU affinity to %s" % cpu_affinity)
             affinity.set_process_affinity_mask(0, cpu_affinity)
         change_resource_limit(soft_file_limit)
         if gc_thresholds:
             gc.set_threshold(*gc_thresholds)
         reactor.run()
Example #13
0
 def run():
     # make sure that we run the reactor with the sentinel log context,
     # otherwise other PreserveLoggingContext instances will get confused
     # and complain when they see the logcontext arbitrarily swapping
     # between the sentinel and `run` logcontexts.
     with PreserveLoggingContext():
         logger.info("Running")
         if cpu_affinity is not None:
             if not affinity:
                 quit_with_error(
                     "Missing package 'affinity' required for cpu_affinity\n"
                     "option\n\n"
                     "Install by running:\n\n"
                     "   pip install affinity\n\n"
                 )
             logger.info("Setting CPU affinity to %s" % cpu_affinity)
             affinity.set_process_affinity_mask(0, cpu_affinity)
         change_resource_limit(soft_file_limit)
         if gc_thresholds:
             gc.set_threshold(*gc_thresholds)
         reactor.run()
Example #14
0
def set_processor_affinity(mask, pid=None):
    """Sets the process_affinity to the given cores, either for the current process or the given pid. mask can be an affinity mask or list of cores. Returns success"""
    mask = _create_affinity_mask(mask)
    pid = pid or 0
    previous_mask = affinity.set_process_affinity_mask(pid, mask)
    current_mask = affinity.get_process_affinity_mask(pid)
    current_mask_str = ", ".join(str(i) for i in _affinity_mask_to_list(current_mask))
    if current_mask != mask:
        request_mask_str = ", ".join(str(i) for i in _affinity_mask_to_list(mask))
        logger.warning("Set process affinity for pid %d to cores %s unsuccessful: actually set to %s", pid, request_mask_str, current_mask_str)
        return False
    else:
        logger.info("Set process affinity for pid %d to cores %s", pid, current_mask_str)
        return True
Example #15
0
 def setAffinity():
    if affinitymask: 
       set_process_affinity_mask(os.getpid(),affinitymask)
Example #16
0
    pass

print "robot-sandbox (development version) starting..."
print " "

import cellulose  # nu-l gaseste py2exe
import cellulose.extra  # nu-l gaseste py2exe
import cellulose.extra.restrictions  # nu-l gaseste py2exe
import pprint

import sys
import os

try:
    import affinity
    affinity.set_process_affinity_mask(os.getpid(), 1)
except:
    print "module affinity not found"

print "cwd: ", os.getcwd()
(path, file) = os.path.split(os.getcwd())

#print path
#print file

if file != 'sim-files':
    print "abs path: ", os.path.abspath(__file__)
    [basepath, file] = os.path.split(os.path.abspath(__file__))
    libpath = os.path.join(basepath, "dist", "library.zip")
    print "base folder:", basepath
    print "library zip:", libpath
            kp, kd = det.detectAndCompute(frm, None)
            frmc = cv2.cvtColor(frm.copy(), cv2.COLOR_GRAY2BGR)
            fp.plotFPoints(frmc, kp, rad=3)
            fkp = '%s_%s.kp' % (fname, tt)
            fimgOut = '%s_%s_kp.png' % (fname, tt)
            cv2.imwrite(fimgOut, frmc)
            fp.kp2file(fkp, kp, kd)
            print "--> %s : %s" % (tt, fkp)


#############################
if __name__ == '__main__':
    #
    try:
        print affinity.get_process_affinity_mask(0)
        affinity.set_process_affinity_mask(0, 2**mp.cpu_count() - 1)
    except:
        pass
    #
    cap = ph.VideoCSVReader(fidx)
    cap.printInfo()
    taskManager = fp.TaskManager()
    for tt in listDet:
        print tt
        det = fp.getDetByName(tt)
        if det == None:
            print 'ERROR: incorrect detector [%s], exit...' % tt
            sys.exit(1)
        ret = True
        numFrames = cap.getNumFrames()
        for ii in xrange(numFrames):
Example #18
0
    def config(self, config, servername=None, dbconfig=None,
               memconfig=None, redis_config=None, masterconf=None,
               model_default_config=None, model_config=None, msdk_config=None):
        """配置服务器"""
        GlobalObject().json_config = config
        GlobalObject().json_model_config = model_default_config
        GlobalObject().json_model_config = model_config
        netport = config.get('netport')  # 客户端连接
        webport = config.get('webport')  # http连接
        rootport = config.get('rootport')  # root节点配置
        self.remoteportlist = config.get('remoteport', [])  # remote节点配置列表
        if not servername:
            servername = config.get('name')  # 服务器名称
        logpath = config.get('log')  # 日志
        hasdb = config.get('db')  # 数据库连接
        # hasmem = config.get('mem')  # memcached连接
        hasredis = config.get('redis')  # redis连接
        app = config.get('app')  # 入口模块名称
        cpuid = config.get('cpu')  # 绑定cpu
        mreload = config.get('reload')  # 重新加载模块名称
        self.servername = servername
        if servername == 'net':
            time.sleep(6)

        if logpath:
            log_init(logpath)  # 日志处理

        if netport:
            self.netfactory = LiberateFactory()
            netservice = services.CommandService("netservice")
            self.netfactory.addServiceChannel(netservice)
            reactor.listenTCP(netport, self.netfactory)

        if webport:
            self.webroot = Flask("master")
            GlobalObject().webroot = self.webroot
            self.webroot.debug = True
            # reactor.listenTCP(webport, self.webroot)
            reactor.listenWSGI(webport, self.webroot)

        if rootport:
            self.root = PBRoot()
            rootservice = services.Service("rootservice")
            self.root.addServiceChannel(rootservice)
            reactor.listenTCP(rootport, BilateralFactory(self.root))

        for cnf in self.remoteportlist:
            rname = cnf.get('rootname')
            self.remote[rname] = RemoteObject(self.servername)

        if hasdb and dbconfig:
            # logger.info(str(dbconfig))
            dbpool.initPool(**dbconfig)

        if hasredis and redis_config:
            connection_setting = redis_config.get('urls')
            redis_manager.connection_setup(connection_setting)

        if cpuid:
            affinity.set_process_affinity_mask(os.getpid(), cpuid)
        GlobalObject().config(netfactory=self.netfactory, root=self.root,
                              remote=self.remote)
        if app:
            __import__(app)
        if mreload:
            _path_list = mreload.split(".")
            GlobalObject().reloadmodule = __import__(mreload, fromlist=_path_list[:1])
        GlobalObject().remote_connect = self.remote_connect

        if masterconf:
            masterport = masterconf.get('rootport')
            masterhost = masterconf.get('roothost')
            self.master_remote = RemoteObject(servername)
            addr = ('localhost', masterport) if not masterhost else (masterhost, masterport)
            self.master_remote.connect(addr)
            GlobalObject().masterremote = self.master_remote

        if msdk_config:
            #zone_id = msdk_config.get("zone_id")
            host = msdk_config.get("host")
            #pay_host = msdk_config.get("pay_host")
            goods_host = msdk_config.get("buy_goods_host")
            valid_host = msdk_config.get("valid_host")
            qq_appid = msdk_config.get("qq_appid")
            qq_appkey = msdk_config.get("qq_appkey")
            wx_appid = msdk_config.get("wx_appid")
            wx_appkey = msdk_config.get("wx_appkey")
            log = logger_sdk.new_log('TxApi')
            GlobalObject().msdk = Msdk(host, qq_appid, qq_appkey, wx_appid, wx_appkey, log=log)
            GlobalObject().pay = MidasApi(host, goods_host, valid_host, log=log)
        import admin
Example #19
0
    def config(self,
               config,
               servername=None,
               dbconfig=None,
               memconfig=None,
               masterconf=None):
        '''配置服务器
        '''
        GlobalObject().json_config = config
        GlobalObject().remote_connect = self.remote_connect
        netport = config.get('netport')  #客户端连接
        webport = config.get('webport')  #http连接
        rootport = config.get('rootport')  #root节点配置
        self.remoteportlist = config.get('remoteport', [])  #remote节点配置列表
        if not servername:
            servername = config.get('name')  #服务器名称
        logpath = config.get('log')  #日志
        hasdb = config.get('db')  #数据库连接
        hasmem = config.get('mem')  #memcached连接
        app = config.get('app')  #入口模块名称
        cpuid = config.get('cpu')  #绑定cpu
        mreload = config.get('reload')  #重新加载模块名称
        self.servername = servername

        if netport:
            self.netfactory = LiberateFactory()
            netservice = services.CommandService("netservice")
            self.netfactory.addServiceChannel(netservice)
            reactor.listenTCP(netport, self.netfactory)

        if webport:
            self.webroot = Flask("servername")
            GlobalObject().webroot = self.webroot
            reactor.listenWSGI(webport, self.webroot)

        if rootport:
            self.root = PBRoot()
            rootservice = services.Service("rootservice")
            self.root.addServiceChannel(rootservice)
            reactor.listenTCP(rootport, BilateralFactory(self.root))

        for cnf in self.remoteportlist:
            rname = cnf.get('rootname')
            self.remote[rname] = RemoteObject(self.servername)

        if hasdb and dbconfig:
            if dbconfig.has_key("user") and dbconfig.has_key(
                    "host") and dbconfig.has_key("host"):
                dbpool.initPool({"default": dbconfig})
            else:
                dbpool.initPool(dbconfig)

        if hasmem and memconfig:
            urls = memconfig.get('urls')
            #             hostname = str(memconfig.get('hostname'))
            memcached_connect(urls)
            from gfirefly.dbentrust.util import M2DB_PORT, M2DB_HOST, ToDBAddress
            ToDBAddress().setToDBHost(memconfig.get("pubhost", M2DB_HOST))
            ToDBAddress().setToDBPort(memconfig.get("pubport", M2DB_PORT))

        if logpath:
            log.addObserver(loogoo(logpath))  #日志处理
        log.startLogging(sys.stdout)

        if cpuid:
            affinity.set_process_affinity_mask(os.getpid(), cpuid)
        GlobalObject().config(netfactory=self.netfactory,
                              root=self.root,
                              remote=self.remote)

        if app:
            __import__(app)
        if mreload:
            _path_list = mreload.split(".")
            GlobalObject().reloadmodule = __import__(mreload,
                                                     fromlist=_path_list[:1])

        if masterconf:
            masterport = masterconf.get('rootport')
            masterhost = masterconf.get('roothost')
            self.master_remote = RemoteObject(servername)
            GlobalObject().masterremote = self.master_remote
            import admin
            addr = ('localhost',
                    masterport) if not masterhost else (masterhost, masterport)
            self.master_remote.connect(addr)
Example #20
0
def main():
    from suite import benchmarks
    # GitRepo wants exactly 7 character hash?
    if args.base_commit:
        args.base_commit = args.base_commit[:7]
    if args.target_commit:
        args.target_commit = args.target_commit[:7]

    if not args.log_file:
        args.log_file = os.path.abspath(
            os.path.join(REPO_PATH, 'vb_suite.log'))

    saved_dir = os.path.curdir
    if args.outdf:
        # not bullet-proof but enough for us
        args.outdf = os.path.realpath(args.outdf)

    if args.log_file:
        # not bullet-proof but enough for us
        args.log_file = os.path.realpath(args.log_file)

    random.seed(args.seed)
    np.random.seed(args.seed)

    try:
        import affinity
        affinity.set_process_affinity_mask(0,args.affinity)
        assert affinity.get_process_affinity_mask(0) == args.affinity
        print("CPU affinity set to %d" % args.affinity)
    except ImportError:
        import warnings
        print("\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"+
                      "The 'affinity' module is not available, results may be unreliable\n" +
                      "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n"
            )
        time.sleep(2)

    print("\n")
    prprint("LOG_FILE = %s" % args.log_file)
    if args.outdf:
        prprint("PICKE_FILE = %s" % args.outdf)

    print("\n")

    # move away from the pandas root dit, to avoid possible import
    # surprises
    os.chdir(os.path.dirname(os.path.abspath(__file__)))

    benchmarks = [x for x in benchmarks if re.search(args.regex,x.name)]

    for b in benchmarks:
        b.repeat = args.repeats
        if args.ncalls:
            b.ncalls = args.ncalls

    if benchmarks:
        if args.head:
            profile_head(benchmarks)
        else:
            profile_comparative(benchmarks)
    else:
        print( "No matching benchmarks")

    os.chdir(saved_dir)
Example #21
0
def main():
    from suite import benchmarks
    # GitRepo wants exactly 7 character hash?
    if args.base_commit:
        args.base_commit = args.base_commit[:7]
    if args.target_commit:
        args.target_commit = args.target_commit[:7]

    if not args.log_file:
        args.log_file = os.path.abspath(os.path.join(REPO_PATH,
                                                     'vb_suite.log'))

    saved_dir = os.path.curdir
    if args.outdf:
        # not bullet-proof but enough for us
        args.outdf = os.path.realpath(args.outdf)

    if args.log_file:
        # not bullet-proof but enough for us
        args.log_file = os.path.realpath(args.log_file)

    random.seed(args.seed)
    np.random.seed(args.seed)

    affinity_set = False

    # try psutil first since it is more commonly present and better
    # maintained.  Some people experienced problems with affinity package
    # (see https://code.google.com/p/psutil/issues/detail?id=238 for more references)
    try:
        import psutil
        if hasattr(psutil.Process, 'set_cpu_affinity'):
            psutil.Process(os.getpid()).set_cpu_affinity([args.affinity])
            affinity_set = True
    except ImportError:
        pass

    if not affinity_set:
        try:
            import affinity
            affinity.set_process_affinity_mask(0, args.affinity)
            assert affinity.get_process_affinity_mask(0) == args.affinity
            affinity_set = True
        except ImportError:
            pass

    if not affinity_set:
        import warnings
        warnings.warn(
            "\n\n"
            "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
            "The 'affinity' or 'psutil' >= 0.5.0 modules are not available, results may be unreliable\n"
            "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n"
        )
        time.sleep(2)
    else:
        print("CPU affinity set to %d" % args.affinity)

    print("\n")
    prprint("LOG_FILE = %s" % args.log_file)
    if args.outdf:
        prprint("PICKE_FILE = %s" % args.outdf)

    print("\n")

    # move away from the pandas root dit, to avoid possible import
    # surprises
    os.chdir(os.path.dirname(os.path.abspath(__file__)))

    benchmarks = [x for x in benchmarks if re.search(args.regex, x.name)]

    for b in benchmarks:
        b.repeat = args.repeats
        if args.ncalls:
            b.ncalls = args.ncalls

    if benchmarks:
        if args.head:
            profile_head(benchmarks)
        else:
            profile_comparative(benchmarks)
    else:
        print("No matching benchmarks")

    os.chdir(saved_dir)
Example #22
0
import os
import traceback
import json
import logging
import logging.config
import argparse
from StringIO import StringIO
from multiprocessing import cpu_count
from multiprocessing import RLock
from multiprocessing import Pool
from twhere.exprmodels import experiment
from twhere.config import Configuration

try:
    import affinity
    affinity.set_process_affinity_mask(os.getpid(), (1 << cpu_count()) - 1)
except:  # pylint: disable-msg=W0702
    print >>sys.stderr, 'WARN: Fail on setting CPU affinity, check cpu loads!'

CITY = dict(zip(['NY', 'CH', 'LA', 'SF'],
                ['27485069891a7938',
                 '1d9a5370a355ab0c',
                 '3b77caf94bfc81fe',
                 '5a110d312052166f']))

LOGGING_CONF = {'version': 1,
                'formatters': {
                'simple': {'format':
                           "%(asctime)s %(process)d %(name)s "
                           "[%(levelname)s] %(message)s"}
                },
You need to have matplotlib installed for this to work.
"""

import affinity
import argparse
from collections import defaultdict
import csv
import itertools
import os.path
import pickle

import numpy as np

# necessary to do this after importing numpy to take avantage of
# multiple cores on unix
affinity.set_process_affinity_mask(0, 2**multiprocessing.cpu_count() - 1)

import matplotlib

# Improve cross-system compatability for writing PDFs with this call.
# Note, it must be called before pyplot and backend_pdf are imported.
matplotlib.use('Agg')

import matplotlib.pyplot as plt
import matplotlib.backends.backend_pdf as pdf

import accuracy_model_util

# Must be in PYTHONPATH
import knowledge_state
import accuracy_model
            kp,kd=det.detectAndCompute(frm, None)
            frmc=cv2.cvtColor(frm.copy(), cv2.COLOR_GRAY2BGR)
            fp.plotFPoints(frmc, kp, rad=3)
            fkp='%s_%s.kp' % (fname, tt)
            fimgOut='%s_%s_kp.png' % (fname, tt)
            cv2.imwrite(fimgOut, frmc)
            fp.kp2file(fkp, kp,kd)
            print "--> %s : %s" % (tt, fkp)


#############################
if __name__=='__main__':
    #
    try:
        print affinity.get_process_affinity_mask(0)
        affinity.set_process_affinity_mask(0,2**mp.cpu_count()-1)
    except:
        pass
    #
    cap=ph.VideoCSVReader(fidx)
    cap.printInfo()
    taskManager=fp.TaskManager()
    for tt in listDet:
        print tt
        det=fp.getDetByName(tt)
        if det==None:
            print 'ERROR: incorrect detector [%s], exit...' % tt
            sys.exit(1)
        ret=True
        numFrames=cap.getNumFrames()
        for ii in xrange(numFrames):
Example #25
0
    def config(self,
               config,
               servername=None,
               dbconfig=None,
               memconfig=None,
               redis_config=None,
               masterconf=None,
               model_default_config=None,
               model_config=None,
               msdk_config=None):
        """配置服务器"""
        GlobalObject().json_config = config
        GlobalObject().json_model_config = model_default_config
        GlobalObject().json_model_config = model_config
        netport = config.get('netport')  # 客户端连接
        webport = config.get('webport')  # http连接
        rootport = config.get('rootport')  # root节点配置
        self.remoteportlist = config.get('remoteport', [])  # remote节点配置列表
        if not servername:
            servername = config.get('name')  # 服务器名称
        logpath = config.get('log')  # 日志
        hasdb = config.get('db')  # 数据库连接
        # hasmem = config.get('mem')  # memcached连接
        hasredis = config.get('redis')  # redis连接
        app = config.get('app')  # 入口模块名称
        cpuid = config.get('cpu')  # 绑定cpu
        mreload = config.get('reload')  # 重新加载模块名称
        self.servername = servername
        #if servername == 'net':
        #time.sleep(6)
        #if servername == 'gate':
        #time.sleep(16)

        if logpath:
            log_init(logpath)  # 日志处理

        if netport:
            self.netfactory = LiberateFactory()
            netservice = services.CommandService("netservice")
            self.netfactory.addServiceChannel(netservice)
            reactor.listenTCP(netport, self.netfactory)

        if webport:
            self.webroot = Flask("master")
            GlobalObject().webroot = self.webroot
            self.webroot.debug = True
            # reactor.listenTCP(webport, self.webroot)
            reactor.listenWSGI(webport, self.webroot)

        if rootport:
            self.root = PBRoot()
            rootservice = services.Service("rootservice")
            self.root.addServiceChannel(rootservice)
            reactor.listenTCP(rootport, BilateralFactory(self.root))

        for cnf in self.remoteportlist:
            rname = cnf.get('rootname')
            self.remote[rname] = RemoteObject(self.servername)

        if hasdb and dbconfig:
            # logger.info(str(dbconfig))
            dbpool.initPool(**dbconfig)

        if hasredis and redis_config:
            connection_setting = redis_config.get('urls')
            redis_manager.connection_setup(connection_setting)

        if cpuid:
            affinity.set_process_affinity_mask(os.getpid(), cpuid)
        GlobalObject().config(netfactory=self.netfactory,
                              root=self.root,
                              remote=self.remote)
        if app:
            __import__(app)
        if mreload:
            _path_list = mreload.split(".")
            GlobalObject().reloadmodule = __import__(mreload,
                                                     fromlist=_path_list[:1])
        GlobalObject().remote_connect = self.remote_connect

        if masterconf:
            masterport = masterconf.get('rootport')
            masterhost = masterconf.get('roothost')
            self.master_remote = RemoteObject(servername)
            addr = ('localhost',
                    masterport) if not masterhost else (masterhost, masterport)
            self.master_remote.connect(addr)
            GlobalObject().masterremote = self.master_remote

        if msdk_config:
            #zone_id = msdk_config.get("zone_id")
            host = msdk_config.get("host")
            #pay_host = msdk_config.get("pay_host")
            goods_host = msdk_config.get("buy_goods_host")
            valid_host = msdk_config.get("valid_host")
            qq_appid = msdk_config.get("qq_appid")
            qq_appkey = msdk_config.get("qq_appkey")
            wx_appid = msdk_config.get("wx_appid")
            wx_appkey = msdk_config.get("wx_appkey")
            log = logger_sdk.new_log('TxApi')
            GlobalObject().msdk = Msdk(host,
                                       qq_appid,
                                       qq_appkey,
                                       wx_appid,
                                       wx_appkey,
                                       log=log)
            GlobalObject().pay = MidasApi(host,
                                          goods_host,
                                          valid_host,
                                          log=log)
        import admin
Example #26
0
 def setAffinity():
     if affinitymask:
         set_process_affinity_mask(os.getpid(), affinitymask)
Example #27
0

print "robot-sandbox (development version) starting..."
print " "

import cellulose    # nu-l gaseste py2exe
import cellulose.extra    # nu-l gaseste py2exe
import cellulose.extra.restrictions    # nu-l gaseste py2exe
import pprint

import sys
import os

try:
    import affinity
    affinity.set_process_affinity_mask(os.getpid(),1)
except:
    print "module affinity not found"

print "cwd: ", os.getcwd()
(path,file) = os.path.split(os.getcwd())

#print path
#print file

if file != 'sim-files':
    print "abs path: ", os.path.abspath(__file__)
    [basepath,file] = os.path.split(os.path.abspath(__file__))
    libpath = os.path.join(basepath, "dist", "library.zip")
    print "base folder:", basepath
    print "library zip:", libpath
Example #28
0
def main():
    from suite import benchmarks
    # GitRepo wants exactly 7 character hash?
    if args.base_commit:
        args.base_commit = args.base_commit[:7]
    if args.target_commit:
        args.target_commit = args.target_commit[:7]

    if not args.log_file:
        args.log_file = os.path.abspath(os.path.join(REPO_PATH,
                                                     'vb_suite.log'))

    saved_dir = os.path.curdir
    if args.outdf:
        # not bullet-proof but enough for us
        args.outdf = os.path.realpath(args.outdf)

    if args.log_file:
        # not bullet-proof but enough for us
        args.log_file = os.path.realpath(args.log_file)

    random.seed(args.seed)
    np.random.seed(args.seed)

    if args.base_pickle and args.target_pickle:
        baseline_res = prep_pickle_for_total(pd.load(args.base_pickle))
        target_res = prep_pickle_for_total(pd.load(args.target_pickle))

        report_comparative(target_res, baseline_res)
        sys.exit(0)

    if args.affinity is not None:
        try:
            import affinity

            affinity.set_process_affinity_mask(0, args.affinity)
            assert affinity.get_process_affinity_mask(0) == args.affinity
            print("CPU affinity set to %d" % args.affinity)
        except ImportError:
            print(
                "-a/--afinity specified, but the 'affinity' module is not available, aborting.\n"
            )
            sys.exit(1)

    print("\n")
    prprint("LOG_FILE = %s" % args.log_file)
    if args.outdf:
        prprint("PICKE_FILE = %s" % args.outdf)

    print("\n")

    # move away from the pandas root dit, to avoid possible import
    # surprises
    os.chdir(os.path.dirname(os.path.abspath(__file__)))

    benchmarks = [x for x in benchmarks if re.search(args.regex, x.name)]

    for b in benchmarks:
        b.repeat = args.repeats
        if args.ncalls:
            b.ncalls = args.ncalls

    if benchmarks:
        if args.head:
            profile_head(benchmarks)
        else:
            profile_comparative(benchmarks)
    else:
        print("No matching benchmarks")

    os.chdir(saved_dir)
Example #29
0
import os
import traceback
import json
import logging
import logging.config
import argparse
from StringIO import StringIO
from multiprocessing import cpu_count
from multiprocessing import RLock
from multiprocessing import Pool
from twhere.exprmodels import experiment
from twhere.config import Configuration

try:
    import affinity
    affinity.set_process_affinity_mask(os.getpid(), (1 << cpu_count()) - 1)
except:  # pylint: disable-msg=W0702
    print >> sys.stderr, 'WARN: Fail on setting CPU affinity, check cpu loads!'

CITY = dict(
    zip(['NY', 'CH', 'LA', 'SF'], [
        '27485069891a7938', '1d9a5370a355ab0c', '3b77caf94bfc81fe',
        '5a110d312052166f'
    ]))

LOGGING_CONF = {
    'version': 1,
    'formatters': {
        'simple': {
            'format':
            "%(asctime)s %(process)d %(name)s "
Example #30
0
    def set_config(self):
        """
        初始化节点服务配置
        :return:
        """
        config = Config().config
        ser_cfg = config.get("servers", {}).get(self.servername)
        if not ser_cfg:
            raise ValueError
        mem_cfg = config.get("cache")
        master_cfg = config.get("master")
        db_cfg = config.get("db")

        GlobalObject().json_config = ser_cfg
        netport = ser_cfg.get('netport')  # 客户端连接
        webport = ser_cfg.get('webport')  # http连接
        rootport = ser_cfg.get('rootport')  # root节点配置
        wsport = ser_cfg.get("wsport")  # WebSocket端口
        self.remoteportlist = ser_cfg.get('remoteport', [])  # remote节点配置列表
        logpath = ser_cfg.get('log')  # 日志
        hasdb = ser_cfg.get('db')  # 数据库连接
        hasmem = ser_cfg.get('mem')  # memcached连接
        app = ser_cfg.get('app')  # 入口模块名称
        cpuid = ser_cfg.get('cpu')  # 绑定cpu
        mreload = ser_cfg.get('reload')  # 重新加载模块名称

        if master_cfg:
            masterport = master_cfg.get('rootport')
            masterhost = master_cfg.get('roothost')
            self.master_remote = RemoteObject(self.servername)
            addr = ('localhost',
                    masterport) if not masterhost else (masterhost, masterport)
            self.master_remote.connect(addr)
            GlobalObject().masterremote = self.master_remote

        if netport:
            self.netfactory = LiberateFactory()
            netservice = services.CommandService("netservice")
            self.netfactory.addServiceChannel(netservice)
            reactor.listenTCP(netport, self.netfactory)

        if webport:
            self.webroot = vhost.NameVirtualHost()
            GlobalObject().webroot = self.webroot
            reactor.listenTCP(webport, DelaySite(self.webroot))

        if rootport:
            self.root = PBRoot()
            rootservice = services.Service("rootservice")
            self.root.addServiceChannel(rootservice)
            reactor.listenTCP(rootport, BilateralFactory(self.root))

        if wsport:
            self.ws = WsFactory(wsport)
            wsservice = services.CommandService("wsservice")
            self.ws.addServiceChannel(wsservice)
            reactor.listenTCP(wsport, self.ws)

        for cnf in self.remoteportlist:
            rname = cnf.get('rootname')
            self.remote[rname] = RemoteObject(self.servername)

        if hasdb and db_cfg:
            log.msg(str(db_cfg))
            dbpool.initPool(**db_cfg)

        if hasmem and mem_cfg:
            urls = mem_cfg.get('urls')
            hostname = str(mem_cfg.get('hostname'))
            mclient.connect(urls, hostname)

        if logpath:
            log.addObserver(loogoo(logpath))  #日志处理
        log.startLogging(sys.stdout)

        if cpuid:
            affinity.set_process_affinity_mask(os.getpid(), cpuid)

        GlobalObject().config(netfactory=self.netfactory,
                              root=self.root,
                              remote=self.remote)
        GlobalObject().server = self

        if app:
            __import__(app)
        if mreload:
            _path_list = mreload.split(".")
            GlobalObject().reloadmodule = __import__(mreload,
                                                     fromlist=_path_list[:1])
        GlobalObject().remote_connect = self.remote_connect
        import admin
Example #31
0
    def config(self, config, servername=None, dbconfig=None,
               memconfig=None, redis_config=None, masterconf=None, model_default_config=None,
               model_config=None):
        """配置服务器"""
        GlobalObject().json_config = config
        GlobalObject().json_model_config = model_default_config
        GlobalObject().json_model_config = model_config
        netport = config.get('netport')  # 客户端连接
        webport = config.get('webport')  # http连接
        rootport = config.get('rootport')  # root节点配置
        self.remoteportlist = config.get('remoteport', [])  # remote节点配置列表
        if not servername:
            servername = config.get('name')  # 服务器名称
        logpath = config.get('log')  # 日志
        hasdb = config.get('db')  # 数据库连接
        hasmem = config.get('mem')  # memcached连接
        hasredis = config.get('redis') # redis连接
        app = config.get('app')  # 入口模块名称
        cpuid = config.get('cpu')  # 绑定cpu
        mreload = config.get('reload')  # 重新加载模块名称
        self.servername = servername

        if logpath:
            log_init(logpath)  # 日志处理

        if netport:
            self.netfactory = LiberateFactory()
            netservice = services.CommandService("netservice")
            self.netfactory.addServiceChannel(netservice)
            reactor.listenTCP(netport, self.netfactory)

        if webport:
            self.webroot = Flask("master")
            GlobalObject().webroot = self.webroot
            self.webroot.debug = True
            # reactor.listenTCP(webport, self.webroot)
            reactor.listenWSGI(webport, self.webroot)

        if rootport:
            self.root = PBRoot()
            rootservice = services.Service("rootservice")
            self.root.addServiceChannel(rootservice)
            reactor.listenTCP(rootport, BilateralFactory(self.root))

        for cnf in self.remoteportlist:
            rname = cnf.get('rootname')
            self.remote[rname] = RemoteObject(self.servername)

        if hasdb and dbconfig:
            # logger.info(str(dbconfig))
            dbpool.initPool(**dbconfig)

        if hasmem and memconfig:
            urls = memconfig.get('urls')
            hostname = str(memconfig.get('hostname'))
            mclient.connect(urls, hostname)

        if hasredis and redis_config:
            host = redis_config.get("host")
            port = redis_config.get("port")
            db = redis_config.get("db", 0)
            redis_client.connect(host, port, db)

        if cpuid:
            affinity.set_process_affinity_mask(os.getpid(), cpuid)
        GlobalObject().config(netfactory=self.netfactory, root=self.root,
                              remote=self.remote)
        if app:
            __import__(app)
        if mreload:
            _path_list = mreload.split(".")
            GlobalObject().reloadmodule = __import__(mreload, fromlist=_path_list[:1])
        GlobalObject().remote_connect = self.remote_connect

        if masterconf:
            masterport = masterconf.get('rootport')
            masterhost = masterconf.get('roothost')
            self.master_remote = RemoteObject(servername)
            addr = ('localhost', masterport) if not masterhost else (masterhost, masterport)
            self.master_remote.connect(addr)
            GlobalObject().masterremote = self.master_remote

        import admin
Example #32
0
except:
    print >> sys.stderr, 'Error: arguments'
    sys.exit()
if len(repeats) == 1: repeats *= 2
elif len(repeats) > 2 or any(t <= 0 for t in repeats):
    print >> sys.stderr, 'Error: arguments'
    sys.exit()
comp_repeats, decomp_repeats = repeats

# Lock to a single core (reduces context switches, picks highest affinity bit)
# Only available if the affinity module has been installed
try:
    import affinity
    mask, i = affinity.get_process_affinity_mask(os.getpid()), -1
    while mask: mask >>= 1; i += 1
    affinity.set_process_affinity_mask(os.getpid(), 1 << (i-(i&1)))
except: pass

# Attempt to increase the priority to very high
try:
    import win32api, win32process
    win32process.SetPriorityClass(-1, win32process.HIGH_PRIORITY_CLASS)
    win32process.SetPriorityClass(-1, win32process.REALTIME_PRIORITY_CLASS)
except:
    try:
        while True: os.nice(-1)
    except: pass

nfiles = 0
full_size = 0
compressed_size = 0
Example #33
0
    def config(self,
               config,
               servername=None,
               dbconfig=None,
               memconfig=None,
               masterconf=None):
        '''配置服务器
        '''
        GlobalObject().json_config = config
        netport = config.get('netport')  #客户端连接
        webport = config.get('webport')  #http连接
        rootport = config.get('rootport')  #root节点配置
        self.remoteportlist = config.get('remoteport', [])  #remote节点配置列表
        if not servername:
            servername = config.get('name')  #服务器名称
        logpath = config.get('log')  #日志
        hasdb = config.get('db')  #数据库连接
        hasmem = config.get('mem')  #memcached连接
        app = config.get('app')  #入口模块名称
        cpuid = config.get('cpu')  #绑定cpu
        mreload = config.get('reload')  #重新加载模块名称
        self.servername = servername
        if masterconf:
            masterport = masterconf.get('rootport')
            masterhost = masterconf.get('roothost')
            self.master_remote = RemoteObject(servername, "master")
            addr = ('localhost',
                    masterport) if not masterhost else (masterhost, masterport)
            self.master_remote.connect(addr)
            GlobalObject().masterremote = self.master_remote

        if netport:
            self.netfactory = LiberateFactory()
            netservice = services.CommandService("netservice")
            self.netfactory.addServiceChannel(netservice)
            reactor.listenTCP(netport, self.netfactory)

        if webport:
            self.webroot = vhost.NameVirtualHost()
            GlobalObject().webroot = self.webroot
            reactor.listenTCP(webport, DelaySite(self.webroot))

        if rootport:
            self.root = PBRoot()
            rootservice = services.Service("rootservice")
            self.root.addServiceChannel(rootservice)
            reactor.listenTCP(rootport, BilateralFactory(self.root))

        for cnf in self.remoteportlist:
            rname = cnf.get('rootname')
            self.remote[rname] = RemoteObject(self.servername, rname)

        if hasdb and dbconfig:
            log.msg(str(dbconfig))
            dbpool.initPool(**dbconfig)

        if hasmem and memconfig:
            urls = memconfig.get('urls')
            hostname = str(memconfig.get('hostname'))
            mclient.connect(urls, hostname)

        if logpath:
            log.addObserver(loogoo(logpath))  #日志处理
        log.startLogging(sys.stdout)

        if cpuid:
            affinity.set_process_affinity_mask(os.getpid(), cpuid)
        GlobalObject().config(netfactory=self.netfactory,
                              root=self.root,
                              remote=self.remote)
        if app:
            __import__(app)
        if mreload:
            _path_list = mreload.split(".")
            GlobalObject().reloadmodule = __import__(mreload,
                                                     fromlist=_path_list[:1])
        GlobalObject().remote_connect = self.remote_connect
        import admin
Example #34
0
def main():
    from suite import benchmarks
    # GitRepo wants exactly 7 character hash?
    if args.base_commit:
        args.base_commit = args.base_commit[:7]
    if args.target_commit:
        args.target_commit = args.target_commit[:7]

    if not args.log_file:
        args.log_file = os.path.abspath(
            os.path.join(REPO_PATH, 'vb_suite.log'))

    saved_dir = os.path.curdir
    if args.outdf:
        # not bullet-proof but enough for us
        args.outdf = os.path.realpath(args.outdf)

    if args.log_file:
        # not bullet-proof but enough for us
        args.log_file = os.path.realpath(args.log_file)

    random.seed(args.seed)
    np.random.seed(args.seed)

    affinity_set = False

    # try psutil first since it is more commonly present and better
    # maintained.  Some people experienced problems with affinity package
    # (see https://code.google.com/p/psutil/issues/detail?id=238 for more references)
    try:
        import psutil
        if hasattr(psutil.Process, 'set_cpu_affinity'):
            psutil.Process(os.getpid()).set_cpu_affinity([args.affinity])
            affinity_set = True
    except ImportError:
        pass

    if not affinity_set:
        try:
            import affinity
            affinity.set_process_affinity_mask(0, args.affinity)
            assert affinity.get_process_affinity_mask(0) == args.affinity
            affinity_set = True
        except ImportError:
            pass

    if not affinity_set:
        import warnings
        warnings.warn("\n\n"
              "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
              "The 'affinity' or 'psutil' >= 0.5.0 modules are not available, results may be unreliable\n"
              "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n"
            )
        time.sleep(2)
    else:
        print("CPU affinity set to %d" % args.affinity)

    print("\n")
    prprint("LOG_FILE = %s" % args.log_file)
    if args.outdf:
        prprint("PICKE_FILE = %s" % args.outdf)

    print("\n")

    # move away from the pandas root dit, to avoid possible import
    # surprises
    os.chdir(os.path.dirname(os.path.abspath(__file__)))

    benchmarks = [x for x in benchmarks if re.search(args.regex,x.name)]

    for b in benchmarks:
        b.repeat = args.repeats
        if args.ncalls:
            b.ncalls = args.ncalls

    if benchmarks:
        if args.head:
            profile_head(benchmarks)
        else:
            profile_comparative(benchmarks)
    else:
        print( "No matching benchmarks")

    os.chdir(saved_dir)
Example #35
0
                cpu_status['job'].status = 'fail'
                cpu_status['job'].failure_count += 1
                cpu_status['job'].error = cpu_status['process'].stderr.read() + '\n'
                cpu_status['job'].save()
                self.__setitem__(cpu, self.CPU_IDLE_FLAG)

    def start_job(self, job):
        # Find the first unused CPU
        for cpu, pid in self.items():
            if pid == self.CPU_IDLE_FLAG:
                break
        # cpu is the processor to use
        popen_args_tuple = tuple(job.configuration.run_script.split(' '))
        popen_args_tuple += (job.input_file,)
        #logger.debug('Job Popen args: %s' % ' '.join(popen_args_tuple))
        try:
            job.status = 'processing'
            self.__setitem__(cpu, {
                'job': job,
                'process': subprocess.Popen(popen_args_tuple, stderr=subprocess.PIPE)})
        except Exception, e:
            job.status = 'startfail'
            job.error = str(e)
            job.failure_count += 1
        else:
            mask = 2**(cpu)
            affinity.set_process_affinity_mask(self.__getitem__(cpu)['process'].pid, mask)
        finally:
            job.save()

Example #36
0
                self.__setitem__(cpu, self.CPU_IDLE_FLAG)

    def start_job(self, job):
        # Find the first unused CPU
        for cpu, pid in self.items():
            if pid == self.CPU_IDLE_FLAG:
                break
        # cpu is the processor to use
        popen_args_tuple = tuple(job.configuration.run_script.split(' '))
        popen_args_tuple += (job.input_file, )
        #logger.debug('Job Popen args: %s' % ' '.join(popen_args_tuple))
        try:
            job.status = 'processing'
            self.__setitem__(
                cpu, {
                    'job':
                    job,
                    'process':
                    subprocess.Popen(popen_args_tuple, stderr=subprocess.PIPE)
                })
        except Exception, e:
            job.status = 'startfail'
            job.error = str(e)
            job.failure_count += 1
        else:
            mask = 2**(cpu)
            affinity.set_process_affinity_mask(
                self.__getitem__(cpu)['process'].pid, mask)
        finally:
            job.save()
Example #37
0
def set_affinity():
    affinity.set_process_affinity_mask(os.getpid(), 0xFFFFFFFF)   
Example #38
0
import argparse
import datetime
import multiprocessing
import os
import shutil
import sys

from mirt import mirt_train_EM, generate_predictions, score
from mirt import visualize, adaptive_pretest, generate_responses
from train_util import model_training_util

# Necessary on some systems to make sure all cores are used. If not all
# cores are being used and you'd like a speedup, pip install affinity
try:
    import affinity
    affinity.set_process_affinity_mask(0, 2 ** multiprocessing.cpu_count() - 1)
except NotImplementedError:
    pass
except ImportError:
    sys.stderr.write('If you find that not all cores are being '
                     'used, try installing affinity.\n')


def get_command_line_arguments(arguments=None):
    """Gets command line arguments passed in when called, or
    can be called from within a program.

    Parses input from the command line into options for running
    the MIRT model. For more fine-grained options, look at
    mirt_train_EM.py
    """
Example #39
0
    def config(self, config, servername=None, dbconfig=None,
                memconfig=None, masterconf=None):
        '''配置服务器
        '''
        GlobalObject().json_config = config
        GlobalObject().remote_connect = self.remote_connect
        netport = config.get('netport')#客户端连接
        webport = config.get('webport')#http连接
        rootport = config.get('rootport')#root节点配置
        self.remoteportlist = config.get('remoteport',[])#remote节点配置列表
        if not servername:
            servername = config.get('name')#服务器名称
        logpath = config.get('log')#日志
        hasdb = config.get('db')#数据库连接
        hasmem = config.get('mem')#memcached连接
        app = config.get('app')#入口模块名称
        cpuid = config.get('cpu')#绑定cpu
        mreload = config.get('reload')#重新加载模块名称
        self.servername = servername
            
        if netport:
            self.netfactory = LiberateFactory()
            netservice = services.CommandService("netservice")
            self.netfactory.addServiceChannel(netservice)
            reactor.listenTCP(netport,self.netfactory)
            
        if webport:
            self.webroot = Flask("servername")
            GlobalObject().webroot = self.webroot
            reactor.listenWSGI(webport, self.webroot)
            
        if rootport:
            self.root = PBRoot()
            rootservice = services.Service("rootservice")
            self.root.addServiceChannel(rootservice)
            reactor.listenTCP(rootport, BilateralFactory(self.root))
            
        for cnf in self.remoteportlist:
            rname = cnf.get('rootname')
            self.remote[rname] = RemoteObject(self.servername)
            
        if hasdb and dbconfig:
            if dbconfig.has_key("user") and dbconfig.has_key("host") and dbconfig.has_key("host"):
                dbpool.initPool({"default":dbconfig})
            else:
                dbpool.initPool(dbconfig)
            
        if hasmem and memconfig:
            urls = memconfig.get('urls')
#             hostname = str(memconfig.get('hostname'))
            memcached_connect(urls)
            from gfirefly.dbentrust.util import M2DB_PORT,M2DB_HOST,ToDBAddress
            ToDBAddress().setToDBHost(memconfig.get("pubhost",M2DB_HOST))
            ToDBAddress().setToDBPort(memconfig.get("pubport",M2DB_PORT))
            
        if logpath:
            log.addObserver(loogoo(logpath))#日志处理
        log.startLogging(sys.stdout)
        
        if cpuid:
            affinity.set_process_affinity_mask(os.getpid(), cpuid)
        GlobalObject().config(netfactory = self.netfactory, root=self.root,
                    remote = self.remote)
        
        if app:
            __import__(app)
        if mreload:
            _path_list = mreload.split(".")
            GlobalObject().reloadmodule = __import__(mreload,fromlist=_path_list[:1])
        
        if masterconf:
            masterport = masterconf.get('rootport')
            masterhost = masterconf.get('roothost')
            self.master_remote = RemoteObject(servername)
            GlobalObject().masterremote = self.master_remote
            import admin
            addr = ('localhost',masterport) if not masterhost else (masterhost,masterport)
            self.master_remote.connect(addr)        
Example #40
0
 def config(self, config, servername=None, dbconfig=None,
             memconfig=None, masterconf=None):
     '''配置服务器
     '''
     GlobalObject().json_config = config
     netport = config.get('netport')#客户端连接
     webport = config.get('webport')#http连接
     rootport = config.get('rootport')#root节点配置
     self.remoteportlist = config.get('remoteport',[])#remote节点配置列表
     if not servername:
         servername = config.get('name')#服务器名称
     logpath = config.get('log')#日志
     hasdb = config.get('db')#数据库连接
     hasmem = config.get('mem')#memcached连接
     app = config.get('app')#入口模块名称
     cpuid = config.get('cpu')#绑定cpu
     mreload = config.get('reload')#重新加载模块名称
     self.servername = servername
     if masterconf:
         masterport = masterconf.get('rootport')
         masterhost = masterconf.get('roothost')
         self.master_remote = RemoteObject(servername)
         addr = ('localhost',masterport) if not masterhost else (masterhost,masterport)
         self.master_remote.connect(addr)
         GlobalObject().masterremote = self.master_remote
         
     if netport:
         self.netfactory = LiberateFactory()
         netservice = services.CommandService("netservice")
         self.netfactory.addServiceChannel(netservice)
         reactor.listenTCP(netport,self.netfactory)
         
     if webport:
         self.webroot = vhost.NameVirtualHost()
         GlobalObject().webroot = self.webroot
         reactor.listenTCP(webport, DelaySite(self.webroot))
         
     if rootport:
         self.root = PBRoot()
         rootservice = services.Service("rootservice")
         self.root.addServiceChannel(rootservice)
         reactor.listenTCP(rootport, BilateralFactory(self.root))
         
     for cnf in self.remoteportlist:
         rname = cnf.get('rootname')
         self.remote[rname] = RemoteObject(self.servername)
         
     if hasdb and dbconfig:
         log.msg(str(dbconfig))
         dbpool.initPool(**dbconfig)
         
     if hasmem and memconfig:
         urls = memconfig.get('urls')
         hostname = str(memconfig.get('hostname'))
         mclient.connect(urls, hostname)
         
     if logpath:
         log.addObserver(loogoo(logpath))#日志处理
     log.startLogging(sys.stdout)
     
     if cpuid:
         affinity.set_process_affinity_mask(os.getpid(), cpuid)
     GlobalObject().config(netfactory = self.netfactory, root=self.root,
                 remote = self.remote)
     if app:
         __import__(app)
     if mreload:
         GlobalObject().reloadmodule = __import__(mreload)
     GlobalObject().remote_connect = self.remote_connect
     import admin
Example #41
0
# coding: UTF-8
import os
import sys
import affinity
from config.url import urls
sys.path.insert(0, './lib/webpy')
import web

app = web.application(urls, globals())

if __name__ == "__main__":
    pid = os.getpid()
    print affinity.get_process_affinity_mask(pid)
    affinity.set_process_affinity_mask(pid, 2)
    print affinity.get_process_affinity_mask(pid)
    app.run()