def __init__(self, sess, S_INFO, S_LEN, A_DIM, LR_RATE=1e-4, ID=1): self.pool_ = pool.pool() self.sess = sess self.S_INFO = S_INFO self.S_LEN = S_LEN self.A_DIM = A_DIM self.s_name = 'actor/' + str(ID) self.inputs, self.out = self.create_network() self.real_out = tf.clip_by_value(self.out, 1e-4, 1. - 1e-4) self.y_ = tf.placeholder(shape=[None, A_DIM], dtype=tf.float32) # you can use any loss you want # self.core_net_loss = tflearn.objectives.mean_square( # self.real_out, self.y_) + 1e-3 * tf.reduce_sum(tf.multiply(self.real_out, tf.log(self.real_out))) # self.core_net_loss = -tf.reduce_sum(self.y_ * tf.log(self.real_out)) + 1e-3 * tf.reduce_sum(tf.multiply(self.real_out, tf.log(self.real_out))) self.core_net_loss = tflearn.objectives.categorical_crossentropy( self.real_out, self.y_) + 1e-3 * tf.reduce_sum(tf.multiply(self.real_out, tf.log(self.real_out))) self.core_train_op = tf.train.AdamOptimizer( learning_rate=LR_RATE).minimize(self.core_net_loss) self.saver = tf.train.Saver() # save neural net parameters
def __init__(self, spaces, packed=False, nch=pool.PROCESSES): if not packed: spaces = [map(pack_matrix, L) for L in spaces] self.nch = nch spaces_div = list(nchunks(spaces, nch)) for subl in spaces_div: subl.sort(key=len) self.result = pool.pool().map_async(intersect_worker, spaces_div)
def pool(self, action, pool_name, the_rest, act=True): if action not in ("new", "add", "join"): return self.error(act) the_rest_list = the_rest.split(" ") # the young and the_rest_list working_pool = self._controller.get_pool(pool_name) if action == "add" or action == "new": if working_pool is None: if action == "add": raise Exception # TODO: pool not defined working_pool = pool(pool_name) if act: for ip_addr in the_rest_list: working_pool.add(ip_addr) else: pass # TODO: asserts verification stuff elif action == "join": if act: for join_pool_name in the_rest_list: join_pool = self._controller.get_pool(join_pool_name) if join_pool is None: raise Exception # TODO: pool not found else: working_pool.join(join_pool) else: pass # TODO: asserts verification stuff return working_pool # TODO: asserts want something else returned
def load(self): f = open(self.path) #read file self.R, self.S, self.U, self.P, self.M = f.readline().split( " ") #get start data self.R, self.S, self.U, self.P, self.M = int(self.R), int(self.S), int( self.U), int(self.P), int(self.M) #make them int self.rows = [row.row(self.S) for i in range(self.R)] #create rows self.pools = [pool.pool(i) for i in range(self.P)] #create pools for i in range(self.U): #disable slots r, s = f.readline().split(" ") self.disabledslot(int(r), int(s)) for i in range(self.M): #add servers to tmp list s, c = f.readline().split(" ") self.tmpServ.append(server.server(int(s), int(c), i)) #sort the temp servers so that we can decide how to spead them #order first by size then by capacity self.tmpServ = sorted(self.tmpServ, key=operator.attrgetter('size')) self.tmpServ = sorted(self.tmpServ, key=operator.attrgetter('capacity')) f.close()
def parallel_cohens(self): """ NS: Add some parallel processing to the cohensSimplify procedure _cohensSimplify is a common process used in image() and kernel() Return a tuple of modified matrix M, image data c and kernel data d. """ print >>sys.stderr, "Parallel Cohens simplify %dx%d" % (self.row, self.column) M = self.copy() c = [0] * (M.row + 1) d = [-1] * (M.column + 1) Mpr = [pack_vector(M.getRow(i)) for i in range1(M.row)] for k in range(1, M.column + 1): for j in range(1, M.row + 1): if not c[j] and Mpr[j - 1][0][k - 1]: break else: # not found j such that m(j, k)!=0 and c[j]==0 d[k] = 0 continue Mj = unpack_vector(Mpr[j - 1]) top = -ring.inverse(Mj[k]) Mj[k] = -self.coeff_ring.one for s in range(k + 1, M.column + 1): Mj[s] = top * Mj[s] Mjp = pack_vector(Mj) Mpr[j - 1] = Mjp work = [(Mpr[i - 1], Mjp, k) for i in range(1, M.row + 1) if i != j] result = pool.pool().map(cohens_worker, work) i = 1 for v in result: if i != j: Mpr[i - 1] = v else: i += 1 Mpr[i - 1] = v i += 1 c[j] = k d[k] = j for i in range(1, M.row + 1): M.setRow(i, unpack_vector(Mpr[i - 1])) return (M, c, d)
from flask import Flask, request, make_response, jsonify, send_file, session from core import zju from wx import sign from pool import pool app = Flask(__name__) sess_pool = pool() def jsonres(res, status_code=200): response = make_response(jsonify(res), status_code) response.headers['Access-Control-Allow-Origin'] = '*' return response allow = [] with open('./allow.account', 'r') as f: allow = list(f.readlines()) allow = [i.strip('\n') for i in allow] @app.route('/', methods=['GET']) def hello(): return 'xixi~' @app.route('/signature', methods=['GET']) def signature(): url = request.args.get('url') try: obj = sign()
def parallel_weaksim(As, Bs=None): if Bs is None: Bs = As return pool.pool().map_async(weaksim_worker, [ (pack_matrix(A), pack_matrix(B)) for (A, B) in zip(As, Bs) ])
import random import math import unittest from point import point from pool import pool from dbscan import * from test import * m_pool = pool(100) m_gather = m_pool.getgather() print('Init DONE') DBSCAN(10, 5, m_gather) print('Function DONE') for b in m_clusters: print("NEW CLUSTER---------------------") print(len(b)) for a in b: print(a.x, " ", a.y, " ", a.r) print("NOISE---------------------") for a in m_gather: if a.is_noise: print(a.x, " ", a.y, " ", a.r) print("\n\n**********TEST************") suite = unittest.TestLoader().loadTestsFromTestCase(TestDBScan) unittest.TextTestRunner(verbosity=2).run(suite)