Exemplo n.º 1
0
    def get(self, param_id):
        """
        Update geocast parameters
        """
        global datasets, tree, all_data
        global eps, percent, com_range, mar, arf, utl, heuristic, subcell, localness, constraint
        dataset = self.get_argument("dataset", default=Params.DATASET)
        eps = self.get_argument("eps", default=eps)
        percent = self.get_argument("percent", default=Params.PercentGrid)
        com_range = self.get_argument("range", default=Params.NETWORK_DIAMETER)

        # geocast parameters
        mar = self.get_argument("mar", default=Params.MAR)
        arf = self.get_argument("arf", default=Params.AR_FUNCTION)
        utl = self.get_argument("utl", default=Params.U)
        heuristic = self.get_argument("heuristic",
                                      default=Params.COST_FUNCTION)
        subcell = self.get_argument("subcell",
                                    default=Params.PARTIAL_CELL_SELECTION)
        localness = self.get_argument("localness",
                                      default=Params.CUSTOMIZED_GRANULARITY)
        constraint = self.get_argument("constraint",
                                       default=Params.CONSTRAINT_INFERENCE)

        Params.DATASET = dataset
        Params.Eps = float(eps)
        Params.PercentGrid = float(percent)
        Params.NETWORK_DIAMETER = float(com_range) / 1000.0
        Params.MAR = float(mar)
        Params.AR_FUNCTION = arf
        Params.U = float(utl)
        Params.COST_FUNCTION = heuristic
        Params.PARTIAL_CELL_SELECTION = (subcell == "true" or subcell == True)
        Params.CUSTOMIZED_GRANULARITY = (localness == "true"
                                         or localness == True)
        Params.CONSTRAINT_INFERENCE = constraint == "true"
        print "Update parameters ... "
        print Params.DATASET, Params.Eps, Params.PercentGrid, Params.NETWORK_DIAMETER, Params.MAR, Params.AR_FUNCTION, Params.U, Params.COST_FUNCTION, Params.PARTIAL_CELL_SELECTION, Params.CUSTOMIZED_GRANULARITY

        # workerPSD parameters
        rebuild = self.get_argument("rebuild", default=0)
        rebuild = int(rebuild)
        if rebuild == 1:
            print "Reading data ... " + dataset
            data = data_readin()
            p = Params(1000)
            print "Creating WorkerPSD..."
            tree = Grid_adaptive(data, p)
            tree.buildIndex()
            bounds = np.array([[Params.x_min, Params.y_min],
                               [Params.x_max, Params.y_max]])
            all_data[dataset] = (tree, bounds, p.NDATA)
            print "Created WorkerPSD..." + dataset

        self.write(
            json.dumps({"status": "update successfully"}, sort_keys=True))
    def __init__(self, data, param):
        Grid_adaptive.__init__(self, data, param)

        # compute the best grid size at level 1

        # grid size
        d1 = distance(Params.LOW[0], Params.LOW[1], Params.LOW[0], Params.HIGH[1])
        d2 = distance(Params.LOW[0], Params.LOW[1], Params.HIGH[0], Params.LOW[1])
        mind = min(d1, d2)
        self.m = int(math.floor(mind / (2 * Params.MTD)))
        logging.debug("Grid_adaptive_localness: Level 1 size: %d" % self.m)
Exemplo n.º 3
0
 def get(self):
     global tree, eps, all_data, datasets
     print "Reset data``"
     all_data = {}
     for dataset in datasets:
         Params.DATASET = dataset
         data = data_readin()
         p = Params(1000)
         eps = p.Eps
         tree = Grid_adaptive(data, p)
         tree.buildIndex()
         bounds = np.array([[Params.x_min, Params.y_min], [Params.x_max, Params.y_max]])
         all_data[dataset] = (tree, bounds, p.NDATA)
Exemplo n.º 4
0
    def __init__(self, data, param):
        Grid_adaptive.__init__(self, data, param)

        # compute the best grid size at level 1

        # grid size
        d1 = distance(Params.LOW[0], Params.LOW[1], Params.LOW[0],
                      Params.HIGH[1])
        d2 = distance(Params.LOW[0], Params.LOW[1], Params.HIGH[0],
                      Params.LOW[1])
        mind = min(d1, d2)
        self.m = int(math.floor(mind / (2 * Params.MTD)))
        logging.debug("Grid_adaptive_localness: Level 1 size: %d" % self.m)
Exemplo n.º 5
0
 def get(self):
     global tree, eps, all_data, datasets
     print "Reset data``"
     all_data = {}
     for dataset in datasets:
         Params.DATASET = dataset
         data = data_readin()
         p = Params(1000)
         eps = p.Eps
         tree = Grid_adaptive(data, p)
         tree.buildIndex()
         bounds = np.array([[Params.x_min, Params.y_min],
                            [Params.x_max, Params.y_max]])
         all_data[dataset] = (tree, bounds, p.NDATA)
Exemplo n.º 6
0
    def post(self, param_id):
        """
        Update geocast parameters
        """
        global datasets, tree, all_data
        global eps, percent, com_range, mar, arf, utl, heuristic, subcell, localness, constraint
        workers = tornado.escape.json_decode(self.request.body)

        # print simplejson.dumps(workers)

        # np.fromiter(json.loads(workers),dtype)

        # save data to a file
        # tmp_workers_file = "../../dataset/tmp_workers_file.dat"

        # data = np.genfromtxt("../../dataset/yelp.dat",unpack = True)

        print "Start updating worker locations"
        i = 0
        all_workers = []
        for worker in workers:
            i += 1
            if i % 1000 == 0:
                print "Updated ", i, " workers"
            pair = [worker['k'], worker['B']]
            all_workers.append(pair)
            data = np.array(all_workers)
            np.savetxt('../../dataset/update.txt', data, delimiter='\t')
            data = data.transpose()
        Params.NDIM, Params.NDATA = data.shape[0], data.shape[1]

        Params.LOW, Params.HIGH = np.amin(data, axis=1), np.amax(data, axis=1)
        print Params.NDIM, Params.NDATA
        print Params.LOW, Params.HIGH

        p = Params(1000)
        print "Creating WorkerPSD..."
        dataset = self.get_argument("dataset", default=Params.DATASET)
        Params.DATASET = dataset
        p.select_dataset()
        print dataset
        tree = Grid_adaptive(data, p)
        tree.buildIndex()
        bounds = np.array([[Params.x_min, Params.y_min],
                           [Params.x_max, Params.y_max]])
        print bounds
        all_data[dataset] = (tree, bounds, p.NDATA)

        self.write(
            json.dumps({"status": "update successfully"}, sort_keys=True))
Exemplo n.º 7
0
    def get(self, param_id):
        """
        Update geocast parameters
        """
        global datasets, tree, all_data
        global eps, percent, com_range, mar, arf, utl, heuristic, subcell, localness, constraint
        dataset = self.get_argument("dataset", default=Params.DATASET)
        eps = self.get_argument("eps", default=eps)
        percent = self.get_argument("percent", default=Params.PercentGrid)
        com_range = self.get_argument("range", default=Params.NETWORK_DIAMETER)

        # geocast parameters
        mar = self.get_argument("mar", default=Params.MAR)
        arf = self.get_argument("arf", default=Params.AR_FUNCTION)
        utl = self.get_argument("utl", default=Params.U)
        heuristic = self.get_argument("heuristic", default=Params.COST_FUNCTION)
        subcell = self.get_argument("subcell", default=Params.PARTIAL_CELL_SELECTION)
        localness = self.get_argument("localness", default=Params.CUSTOMIZED_GRANULARITY)
        constraint = self.get_argument("constraint", default=Params.CONSTRAINT_INFERENCE)

        Params.DATASET = dataset
        Params.Eps = float(eps)
        Params.PercentGrid = float(percent)
        Params.NETWORK_DIAMETER = float(com_range) / 1000.0
        Params.MAR = float(mar)
        Params.AR_FUNCTION = arf
        Params.U = float(utl)
        Params.COST_FUNCTION = heuristic
        Params.PARTIAL_CELL_SELECTION = (subcell == "true" or subcell == True)
        Params.CUSTOMIZED_GRANULARITY = (localness == "true" or localness == True)
        Params.CONSTRAINT_INFERENCE = constraint == "true"
        print "Update parameters ... "
        print Params.DATASET, Params.Eps, Params.PercentGrid, Params.NETWORK_DIAMETER, Params.MAR, Params.AR_FUNCTION, Params.U, Params.COST_FUNCTION, Params.PARTIAL_CELL_SELECTION, Params.CUSTOMIZED_GRANULARITY

        # workerPSD parameters
        rebuild = self.get_argument("rebuild", default=0)
        rebuild = int(rebuild)
        if rebuild == 1:
            print "Reading data ... " + dataset
            data = data_readin()
            p = Params(1000)
            print "Creating WorkerPSD..."
            tree = Grid_adaptive(data, p)
            tree.buildIndex()
            bounds = np.array([[Params.x_min, Params.y_min], [Params.x_max, Params.y_max]])
            all_data[dataset] = (tree, bounds, p.NDATA)
            print "Created WorkerPSD..." + dataset

        self.write(
            json.dumps({"status": "update successfully"}, sort_keys=True))
Exemplo n.º 8
0
    def post(self, param_id):
        """
        Update geocast parameters
        """
        global datasets, tree, all_data
        global eps, percent, com_range, mar, arf, utl, heuristic, subcell, localness, constraint
        workers = tornado.escape.json_decode(self.request.body)

        # print simplejson.dumps(workers)

        # np.fromiter(json.loads(workers),dtype)

        # save data to a file
        # tmp_workers_file = "../../dataset/tmp_workers_file.dat"

        # data = np.genfromtxt("../../dataset/yelp.dat",unpack = True)

        print "Start updating worker locations"
        i = 0
        all_workers = []
        for worker in workers:
            i += 1
            if i % 1000 == 0:
                print "Updated ", i, " workers"
            pair = [worker['k'], worker['B']]
            all_workers.append(pair)
            data = np.array(all_workers)
            np.savetxt('../../dataset/update.txt', data, delimiter='\t')
            data = data.transpose()
        Params.NDIM, Params.NDATA = data.shape[0], data.shape[1]

        Params.LOW, Params.HIGH = np.amin(data, axis=1), np.amax(data, axis=1)
        print Params.NDIM, Params.NDATA
        print Params.LOW, Params.HIGH

        p = Params(1000)
        print "Creating WorkerPSD..."
        dataset = self.get_argument("dataset", default=Params.DATASET)
        Params.DATASET = dataset
        p.select_dataset()
        print dataset
        tree = Grid_adaptive(data, p)
        tree.buildIndex()
        bounds = np.array([[Params.x_min, Params.y_min], [Params.x_max, Params.y_max]])
        print bounds
        all_data[dataset] = (tree, bounds, p.NDATA)

        self.write(
            json.dumps({"status": "update successfully"}, sort_keys=True))
Exemplo n.º 9
0
 def initialize(self):
     """
     Hook for subclass initialization
     A dictionary passed as the third argument of a url spec will be 
     supplied as keyword arguments to initialize().
     """
     global tree, eps, all_data, datasets
     if len(all_data) == 0:
         for dataset in datasets:
             Params.DATASET = dataset
             data = data_readin()
             p = Params(1000)
             eps = p.Eps
             tree = Grid_adaptive(data, p)
             tree.buildIndex()
             bounds = np.array([[Params.x_min, Params.y_min], [Params.x_max, Params.y_max]])
             all_data[dataset] = (tree, bounds, p.NDATA)
Exemplo n.º 10
0
 def initialize(self):
     """
     Hook for subclass initialization
     A dictionary passed as the third argument of a url spec will be 
     supplied as keyword arguments to initialize().
     """
     global tree, eps, all_data, datasets
     if len(all_data) == 0:
         for dataset in datasets:
             Params.DATASET = dataset
             data = data_readin()
             p = Params(1000)
             eps = p.Eps
             tree = Grid_adaptive(data, p)
             tree.buildIndex()
             bounds = np.array([[Params.x_min, Params.y_min],
                                [Params.x_max, Params.y_max]])
             all_data[dataset] = (tree, bounds, p.NDATA)
Exemplo n.º 11
0
    def post(self):
        global all_data, datasets, datasets2, boundaries, MTDs, worker_counts, all_datafiles, pearson_skewness, areas, spearman_skewness

        fileinfo = self.request.files['dataset'][0]
        print "fileinfo is", fileinfo
        fname = fileinfo['filename']
        fname = os.path.splitext(fname)[0]
        # cname = str(uuid.uuid4()) + extn
        cname = fname
        fh = open(__UPLOADS__ + cname, 'w')
        fh.write(fileinfo['body'])

        # update variables
        datasets.append(fname)
        datasets2.append(fname)
        all_datafiles[fname] = fname

        Params.DATASET = fname + '.dat'
        data = data_readin()
        p = Params(1000)
        eps = p.Eps
        tree = Grid_adaptive(data, p)
        tree.buildIndex()
        bounds = np.array([[Params.LOW[0], Params.LOW[1]],
                           [Params.HIGH[0], Params.HIGH[1]]])

        MTDs.append(Params.MTD)
        worker_counts.append(p.NDATA)
        pearson_skewness.append(0)
        areas.append(0)
        spearman_skewness.append(0)
        boundaries.append(
            str(Params.LOW[0]) + "," + str(Params.LOW[1]) + "," +
            str(Params.HIGH[0]) + "," + str(Params.HIGH[1]))

        all_data[fname] = (tree, bounds, data)

        self.finish(Params.DATASET +
                    " is uploaded to the server. Its PSD is constructed.")
Exemplo n.º 12
0
    def post(self):
        global all_data, datasets, datasets2, boundaries, MTDs, worker_counts, all_datafiles, pearson_skewness, areas, spearman_skewness

        fileinfo = self.request.files['dataset'][0]
        print "fileinfo is", fileinfo
        fname = fileinfo['filename']
        fname = os.path.splitext(fname)[0]
        # cname = str(uuid.uuid4()) + extn
        cname = fname
        fh = open(__UPLOADS__ + cname, 'w')
        fh.write(fileinfo['body'])

        # update variables
        datasets.append(fname)
        datasets2.append(fname)
        all_datafiles[fname] = fname

        Params.DATASET = fname + '.dat'
        data = data_readin()
        p = Params(1000)
        eps = p.Eps
        tree = Grid_adaptive(data, p)
        tree.buildIndex()
        bounds = np.array([[Params.LOW[0], Params.LOW[1]], [Params.HIGH[0], Params.HIGH[1]]])

        MTDs.append(Params.MTD)
        worker_counts.append(p.NDATA)
        pearson_skewness.append(0)
        areas.append(0)
        spearman_skewness.append(0)
        boundaries.append(
            str(Params.LOW[0]) + "," + str(Params.LOW[1]) + "," + str(Params.HIGH[0]) + "," + str(Params.HIGH[1]))

        all_data[fname] = (tree, bounds, data)

        self.finish(Params.DATASET + " is uploaded to the server. Its PSD is constructed.")
Exemplo n.º 13
0
 def run_Grid_adaptive(self, param):
     logging.debug('building Grid_adaptive...')
     tree = Grid_adaptive(self.data, param)
     start = time.clock()
     tree.buildIndex()
     if Params.CONSTRAINT_INFERENCE:
         tree.adjustConsistency()
     end = time.clock()
     logging.info('[T] Grid_adaptive building time: %.2d ' % (end - start))
     return self.query(tree, "Grid_adaptive")