Exemple #1
0
    def __init__(self, mDataProcessor=None):

        print "Initiallizing......"

        if mDataProcessor is None:
            dP = dataProcessor()
        else:
            dP = mDataProcessor

        self.__originData = dP.gridData
        self.__data = dP.dataFrame
        self.__hashSizeList = [10, 11, 12, 13, 14, 15]
        self.__traceToCheckList = [15, 250, 480, 690, 900]

        self.__hashProcessors = {}
        inputDim = len(self.__originData[0])

        #对于每一种hash_size
        for size in self.__hashSizeList:
            #只用10测试
            # for size in [10]:
            lsh = ls.LSHash(size, inputDim)
            #把每个向量分别输入到hash_table中
            for index in range(dP.getTraceSum()):
                input_point = self.__data.iloc[:, index].tolist()
                lsh.index(
                    input_point,
                    extraData(
                        index, self.__originData[2][index][1],
                        self.__originData[2][index][2]))  #额外数据是轨迹的开始时间和结束时间
            #把每个处理器存入类变量
            self.__hashProcessors[size] = lsh

        print "Initialization Success!"
Exemple #2
0
def localise_to_geo(bbox_values, exact_values, threshold, alpha, conj_m, d):
    result = []

    geo_dict = {exact.place: [] for exact in exact_values + bbox_values}
    bbox_dict = {exact.place: [] for exact in exact_values + bbox_values}

    print("now put tweets into place buckets")
    for value in tqdm(exact_values):
        geo_dict[value.place].append(value)
    for value in tqdm(bbox_values):
        bbox_dict[value.place].append(value)
    print("tweets have been allocated")

    coord_dict = {}

    print("creating indexes...")
    for place in tqdm(bbox_dict):
        print("work with place %s" % place)
        lsh = lshash.LSHash(6, conj_m.shape[1])
        for tweet in geo_dict[place]:
            lsh.index(np.array(conj_m[d[tweet.id]], dtype=int),
                      extra_data=tweet.id)
            if tweet.coordinates == None:
                print(tweet.id)
            coord_dict[tweet.id] = tweet
        print("index for %s is ready. Starting localisation" % place)
        for bbox in tqdm(bbox_dict[place]):
            inside_tweets = bbox_dict[bbox.place]
            if len(inside_tweets) < 3: continue
            cs = lsh.query(np.array(conj_m[d[bbox.id]], dtype=int),
                           num_results=10,
                           distance_func='euclidean')
            cs2 = []
            for m in cs:
                if m[1] < threshold:
                    cs2.append([m[0][1], m[1]])

            points = []
            for idx in cs2:
                exact = coord_dict[idx[0]]

                tdelta = (exact.time - bbox.time).total_seconds() / timedelta(
                    minutes=1).total_seconds()
                # another threshold by time. Not more than a week
                if tdelta > 60 * 24 * 7: continue
                points.append((exact.coordinates, 1 - tdelta / (60 * 24 * 7),
                               1 - idx[1] / 10.0))
            if len(points) < 3: continue
            x0 = np.sum([
                x[0][0] * (alpha * x[1] + (1 - alpha) * x[2]) for x in points
            ])
            y0 = np.sum([
                x[0][1] * (alpha * x[1] + (1 - alpha) * x[2]) for x in points
            ])
            m0 = np.sum([alpha * x[1] + (1 - alpha) * x[2] for x in points])
            bbox.coordinates = [x0 / m0, y0 / m0]
            result.append(bbox)

    return result
def codify_frames(frames, num_bits):

    temp = frames[0].split()
    num_features = len(temp)
    # Initializing hash
    lsh = lshash.LSHash(num_bits, num_features)
    # Getting plane of first and unique hash table
    plane = lsh.uniform_planes[0]
    bin_frames = []

    for i in frames:
        # Extracting features as float list
        features = list(map(float, i.split()))
        bin_frames.append(lsh._hash(plane, features))

    return bin_frames
Exemple #4
0
def localise_to_bbox(unloc, loc, threshold, alpha, conj_m, d):

    new_col = []

    lsh = lshash.LSHash(6, conj_m.shape[1])

    bbox_dict = {}

    for bbox in loc:
        lsh.index(conj_m[d[bbox.id]], extra_data=bbox.id)
        bbox_dict[bbox.id] = bbox

    for tweet in tqdm(unloc):
        cs = lsh.query(conj_m[d[tweet.id]],
                       num_results=10,
                       distance_func='cosine')
        points = []
        boxes = []
        cs2 = []
        for m in cs:
            if m[1] < threshold:
                cs2.append([m[0][1], m[1]])
        for idx in cs2:
            bbox = bbox_dict[idx[0]]

            tdelta = (bbox.time - tweet.time).total_seconds() / timedelta(
                minutes=1).total_seconds()
            # another threshold by time. Not more than a week
            if tdelta > 60 * 24 * 7: continue

            points += [(x, tdelta + 0.0001, idx[1] + 0.0001)
                       for x in bbox.bounding_box]
            boxes.append(bbox.bounding_box)
        x0 = np.sum(
            [x[0][0] * (alpha / x[1] + (1 - alpha) / x[2]) for x in points])
        y0 = np.sum(
            [x[0][1] * (alpha / x[1] + (1 - alpha) / x[2]) for x in points])
        m0 = np.sum([alpha / x[1] + (1 - alpha) / x[2] for x in points])
        coord_res = Point([x0 / m0, y0 / m0])
        for box in boxes:
            pol = Polygon(box)
            if pol.contains(coord_res):
                tweet.bounding_box = box
                new_col.append(tweet)
                break
    return new_col
def codify_frames(frames, num_bits):

    num_features = frames.shape[1]
    # Initializing hash
    lsh = lshash.LSHash(num_bits, num_features)
    # Getting plane of first and unique hash table
    plane = lsh.uniform_planes[0]
    bin_frames = []

    frames = frames - np.mean(frames, 0)

    for i in range(len(plane)):
        plane[i][-1] *= 10

    for i in frames:
        # Extracting features as float list
        features = i.tolist()
        bin_frames.append(lsh._hash(plane, features))

    return bin_frames
    def __init__(self, udp_thread):
        dispatcher.connect(self.dispatcher_receive,
                           signal=UDPSERVER_SIGNAL,
                           sender=UDPSERVER_SENDER)
        # initialize dlib's face detector (HOG-based) and then create
        # the facial landmark predictor
        self.detector = dlib.get_frontal_face_detector()
        self.predictor = dlib.shape_predictor(
            "shape_predictor_68_face_landmarks.dat")
        self.camera_index = 0
        self.cap = cv2.VideoCapture(self.camera_index)
        self.count = 0
        self.old_time = 0
        self.dimensions = 26
        self.reference_scale_multiplier = 5
        self.lsh = lshash.LSHash(256, self.dimensions)
        self.extra_data = ""
        self.save_face = False
        self.save_file_name = "face_data3.json"
        self.imageScale = 0.25

        self.udp_thread = udp_thread
        self.faceDetection()