コード例 #1
0
 def evaluate(self):
     """Load results, or calculate them if not available."""
     results = Vessel("predictions.dat")
     if "y_" not in results.keys or results.scientific_name != self.scientific_name:
         self.y_ = self.cnn.model.predict(self.X)
         results.y = self.y
         results.y_ = self.y_
         results.scientific_name = self.scientific_name
         results.save()
     self.y_ = results.y_
コード例 #2
0
 def classify_tiles(self):
     """Classify all the tiles."""
     scores = {}
     for scientific_name in tqdm(self.target_species):
         cnn = CNN(scientific_name, do_load_model=True)
         scores[scientific_name] = cnn.model.predict(self.X)
     self.scores = scores
     cm = Vessel("confusion_matrix.dat")
     cm.scores = scores
     cm.save()
コード例 #3
0
def find_all_images(tree, imgs, truth_locations, truths):
    """Find all images that contain each ground truth location."""
    v = Vessel("ground_truth.dat")
    for idx, truth in tqdm(enumerate(truths)):
        loc = [truth_locations[idx, :]]
        truth["images"] = []
        _, candidates = tree.query(loc, k=100)
        for candidate in candidates[0]:
            if in_image(loc[0], imgs[candidate]["image_loc"]):
                truth["images"].append(imgs[candidate]["_id"])
            else:
                v.truths = truths
                v.save()
                break
コード例 #4
0
def process_images(tile_size=100,
                   step_size=100,
                   max_number_images=20,
                   dt=DELTA_TIME):
    """Process multiple images."""
    crs = []
    vfs = []
    weights = []
    image_numbers = []
    print("Processing Images.")
    for image_number in tqdm(np.arange(1, max_number_images)):
        img1 = load_image_num(image_number, FOLDER_NAME)
        img2 = load_image_num(image_number + FRAME_INTERVAL, FOLDER_NAME)
        cr, vf, sims = estimate_velocity_field(img1, img2, tile_size,
                                               step_size, dt)
        crs.append(cr)
        vfs.append(vf)
        weights.append(sims)
        image_numbers.append(image_number)

    # Kalman filter data for each tile and get computed smoothed kalman filtered data.
    crs = np.array(crs)
    vfs = np.array(vfs)
    kalman_vfs = np.zeros_like(vfs)
    smoothed_kalman_vfs = np.zeros_like(vfs)
    print("Applying filter.")
    n_tiles = len(crs[0])
    for index in tqdm(np.arange(n_tiles)):
        tile_field_series = [vf[index] for vf in vfs]
        tile_weight_series = [w[index] for w in weights]
        k_vx, k_vy = kalman(tile_field_series)
        new_k = np.matrix(tuple(zip(k_vy, k_vx)))
        new_u = smooth_kalman(k_vx, k_vy, tile_weight_series)
        for seq in range(len(kalman_vfs)):
            kalman_vfs[seq][index] = new_k[seq]
            smoothed_kalman_vfs[seq][index] = new_u[seq]

    # Save this data.
    v = Vessel("fields_kalman.dat")
    v.crs = crs
    v.vfs = kalman_vfs
    v.image_numbers = image_numbers
    v.save()

    v = Vessel("fields_smoothed_kalman.dat")
    v.crs = crs
    v.vfs = smoothed_kalman_vfs
    v.image_numbers = image_numbers
    v.save()
コード例 #5
0
 def load_targets(self):
     """Load example targets."""
     y = []
     X = []
     print("> Assembling the data.")
     for scientific_name in tqdm(self.target_species):
         annotations = get_specified_target(scientific_name,
                                            nb_annotations=100)
         for annotation in tqdm(annotations):
             X_ = extract_tiles_from_annotation(annotation, 10)
             X.extend(X_)
             y_ = [scientific_name] * len(X_)
             y.extend(y_)
     self.X = X
     self.y = y
     print("> Assembly complete.")
     c = Vessel("confusion_matrix.dat")
     c.X = X
     c.y = y
     c.save()
コード例 #6
0
def create_label_maps(path_to_annotations, path_to_maps):
    """Find species to integer (and inverse) maps."""
    # Find all unique species.
    v = Vessel(path_to_annotations)
    unique_species = set({})
    for img in v.annotated_images:
        for annotation in img["annotations"]:
            if "plant" not in annotation.keys():
                continue
            unique_species.add(annotation["plant"])
    unique_species = sorted(list(unique_species))

    # Build label maps.
    label_map = {}
    label_map_inverse = {}
    for itr, plant_name in enumerate(unique_species):
        label_map_inverse[plant_name] = itr
        label_map[itr] = plant_name

    maps = Vessel(path_to_maps)
    maps.plant_to_id = label_map_inverse
    maps.id_to_plant = label_map
    maps.save()
コード例 #7
0
    v = Vessel("targets.dat")

    # X — images; y — class identities (one-hot vectors).
    v.X = {}
    v.y = {}

    targets = sorted(glob(f"data/*"))
    v.targets = {}
    for target in targets:
        target_name = target.split("/")[-1]
        v.targets[target_name] = glob(f"{target}/*")
    target_names = list(v.targets.keys())
    v.target_names = target_names

    # Now generate training/test data.
    for itr, target in enumerate(tqdm(v.target_names)):
        v.X[target] = []
        v.y[target] = []
        paths_to_images = v.targets[target]
        for path_to_image in paths_to_images:
            # Open image and resize it appropriately.
            image = imread(path_to_image)
            image_ = resize_image(image)
            # Add standardized images and class labels.
            v.X[target].append(image_)
            v.y[target].append(one_hot(itr, len(v.target_names)))
        # Convert to numpy arrays
        v.X[target] = np.array(v.X[target])
        v.y[target] = np.array(v.y[target])
    v.save()
コード例 #8
0
ファイル: stash.py プロジェクト: lightscalar/livestream
class Stash(object):
    """Store data and filter it."""
    def __init__(
        self,
        nb_taps: int = 5,
        demand_uniqueness: bool = True,
        do_filter=True,
        save_data=False,
    ):
        self.do_filter = do_filter
        self.save_data = save_data
        self.demand_uniqueness = demand_uniqueness
        self.M = M = nb_taps
        self.p = p = int((M - 1) / 2)
        self.q = p + 1

        # These vectors hold the time/values being added to the stash.
        self.x = deque([], maxlen=1000)
        self.t = deque([], maxlen=1000)

        # These variables are the filtered version of t/x; cannot sample from these vectors...
        self.t_ = deque([], maxlen=1000)
        self.x_ = deque([], maxlen=1000)
        self.x_prev = 0

        # These variables are the filtered version from which we sample.  We
        # have two versions because, depending on how quickly we're sampling
        # from the the object, we may exhaust the data needed for the moving
        # average filter.
        self.t_filtered = deque([], maxlen=1000)
        self.x_filtered = deque([], maxlen=1000)
        if self.save_data:
            datestring = datetime.now().strftime("%Y.%m.%d.%H.%M")
            self.store = Vessel(f"data/{datestring}.dat")
            self.store.t = []
            self.store.x = []

    def add(self, t, x):
        """Add new point."""
        if self.demand_uniqueness:
            # Cannot add two successive identical values.
            if len(self.x) > 0:
                if self.x[-1] != x:
                    self.t.append(t)
                    self.x.append(x)
                    self.save_to_store(t, x)
            else:
                self.t.append(t)
                self.x.append(x)
                self.save_to_store(t, x)
        else:
            self.t.append(t)
            self.x.append(x)
            self.save_to_store(t, x)
        if len(self.x) >= self.M and self.do_filter:
            self.filter()

    def save_to_store(self, t, x):
        if self.save_data:
            self.store.t.append(t)
            self.store.x.append(x)
            if np.mod(len(self.store.t), 1000) == 0:
                # Save every 1000 samples.
                self.store.save()

    def filter(self):
        """Super efficient moving average filter."""
        M, p, q = self.M, self.p, self.q
        x = self.x
        idx = len(self.x) - (p + 1)
        x_ = self.x_prev + (x[idx + p] - x[idx - q]) / M
        self.t_.append(self.t[idx])
        self.t_filtered.append(self.t[idx])
        self.x_.append(x_)
        self.x_filtered.append(x_)
        self.x_prev = x_

    @property
    def sample(self):
        """Return first observed pair (t, x), still in queue."""
        if self.do_filter:
            if len(self.t_filtered) > 0:
                yield self.t_filtered.popleft(), self.x_filtered.popleft()
            else:
                yield None, None
        else:  # let's not filter
            if len(self.t) > 0:
                yield self.t.popleft(), self.x.popleft()
            else:
                yield None, None