Ejemplo n.º 1
0
def gauss_smooth(mask, FILTER_SIZE):
    SIGMA = 0.3 * (
        (FILTER_SIZE - 1) * 0.5 - 1) + 0.8  #0.3*(FILTER_SIZE-1) + 0.8
    smoother = Smoother({'data': mask}, FILTER_SIZE, SIGMA)
    new_mask = smoother.get_output()

    return new_mask
Ejemplo n.º 2
0
 def test_draw_black(self):
     s = Smoother([[0]])
     pb = PathBuilder()
     s._draw_black(pb, 20, 30, BLACK)
     pb.optimize()
     self.assertEqual(pb.generate_paths(), [[(20, 30), (30, 30), (30, 40),
                                             (20, 40)]])
Ejemplo n.º 3
0
 def test_draw_black2(self):
     s = Smoother([[0]])
     pb = PathBuilder()
     s._draw_black(pb, 0, 0, BLACK | NW | NE | SE | SW)
     pb.optimize()
     self.assertEqual(pb.generate_paths(), [[(0, 3), (3, 0), (7, 0),
                                             (10, 3), (10, 7), (7, 10),
                                             (3, 10), (0, 7)]])
Ejemplo n.º 4
0
 def test_draw_white2(self):
     s = Smoother([[0]])
     pb = PathBuilder()
     s._draw_white(pb, 0, 0, NW | NE | SE | SW)
     pb.optimize()
     self.assertEqual(pb.generate_paths(),
                      [[(0, 0), (10, 0), (10, 10),
                        (0, 10)], [(2, 5), (5, 8), (8, 5), (5, 2)]])
Ejemplo n.º 5
0
class Peer(visualizer.Peer):
    def __init__(self, *args):
        visualizer.Peer.__init__(self, *args)
        self.departure_position = None
        self.smoothed_branching_position = Smoother()
        self.segments = {}
        hue = random.uniform(0, 1)
        self.color = Vector3d(*(colorsys.hsv_to_rgb(hue, 0.35, 1)))

    def add_segment(self, segment):
        if self.departure_position is None:
            self.departure_position = segment.departure_position
        segment.peer = self
        segment.gathered = False
        self.segments[segment.id] = segment

    def update(self):
        for segment in self.segments.values():
            if not segment.gathered and not segment.is_playing():
                segment.f.gatherer.add(segment)
                segment.gathered = True

        outdated = filter(lambda segment_id: self.segments[segment_id].outdated(),
                          self.segments)
        for segment_id in outdated:
            segment = self.segments[segment_id]
            del self.segments[segment_id]
        self.update_branching_position()

    def update_branching_position(self):
        if len(self.segments) == 0:
            self.smoothed_branching_position.reset()
        else:
            average_target_position = \
                sum([segment.target_position() for segment in self.segments.values()]) / \
                len(self.segments)
            new_branching_position = self.departure_position*0.4 + average_target_position*0.6
            self.smoothed_branching_position.smooth(
                new_branching_position, self.visualizer.time_increment)

    def draw(self):
        if len(self.segments) > 0:
            for segment in self.segments.values():
                segment.draw_playing()
            for segment in self.segments.values():
                self.set_color(0)
                segment.draw_curve()

    def set_color(self, relative_age):
        if GREYSCALE:
            glColor3f(1 - CURVE_OPACITY,
                      1 - CURVE_OPACITY,
                      1 - CURVE_OPACITY)
        else:
            self.visualizer.set_color(self.color)
Ejemplo n.º 6
0
class Peer(visualizer.Peer):
    def __init__(self, *args):
        visualizer.Peer.__init__(self, *args)
        self.departure_position = None
        self.smoothed_branching_position = Smoother()
        self.segments = {}
        self.rightward = random.choice([True, False])
        if self.rightward:
            x = 0
        else:
            x = self.visualizer.width
        self.position = Vector2d(
            x,
            CURVE_MARGIN_Y * self.visualizer.height + \
                random.uniform(0, (1-CURVE_MARGIN_Y*2) * self.visualizer.height))

    def add_segment(self, segment):
        if self.departure_position is None:
            self.departure_position = segment.departure_position
        segment.peer = self
        segment.gathered = False
        self.segments[segment.id] = segment

    def update(self):
        for segment in self.segments.values():
            if not segment.gathered and not segment.is_playing():
                self.visualizer.gather(segment)
                segment.gathered = True

        outdated = filter(lambda segment_id: self.segments[segment_id].outdated(),
                          self.segments)
        for segment_id in outdated:
            segment = self.segments[segment_id]
            del self.segments[segment_id]
        self.update_branching_position()

    def update_branching_position(self):
        if len(self.segments) == 0:
            self.smoothed_branching_position.reset()
        else:
            average_target_position = \
                sum([segment.target_position() for segment in self.segments.values()]) / \
                len(self.segments)
            new_branching_position = self.departure_position * RELATIVE_BRANCHING_POSITION \
                + average_target_position * (1-RELATIVE_BRANCHING_POSITION)
            self.smoothed_branching_position.smooth(
                new_branching_position, self.visualizer.time_increment)

    def draw(self):
        if len(self.segments) > 0:
            for segment in self.segments.values():
                segment.draw_playing()
Ejemplo n.º 7
0
    def gauss_smooth(self, mask, FILTER_SIZE):
        '''
        A tensorflow gauss smooth function.
        Args:
            mask: A 'Tensor' that to be smoothed
            FILTER_SIZE: Spatial size of gaussian filter
        Output:
            A gauss smoothed 'Tensor' of same size as 'mask'
        '''
        SIGMA = 0.3 * (
            (FILTER_SIZE - 1) * 0.5 - 1) + 0.8  #0.3*(FILTER_SIZE-1) + 0.8
        smoother = Smoother({'data': mask}, FILTER_SIZE, SIGMA)
        new_mask = smoother.get_output()

        return new_mask
Ejemplo n.º 8
0
class Segment(waves.Segment, heat_map.Segment):
    def __init__(self, *args):
        waves.Segment.__init__(self, *args)
        heat_map.Segment.__init__(self, *args)
        self._amp_smoother = Smoother(response_factor=2.5)

    def relative_size(self):
        age = self.age()
        if age > (self.duration - self._fade_time):
            return 1 - sigmoid(1 - (self.duration - age) / self._fade_time)
        else:
            self._amp_smoother.smooth(
                max([abs(value) for value in self.waveform]),
                self.visualizer.time_increment)
            return sigmoid(pow(max(self._amp_smoother.value(), 0), 0.25))
    def __init__(self, tr_log, pieces, args):
        visualizer.Visualizer.__init__(self, args)
        AncestryPlotter.__init__(self, tr_log.total_file_size(), tr_log.lastchunktime(), args)

        if args.unfold == BACKWARD:
            for piece in pieces:
                self.add_piece(piece["id"], piece["t"], piece["begin"], piece["end"])
        elif args.unfold == FORWARD:
            self._remaining_pieces = copy.copy(pieces)

        self._autozoom = (args.geometry == CIRCLE and self.args.autozoom)
        if self._autozoom:
            self._max_pxy = 0
            self._zoom_smoother = Smoother()

        if args.node_style == CIRCLE:
            self._node_plot_method = self._draw_node_circle
            self._nodes = {}
        else:
            self._node_plot_method = None

        if args.node_size_envelope:
            attack, decay, sustain = args.node_size_envelope.split(",")
            self._node_size_envelope = AdsrEnvelope(
                attack, decay, sustain, args.node_size_envelope_slope)
        else:
            self._node_size_envelope = None

        if args.sway_envelope:
            attack, decay, sustain = args.sway_envelope.split(",")
            self._sway_envelope = AdsrEnvelope(attack, decay, sustain)
        else:
            self._sway_envelope = None
Ejemplo n.º 10
0
 def __init__(self, *args):
     visualizer.Peer.__init__(self, *args)
     self.departure_position = None
     self.smoothed_branching_position = Smoother()
     self.segments = {}
     hue = random.uniform(0, 1)
     self.color = Vector3d(*(colorsys.hsv_to_rgb(hue, 0.35, 1)))
Ejemplo n.º 11
0
Archivo: blur.py Proyecto: zhcv/summary
def smooth():
    input_image = tf.placeholder(tf.float32, shape=[1, None, None, 3])
    smoother = Smoother({'data': input_image}, FILTER_SIZE, SIGMA)
    smoothed_image = smoother.get_output()

    init = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init)
        image = Image.open(FLAGS.image_path)
        image = np.array(image, dtype=np.float32)
        image = image.reshape((1, image.shape[0], image.shape[1], 1))
        smoothed = sess.run(smoothed_image, 
            feed_dict={image_input: image})
        smoothed = smoothed / np.max(smoothed)

        out_image = np.squeeze(smoothed)
        out_image = Image.fromarray(np.squeeze(np.uint8(out_image * 255)))
Ejemplo n.º 12
0
 def test_getitem(self):
     s = Smoother([[0, 1], [1, 0], [1, 1]])
     self.assertEqual(s.width, 2)
     self.assertEqual(s.height, 3)
     self.assertFalse(s[(0, 0)])
     self.assertTrue(s[(1, 2)])
     self.assertFalse(s[(1, -1)])
     self.assertFalse(s[(-1, 1)])
     self.assertFalse(s[(2, 1)])
    def __init__(self, tr_log, pieces, args):
        visualizer.Visualizer.__init__(self, args)
        AncestryPlotter.__init__(self, tr_log.total_file_size(), tr_log.lastchunktime(), args)
        self._unfold_function = getattr(self, "_unfold_%s" % args.unfold)

        self._pre_render()

        self._autozoom = (args.geometry == CIRCLE and self.args.autozoom)
        if self._autozoom:
            self._max_pxy = 0
            self._zoom_smoother = Smoother()
Ejemplo n.º 14
0
def smooth():

    Image_Placeholder = tf.placeholder( tf.float32, shape = [1, None, None, 3])
    smoother = Smoother({'data':Image_Placeholder}, FILTER_SIZE, SIGMA)
    smoothed_image = smoother.get_output()

    init = tf.initialize_all_variables()

    with tf.Session() as sess:
        sess.run(init)
        image = Image.open(FLAGS.image_path)
        image = np.array(image, dtype = np.float32)
        image = image.reshape((1, image.shape[0], image.shape[1], 3))
        smoothed = sess.run(smoothed_image,
                             feed_dict = {Image_Placeholder: image})
        smoothed = smoothed / np.max(smoothed)
        out_image = np.squeeze(smoothed)

        out_image = Image.fromarray(np.squeeze(np.uint8(out_image * 255)))
        out_image.show()
Ejemplo n.º 15
0
 def __init__(
         self, 
         id, 
         bins=[0, .25, .5, .75, 1], 
         prob=[.25, .25, .25, .25],
         editable_cols=['bin-start', 'bin-end', 'pdf', 'cdf'], 
         datatable={},
         row_addable=False,
         scalable=False,
         smoother=False, 
         *args, **kwargs
     ):
     super().__init__(id, *args, **kwargs)
     self.bins = bins
     self.prob = prob
     self.editable_cols = editable_cols
     self.datatable = datatable
     self.row_addable = row_addable
     self.scalable = scalable
     self.smoother = smoother
     # underlying distribution if using smoother
     self._dist = Smoother()
Ejemplo n.º 16
0
 def __init__(self, *args):
     visualizer.Peer.__init__(self, *args)
     self.departure_position = None
     self.smoothed_branching_position = Smoother()
     self.segments = {}
     self.rightward = random.choice([True, False])
     if self.rightward:
         x = 0
     else:
         x = self.visualizer.width
     self.position = Vector2d(
         x,
         CURVE_MARGIN_Y * self.visualizer.height + \
             random.uniform(0, (1-CURVE_MARGIN_Y*2) * self.visualizer.height))
Ejemplo n.º 17
0
def main(filepath, start, end, alpha):
    print(f'Start\t\n')

    print(f'{filepath}\t{start}\t{end}\t{alpha}')

    smoother = Smoother(filepath, start, end, alpha)

    if start is None and end is None and alpha is None:
        smoother.plot(isRaw=True).savefig('Raw.png')
    else:
        smoother.plot(isRaw=False, alpha=alpha, start=start, end=end).savefig(
            f'smoothed_from_{start}_to_{end}_with_{alpha}.png')
    print(f'End\t\n')
    return 0
Ejemplo n.º 18
0
class SmoothingLimiter(OriginalLimiter):
    def __init__(self, priority, limit_rate_model, proxy_model):
        OriginalLimiter.__init__(self, priority, limit_rate_model, proxy_model)
        self.smooth_released = Smoother(2)
        self.smooth_rate_limit = Smoother(2)
        self.rate_set = False

    def update_rate(self, params):
        OriginalLimiter.update_rate(self, params)
        if not self.rate_set:
            self.rate_set = True
            self.smooth_rate_limit.reset(self.rate)
        else:
            self.smooth_rate_limit.set_total(params.time, self.rate)

    def update_limit(self, params):
        self.limit = 2.0 * (self.smooth_rate_limit.smooth_total(params.time) -
                            self.smooth_released.smooth_rate(params.time))

    def can_start(self, params):
        return params.num_started + params.count <= self.limit

    def update_budget(self, params):
        self.smooth_released.add_delta(params.time, params.num_started)
class Ancestry(visualizer.Visualizer, AncestryPlotter):
    def __init__(self, tr_log, pieces, args):
        visualizer.Visualizer.__init__(self, args)
        AncestryPlotter.__init__(self, tr_log.total_file_size(), tr_log.lastchunktime(), args)

        if args.unfold == BACKWARD:
            for piece in pieces:
                self.add_piece(piece["id"], piece["t"], piece["begin"], piece["end"])
        elif args.unfold == FORWARD:
            self._remaining_pieces = copy.copy(pieces)

        self._autozoom = (args.geometry == CIRCLE and self.args.autozoom)
        if self._autozoom:
            self._max_pxy = 0
            self._zoom_smoother = Smoother()

        if args.node_style == CIRCLE:
            self._node_plot_method = self._draw_node_circle
            self._nodes = {}
        else:
            self._node_plot_method = None

        if args.node_size_envelope:
            attack, decay, sustain = args.node_size_envelope.split(",")
            self._node_size_envelope = AdsrEnvelope(
                attack, decay, sustain, args.node_size_envelope_slope)
        else:
            self._node_size_envelope = None

        if args.sway_envelope:
            attack, decay, sustain = args.sway_envelope.split(",")
            self._sway_envelope = AdsrEnvelope(attack, decay, sustain)
        else:
            self._sway_envelope = None


    @staticmethod
    def add_parser_arguments(parser):
        AncestryPlotter.add_parser_arguments(parser)
        visualizer.Visualizer.add_parser_arguments(parser)
        parser.add_argument("-z", dest="timefactor", type=float, default=1.0)

    def InitGL(self):
        visualizer.Visualizer.InitGL(self)
        glClearColor(0.0, 0.0, 0.0, 0.0)

        if self.args.node_style == CIRCLE:
            self._node_circle_lists = []
            for n in range(0, NODE_SIZE_PRECISION):
                display_list = self.new_display_list_id()
                self._node_circle_lists.append(display_list)
                glNewList(display_list, GL_COMPILE)
                self._render_node_circle(0, 0, n)
                glEndList()

    def ReSizeGLScene(self, width, height):
        visualizer.Visualizer.ReSizeGLScene(self, width, height)
        self._size = min(width, height) - 2*MARGIN
        AncestryPlotter.set_size(self, self._size, self._size)

    def render(self):
        glTranslatef(MARGIN + (self.width - self._size)/2, MARGIN, 0)
        glLineWidth(self.args.line_width)
        glEnable(GL_LINE_SMOOTH)
        glHint(GL_LINE_SMOOTH_HINT, GL_NICEST)
        glEnable(GL_BLEND)
        glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
        glColor3f(1,1,1)

        if self.args.unfold == BACKWARD:
            self._cursor_t = self._duration - self._adjusted_current_time() % self._duration
        elif self.args.unfold == FORWARD:
            if self.args.ff:
                self._add_oldest_remaining_piece()
            else:
                while (len(self._remaining_pieces) > 0 and
                       self._remaining_pieces[0]["t"] <= self._adjusted_current_time()):
                    self._add_oldest_remaining_piece()

        if self._autozoom:
            self._zoom = self._zoom_smoother.value()
            if self._zoom is None:
                self._zoom = 0.0
        else:
            self._zoom = 1.0

        self.plot()

        if self._autozoom:
            if self._max_pxy == 0:
                zoom = 0.5
            else:
                zoom = 0.5 + self._cursor_t/self._duration * 0.5 / self._max_pxy
            self._zoom_smoother.smooth(zoom, self.time_increment)

    def _add_oldest_remaining_piece(self):
        piece = self._remaining_pieces.pop(0)
        self.add_piece(piece["id"], piece["t"], piece["begin"], piece["end"])

    def finished(self):
        return self.current_time() > (self._duration / self.args.timefactor + SUSTAIN_TIME)

    def _adjusted_current_time(self):
        return self.current_time() * self.args.timefactor

    def _follow_piece(self, piece, child=None):
        self._update_and_draw_node(piece, piece.t, (piece.begin + piece.end) / 2)

        if len(piece.growth) > 0:
            path = [(piece.t,
                    (piece.begin + piece.end) / 2)]
            for older_version in reversed(piece.growth):
                if self.args.unfold == FORWARD or self._cursor_t < older_version.t:
                    path.append((older_version.t,
                                 (older_version.begin + older_version.end) / 2))
            self.draw_path(piece, path)
            self._update_and_draw_node(piece, path[-1][0], path[-1][1])

        for parent in piece.parents.values():
            if self.args.unfold == FORWARD or self._cursor_t < parent.t:
                self._connect_generations(parent, piece, child)
                self._follow_piece(parent, piece)
            else:
                if self.args.unfold == BACKWARD:
                    t = self._cursor_t - pow(self._cursor_t - parent.t, 0.7)
                else:
                    t = self._cursor_t
                self._connect_generations(parent, piece, child, t)
                self._update_and_draw_node(parent, t, (parent.begin + parent.end) / 2)

    def _rect_position(self, t, byte_pos):
        x = float(byte_pos) / self._total_size * self._width
        y = (1 - t / self._duration) * self._height
        return Vector2d(x, y)

    def _circle_position(self, t, byte_pos):
        angle = float(byte_pos) / self._total_size * 2*math.pi
        rel_t = 1 - t / self._duration
        px = rel_t * math.cos(angle)
        py = rel_t * math.sin(angle)
        x = self._width / 2 + (px * self._zoom) * self._width / 2
        y = self._height / 2 + (py * self._zoom) * self._height / 2
        if self._autozoom:
            self._max_pxy = max([self._max_pxy, abs(px), abs(py)])
        return Vector2d(x, y)

    def draw_path(self, piece, points):
        if self.args.sway:
            piece_sway_magnitude = self._sway_magnitude(piece)
        glBegin(GL_LINE_STRIP)
        n = 0
        for (t, b) in points:
            x, y = self._position(t, b)
            if self.args.sway:
                magnitude = (1 - float(n) / len(points)) * piece_sway_magnitude
                x += piece.sway.sway.x * magnitude * self._size
                y += piece.sway.sway.y * magnitude * self._size
                n += 1
            glVertex2f(x, y)
        glEnd()

    def draw_curve(self, x1, y1, x2, y2):
        control_points = [
            Vector2d(x1, y1),
            Vector2d(x1 + (x2 - x1) * 0.3, y1),
            Vector2d(x1 + (x2 - x1) * 0.7, y2),
            Vector2d(x2, y2)
            ]
        bezier = make_bezier([(p.x, p.y) for p in control_points])
        points = bezier(CURVE_PRECISION)
        glBegin(GL_LINE_STRIP)
        for x, y in points:
            glVertex2f(x, y)
        glEnd()

    def draw_line(self, x1, y1, x2, y2):
        glBegin(GL_LINES)
        glVertex2f(x1, y1)
        glVertex2f(x2, y2)
        glEnd()

    def _update_and_draw_node(self, piece, t, b):
        if self.args.sway:
            self._update_sway(piece)
        if self._node_plot_method:
            self._node_plot_method(piece, t, b)

    def _update_sway(self, piece):
        if not hasattr(piece, "sway"):
            piece.sway = Sway(self.args.sway_magnitude)
        piece.sway.update(self.time_increment)

    def _draw_node_circle(self, piece, t, b):
        age = self._age(piece)
        size = self._node_size(age)
        cx, cy = self._position(t, b)
        if self.args.sway:
            piece_sway_magnitude = self._sway_magnitude(piece)
            cx += piece.sway.sway.x * piece_sway_magnitude * self._size
            cy += piece.sway.sway.y * piece_sway_magnitude * self._size
        glPushMatrix()
        glTranslatef(cx, cy, 0)
        glCallList(self._node_circle_lists[size])
        glPopMatrix()

    def _age(self, piece):
        try:
            appearance_time = piece.appearance_time
        except AttributeError:
            appearance_time = piece.appearance_time = self._adjusted_current_time()
        return self._adjusted_current_time() - appearance_time

    def _node_size(self, age):
        if self._node_size_envelope:
            return int(self._node_size_envelope.value(age) * (NODE_SIZE_PRECISION-1))
        else:
            return NODE_SIZE_PRECISION-1

    def _sway_magnitude(self, piece):
        age = self._age(piece)
        if self._sway_envelope:
            return self._sway_envelope.value(age)
        else:
            return 1

    def _render_node_circle(self, cx, cy, size):
        # glColor3f(0,0,0)
        # self._render_filled_circle(cx, cy, size)
        # glColor3f(1,1,1)
        # self._render_circle_outline(cx, cy, size)

        # glColor3f(1,1,1)
        # self._render_filled_circle(cx, cy, size)

        glColor3f(1,1,1)
        glEnable(GL_POINT_SMOOTH)
        radius = max(self.args.node_size * self.width * size / (NODE_SIZE_PRECISION-1), 0.1)
        glPointSize(radius * 2)
        glBegin(GL_POINTS)
        glVertex2f(cx, cy)
        glEnd()

    def _render_filled_circle(self, cx, cy, size):
        glBegin(GL_TRIANGLE_FAN)
        glVertex2f(cx, cy)
        angle = 0
        radius = self.args.node_size * self.width * size / (NODE_SIZE_PRECISION-1)
        while angle < 2*math.pi:
            x = cx + math.cos(angle) * radius
            y = cy + math.sin(angle) * radius
            glVertex2f(x, y)
            angle += 0.1
        x = cx + math.cos(0) * radius
        y = cy + math.sin(0) * radius
        glVertex2f(x, y)
        glEnd()

    def _render_circle_outline(self, cx, cy, size):
        glBegin(GL_LINE_STRIP)
        angle = 0
        radius = self.args.node_size * self.width * size / (NODE_SIZE_PRECISION-1)
        while angle < 2*math.pi:
            x = cx + math.cos(angle) * radius
            y = cy + math.sin(angle) * radius
            glVertex2f(x, y)
            angle += 0.1
        glEnd()
Ejemplo n.º 20
0
class Table(Base):
    """
    Tabular distribution elicitation.

    Parameters and attributes
    -------------------------
    id : str, default
        Distribution identifier.

    bins : list of scalars, default=[0, .25, .5, .75, 1]
        List of 'break points' for the bins. The first bin starts at 
        `bins[0]`. The last bin ends at `bins[-1]`.

    prob : list of scalars, default=[.25, .25, .25, .25]
        Probability density function. This is the amount of probability mass
        in each bin. Must sum to 1 and `len(prob)` must be `len(bins)-1`.

    datatable : dict, default={}
        Keyword arguments for the datatable associated with the table
        distribution. See <https://dash.plotly.com/datatable>.

    row_addable : bool, default=False
        Indicates whether the forecaster can add rows.

    scalable : bool, default=False
        Provides a scaling function for the table bins.

    smoother : bool, default=False
        Indicates whether to use a smoother for interpolation. See 
        <https://dsbowen.github.io/smoother/>.

    \*args, \*\*kwargs : 
        Arguments and keyword arguments passed to `super().__init__`.
    """
    def __init__(
            self, 
            id, 
            bins=[0, .25, .5, .75, 1], 
            prob=[.25, .25, .25, .25],
            editable_cols=['bin-start', 'bin-end', 'pdf', 'cdf'], 
            datatable={},
            row_addable=False,
            scalable=False,
            smoother=False, 
            *args, **kwargs
        ):
        super().__init__(id, *args, **kwargs)
        self.bins = bins
        self.prob = prob
        self.editable_cols = editable_cols
        self.datatable = datatable
        self.row_addable = row_addable
        self.scalable = scalable
        self.smoother = smoother
        # underlying distribution if using smoother
        self._dist = Smoother()

    def to_plotly_json(self):
        return {
            'props': {
                'children': self.elicitation(
                    self.bins, 
                    self.prob, 
                    self.editable_cols,
                    self.datatable, 
                    self.row_addable,
                    self.scalable
                )
            },
            'type': 'Div',
            'namespace': 'dash_html_components'
        }

    def elicitation(
            self, 
            bins=[0, .25, .5, .75, 1], 
            prob=[.25, .25, .25, .25], 
            editable_cols=['bin-start', 'bin-end', 'pdf', 'cdf'],
            datatable={}, 
            row_addable=False,
            scalable=False
        ):
        """
        Parameters
        ----------
        bins : list of scalars or numpy.array, default=[0, .25, .5, .75, 1]

        prob : list of scalars or numpy.array, default=[.25, .25, .25, .25]

        datatable : dict, default={}

        row_addable : bool, default=False

        scalable : bool, default=False

        Returns
        -------
        elicitation elements : list of dash elements
            Dash elements used to elicit the distribution.
        """
        def gen_formgroup(label, type, value):
            id = self.get_id(self.id, type)
            return dbc.FormGroup([
                dbc.Label(label, html_for=id, width=6),
                dbc.Col([
                    dbc.Input(
                        id=id, 
                        value=value,
                        type='number', 
                        style={'text-align': 'right'}
                    )
                ], width=6)
            ], row=True)

        return [
            # hidden state div
            html.Div(
                self.dump(),
                id=self.get_id(self.id, 'state'),
                style={'display': 'none'}
            ),
            html.Div([
                gen_formgroup('Lower bound', 'lb', self.bins[0]),
                gen_formgroup('Upper bound', 'ub', self.bins[-1]),
                dbc.Button(
                    'Rescale',
                    id=self.get_id(self.id, 'rescale'),
                    color='primary',
                    style={'margin-bottom': '1em'}
                ),
            ], style={} if scalable else {'display': 'none'}),
            dash_table.DataTable(
                id=self.get_id(self.id, 'table'),
                columns=self.get_columns(editable_cols),
                data=self.get_data(bins, prob),
                **datatable
            ),
            html.Div([
                html.Br(),
                dbc.Button(
                    'Add row',
                    id=self.get_id(self.id, 'row-add'),
                    color='primary',
                )
            ], style={} if self.row_addable else {'display': 'none'})
        ]

    def get_columns(
            self, editable_cols=['bin-start', 'bin-end', 'pdf', 'cdf']
        ):
        """
        Returns
        -------
        columns : list of dict
            List of dictionaries specifying the datatable columns. See
            <https://dash.plotly.com/datatable>,
        """
        format = Format(scheme=Scheme.fixed, precision=2)
        cols = [
            {
                'id': 'bin-start', 
                'name': 'Bin start', 
                'type': 'numeric'
            },
            {
                'id': 'bin-end', 
                'name': 'Bin end', 
                'type': 'numeric'
            },
            {
                'id': 'pdf', 
                'name': 'Probability', 
                'type': 'numeric',
                'format': format
            },
            {
                'id': 'cdf', 
                'name': 'Probability (cum)', 
                'type': 'numeric',
                'format': format
            }
        ]
        for col in cols:
            col['editable'] = col['id'] in editable_cols
        return cols

    def get_data(self, bins=None, prob=None):
        """
        Parameters
        ----------
        bins : list of scalars or numpy.array or None, default=None
            If `None`, use `self.bins`.

        prob : list of scalars or numpy.array or None, default=None
            If `None`, use `self.prob`.

        Returns
        -------
        records : list of dict
            Datatable data in records format.
        """
        def get_record(bin_start, bin_end, pdf_i, cdf_i):
            return {
                'bin-start': bin_start, 
                'bin-end': bin_end, 
                'pdf': 100*pdf_i,
                'cdf': 100*cdf_i
            }

        bins = self.bins if bins is None else bins
        pdf = self.prob if prob is None else prob
        cdf = np.cumsum(pdf)
        assert len(bins)-1 == len(pdf)
        return [
            get_record(*args) for args in zip(bins[:-1], bins[1:], pdf, cdf)
        ]

    @classmethod
    def register_callbacks(cls, app):
        """
        Register dash callbacks for table distributions.

        Parameters
        ----------
        app : dash.Dash
            App with which to register callbacks.
        """
        @app.callback(
            [
                Output(cls.get_id(MATCH, 'state'), 'children'),
                Output(cls.get_id(MATCH, 'table'), 'data')
            ],
            [
                Input(cls.get_id(MATCH, 'table'), 'data_timestamp'),
                Input(cls.get_id(MATCH, 'rescale'), 'n_clicks'),
                Input(cls.get_id(MATCH, 'row-add'), 'n_clicks')
            ],
            [
                State(cls.get_id(MATCH, 'state'), 'children'),
                State(cls.get_id(MATCH, 'lb'), 'value'),
                State(cls.get_id(MATCH, 'ub'), 'value'),
                State(cls.get_id(MATCH, 'table'), 'data')
            ]
        )
        def update_table_state(
                _, rescale, add_row, table_state, lb, ub, data
            ):
            trigger_ids = get_trigger_ids(dash.callback_context)
            table = cls.load(table_state)
            table._handle_rescale(rescale, lb, ub, trigger_ids)
            table._handle_data_updates(data, trigger_ids)
            table._handle_row_add(add_row, trigger_ids)
            return table.dump(), table.get_data()

    def fit(self, bins=None, prob=None, derivative=2):
        """
        Fit the smoother given masses constraints.

        Parameters
        ----------
        bins : list of scalars or numpy.array
            Ordered list of bin break points. If `None`, use `self.bins`.

        prob : list of scalars or numpy.array
            Probability density function. This is the amount of probability mass
            in each bin. Must sum to 1 and `len(prob)` should be `len(bins)-1`.
            If `None`, use `self.prob`.

        derivative : int, default=2
            Deriviate of the derivative smoothing function to maximize. e.g. 
            `2` means the smoother will minimize the mean squaure second 
            derivative.

        Returns
        -------
        self
        """
        bins = np.array(self.bins if bins is None else bins)
        pdf = self.prob if prob is None else prob
        # 0-1 scaling; ensures consistent smoother fitting at different scales
        loc, scale = bins[0], bins[-1] - bins[0]
        bins = (bins - loc) / scale
        # fit smoother
        params = zip(bins[:-1], bins[1:], pdf)
        self._dist.fit(
            0, 1, [MassConstraint(lb, ub, mass) for lb, ub, mass in params],
            DerivativeObjective(derivative)
        )
        # restore to original scale
        self._dist.x = scale * self._dist.x + loc
        return self

    def dump(self):
        """
        Dump the table distribution state dictionary in JSON format.

        Returns
        -------
        state : dict, JSON
        """
        return json.dumps({
            'cls': self.__class__.__name__,
            'id': self.id,
            'bins': self.bins,
            'prob': self.prob,
            'datatable': self.datatable,
            'editable_cols': self.editable_cols,
            'row_addable': self.row_addable,
            'scalable': self.scalable,
            'smoother': self.smoother,
            'x': list(self._dist.x),
            '_f_x': list(self._dist._f_x)
        })

    @classmethod
    def load(cls, state_dict):
        """
        Load a table distribution from its state dictionary.

        Parameters
        ----------
        state_dict : dict
            Output of `Table.dump`.

        Returns
        -------
        table : `Table`
        """
        state = json.loads(state_dict)
        table = cls(
            id=state['id'],
            bins=state['bins'],
            prob=state['prob'],
            datatable=state['datatable'],
            editable_cols=state['editable_cols'],
            row_addable=state['row_addable'],
            scalable=state['scalable'],
            smoother=state['smoother']
        )
        table._dist.x = np.array(state['x'])
        table._dist._f_x = np.array(state['_f_x'])
        return table

    def _handle_rescale(self, rescale, lb, ub, trigger_ids):
        """
        Helper method for callabck scaling table bins.
        """
        def rescale_f(x):
            x = np.array(x)
            return (ub-lb) * (x-curr_lb) / (curr_ub - curr_lb) + lb

        if rescale and self.get_id(self.id, 'rescale') in trigger_ids:
            curr_lb, curr_ub = self.bins[0], self.bins[-1]
            self.bins = list(rescale_f(self.bins))
            self._dist.x = rescale_f(self._dist.x)

    def _handle_data_updates(self, data, trigger_ids):
        """
        Helper method for callback updating table state which handles updates
        to the data in the data table.
        """
        def handle_row_delete():
            """
            Handle a row being deleted.
            """
            i = get_deleted_row(data, prev_data)
            pdf_i = self.prob.pop(i)
            if i < len(self.prob):
                self.prob[i] += pdf_i
            handle_bin_update()

        def handle_data_update():
            """
            Handle data updates.
            """
            # Strictly speaking, it should be sufficient to handle updates for
            # only the changed column. But it's often useful to check that the
            # columns are consistent because of asynchronous updating.
            _, changed_col = get_changed_cell(data, prev_data)
            handle_bin_update(end_updated=changed_col=='bin-end')
            if changed_col == 'pdf':
                self.prob = [d['pdf']/100. for d in data]
            else:
                cdf = np.insert([d['cdf'] for d in data], 0, 0)
                self.prob = list(np.diff(cdf)/100.)

        def handle_bin_update(end_updated=True):
            """
            Handle bin updates.
            """
            bin_start = [d['bin-start'] for d in data]
            bin_end = [d['bin-end'] for d in data]
            self.bins = (
                bin_start[:1] + bin_end if end_updated 
                else bin_start + bin_end[-1:]
            )

        if self.get_id(self.id, 'table') not in trigger_ids:
            return
        prev_data = self.get_data()
        if len(data) < len(prev_data):
            handle_row_delete()
        else:
            handle_data_update()
        if self.smoother:
            try:
                self.fit()
            except:
                pass
        return self

    def _handle_row_add(self, add_row, trigger_ids):
        """
        Helper method for callback updating table state which handles adding
        rows.
        """
        if add_row and self.get_id(self.id, 'row-add') in trigger_ids:
            self.bins.append(self.bins[-1])
            self.prob.append(0)

    def pdf(self, x):
        if self.smoother:
            return self._dist.pdf(x)
        if x < self.bins[0] or self.bins[-1] <= x:
            return 0
        params = zip(self.bins[:-1], self.bins[1:], self.prob)
        for bin_start, bin_end, pdf in params:
            if bin_start < x <= bin_end:
                return pdf / (bin_end - bin_start)

    def cdf(self, x):
        if self.smoother:
            return self._dist.cdf(x)
        if x <= self.bins[0]:
            return 0
        if x >= self.bins[-1]:
            return 1
        cdf = 0
        params = zip(self.bins[:-1], self.bins[1:], self.prob)
        for bin_start, bin_end, pdf in params:
            if bin_start < x <= bin_end:
                return cdf + pdf * (x-bin_start) / (bin_end - bin_start)
            cdf += pdf

    def pdf_plot(self, **kwargs):
        """
        Parameters
        ----------
        \*\*kwargs :
            Keyword arguments for `go.Scatter`.

        Returns
        -------
        scatter : go.Scatter.
            Scatter plot of the pdf.
        """
        name = kwargs.pop('name', self.id)
        if self.smoother:
            return go.Scatter(
                x=self._dist.x, y=self._dist.f_x, name=name, **kwargs
            )
        heights = np.array(self.prob) / np.diff(self.bins)
        x, y = [self.bins[0]], [heights[0]]
        values = zip(self.bins[1:], heights[:-1], heights[1:])
        for x_i, height_prev, height_curr in values:
            x += [x_i, x_i]
            y += [height_prev, height_curr]
        x.append(self.bins[-1])
        y.append(heights[-1])
        return go.Scatter(x=x, y=y, name=name, **kwargs)

    def cdf_plot(self, **kwargs):
        """
        Parameters
        ----------
        \*\*kwargs :
            Keyword arguments for `go.Scatter`.

        Returns
        -------
        scatter : go.Scatter
            Scatter plot of the cdf.
        """
        name = kwargs.pop('name', self.id)
        if self.smoother:
            return go.Scatter(
                x=self._dist.x, y=self._dist.F_x, name=name, **kwargs
            )
        F_x = np.insert(np.cumsum(self.prob), 0, 0)
        return go.Scatter(x=self.bins, y=F_x, name=name, **kwargs)

    def bar_plot(self, **kwargs):
        """
        Parameters
        ----------
        \*\*kwargs :
            Keyword arguments for `go.Bar`.

        Returns
        -------
        bar plot : go.Bar
            Bar plot of the pdf in the datatable.
        """
        name = kwargs.pop('name', self.id)
        return go.Bar(
            x=(np.array(self.bins[1:]) + np.array(self.bins[:-1])) / 2.,
            y=np.array(self.prob) / np.diff(self.bins),
            width=np.diff(self.bins),
            name=name,
            **kwargs
        )
Ejemplo n.º 21
0
 def test_vectorize_origin(self):
     s = Smoother([[0, 1], [1, 0]])
     s.smooth()
     self.assertEqual(s.vectorize(10, -10), [[(10, 3), (23, -10), (30, -10),
                                              (30, -3), (17, 10),
                                              (10, 10)]])
Ejemplo n.º 22
0
 def vectorize(self, smooth=True):
     s = Smoother(self._bitmap())
     if smooth:
         s.smooth()
     return s.vectorize(MARGIN, -self.font.bdf[b'FONT_DESCENT'] * SCALE)
Ejemplo n.º 23
0
 def __init__(self, *args):
     waves.Segment.__init__(self, *args)
     heat_map.Segment.__init__(self, *args)
     self._amp_smoother = Smoother(response_factor=2.5)
Ejemplo n.º 24
0
def get_smoothed_sent_dist(address):
    sent_dist = sent_distributions.find_one({'address': address})
    smitty = Smoother(sent_dist['sent_distribution'], 'total')
    smitty.smooth()
    smoothed_dist = smitty.to_objects()
    return smoothed_dist
Ejemplo n.º 25
0
 def test_draw_white(self):
     s = Smoother([[0]])
     pb = PathBuilder()
     s._draw_white(pb, 20, 30, NW)
     pb.optimize()
     self.assertEqual(pb.generate_paths(), [[(20, 30), (27, 30), (20, 37)]])
Ejemplo n.º 26
0
def get_smoothed_sent_dist(address):
  sent_dist = sent_distributions.find_one({'address': address})
  smitty = Smoother(sent_dist['sent_distribution'], 'total')
  smitty.smooth()
  smoothed_dist = smitty.to_objects()
  return smoothed_dist
Ejemplo n.º 27
0
    def test_smooth(self):
        s1 = Smoother([[0, 1], [1, 0]])
        s1.smooth()
        self.assertEqual(s1._bmp,
                         [[0 | SE, 1 | NW | SE], [1 | NW | SE, 0 | NW]])

        s2 = Smoother([[1, 0, 0], [1, 0, 0], [1, 1, 1]])
        s2.smooth()
        self.assertEqual(s2._bmp, [[1, 0, 0], [1, 0, 0], [1, 1, 1]])

        s3 = Smoother([[0, 1, 1, 0], [1, 0, 0, 1], [1, 0, 0, 1], [0, 1, 1, 0]])
        s3.smooth()
        self.assertEqual(s3._bmp, [[0 | SE, 1 | NW, 1 | NE, 0 | SW],
                                   [1 | NW, 0 | NW, 0 | NE, 1 | NE],
                                   [1 | SW, 0 | SW, 0 | SE, 1 | SE],
                                   [0 | NE, 1 | SW, 1 | SE, 0 | NW]])
class Ancestry(visualizer.Visualizer, AncestryPlotter):
    def __init__(self, tr_log, pieces, args):
        visualizer.Visualizer.__init__(self, args)
        AncestryPlotter.__init__(self, tr_log.total_file_size(), tr_log.lastchunktime(), args)
        self._unfold_function = getattr(self, "_unfold_%s" % args.unfold)

        self._pre_render()

        self._autozoom = (args.geometry == CIRCLE and self.args.autozoom)
        if self._autozoom:
            self._max_pxy = 0
            self._zoom_smoother = Smoother()

    @staticmethod
    def add_parser_arguments(parser):
        AncestryPlotter.add_parser_arguments(parser)
        visualizer.Visualizer.add_parser_arguments(parser)
        parser.add_argument("-z", dest="timefactor", type=float, default=1.0)

    def _pre_render(self):
        print "pre-rendering..."
        self._timeline = []
        for piece in pieces:
            self.add_piece(piece["id"], piece["t"], piece["begin"], piece["end"])
            frame = {"t": piece["t"],
                     "tracker": copy.deepcopy(self._tracker),
                     "num_pieces": self._num_pieces}
            self._timeline.append(frame)
        print "ok"

    def InitGL(self):
        visualizer.Visualizer.InitGL(self)
        glClearColor(0.0, 0.0, 0.0, 0.0)

    def ReSizeGLScene(self, width, height):
        visualizer.Visualizer.ReSizeGLScene(self, width, height)
        self._size = min(width, height) - 2*MARGIN
        AncestryPlotter.set_size(self, self._size, self._size)

    def render(self):
        glTranslatef(MARGIN + (self.width - self._size)/2, MARGIN, 0)
        glLineWidth(LINE_WIDTH * self.width)
        glEnable(GL_LINE_SMOOTH)
        glHint(GL_LINE_SMOOTH_HINT, GL_NICEST)
        glEnable(GL_BLEND)
        glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
        glColor3f(1,1,1)

        self._unfold_function(self.current_time() * self.args.timefactor)
        frame = self._frame_at_cursor()
        self._activate_frame(frame)

        if self._autozoom:
            self._zoom = self._zoom_smoother.value()
            if self._zoom is None:
                self._zoom = 0.0
        else:
            self._zoom = 1.0

        self.plot()

        if self._autozoom:
            if self._max_pxy == 0:
                zoom = 0.5
            else:
                zoom = 0.5 + self.cursor_t/self._duration * 0.5 / self._max_pxy
            self._zoom_smoother.smooth(zoom, self.time_increment)
        
    def _unfold_backward(self, t):
        self._cursor_t = self._duration - t % self._duration

    def _unfold_forward(self, t):
        self._cursor_t = t % self._duration

    def _frame_at_cursor(self):
        for frame in self._timeline:
            if frame["t"] >= self._cursor_t:
                return frame
        return self._timeline[-1]

    def _activate_frame(self, frame):
        self._num_pieces = frame["num_pieces"]
        self._tracker = frame["tracker"]

    # def _follow_piece(self, piece):
    #     if len(piece.growth) > 0:
    #         path = [(piece.t,
    #                 (piece.begin + piece.end) / 2)]
    #         for older_version in reversed(piece.growth):
    #             path.append((older_version.t,
    #                          (older_version.begin + older_version.end) / 2))
    #         self.draw_path(path)

    #     for parent in piece.parents.values():
    #         if self.min_t < parent.t < self.max_t:
    #             self._connect_child_and_parent(
    #                 piece.t, (piece.begin + piece.end) / 2,
    #                 parent.t, (parent.begin + parent.end) / 2)
    #             self._follow_piece(parent)
    #         else:
    #             if self.args.unfold == BACKWARD:
    #                 t = self.cursor_t - pow(self.cursor_t - parent.t, 0.7)
    #             else:
    #                 t = self.cursor_t
    #             self._connect_child_and_parent(
    #                 piece.t, (piece.begin + piece.end) / 2,
    #                 t, (parent.begin + parent.end) / 2)

    def _rect_position(self, t, byte_pos):
        x = float(byte_pos) / self._total_size * self._width
        y = (1 - t / self._duration) * self._height
        return x, y

    def _circle_position(self, t, byte_pos):
        angle = float(byte_pos) / self._total_size * 2*math.pi
        rel_t = 1 - t / self._duration
        px = rel_t * math.cos(angle)
        py = rel_t * math.sin(angle)
        x = self._width / 2 + (px * self._zoom) * self._width / 2
        y = self._height / 2 + (py * self._zoom) * self._height / 2
        if self._autozoom:
            self._max_pxy = max([self._max_pxy, abs(px), abs(py)])
        return x, y

    def draw_path(self, points):
        glBegin(GL_LINE_STRIP)
        for (t, b) in points:
            x, y = self._position(t, b)
            glVertex2f(x, y)
        glEnd()

    def draw_curve(self, x1, y1, x2, y2):
        control_points = [
            Vector2d(x1, y1),
            Vector2d(x1 + (x2 - x1) * 0.3, y1),
            Vector2d(x1 + (x2 - x1) * 0.7, y2),
            Vector2d(x2, y2)
            ]
        bezier = make_bezier([(p.x, p.y) for p in control_points])
        points = bezier(CURVE_PRECISION)
        glBegin(GL_LINE_STRIP)
        for x, y in points:
            glVertex2f(x, y)
        glEnd()
Ejemplo n.º 29
0
 def test_vectorize(self):
     s = Smoother([[0, 1], [1, 0]])
     s.smooth()
     self.assertEqual(s.vectorize(), [[(0, 13), (13, 0), (20, 0), (20, 7),
                                       (7, 20), (0, 20)]])
Ejemplo n.º 30
0
 def __init__(self, priority, limit_rate_model, proxy_model):
     OriginalLimiter.__init__(self, priority, limit_rate_model, proxy_model)
     self.smooth_released = Smoother(2)
     self.smooth_rate_limit = Smoother(2)
     self.rate_set = False