def iter_chunks(l, size):
    """
    Returns a generator containing chunks of *size* of a list, integer or generator *l*. A *size*
    smaller than 1 results in no chunking at all.
    """
    if isinstance(l, six.integer_types):
        l = six.range(l)

    if isinstance(l, types.GeneratorType):
        if size < 1:
            yield list(l)
        else:
            chunk = []
            for elem in l:
                if len(chunk) < size:
                    chunk.append(elem)
                else:
                    yield chunk
                    chunk = []
            if chunk:
                yield chunk

    else:
        if size < 1:
            yield l
        else:
            for i in six.range(0, len(l), size):
                yield l[i:i + size]
Пример #2
0
def test_simple_api(cluster):
    client = cluster.get_routing_client()
    with client.map() as map_client:
        for x in six.range(10):
            map_client.set('key:%d' % x, x)
            map_client.zadd('zset:%d' % x, **dict(make_zset_data(x)))

    for x in six.range(10):
        assert client.get('key:%d' % x) == str(x)
        assert client.zrange('zset:%d' % x, 0, -1,
                             withscores=True) == make_zset_data(x)

    results = []  # (promise, expected result)
    with client.map() as map_client:
        for x in six.range(10):
            results.append((
                map_client.zrange('zset:%d' % x, 0, -1, withscores=True),
                make_zset_data(x),
            ))

    for promise, expectation in results:
        assert promise.value == expectation

    with client.map() as map_client:
        for x in six.range(10):
            map_client.delete('key:%d' % x)

    for x in six.range(10):
        assert client.get('key:%d' % x) is None
Пример #3
0
def test_promise_api(cluster):
    results = []
    with cluster.map() as client:
        for x in six.range(10):
            client.set('key-%d' % x, x)
        for x in six.range(10):
            client.get('key-%d' % x).then(lambda x: results.append(int(x)))
    assert sorted(results) == list(six.range(10))
Пример #4
0
def test_simple_api(cluster, poll, monkeypatch):
    monkeypatch.setattr(clients, 'poll', poll)

    client = cluster.get_routing_client()
    with client.map() as map_client:
        for x in six.range(10):
            map_client.set('key:%s' % x, x)

    for x in six.range(10):
        assert client.get('key:%d' % x) == str(x)
Пример #5
0
def test_basic_cluster_disabled_batch(cluster):
    iterations = 10000

    with cluster.map(auto_batch=False) as client:
        for x in six.range(iterations):
            client.set('key-%06d' % x, x)
    responses = []
    with cluster.map(auto_batch=False) as client:
        for x in six.range(iterations):
            responses.append(client.get('key-%06d' % x))
    ref_sum = sum(int(x.value) for x in responses)
    assert ref_sum == sum(six.range(iterations))
Пример #6
0
def test_reconnect(cluster):
    with cluster.map() as client:
        for x in six.range(10):
            client.set(str(x), str(x))

    with cluster.all() as client:
        client.config_set('timeout', 1)

    time.sleep(2)

    with cluster.map() as client:
        rv = Promise.all([client.get(str(x)) for x in six.range(10)])

    assert rv.value == list(map(str, six.range(10)))
Пример #7
0
    def create_block(self, val):
        """Create a solid monochrome box

        Create a box of size `__box_size x __box_size`.

        Args:
            val:
                Binary input. If `True` or non-0, creates a box made of `fg`
                pixels, otherwise made of `bg` pixels

        Returns:
            Monochrome box
        """
        if val: return [[self.fg]*self.get_box_size() for _ in six.range(self.get_box_size())]
        else: return [[self.bg]*self.get_box_size() for _ in six.range(self.get_box_size())]
Пример #8
0
 def _read_count_and_string_table(cls, bs):
     """ Read a count then return the strings in a list """
     result = []
     entry_count = bs.read("uint:8")
     for _ in six.range(0, entry_count):
         result.append( cls._read_string(bs) )
     return result
Пример #9
0
def train():
    from keras.metrics import categorical_accuracy

    TransformImage.register('flatten', lambda img: img.flatten())
    transform = (TransformImage(0)
                 .by('rerange', 0, 255, 0, 1, 'float32')
                 .by('flatten'))
    build_batch = (BuildBatch(BATCH_SIZE)
                   .by(0, 'vector', 'float32')
                   .by(1, 'one_hot', 'uint8', NUM_CLASSES))
    plot = PlotLines((0, 1), layout=(2, 1), every_sec=1)

    print('loading data...')
    train_samples, test_samples = load_samples()

    print('creating network ...')
    network = create_network()

    print('training...', NUM_EPOCHS)
    for epoch in range(NUM_EPOCHS):
        print('EPOCH:', epoch)

        t_loss, t_acc = (train_samples >> PrintProgress(train_samples) >>
                         transform >> build_batch >>
                         network.train() >> plot >> Unzip())
        print('train loss : {:.6f}'.format(t_loss >> Mean()))
        print('train acc  : {:.1f}'.format(100 * (t_acc >> Mean())))

        e_acc = (test_samples >> transform >> build_batch >>
                 network.evaluate([categorical_accuracy]))
        print('test acc   : {:.1f}'.format(100 * e_acc))

        network.save_best(e_acc, isloss=False)
Пример #10
0
    def _parse_asrt(cls, box_bs):
        """ Parse asrt / Segment Run Table Box """

        asrt = SegmentRunTable()
        asrt.header = cls._read_box_header(box_bs)
        # read the entire box in case there's padding
        asrt_bs_box = box_bs.read(asrt.header.box_size * 8)

        asrt_bs_box.pos += 8
        update_flag = asrt_bs_box.read("uint:24")
        asrt.update = True if update_flag == 1 else False

        asrt.quality_segment_url_modifiers = cls._read_count_and_string_table(
            asrt_bs_box)

        asrt.segment_run_table_entries = []
        segment_count = asrt_bs_box.read("uint:32")

        for _ in six.range(0, segment_count):
            first_segment = asrt_bs_box.read("uint:32")
            fragments_per_segment = asrt_bs_box.read("uint:32")
            asrt.segment_run_table_entries.append(
                SegmentRunTable.SegmentRunTableEntry(
                    first_segment=first_segment,
                    fragments_per_segment=fragments_per_segment))
        return asrt
Пример #11
0
def assert_gapless_hosts(hosts):
    if not hosts:
        raise BadHostSetup('No hosts were configured.')
    for x in six.range(len(hosts)):
        if hosts.get(x) is None:
            raise BadHostSetup('Expected host with ID "%d" but no such '
                               'host was found.' % x)
Пример #12
0
 def fit_slopes_to_facet_lines(self,
                               polynomial_degree=4,
                               curvature_threshold=0.0004):
     """
     Fits (linear) lines of best fit to extracted profiles, already stored as
     class properties.
     """
     avg_slopes_linear = []
     avg_slopes_poly = []
     curv_of_flattest_part_list = []
     slope_min_curv = []
     rsqd_list = []
     big_slope_small_curv = []
     elev_at_bssc = []
     for i in six.range(len(self.profile_x_facet_pts)):
         x = self.profile_x_facet_pts[i]
         z = self.profile_z_facet_pts[i]
         (grad, offset) = np.polyfit(x, z, 1)
         coeffs, residuals = np.polyfit(x, z, polynomial_degree,
                                        full=True)[:2]
         rsqd = 1. - residuals / (z.size * z.var())
         # differentiate the coeffs to get slope:
         diff_multiplier = np.arange(polynomial_degree + 1)[::-1]
         curv_multiplier = np.arange(polynomial_degree)[::-1]
         z_equ = np.poly1d(coeffs)
         S_equ = np.poly1d((coeffs * diff_multiplier)[:-1])
         curv_equ = np.poly1d(
             ((coeffs * diff_multiplier)[:-1] * curv_multiplier)[:-1])
         S_at_each_pt = S_equ(x)
         curv_at_each_pt = curv_equ(x)
         avg_slopes_linear.append(abs(grad))
         avg_slopes_poly.append(np.amax(np.fabs(S_at_each_pt)))
         loc_of_flattest_part = np.argmin(np.fabs(
             curv_at_each_pt[2:-2])) + 2
         curv_of_flattest_part = curv_at_each_pt[loc_of_flattest_part]
         S_at_min_curve_untested = abs(S_at_each_pt[loc_of_flattest_part])
         small_curves = np.less(np.fabs(curv_at_each_pt[2:-2]),
                                curvature_threshold)
         try:
             big_slope_small_curv.append(np.amax(
                 S_at_each_pt[small_curves]))
             elev_at_bssc.append(z[np.argmax(S_at_each_pt[small_curves])])
         except ValueError:
             big_slope_small_curv.append(np.nan)
             elev_at_bssc.append(np.nan)
         slope_min_curv.append(S_at_min_curve_untested)
         curv_of_flattest_part_list.append(curv_of_flattest_part)
         rsqd_list.append(rsqd)
         # figure(8)
         # synthetic_z = grad*x + offset
         synthetic_z = z_equ(x)
         plot(x, z, "x")
         plot(x, synthetic_z, "-")
     self.avg_slopes_linear = np.array(avg_slopes_linear)
     self.avg_slopes_poly = np.array(avg_slopes_poly)
     self.curv_of_flattest_part = np.array(curv_of_flattest_part_list)
     self.slope_min_curv = np.array(slope_min_curv)
     self.big_slope_small_curv = np.array(big_slope_small_curv)
     self.elev_at_bssc = np.array(elev_at_bssc)
     self.rsqd = np.array(rsqd_list)
Пример #13
0
    def __init__(self,
                 servers=4,
                 databases_each=8,
                 server_executable='redis-server'):
        self._fd_dir = tempfile.mkdtemp()
        self.databases_each = databases_each
        self.server_executable = server_executable
        self.servers = []

        for server in six.range(servers):
            self.spawn_server()
Пример #14
0
def main(request, response):
    delay = float(request.GET.first(b"ms", 500)) / 1E3
    count = int(request.GET.first(b"count", 50))
    # Read request body
    request.body
    time.sleep(delay)
    response.headers.set(b"Content-type", b"text/plain")
    response.write_status_headers()
    time.sleep(delay)
    for i in range(count):
        response.writer.write_content(b"TEST_TRICKLE\n")
        time.sleep(delay)
Пример #15
0
    def test(k):
        data = {}
        for i in six.range(1000):
            tower = k.get_node('a%s' % i)
            data.setdefault(tower, 0)
            data[tower] += 1

        return [
            k.get_node('Apple'),
            k.get_node('Hello'),
            k.get_node('Data'),
            k.get_node('Computer')
        ]
Пример #16
0
    def _rebuild_circle(self):
        """Updates the hash ring."""
        self._hashring = {}
        self._sorted_keys = []
        total_weight = 0
        for node in self._nodes:
            total_weight += self._weights.get(node, 1)

        for node in self._nodes:
            weight = self._weights.get(node, 1)

            ks = math.floor((40 * len(self._nodes) * weight) / total_weight)

            for i in six.range(0, int(ks)):
                k = md5_bytes('%s-%s-salt' % (node, i))

                for l in six.range(0, 4):
                    key = ((k[3 + l * 4] << 24) | (k[2 + l * 4] << 16) |
                           (k[1 + l * 4] << 8) | k[l * 4])
                    self._hashring[key] = node
                    self._sorted_keys.append(key)

        self._sorted_keys.sort()
Пример #17
0
 def make_cluster(self):
     """Creates a correctly configured cluster from the servers
     spawned.  This also automatically waits for the servers to be up.
     """
     self.wait_for_servers()
     hosts = []
     host_id = 0
     for server in self.servers:
         for x in six.range(self.databases_each):
             hosts.append({
                 'host_id': host_id,
                 'unix_socket_path': server.socket_path,
                 'db': x,
             })
             host_id += 1
     return Cluster(hosts)
Пример #18
0
    def _parse_afrt(cls, box_bs):
        """ Parse afrt / Fragment Run Table Box """

        afrt = FragmentRunTable()
        afrt.header = cls._read_box_header(box_bs)
        # read the entire box in case there's padding
        afrt_bs_box = box_bs.read(afrt.header.box_size * 8)

        afrt_bs_box.pos += 8
        update_flag = afrt_bs_box.read("uint:24")
        afrt.update = True if update_flag == 1 else False

        afrt.time_scale = afrt_bs_box.read("uint:32")
        afrt.quality_fragment_url_modifiers = cls._read_count_and_string_table(
            afrt_bs_box)

        fragment_count = afrt_bs_box.read("uint:32")

        afrt.fragments = []

        for _ in six.range(0, fragment_count):
            first_fragment = afrt_bs_box.read("uint:32")
            first_fragment_timestamp_raw = afrt_bs_box.read("uint:64")

            try:
                first_fragment_timestamp = datetime.utcfromtimestamp(
                    first_fragment_timestamp_raw / float(afrt.time_scale))
            except ValueError:
                # Elemental sometimes create odd timestamps
                first_fragment_timestamp = None

            fragment_duration = afrt_bs_box.read("uint:32")

            if fragment_duration == 0:
                discontinuity_indicator = afrt_bs_box.read("uint:8")
            else:
                discontinuity_indicator = None

            frte = FragmentRunTable.FragmentRunTableEntry(
                first_fragment=first_fragment,
                first_fragment_timestamp=first_fragment_timestamp,
                fragment_duration=fragment_duration,
                discontinuity_indicator=discontinuity_indicator)
            afrt.fragments.append(frte)
        return afrt
Пример #19
0
def train():
    from keras.metrics import categorical_accuracy

    rerange = TransformImage(0).by('rerange', 0, 255, 0, 1, 'float32')
    build_batch = (BuildBatch(BATCH_SIZE).by(0, 'image', 'float32').by(
        1, 'one_hot', 'uint8', NUM_CLASSES))
    p = 0.1
    augment = (AugmentImage(0).by('identical', 1.0).by(
        'brightness', p, [0.7, 1.3]).by('color', p, [0.7, 1.3]).by(
            'shear', p, [0, 0.1]).by('fliplr', p).by('rotate', p, [-10, 10]))
    plot_eval = PlotLines((0, 1), layout=(2, 1))

    print('creating network...')
    network = create_network()

    print('loading data...')
    train_samples, test_samples = load_samples()
    train_samples, val_samples = train_samples >> SplitRandom(0.8)

    print('training...', len(train_samples), len(val_samples))
    for epoch in range(NUM_EPOCHS):
        print('EPOCH:', epoch)

        t_loss, t_acc = (train_samples >> PrintProgress(train_samples) >>
                         Pick(PICK) >> augment >> rerange >> Shuffle(100) >>
                         build_batch >> network.train() >> Unzip())
        t_loss, t_acc = t_loss >> Mean(), t_acc >> Mean()
        print("train loss : {:.6f}".format(t_loss))
        print("train acc  : {:.1f}".format(100 * t_acc))

        v_loss, v_acc = (val_samples >> rerange >> build_batch >>
                         network.validate() >> Unzip())
        v_loss, v_acc = v_acc >> Mean(), v_acc >> Mean()
        print('val loss   : {:.6f}'.format(v_loss))
        print('val acc    : {:.1f}'.format(100 * v_acc))

        network.save_best(v_acc, isloss=False)
        plot_eval((t_acc, v_acc))

    print('testing...', len(test_samples))
    e_acc = (test_samples >> rerange >> build_batch >> network.evaluate(
        [categorical_accuracy]))
    print('test acc   : {:.1f}'.format(100 * e_acc))
Пример #20
0
    def get_connection(self, command_name, shard_hint=None):
        host_id = shard_hint
        if host_id is None:
            raise RuntimeError('The routing pool requires the host id '
                               'as shard hint')

        real_pool = self.cluster.get_pool_for_host(host_id)

        # When we check something out from the real underlying pool it's
        # very much possible that the connection is stale.  This is why we
        # check out up to 10 connections which are either not connected
        # yet or verified alive.
        for _ in six.range(10):
            con = real_pool.get_connection(command_name)
            if con._sock is None or not is_closed(con._sock):
                con.__creating_pool = weakref(real_pool)
                return con

        raise ConnectionError('Failed to check out a valid connection '
                              '(host %s)' % host_id)
Пример #21
0
    def create_block_gradient_alpha(self, val):
        """Create a box with variable alpha channel

        Create a box of size `__box_size x __box_size` with its transparancy
        scaled to `val`

        Args:
            val:
                Scaling factor ranging from 0 to `q_max`. Sets the value of the
                alpha channel of the current box. The scaling is done using
                `val / q_max`. The color is set to `fg`

        Returns:
            A single color box with transparancy set to `val/q_max`
        """
        color = self.fg[:]
        # self.q_max = float('inf')
        if type(color) is int or type(color) is float:
            color = [color]*4
        elif type(color) is list:
            while len(color) < 4:
                color += [0]    # color = color + [0]
        color[3] = val*1. / self.q_max
        return [[color]*self.get_box_size() for _ in six.range(self.get_box_size())]
Пример #22
0
    def _parse_abst(cls, bootstrap_bs, header):
        
        abst = BootStrapInfoBox()
        abst.header = header
        
        box_bs = bootstrap_bs.read(abst.header.box_size * 8)
        
        abst.version, abst.profile_raw, abst.live, abst.update, \
        abst.time_scale, abst.current_media_time, abst.smpte_timecode_offset = \
                box_bs.readlist("""pad:8, pad:24, uint:32, uint:2, bool, bool,
                                   pad:4,
                                   uint:32, uint:64, uint:64""")
        abst.movie_identifier = cls._read_string(box_bs)
        
        abst.server_entry_table = cls._read_count_and_string_table(box_bs)
        abst.quality_entry_table = cls._read_count_and_string_table(box_bs)
            
        abst.drm_data = cls._read_string(box_bs)
        abst.meta_data = cls._read_string(box_bs)
                
        abst.segment_run_tables = []
        
        segment_count = box_bs.read("uint:8")
        log.debug("segment_count: %d" % segment_count)
        for _ in six.range(0, segment_count):
            abst.segment_run_tables.append( cls._parse_asrt(box_bs) )

        abst.fragment_tables = []
        fragment_count = box_bs.read("uint:8")
        log.debug("fragment_count: %d" % fragment_count)
        for _ in xrange(0, fragment_count):
            abst.fragment_tables.append( cls._parse_afrt(box_bs) )
        
        log.debug("Finished parsing abst")
        
        return abst
Пример #23
0
def make_zset_data(x):
    return [(str(i), float(i)) for i in six.range(x, x + 10)]
Пример #24
0
    def create_line(self, rot=0, base = None):
        """Create a line on a 8x8 grid box

        Args:
            rot:    Rotation of the line. 0 represents vertical line (range 0-15)

        Returns:
            `__box_size x __box_size` np.array() with a line

        NOTE:
            The rotation is in the increments of 11.25 degrees clockwise
        """
        if rot == -1:
            return [[self.bg]*self.get_box_size() for _ in six.range(self.get_box_size())]

        assert (0 <= rot <= 15)
        rot = rot*11.25

        # In case we want to draw something else:
        if base is None:
            # res = np.full((self.get_box_size(), self.get_box_size()), self.bg)
            res = [[self.bg]*self.get_box_size() for _ in six.range(self.get_box_size())]

        def bound_point(p):
            while p[0] < 0: p[0] += 1
            while p[0] >= self.get_box_size(): p[0] -= 1
            while p[1] < 0: p[1] += 1
            while p[1] >= self.get_box_size(): p[1] -= 1
            return p

        # conv = lambda x: map(int, np.floor(x))
        def conv(x):            # Readability
            return map(int, np.floor(x))

        ## The reason the indeces are extending to -2 and 10 is because the
        ## rotation is calculated as sin/cos, meaning, we need a long line to
        ## fill the diagonals
        if self.get_box_size() % 2 == 0:
            ## 4 different origins for rotation
            # First quadrant
            origin = [(self.get_box_size() - 1) / 2, (self.get_box_size() - 1) / 2]
            for idx in six.range(-2, origin[1] + 1):
                #for jdx in six.range(4):
                jdx = origin[1]
                point = self.rotate_point(origin, [idx, jdx], rot)
                point = conv(point)
                point = bound_point(point)
                res[point[0]][point[1]] = self.fg

            # Second quadrant
            origin[1] += 1
            for idx in six.range(-2, origin[1] + 1):
                jdx = origin[1]
                point = self.rotate_point(origin, [idx, jdx], rot)
                point = conv(point)
                point = bound_point(point)
                res[point[0]][point[1]] = self.fg

            # Third quadrant
            origin[0] += 1
            for idx in six.range(origin[1] + 1, self.get_box_size() + 2):
                jdx = origin[1]
                point = self.rotate_point(origin, [idx, jdx], rot)
                point = conv(point)
                point = bound_point(point)
                res[point[0]][point[1]] = self.fg

            # Fourth quadrant
            origin[1] -= 1
            for idx in six.range(origin[1] + 1, self.get_box_size() + 2):
                jdx = origin[1]
                point = self.rotate_point(origin, [idx, jdx], rot)
                point = conv(point)
                point = bound_point(point)
                res[point[0]][point[1]] = self.fg
        else:
            ## Single rotation origin
            origin = [self.get_box_size() / 2, self.get_box_size() / 2]
            for idx in six.range(self.get_box_size()):
                jdx = origin[1]
                point = self.rotate_point(origin, [idx, jdx], rot)
                point = conv(point)
                point = bound_point(point)
                res[point[0]][point[1]] = self.fg
        return res
Пример #25
0
 def _parse_afra(cls, bs, header):
 
     afra = FragmentRandomAccessBox()
     afra.header = header
     
     # read the entire box in case there's padding
     afra_bs = bs.read(header.box_size * 8)
     # skip Version and Flags
     afra_bs.pos += 8 + 24
     long_ids, long_offsets, global_entries, afra.time_scale, local_entry_count  = \
             afra_bs.readlist("bool, bool, bool, pad:5, uint:32, uint:32")
     
     if long_ids:
         id_bs_type = "uint:32"
     else:
         id_bs_type = "uint:16"
             
     if long_offsets:
         offset_bs_type = "uint:64"
     else:
         offset_bs_type = "uint:32"
     
     log.debug("local_access_entries entry count: %s", local_entry_count)
     afra.local_access_entries = []        
     for _ in six.range(0, local_entry_count):
         time = cls._parse_time_field(afra_bs, afra.time_scale)
         
         offset = afra_bs.read(offset_bs_type)
         
         afra_entry = \
             FragmentRandomAccessBox.FragmentRandomAccessBoxEntry(time=time, 
                                                                  offset=offset)
         afra.local_access_entries.append(afra_entry)
     
     afra.global_access_entries = []
     
     if global_entries:
         global_entry_count = afra_bs.read("uint:32")
         
         log.debug("global_access_entries entry count: %s", global_entry_count)  
         
         for _ in six.range(0, global_entry_count):
             time = cls._parse_time_field(afra_bs, afra.time_scale)
             
             segment_number = afra_bs.read(id_bs_type)
             fragment_number = afra_bs.read(id_bs_type)
             
             afra_offset = afra_bs.read(offset_bs_type)
             sample_offset = afra_bs.read(offset_bs_type)
             
             afra_global_entry = \
                 FragmentRandomAccessBox.FragmentRandomAccessBoxGlobalEntry(
                                         time=time,
                                         segment_number=segment_number,
                                         fragment_number=fragment_number,
                                         afra_offset=afra_offset,
                                         sample_offset=sample_offset)
 
             afra.global_access_entries.append(afra_global_entry)
    
     return afra