Пример #1
0
 def residual_unit(filter_size):
     return rcompose(
         ljuxt(
             rcompose(batch_normalization(), conv(filter_size, 3),
                      batch_normalization(), relu(),
                      conv(filter_size, 3), batch_normalization()),
             identity), add())
Пример #2
0
 def first_residual_unit(filter_size, stride_size):
     return rcompose(
         batch_normalization(), relu(),
         ljuxt(
             rcompose(conv(filter_size, 3, stride_size),
                      batch_normalization(), relu(),
                      conv(filter_size, 3, 1)),
             rcompose(conv(filter_size, 1, stride_size))), add())
Пример #3
0
    def inception():
        u1 = rcompose(AveragePooling1D(pool_size=3, strides=1, padding='same'),
                      conv1D(48, 1))
        u2 = conv1D(48, 1)
        u3 = rcompose(conv1D(16, 1), conv1D(48, 3))
        u4 = rcompose(conv1D(16, 1), conv1D(48, 3), conv1D(48, 3))

        return rcompose(ljuxt(u1, u2, u3, u4), Concatenate(axis=2))
Пример #4
0
	def	__residual_block(self, filter_size, stride_size, unit_size):
		return rcompose(
				self.__first_residual_unit(filter_size, stride_size),
				rcompose(
					*repeatedly(
						partial(__residual_unit, filter_size),
						unit_size-1
						)
					)
				)
Пример #5
0
def message(bot, nodes, args):

    try:
        max = float(args['max']) if 'max' in args else float('inf')
        messages = args['messages']
        media = args.get('media_share')
        profile = args.get('profile')
        hashtag = args.get('hashtag')
    except Exception:
        bot.logger.error(
            'please add all necessary args, {} isn\'t enought'.format(args))
        return [], {}

    count = 0
    events = []

    def increment():
        nonlocal count
        count += 1
        return True

    def add_event(pair):
        node, text = pair
        events.append({
            'type': 'message',
            'metadata': bot.metadata,
            'args': {
                'message': text,
            },
            'node': {
                'type': 'user',
                'username': node.username,
            },
            'timestamp': int(datetime.utcnow().timestamp())
        })
        return node

    stop = raiser(StopIteration)

    listmap = rcompose(map, list)

    process = rcompose(
        lambda x: stop() if x and count >= max else x,
        lambda x: (x, listmap(choice, messages)),
        lambda pair: listmap(lambda m: send_message(bot, m, pair[0]), pair[1]),
        # lambda x: print(x) or x,
        lambda x: listmap(add_event, x),
        lambda x: x and x[0],
        lambda x: x and increment() and x,
    )

    result = map(process, nodes)
    result = filter(lambda x: x, result)

    return result, {'events': events}
Пример #6
0
	def	__residual_unit(self, filter_size):
		return rcompose(
				self.__ljuxt(
					rcompose(
						self.__batch_normalization(),
						self.__relu(),
						self.__conv(filter_size, 3),
						self.__batch_normalization(),
						self.__relu(),
						self.__conv(filter_size, 3)
						),
					identity
					),
				self.__add()
				)
Пример #7
0
	def	__first_residual_unit(self, filter_size, stride_size):
		return rcompose(
				self.__batch_normalization(),
				self.__relu(),
				self.__ljuxt(
					rcompose(
						self.__conv(filter_size, 3, stride_size),
						self.__batch_normalization(),
						self.__relu(),
						self.__conv(filter_size, 3, 1)
						),
					rcompose(self.__conv(filter_size, 1, stride_size))
					),
				self.__add()
				)
Пример #8
0
def followers(bot: Bot, nodes, args) -> List[User]:

    # bot.logger.debug('nodes at followers %s' % list(nodes)[:3])
    #
    # nodes = iter(list(nodes))
    amount = args.get('amount') or 1
    # query = args.get('query', {})

    pack_user = lambda item: User(**item)

    process = rcompose(
        lambda user: user.pk, lambda id: cycled_api_call(
            amount,
            bot,
            bot.api.user_followers,
            dict(
                user_id=id,
                **args.get('query', {}),
            ),
            'users',
        ), lambda gen: map(pack_user, gen))

    result = mapcat(process, nodes)

    return result, {}
Пример #9
0
def like(bot, nodes, args):
    max = float(args['max']) if 'max' in args else float('inf')

    count = 0

    def increment():
        nonlocal count
        count += 1
        return True

    stop = raiser(StopIteration)

    process = rcompose(
        lambda x: stop() if x and count >= max else x,
        # lambda node: node \
        #     if bot.suitable(node) \
        #     else tap(None,lambda: bot.logger.warn('{} not suitable'.format(node))),
        lambda node: like_media(node, bot=bot) \
            if node else None,
        lambda x: x and increment() and x,
    )

    liked = map(process, nodes)
    liked = filter(lambda x: x, liked)

    return liked, {}
Пример #10
0
def main(youtube_id: str, bust_cache: bool = False) -> (
        Tuple[List[Tuple[float, float]], int, int]):
    """Read in the frames of the video, find the center of change.

    Writes out x,y positions to a csv, one row per frame.
    """
    path_data_fn = const.path_data_fn(youtube_id)

    frames = get_frames(youtube_id,
                        n_frames(youtube_id))
    first_frame = next(frames)
    # next() mutates the Iterable so we need to add the first frame back
    all_frames = itertools.chain((first_frame,), frames)

    (width, height) = (first_frame.width, first_frame.height)
    assert width > 0 and height > 0

    if not os.path.exists(path_data_fn) or bust_cache:
        def weighted_pos(im_bands: Tuple[Image.Image, ...]) -> Tuple[float, float]:
            return weighted_average_pos(im_bands, width, height)

        positions = fn.rcompose(
            fn.pairwise,
            fn.partial(fn.map, image_squared_difference),
            fn.partial(fn.map, weighted_pos),
            list
        )(all_frames)

        with open(const.path_data_fn(youtube_id), 'w') as f:
            csv.writer(f).writerows(positions)
        return (positions, width, height)
    else:
        with open(const.path_data_fn(youtube_id), 'r') as f:
            return ([(float(line[0]), float(line[1])) for line in csv.reader(f)], width, height)
Пример #11
0
def follow(bot: Bot, nodes, args):

    max = float(args['max']) if 'max' in args else float('inf')
    count = 0

    def increment():
        nonlocal count
        count += 1

    stop = raiser(StopIteration)

    process = rcompose(
        # lambda x: tap(x, lambda: bot.logger.warn('{}._data: \n{}'.format(x, unmask(x._data)))),
        lambda x: stop() if x and count >= max else x,
        # lambda node: node \
        #     if bot.suitable(node) \
        #     else tap(None,lambda: bot.logger.warn('{} not suitable'.format(node))),
        lambda node: follow_user(node, bot=bot) \
            if node else None,
        lambda x: tap(x, increment) if x else None,
    )

    followed = map(process, nodes)
    followed = filter(lambda x: x, followed)

    return followed, {}
Пример #12
0
 def fire_module(filters_squeeze, filters_expand):
     return rcompose(
         BatchNormalization(), Activation('relu'),
         conv1D(filters_squeeze,
                1), BatchNormalization(), Activation('relu'),
         ljuxt(conv1D(filters_expand // 2, 1),
               conv1D(filters_expand // 2, 3)), Concatenate())
Пример #13
0
	def	__fire_module_with_shortcut(self, filters_squeeze, filters_expand):
		return rcompose(
				self.__ljuxt(
					self.__fire_module(filters_squeeze, filters_expand),
					identity
					),
				self.__add()
				)
Пример #14
0
def create_volume_string_set(paths):
    """
    Returns uniq list of volume strings for a given list host paths
    """
    f = funcy.rcompose(
        get_container_mount,
        lambda x: create_volume_string(x["host_dir"], x["container_dir"]))
    return list(funcy.distinct(map(f, paths)))
Пример #15
0
def create_volume_string_set(paths):
    """
    Returns uniq list of volume strings for a given list host paths
    """
    f = funcy.rcompose(
            get_container_mount,
            lambda x: create_volume_string(x["host_dir"], x["container_dir"]))
    return list(funcy.distinct(map(f, paths)))
Пример #16
0
 def fire_module(filters_squeeze, filters_expand):
     return rcompose(batch_normalization(),
                     relu(),
                     conv(filters_squeeze, 1),
                     batch_normalization(),
                     relu(),
                     ljuxt(conv(filters_expand // 2, 1),
                           conv(filters_expand // 2, 3)),
                     concatenate())
Пример #17
0
def n_frames(youtube_id: str) -> int:
    # TODO(colin): somehow unite this with the filename in the const module
    extractor = r'%s_(\d+).png' % youtube_id
    return fn.rcompose(
        os.listdir,
        fn.partial(fn.filter, extractor),
        fn.partial(fn.map, extractor),
        fn.partial(fn.map, int),
        max)(const.cache_dir)
Пример #18
0
def author(bot, nodes, args):

    process = rcompose(
        lambda media: media['user'],
        lambda data: User(**data),
    )

    result = map(process, nodes)

    return result, {}
Пример #19
0
def process(data):
    return rcompose(
        clean_outliers,
        fill_na_train,
        fill_na_test,
        smooth_median_test,
        smooth,
        add_specified_features,
        generate_shifts,
    )(data)
Пример #20
0
	def	make_graph(self, class_size):
		return rcompose(
				self.__conv(16, 3),
				self.__residual_block(16 * k, 1, n),
				self.__residual_block(32 * k, 2, n),
				self.__residual_block(64 * k, 2, n),
				self.__batch_normalization(),
				self.__relu(),
				self.__global_average_pooling(),
				self.__dense(class_size, 'softmax')
				)
Пример #21
0
def get_container_path(host_directory, container_prefix_path="/tmp"):
    """
    Returns a container directory location under the given prefix path.
    This is deterministic and always returns the same path for the given host path.
    """
    import hashlib
    from hashids import Hashids
    digest = funcy.rcompose(
        lambda x: hashlib.md5(x.encode('utf-8')).hexdigest(),
        lambda x: int(x, base=16),
        Hashids(min_length=6).encode)
    return os.path.join(container_prefix_path, digest(host_directory))
Пример #22
0
def likers(bot, nodes, args) -> List[Media]:

    pack_user = lambda item: User(**item)
    amount = args.get('amount')

    process = rcompose(lambda media: media.pk,
                       lambda id: get_likers(id, bot, amount),
                       lambda gen: map(pack_user, gen))

    result = mapcat(process, nodes)

    return result, {}
Пример #23
0
def get_container_path(host_directory, container_prefix_path = "/tmp"):
    """
    Returns a container directory location under the given prefix path.
    This is deterministic and always returns the same path for the given host path.
    """
    import hashlib
    from hashids import Hashids
    digest = funcy.rcompose(
            lambda x: hashlib.md5(x.encode('utf-8')).hexdigest(),
            lambda x: int(x, base=16),
            Hashids(min_length=6).encode)
    return os.path.join(container_prefix_path, digest(host_directory))
Пример #24
0
        def _fetch_all(self):
            # This thing appears in Django 1.9.
            # In Djangos 1.9 and 1.10 both calls mean the same.
            # Starting from Django 1.11 .iterator() uses chunked fetch
            # while ._fetch_all() stays with bare _iterable_class.
            if hasattr(self, '_iterable_class'):
                it = self._iterable_class(self)
            else:
                it = self.iterator()
            self._result_cache = lmap(rcompose(*self._mappers), it)

            # Fill in the rest
            base._fetch_all(self)
Пример #25
0
	def	__fire_module(self, filters_squeeze, filters_expand):
		return rcompose(
				self.__batch_normalization(),
				self.__relu(),
				self.__conv(filters_squeeze, 1),
				self.__batch_normalization(),
				self.__relu(),
				self.__ljuxt(
					self.__conv(filters_expand // 2, 1),
					self.__conv(filters_expand // 2, 3)
					),
				self.__concatenate()
				)
Пример #26
0
def process_raw_metrics(app, metrics, mappings):
    """
    Given a dictionary of raw metrics retrieved from a container output file, and an
    array of mappings for those metrics, convert the input dictionary of metrics
    using these mappings.
    """
    function_list = globals()

    def parse(mapping):
        return parse_metric(app, mapping, fetch_metric(metrics, mapping))

    create_key_value_dict = funcy.rcompose(
        partial(map, parse), dict, partial(funcy.select_values, funcy.notnone))

    return create_key_value_dict(mappings)
Пример #27
0
def following(bot, nodes,  args) -> List[User]:

    amount = args.get('amount') or 1

    pack_user = lambda item: User(**item)

    process = rcompose(
        lambda user: user.pk,
        lambda id: cycled_api_call(amount, bot, bot.api.user_following, dict(user_id=id, **args.get('query', {}),), 'users'),
        lambda gen: map(pack_user, gen)
    )

    result = mapcat(process, nodes)

    return result, {}
Пример #28
0
def hashtag_feed(bot: Bot, nodes,  args) -> List[Media]:
    amount = args.get('amount') or 1

    pack_media = lambda data: Media(id=data['pk'], data=data)

    process = rcompose(
        lambda tag: tag.name,
        # lambda x: tap(x, lambda: print(bot.last)),
        lambda name: cycled_api_call(amount, bot, bot.api.feed_tag, (name,), 'items'),
        lambda items: map(pack_media, items),
    )

    result = mapcat(process, nodes)

    return result, {}
Пример #29
0
def hashtag_stories(bot: Bot, nodes, args) -> List[Media]:
    amount = args.get('amount') or 1

    pack_story = lambda data: Story(**data)

    process = rcompose(
        lambda tag: tag.name,
        # lambda x: tap(x, lambda: print(bot.last)),
        lambda id: cycled_api_call(amount, bot, bot.api.feed_tag, id,
                                   ['story', 'items']),
        lambda items: map(pack_story, items),
    )

    result = mapcat(process, nodes)

    return result, {}
Пример #30
0
def get_vlans_of_port(ip, port):
    try:
        child = telnet(ip)
        rslt = do_some(child, f'disp cu interface {port}')
        eth_trunk = re_find(r'eth-trunk \d+', rslt).replace(' ', '')
        rslt = do_some(child, 'disp cu interface filter user-vlan')
        close(child)
    except Exception as e:
        raise e
    rslt = rcompose(methodcaller('split', '#'),
                    autocurry(filter)(lambda x: re_test(eth_trunk, x, re.I)),
                    autocurry(mapcat)(lambda x: x.split('\r\n')),
                    autocurry(filter)('user-vlan'),
                    autocurry(map)(lambda x: x.strip()),
                    autocurry(map)(lambda x: _item_to_vlans(x)))(rslt)
    return merge(set(), *rslt)
Пример #31
0
def text(bot, nodes, args):

    try:
        max = float(args['max']) if 'max' in args else float('inf')
        messages = args['messages']
    except Exception:
        bot.logger.error(
            'please add all necessary args, {} isn\'t enought'.format(args))
        return [], {}

    count = 0

    def increment():
        bot.total['texts'] += 1
        nonlocal count
        count += 1

    stop = raiser(StopIteration)

    return_if_suitable = lambda node: node \
        if bot.suitable(node, table='texted', specifier=str(messages)) \
        else tap(None,lambda: bot.logger.warning('{} not suitable'.format(node)))

    discard_if_reached_limit = lambda node: node \
        if not bot.reached_limit('texts') \
        else tap(None, bot.logger.error('reached texting daily limit'))

    send_msg_from_groups = lambda node: map(
            lambda msgs: send_message(bot, choice(msgs), node),
            messages) \
         if node else []

    process = rcompose(
        lambda x: stop() if x and count >= max else x,
        # return_if_suitable,
        discard_if_reached_limit,
        send_msg_from_groups,
        lambda arr: list(arr)[0] if arr else None,
        lambda x: tap(x, increment) if x else None,
    )

    result = map(process, nodes)
    result = filter(lambda x: x, result)

    return result, {}
Пример #32
0
def comment(bot: Bot, nodes, args):

    try:
        max = float(args['max']) if 'max' in args else float('inf')
        comments = args['comments']
    except:
        bot.logger.error(
            'please add all necessary args, {} isn\'t enought'.format(args))
        return [], {}

    count = 0

    def increment():
        nonlocal count
        count += 1

    stop = raiser(StopIteration)



    return_if_suitable = lambda node: node \
        if bot.suitable(node, table='commented', specifier=str(comments)) \
        else tap(None,lambda: bot.logger.warn('{} not suitable'.format(node)))

    discard_if_reached_limit = lambda node: node \
        if not bot.reached_limit('comments') \
        else tap(None, bot.logger.error('reached commenting daily limit'))

    do_comment_from_groups = lambda node: map(
            lambda cmnts: do_comment(bot, choice(cmnts), node),
            comments) and node \
         if node else None

    process = rcompose(
        lambda x: stop() if x and count >= max else x,
        # return_if_suitable,
        # discard_if_reached_limit,
        do_comment_from_groups,
        lambda x: tap(x, increment) if x else None,
    )

    result = map(process, nodes)
    result = filter(lambda x: x, result)

    return result, {}
Пример #33
0
    def description(self):
        if self._description is None:
            name = self.name

            if name in SCORING_NAME_MAPPER:
                self._description = SCORING_NAME_MAPPER[name]
            else:
                # default formatting
                def upper_first(s):
                    return s[0].upper() + s[1:] if s is not None else s
                format = rcompose(
                    lambda s: s.split('_'),
                    partial(map, upper_first),
                    lambda l: ' '.join(l),
                )
                self._description = format(name)

        return self._description
Пример #34
0
def geotag_stories(bot: Bot, nodes, args) -> List[Story]:

    pack_story = lambda data: Story(**data)
    amount = args.get('amount') or 1

    process = rcompose(
        lambda tag: tag.id,
        # lambda x: tap(x, lambda: print(x)),
        lambda id: cycled_api_call(amount, bot, bot.api.location_stories, id,
                                   ('story', 'items')),
        # lambda x: tap(x, lambda: print(next(x))),
        # lambda gen: islice(gen, amount),
        lambda items: map(pack_story, items),
    )

    result = mapcat(process, nodes)

    return result, {}
Пример #35
0
Файл: T64.py Проект: sjava/weihu
def get_groups(ip):
    def _get_desc(child, group):
        name = group['name']
        rslt = do_some(child, 'show run interface {name}'.format(name=name))
        desc = re_find(r'description\s(\S+ *\S*)', rslt)
        group['desc'] = desc
        if group['mode'] == 'active':
            group['mode'] = 'yes'
        return group

    try:
        child = telnet(ip)
        rslt = do_some(child, 'show run | in smartgroup [0-9]+')
        ff = rcompose(partial(map, lambda x: x.strip()),
                      distinct,
                      partial(map, r'(smartgroup\s\d+)\smode\s(\w+)'),
                      partial(map, lambda x: dict(name=x[0].replace(' ', ''), mode=x[1])))
        temp = ff(rslt.splitlines()[:-1])
        get_desc = partial(_get_desc, child)
        groups = lmap(get_desc, temp)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', groups, ip)
def make_boxes_from_frame_spec(min_frame: int, max_frame: int,
                               xspec: FrameSpec, yspec: FrameSpec,
                               video_width: int, video_height: int,
                               keyframes_only: bool = False) -> (
                                   FrameSpecOutput):
    key_frames_x = [0] + list(fn.sums([frame for _, frame in xspec]))[:-1]
    key_frames_y = [0] + list(fn.sums([frame for _, frame in yspec]))[:-1]

    all_keyframes = list(sorted(list(
        set(key_frames_x).union(set(key_frames_y)))))

    @fn.autocurry
    def key_frame_index(key_frames, frame):
        for ki, k in enumerate(key_frames):
            if k > frame:
                return ki - 1
        return len(key_frames) - 1

    @fn.autocurry
    def ensure_in_range(size, maxval, pos):
        if pos - padding/2 < 0:
            pos = padding/2
        elif pos - padding/2 + size > maxval:
            pos = maxval - size + padding/2
        return pos

    frame_pos_x_fn = fn.rcompose(
        fn.partial(fn.map,
                   key_frame_index(key_frames_x)),
        fn.partial(fn.map, lambda key_idx: int(round(xspec[key_idx][0]))),
        fn.partial(fn.map,
                   ensure_in_range(const.box_width, video_width)))

    frame_pos_y_fn = fn.rcompose(
        fn.partial(fn.map,
                   key_frame_index(key_frames_y)),
        fn.partial(fn.map, lambda key_idx: int(round(yspec[key_idx][0]))),
        fn.partial(fn.map,
                   ensure_in_range(const.box_height, video_height)))

    if keyframes_only:
        frame_pos_x = frame_pos_x_fn(all_keyframes)
        frame_pos_y = frame_pos_y_fn(all_keyframes)
        return [
            (float(frame) / len(range(min_frame, max_frame)),) +
            shared.tuple4(tuple(
                int(round(coord))
                for coord in (pos_x - padding/2,
                              pos_y - padding/2,
                              pos_x - padding/2 + const.box_width,
                              pos_y - padding/2 + const.box_height)
            )) for frame, pos_x, pos_y in zip(all_keyframes, frame_pos_x,
                                              frame_pos_y)]
    else:
        frame_pos_x = frame_pos_x_fn(range(min_frame, max_frame))
        frame_pos_y = frame_pos_y_fn(range(min_frame, max_frame))

        return [shared.tuple4(tuple(
            int(round(coord))
            for coord in (pos_x - padding/2,
                          pos_y - padding/2,
                          pos_x - padding/2 + const.box_width,
                          pos_y - padding/2 + const.box_height)
            )) for pos_x, pos_y in zip(frame_pos_x, frame_pos_y)]
Пример #37
0
    cur.close()
    conn.close()


def run_sql_with_db(db, sql):

    with db as cur:
        cur.execute(sql)
        time.sleep(1)


if __name__ == '__main__':

    print('test get_pid_tid_pair')
    print(get_pid_tid_pair())
    run_in_thread(fy.rcompose(get_pid_tid_pair, print))
    run_in_process(fy.rcompose(get_pid_tid_pair, print))
    print()

    database = '/tmp/test_mt_mp.py'
    conn_f = fy.partial(sqlite3.connect, database)
    db = modb.Database(sqlite3, database)
    inc_sql = 'update counters set count = count + 1;'

    kwargs = {
        'host': 'localhost',
        'db': 'test',
    }
    conn_f = fy.partial(pymysql.connect, **kwargs)
    db = modb.Database(pymysql, **kwargs)
Пример #38
0
 def __call__(self, addrs):
     pipeline = funcy.rcompose(*self.steps)
     return pipeline(addrs)()