def main(youtube_id: str, bust_cache: bool = False) -> (
        Tuple[List[Tuple[float, float]], int, int]):
    """Read in the frames of the video, find the center of change.

    Writes out x,y positions to a csv, one row per frame.
    """
    path_data_fn = const.path_data_fn(youtube_id)

    frames = get_frames(youtube_id,
                        n_frames(youtube_id))
    first_frame = next(frames)
    # next() mutates the Iterable so we need to add the first frame back
    all_frames = itertools.chain((first_frame,), frames)

    (width, height) = (first_frame.width, first_frame.height)
    assert width > 0 and height > 0

    if not os.path.exists(path_data_fn) or bust_cache:
        def weighted_pos(im_bands: Tuple[Image.Image, ...]) -> Tuple[float, float]:
            return weighted_average_pos(im_bands, width, height)

        positions = fn.rcompose(
            fn.pairwise,
            fn.partial(fn.map, image_squared_difference),
            fn.partial(fn.map, weighted_pos),
            list
        )(all_frames)

        with open(const.path_data_fn(youtube_id), 'w') as f:
            csv.writer(f).writerows(positions)
        return (positions, width, height)
    else:
        with open(const.path_data_fn(youtube_id), 'r') as f:
            return ([(float(line[0]), float(line[1])) for line in csv.reader(f)], width, height)
示例#2
0
def main(input_mp4, output_mat, noise, speaker, num_features,
         num_frames, verbose, fps, log_spec):
    my_features = partial(features, frames=num_frames, features=num_features,
                          fps=fps, use_mfcc=not log_spec)
    wav_to_features = compose(my_features, wav_read)
    mp4_to_features = compose(my_features, to_wav)

    train_classifiers2 = partial(train_classifiers, [noise] + list(speaker), 
                                 wav_to_features, LinearSVC, verbose=verbose)
    
    results = map(train_classifiers2, [AdaBoostClassifier, GaussianNB, LinearSVC,
                                       partial(KNeighborsClassifier, n_neighbors=20)])

    if verbose:
        for name, (_, score) in zip(["Ada", "GNB", "LSVM", "NN"], results):
            print(name, score)
    
    classify, _ = max(results, key=lambda x: (x[1][0] + x[1][1])/2)
    

    mp4_features = mp4_to_features(input_mp4).T
    
    if verbose:
        savemat("all_features.mat", {"all_features": mp4_features})
    
    voice, person = classify(mp4_features)
    savemat(output_mat, {"voice": voice, "person": person})
示例#3
0
    def __init__(self,
                 node,
                 value,
                 relative=False,
                 immediatly=False,
                 target_type="Position",
                 *args,
                 **kwargs):
        parameter_name = "Target " + target_type
        self.node = node
        self.value = value
        self.relative = relative
        self.immediatly = immediatly

        set_target = partial(SdoWriteObject,
                             node=node,
                             parameter_name=parameter_name,
                             value=self.value)
        notify_new_target = partial(NotifyNewTarget,
                                    node=node,
                                    relative=self.relative,
                                    immediatly=self.immediatly)

        operations = [set_target, notify_new_target]

        super(SetTarget, self).__init__(node, operations, *args, **kwargs)
示例#4
0
    def awake(self):
        for gui_element in Component.__added_components__[GuiElement]:
            gui_element.fire_callbacks("register_manager", self)

        self.entity.register_callback("mousemotion",partial(self.mouse_callback, "mousemotion"))
        self.entity.register_callback("mousebuttonup",partial(self.mouse_callback, "mousebuttonup"))
        self.entity.register_callback("mousebuttondown",partial(self.mouse_callback, "mousebuttondown"))
示例#5
0
    def __init__(self, node, relative=False, immediatly=False, *args, **kwargs):
        self.node = node
        self.relative = relative
        self.immediatly = immediatly


        notify = partial(ChangeControlword,
                node = self.node,
                updates = {
                    Can402ControlwordBits.new_set_point: True,
                    Can402ControlwordBits.abs_rel: self.relative,
                    Can402ControlwordBits.change_set_immediately: self.immediatly
                },
                timeout = self.node.atomic_timeout)


        reset = partial(ChangeControlword,
                node = self.node,
                updates = {
                    Can402ControlwordBits.new_set_point: False
                },
                timeout = self.node.atomic_timeout)

        
        operations = [notify, reset]

        super(NotifyNewTarget, self).__init__(node, operations, *args, **kwargs)
def n_frames(youtube_id: str) -> int:
    # TODO(colin): somehow unite this with the filename in the const module
    extractor = r'%s_(\d+).png' % youtube_id
    return fn.rcompose(
        os.listdir,
        fn.partial(fn.filter, extractor),
        fn.partial(fn.map, extractor),
        fn.partial(fn.map, int),
        max)(const.cache_dir)
示例#7
0
    def test_fire_callbacks_pipeline(self,a,b,start_accum):
        e = Events()
        import operator
        # accum = add(a,start_accum)
        e.register_callback("pipeline", partial(operator.add,a))
        # accum = mul(b,accum)
        e.register_callback("pipeline", partial(operator.mul,b))
        # accum == b*(a+start_accum)

        assert e.fire_callbacks_pipeline("pipeline", start_accum) == b*(a+start_accum)
示例#8
0
def plot_history(history):
    def plot_values_collection(title, values_collection):
        plot.clf()
        plot.title(title)
        for values in values_collection:
            plot.plot(values)
        plot.show()

    plot_values_collection(
        'loss', map(partial(getitem, history), ('loss', 'val_loss')))
    plot_values_collection('accuracy',
                           map(partial(getitem, history), ('acc', 'val_acc')))
示例#9
0
 def ride_to_intervals(self, dic):
     return k(dic) @ (lambda ride: k(ride['RIDE']['INTERVALS']) @ partial(
         map, lambda interval: k(ride['RIDE']['SAMPLES']) @ partial(
             filter, lambda data: data['SECS'] >= interval['START'] and
             data['SECS'] < interval['STOP']) @ partial(
                 map, lambda data: {
                     'SECS': data.get('SECS'),
                     'WATTS': data.get('WATTS'),
                     'HR': data.get('HR')
                 }) @ list @ (lambda x:
                              (interval['NAME'], x)) @ 'end') @ dict @ 'end'
                      ) @ 'end'
示例#10
0
 def __init__(self, position=(0,0), size=(0,0), anchor=(0,0), relative_position = False, snap_to_grid = None, *args,**kwargs):
     super(GuiElement, self).__init__(*args,**kwargs)
     self.position = position
     self.relative_position = relative_position
     self.size = size
     self.anchor = anchor
     self.snap_to_grid = snap_to_grid
     self.manager = None
     self.relative_gui_element = None
     self._always_fetch_mouse = False
     self.mouse_callbacks = []
     self.mouse_callbacks.append(("mousemotion",partial(self.mouse_callback, "mousemotion")))
     self.mouse_callbacks.append(("mousebuttonup",partial(self.mouse_callback, "mousebuttonup")))
     self.mouse_callbacks.append(("mousebuttondown",partial(self.mouse_callback, "mousebuttondown")))
示例#11
0
    def default(self, obj):
        ordered_attrs = pipe(
            partial(map, lambda attr: (attr, getattr(obj, attr))),
            partial(remove_values, isnone),
            partial(remove_values, all_fn(isa(list, dict), isempty)),
            partial(walk_values, iffy(isa(dict), sort_dict)),
            OrderedDict)

        if isinstance(obj, Context):
            return ordered_attrs(['key', 'operator', 'operand', 'match_all'])
        elif isinstance(obj, Binding):
            return ordered_attrs(['keys', 'command', 'args', 'context'])
        else:
            return super().default(obj)
示例#12
0
    def get_power_history(self):
        path = os.path.join(self.gc_folder_path, 'config/power.zones')
        with open(path, 'r') as fdesc:
            lines = fdesc.read().splitlines()

        return k(lines) \
         @ partial(map,lambda x: x.split(':')) @ flatten \
         @ partial(split_before, pred = lambda x: 'DEFAULTS' in x or '/' in x) \
         @ list @ partial(select, lambda x: x[0]!='DEFAULTS') \
         @ (lambda l: [{'dateTime': datetime.strptime(x[0],'%Y/%m/%d'),
            **( k(x[1:]) @ partial(map,lambda y:
             k(y.split('=')) @ partial(map, lambda i: i.strip()) @ tuple @ 'end'
                                                            if '=' in y else y)
                @ dict @ 'end' ) } for x in l ]) @ 'end'
示例#13
0
文件: olt.py 项目: sjava/weihu
def add_infs():
    funcs = {'zte': Zte.get_infs, 'hw': Huawei.get_infs}
    get_infs = partial(_company, funcs)

    clear_log()
    nodes = graph.cypher.execute(
        'match (n:Olt) return n.ip as ip,n.company as company')
    olts = [dict(ip=x['ip'], company=x['company']) for x in nodes]
    pool = Pool(128)
    lock = Manager().Lock()
    _add_infs_p = partial(_add_infs, lock)
    list(pool.map(compose(_add_infs_p, get_infs), olts))
    pool.close()
    pool.join()
示例#14
0
def plot_history():
    def plot_values_collection(title, values_collection):
        plot.clf()
        plot.title(title)
        for values in values_collection:
            plot.plot(values)
        plot.show()

    with open('./results/history.pickle', 'rb') as f:
        history = pickle.load(f)

    print(last(history['val_acc']))

    plot_values_collection('loss',     map(partial(getitem, history), ('loss', 'val_loss')))
    plot_values_collection('accuracy', map(partial(getitem, history), ('acc',  'val_acc')))
示例#15
0
文件: Zte.py 项目: sjava/weihu
def get_groups(ip):
    def _get_infs(record):
        name = re_find(r'(Smartgroup:\d+)', record)
        if name:
            name = name.lower().replace(':', '')
        infs = re_all(r'(x?gei_\d+/\d+/\d+)\s?selected', record)
        return dict(name=name, infs=infs)

    def _get_desc_mode(child, group):
        rslt = do_some(child, 'show run int {name}'.format(name=group['name']))
        desc = re_find(r'description\s+(\S+)', rslt)
        group['desc'] = desc
        rslt = do_some(child, 'show run int {inf}'.format(
            inf=group['infs'][0]))
        mode = re_find(r'smartgroup\s\d+\smode\s(\S+)', rslt)
        group['mode'] = mode
        return group

    try:
        child = telnet(ip)
        rslt = re.split(r'\r\n\s*\r\n', do_some(child, 'show lacp internal'))
        groups = thread_last(rslt,
                             (lmap, _get_infs),
                             (select, lambda x: x['name'] and x['infs']))
        lmap(partial(_get_desc_mode, child), groups)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', groups, ip)
示例#16
0
 def _preprocess(self, bindings):
     return pipe(
         deepcopy,
         partial(lflatten, follow=isa(list, tuple, Keymap)),
         self._apply_common_context,
         self._apply_default_match_all
     )(bindings)
示例#17
0
def to_mean_weighted_tf_idf_topics(cluster_results, passage_to_topic_ids=None):
    """Converts cluster results into topics of term frequency-inverse document
    frequency within the results and then weights them by the cluster's given
    probability

        term = topic
        document = list of topics for a given passage

    Example:

        [('tpc:-12894305', 0.134725), ...]

    """
    def _weight_tf_idf_pairs(pair):
        cluster_result, tf_idf_pairs = pair
        return [(topic_id, cluster_result.probability * tf_idf)
                for topic_id, tf_idf in tf_idf_pairs]

    def _average_tf_idf_pairs(pair):
        topic_id, tf_idf_pairs = pair
        return [(topic_id, mean(list_map(second, tf_idf_pairs)))]

    return pipe(to_tf_idf_topics(cluster_results, passage_to_topic_ids),
                partial(zip, cluster_results), map(_weight_tf_idf_pairs), cat,
                groupby(first), lambda d: d.items(),
                map(_average_tf_idf_pairs), cat,
                list_map(lambda t: WeightedTfIdfTopic(*t)),
                sorted(key=attr('score'), reverse=True))
示例#18
0
def get_groups(ip):
    def _get_infs(record):
        name = re_find(r'(Smartgroup:\d+)', record)
        if name:
            name = name.lower().replace(':', '')
        infs = re_all(r'(x?gei_\d+/\d+/\d+)\s?selected', record)
        return dict(name=name, infs=infs)

    def _get_desc_mode(child, group):
        rslt = do_some(child, 'show run int {name}'.format(name=group['name']))
        desc = re_find(r'description\s+(\S+)', rslt)
        group['desc'] = desc
        rslt = do_some(child,
                       'show run int {inf}'.format(inf=group['infs'][0]))
        mode = re_find(r'smartgroup\s\d+\smode\s(\S+)', rslt)
        group['mode'] = mode
        return group

    try:
        child = telnet(ip)
        rslt = re.split(r'\r\n\s*\r\n', do_some(child, 'show lacp internal'))
        groups = thread_last(rslt, (lmap, _get_infs),
                             (select, lambda x: x['name'] and x['infs']))
        lmap(partial(_get_desc_mode, child), groups)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', groups, ip)
示例#19
0
def get_infs(ip):
    def _get_info(child, inf):
        rslt = do_some(child, 'show int {inf}'.format(inf=inf))
        desc = re_find(r'Description\sis\s(\S+)', rslt)
        state = re_find(r'{inf}\sis\s(\S+\s?\S+),'.format(inf=inf), rslt)
        bw = re_find(r'BW\s(\d+)\sKbits', rslt)
        bw = int(bw or 0) / 1000
        inTraffic = re_find(r'seconds\sinput\srate\s?:\s+(\d+)\sBps', rslt)
        inTraffic = int(inTraffic or 0) * 8 / 1e6
        outTraffic = re_find(r'seconds\soutput\srate:\s+(\d+)\sBps', rslt)
        outTraffic = int(outTraffic or 0) * 8 / 1e6
        return dict(name=inf,
                    desc=desc,
                    state=state,
                    bw=bw,
                    inTraffic=inTraffic,
                    outTraffic=outTraffic)

    try:
        child = telnet(ip)
        rslt = do_some(child, 'show run | in interface', timeout=180)
        rslt = re_all(r'interface\s+(x?gei_\d+/\d+/\d+)', rslt)
        infs = lmap(partial(_get_info, child), rslt)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', infs, ip)
示例#20
0
 def test_update(self, mocked_setup, mocked_pygame_event_get, mocked_pygame_key_get_pressed, mocked_pygame_mouse_get_pressed, 
                       mocked_pygame_mouse_get_pos, mocked_pygame_display_flip):
     mocked_setup = callback(mocked_setup)
     e = Entity()
     c = Pygame()
     e.add_component(c)
     c.screen = "foo"
     mocked_draw = mock.MagicMock()
     mocked_input = mock.MagicMock()
     e.register_callback("draw",mocked_draw)
     e.register_callback("input",mocked_input)
     def event_callback(type,event):
         assert type == event.type
         event_callback.called += 1
     for type in TestPygameComponent._event_types:
         e.register_callback(c.pygame_mappings[type],partial(event_callback, type))
     event_callback.called = 0
     e.fire_callbacks("update",0)
     assert event_callback.called == len(TestPygameComponent._event_types)
     mocked_input.assert_called_once_with(
         mocked_pygame_mouse_get_pos.return_value,
         mocked_pygame_mouse_get_pressed.return_value,
         mocked_pygame_key_get_pressed.return_value)
     mocked_draw.assert_called_once_with(c.screen)
     
     assert mocked_pygame_display_flip.called
     assert mocked_pygame_mouse_get_pos.called
     assert mocked_pygame_mouse_get_pressed.called
     assert mocked_pygame_key_get_pressed.called
示例#21
0
文件: S85.py 项目: sjava/weihu
def get_traffics(ip, infs):
    def _get_traffic(child, inf):
        rslt = do_some(child, 'disp int {inf}'.format(inf=inf))
        state = re_find(r'{inf}\scurrent\sstate\s:\s?(\w+\s?\w+)'
                        .format(inf=inf), rslt).lower()
        bw = re_find(r'(\d+[MG])bps-speed mode', rslt)
        if bw is None:
            bw = 0
        elif 'M' in bw:
            bw = int(bw.replace('M', ''))
        else:
            bw = int(bw.replace('G', '')) * 1000
        inTraffic = int(re_find(
            r'\d+ seconds input:\s+\d+\spackets/sec\s(\d+)\sbits/sec', rslt)) / 1000000
        outTraffic = int(re_find(
            r'\d+ seconds output:\s+\d+\spackets/sec\s(\d+)\sbits/sec', rslt)) / 1000000
        infDict = dict(name=inf, state=state, bw=bw,
                       inTraffic=inTraffic, outTraffic=outTraffic)
        return infDict

    try:
        child = telnet(ip)
        rslt = lmap(partial(_get_traffic, child), infs)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', rslt, ip)
示例#22
0
def collect_git_projects(workspace):
    '''Collect the git projects in `workspace`'''
    wqueue = queue.Queue()
    compose(lambda projects: list(wqueue.put(x) for x in projects),
            partial(filter_project_folders, EXCLUDES),
            get_git_projects)(workspace)
    return wqueue
示例#23
0
    def test_fire_callbacks_pipeline_once(self,a,b,start_accum):
        e = Events()
        import operator
        # accum = add(a,start_accum)
        e.register_callback("pipeline", partial(operator.add,a))
        # accum = mul(b,accum) 
        e.register_callback_once("pipeline", partial(operator.mul,b))

        # accum == b*(a+start_accum)

        assert e.fire_callbacks_pipeline("pipeline", start_accum) == b*(a+start_accum)

        # because multiplication was only once
        # accum == a+start_accum
        
        assert e.fire_callbacks_pipeline("pipeline", start_accum) == a+start_accum
示例#24
0
def main():
    (x_train, y_train), (x_validation, y_validation) = load_data()

    model = Model(*juxt(identity, computational_graph(y_train.shape[1]))(Input(shape=x_train.shape[1:])))
    model.compile(loss='categorical_crossentropy', optimizer=SGD(momentum=0.9), metrics=['accuracy'])

    model.summary()
    # plot_model(model, to_file='./results/model.png')

    train_data      = ImageDataGenerator(featurewise_center=True, featurewise_std_normalization=True, width_shift_range=0.125, height_shift_range=0.125, horizontal_flip=True)
    validation_data = ImageDataGenerator(featurewise_center=True, featurewise_std_normalization=True)

    for data in (train_data, validation_data):
        data.fit(x_train)  # 実用を考えると、x_validationでのfeaturewiseのfitは無理だと思う……。

    batch_size = 100
    epochs     = 200

    results = model.fit_generator(train_data.flow(x_train, y_train, batch_size=batch_size),
                                  steps_per_epoch=x_train.shape[0] // batch_size,
                                  epochs=epochs,
                                  callbacks=[LearningRateScheduler(partial(getitem, tuple(take(epochs, concat(repeat(0.01, 1), repeat(0.1, 99), repeat(0.01, 50), repeat(0.001))))))],
                                  validation_data=validation_data.flow(x_validation, y_validation, batch_size=batch_size),
                                  validation_steps=x_validation.shape[0] // batch_size)

    with open('./results/history.pickle', 'wb') as f:
        pickle.dump(results.history, f)

    save_model(model, './results/model.h5')

    del model
示例#25
0
    def contributor_logins(self, repos):
        contributors = funcy.map(self.github.rx_contributors, repos)

        return rx.Observable.concat(contributors) \
            .buffer_with_count(len(repos)) \
            .map(funcy.flatten) \
            .map(funcy.partial(funcy.pluck, 'login'))
示例#26
0
文件: S85.py 项目: sjava/weihu
def get_traffics(ip, infs):
    def _get_traffic(child, inf):
        rslt = do_some(child, 'disp int {inf}'.format(inf=inf))
        state = re_find(
            r'{inf}\scurrent\sstate\s:\s?(\w+\s?\w+)'.format(inf=inf),
            rslt).lower()
        bw = re_find(r'(\d+[MG])bps-speed mode', rslt)
        if bw is None:
            bw = 0
        elif 'M' in bw:
            bw = int(bw.replace('M', ''))
        else:
            bw = int(bw.replace('G', '')) * 1000
        inTraffic = int(
            re_find(r'\d+ seconds input:\s+\d+\spackets/sec\s(\d+)\sbits/sec',
                    rslt)) / 1000000
        outTraffic = int(
            re_find(r'\d+ seconds output:\s+\d+\spackets/sec\s(\d+)\sbits/sec',
                    rslt)) / 1000000
        infDict = dict(name=inf,
                       state=state,
                       bw=bw,
                       inTraffic=inTraffic,
                       outTraffic=outTraffic)
        return infDict

    try:
        child = telnet(ip)
        rslt = lmap(partial(_get_traffic, child), infs)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', rslt, ip)
示例#27
0
    def __init__(self, node, *args, **kwargs):
        self.node = node

        shutdown            = partial(ChangeState, node=node, command=Can402StateCommand.shutdown, timeout = self.node.atomic_timeout)

        reset_communication = partial(AsyncSendAndAwait,
                                node = node,
                                send_msg_factory = partial(
                                                        CanOpenMessageNmtCommand, 
                                                        self.node.canopen, self.node.node_id, Can301StateCommand.reset_communication),
                                await_msg_predicate = lambda msg: (type(msg)==CanOpenMessageNmtBootup),
                                timeout = self.node.atomic_timeout)

        operations = [shutdown, reset_communication]

        super(Quit, self).__init__(node, operations, *args, **kwargs)        
示例#28
0
 def get_sleep_info(self, base_date = None, end_date = None):
     list_dates = self.split_dates_in_Ns(base_date, end_date, 100)
     return k(list_dates) @ partial(map,
      lambda x: self.authd_client.time_series('sleep',
             base_date=x['base_date'], end_date=x['end_date'])['sleep']) \
      @ itertools.chain.from_iterable @ list @ self._sleep_struct \
      @ 'end'
示例#29
0
    def _collect_feature_info(self, candidate_feature_diffs):
        """Collect feature info

        Args:
            candidate_feature_diffs (List[git.diff.Diff]): list of Diffs
                corresponding to admissible file changes compared to
                comparison ref

        Returns:
            List[Tuple[Callable, str, str]]: list of tuple of importer,
                module name, and module path. The "importer" is a callable that
                returns a module
        """

        # the directory containing ballet.yml
        project_root = self.project.path

        # the directory containing the package
        try:
            package_path = self.project.package.__path__[0]
            package_root = pathlib.Path(package_path).parent
        except (AttributeError, IndexError):
            logger.debug("Couldn't get package root, will try to recover",
                         exc_info=True)
            package_root = project_root

        for diff in candidate_feature_diffs:
            path = diff.b_path
            relpath = project_root.joinpath(path).relative_to(package_root)
            modname = relpath_to_modname(relpath)
            modpath = project_root.joinpath(path)
            importer = partial(import_module_at_path, modname, modpath)
            yield importer, modname, modpath
示例#30
0
文件: tools.py 项目: sjava/webapp
def get_vlans(ip, model):
    funcs = {'s85': S85.get_vlans,
             't64g': T64.get_vlans,
             's89': S89.get_vlans,
             's8905e': S8905E.get_vlans,
             's93': S93.get_vlans_a}
    _get_vlans = partial(_model, funcs)
    return _get_vlans(dict(ip=ip, model=model))
示例#31
0
def scrape_since_last_reading():

    # postgres client
    client = Postgres.get_client()
    cursor = client.cursor()
    inserter = partial(insert, cursor)
    # get the last time we fetched some data,
    # looking at the most recent result in the db
    query = ' '.join([
        'select time from scraped_chart',
        'order by time desc',
        'limit 1',
    ])
    cursor.execute(query)
    latest_fetch_time = cursor.fetchone()[0]
    latest_fetch_unix = time.mktime(latest_fetch_time.timetuple())

    # now get USD_BTC history
    btc_price_hist = coin_history('bitcoin')
    # and write that history to DB,
    btc_rows = marshall(btc_price_hist)
    # NOTE since latest fetch time?
    # recent_btc = btc_rows[btc_rows['time'] > latest_fetch_time]
    # [inserter(row) for _, row in recent_btc.iterrows()]
    [inserter(row) for _, row in btc_rows.iterrows()]
    client.commit()
    logger.debug('Scraped USD_BTC')

    # now, a poloniex client
    polo = Poloniex.get_client()
    # and a method for grabbing historical prices
    grab_historical_prices = partial(historical_prices_of, polo,
                                     btc_price_hist)
    # for each market,
    for market in polo.returnTicker():
        # fetch all the chart data since last fetch
        generator = grab_historical_prices(
            market,
            start=latest_fetch_unix,
            end=time.time(),
        )
        list(map(inserter, generator))
        client.commit()
        logger.debug(f'Scraped {market}')

    cursor.close()
def get_vertical_cages(rows):
    transpose_coordinates = lambda t: (t[1], t[0])
    res = get_horizontal_cages(transpose(rows))
    res = walk_keys(transpose_coordinates, res)
    transpose_coordinates_lst = compose(list,
                                        partial(map, transpose_coordinates))
    res = walk_values(transpose_coordinates_lst, res)
    return res
示例#33
0
def ospf_check():
    clear_log()
    devices = [x.split(',')[0] for x in open(devicesFile)]
    pool = Pool(processor)
    lock = Manager().Lock()
    list(pool.map(partial(_inf_ospf_check, lock), devices))
    pool.close()
    pool.join()
示例#34
0
 def _test_robust_transformer_pipeline(self, input_types, bad_input_checks,
                                       catches):
     FragileTransformerPipeline3 = funcy.partial(FragileTransformerPipeline,
                                                 3)
     return self._test_robust_transformer(
         input_types,
         bad_input_checks,
         catches,
         transformer_maker=FragileTransformerPipeline3)
def plot_history(history):
    legends = ["train loss", "test loss", "train accuracy", "test accuracy"]
    i = 0

    def plot_values_collection(title, values_collection):
        plot.clf()
        plot.title(title)
        for values in values_collection:
            plot.plot(values, label=legends.pop(0))
        plot.legend()
        plot.ylabel(title.split(' ')[0])
        plot.xlabel("Epochs")
        plot.show()

    plot_values_collection(
        'Loss', map(partial(getitem, history), ('loss', 'val_loss')))
    plot_values_collection('Accuracy',
                           map(partial(getitem, history), ('acc', 'val_acc')))
示例#36
0
def gen_range_query2d_data(draw):
    global MAX_LEN
    max_key = 2147483647 #2**31
    
    ixys = draw(st.lists(
        st.tuples(
            st.integers(min_value=1, max_value=MAX_LEN),
            st.integers(min_value=1, max_value=max_key),
            st.integers(min_value=1, max_value=max_key),
        ),
        min_size=2, max_size=MAX_LEN,
        unique_by=lambda ixy:ixy[0]
    ))
    
    nt_ixys = F.lmap(tup(Ixy), ixys)
    # min/max x
    max_x_ = max(map(prop('x'),nt_ixys))
    max_xv = min(max_x_ + max_x_ // 2, max_key)
    min_x_ = min(map(prop('x'),nt_ixys))
    min_xv = max(min_x_ - min_x_ // 2, 1)
    min_x, max_x = sorted([
        draw(st.integers(
            min_value=min_xv, max_value=max_xv)),
        draw(st.integers(
            min_value=min_xv, max_value=max_xv))
    ])
    # min/max y
    max_y_ = max(map(prop('y'),nt_ixys))
    max_yv = min(max_y_ + max_y_ // 2, max_key)
    min_y_ = min(map(prop('y'),nt_ixys))
    min_yv = max(min_y_ - min_y_ // 2, 1)
    min_y, max_y = sorted([
        draw(st.integers(
            min_value=min_yv, max_value=max_yv)),
        draw(st.integers(
            min_value=min_yv, max_value=max_yv))
    ])
    
    def included(ixy):
        return(min_x <= ixy.x <= max_x
           and min_y <= ixy.y <= max_y)
    #def excluded(ixy): return key(ixy) < min_key or max_key < key(ixy)
    includeds = go(
        filter(included,nt_ixys),
        F.partial(sorted, key=prop('x')),
        F.curry(F.lmap)(tuple))
    #excludeds = [tuple(ixy) for ixy in filter(excluded,nt_ixys)]
    
    ixy_map = F.zipdict(
        map(F.first, ixys), map(tup(Ixy), ixys))
    return dict(
        ixys=ixys, ixy_map=ixy_map,
        min_x=min_x, max_x=max_x,
        min_y=min_y, max_y=max_y,
        includeds=includeds
    )
示例#37
0
def get_bingfa(ip):
    def _get_users(child, slot):
        record = do_some(child, 'disp max-online slot {s}'.format(s=slot))
        users = re_find(r'Max online users since startup\s+:\s+(\d+)', record)
        users = int(users or 0)
        date = re_find(r'Time of max online users\s+:\s+(\d{4}-\d{2}-\d{2})',
                       record)
        return (slot, users, date)

    try:
        child = telnet(ip)
        rslt = do_some(child, 'disp dev | in BSU')
        ff = compose(partial(select, bool), partial(map, r'(\d+)\s+BSU'))
        slots = ff(rslt.split('\r\n'))
        maxUsers = lmap(partial(_get_users, child), slots)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', maxUsers, ip)
示例#38
0
 def _get_datatable_options(self):
     options = super(SampleAnnotations, self)._get_datatable_options()
     options['columns'] = [
         col for col in options['columns']
         if not isinstance(col, tuple) or col[1] is not None
     ]
     options['columns'].extend(
         (self.get_source_title(src), None, partial(self.get_extra, src))
         for src in self.sources)
     return options
示例#39
0
def make_simple_feed_builder(
    istrain_str='is_train',
    sparse=None
):
    builder = funcy.partial(
        simple_feed_builder,
        istrain_str=istrain_str,
        sparse=sparse
    )
    return builder
示例#40
0
文件: switch.py 项目: sjava/weihu
def add_power_info():
    funcs = {'S8508': S85.get_power_info,
             'S8505': S85.get_power_info,
             'T64G': T64.get_power_info,
             'S8905': S89.get_power_info,
             'S8905E': S8905E.get_power_info,
             'S9306': S93.get_power_info,
             'S9303': S93.get_power_info}
    get_power_info = partial(_model, funcs)
    #  clear_log()
    nodes = graph.cypher.execute(
        "match (s:Switch) where s.snmpState='normal' return s.ip as ip,s.model as model")
    switches = [dict(ip=x['ip'], model=x['model']) for x in nodes]
    pool = Pool(processor)
    lock = Manager().Lock()
    _ff = partial(_add_power_info, lock)
    list(pool.map(compose(_ff, get_power_info), switches))
    pool.close()
    pool.join()
示例#41
0
文件: bras.py 项目: sjava/weihu
def add_bingfa():
    funcs = {'ME60': ME60.get_bingfa,
             'ME60-X16': ME60.get_bingfa,
             'M6000': M6k.get_bingfa}
    _get_bf = partial(_model, funcs)

    clear()
    nodes = graph.find('Bras')
    bras = [(x['ip'], x['model']) for x in nodes]
    lmap(compose(_add_bingfa, _get_bf), bras)
示例#42
0
文件: switch.py 项目: sjava/weihu
def add_traffics():
    funcs = {'S8508': S85.get_traffics,
             'S8505': S85.get_traffics,
             'T64G': T64.get_traffics,
             'S8905': S89.get_traffics,
             'S8905E': S8905E.get_traffics,
             'S9306': S93.get_traffics,
             'S9303': S93.get_traffics}
    get_traffics = partial(_model, funcs)
    #  clear_log()
    nodes = graph.cypher.execute(
        "match (s:Switch)--(i:Inf) where s.snmpState='normal' return s.ip as ip,collect(i.name) as infs,s.model as model")
    switchs = [dict(ip=x['ip'], infs=x['infs'], model=x['model'])
               for x in nodes]
    pool = Pool(processor)
    lock = Manager().Lock()
    _ff = partial(_add_traffics, lock)
    list(pool.map(compose(_ff, get_traffics), switchs))
    pool.close()
    pool.join()
示例#43
0
文件: e8cCheck.py 项目: sjava/weihu
def saveOnus_f(ip):
    mark, rslt = Zte.get_onus(ip)[:-1]
    if mark == 'success' and rslt:
        _ff = lambda x: walk(partial(merge, (ip, x[0])), x[1])
        rslt1 = lmapcat(_ff, rslt)
        with open(result_file, 'a') as frslt:
            for record in rslt1:
                ip, port, onuid, loid = record
                frslt.write("{ip},{port},{onuid},{loid}\n"
                            .format(ip=ip, port=port, onuid=onuid, loid=loid))
    with open(log_file, 'a') as flog:
        flog.write("{ip}:{mark}\n".format(ip=ip, mark=mark))
示例#44
0
def call_once(fn, *args, **kwargs):
    fn = partial(fn, *args, **kwargs)
    @boost_fn
    def call_once_impl():
        if not call_once_impl.called:
            call_once_impl.result = fn()
            call_once_impl.called = True
        return call_once_impl.result

    # Add the function member `called`
    call_once_impl.called = False
    return call_once_impl
示例#45
0
    def __init__(self, node, *args, **kwargs):
        self.node = node

        shutdown = partial(ChangeState,
                           node=node,
                           command=Can402StateCommand.shutdown,
                           timeout=self.node.atomic_timeout)

        reset_communication = partial(
            AsyncSendAndAwait,
            node=node,
            send_msg_factory=partial(CanOpenMessageNmtCommand,
                                     self.node.canopen, self.node.node_id,
                                     Can301StateCommand.reset_communication),
            await_msg_predicate=lambda msg:
            (type(msg) == CanOpenMessageNmtBootup),
            timeout=self.node.atomic_timeout)

        operations = [shutdown, reset_communication]

        super(Quit, self).__init__(node, operations, *args, **kwargs)
示例#46
0
    def __init__(self, node, value, relative=False, immediatly=False, target_type="Position", *args, **kwargs):
        parameter_name = "Target " + target_type
        self.node = node
        self.value = value
        self.relative = relative
        self.immediatly = immediatly

        set_target = partial(SdoWriteObject,
                                node = node, 
                                parameter_name = parameter_name, 
                                value = self.value)
        notify_new_target   = partial(NotifyNewTarget,
                                node = node, 
                                relative = self.relative, 
                                immediatly = self.immediatly)



        operations = [set_target, notify_new_target]

        super(SetTarget, self).__init__(node, operations, *args, **kwargs)
示例#47
0
文件: fs.py 项目: ballet/ballet
def _synctree(
        src: pathlib.Path, dst: pathlib.Path,
        onexist: Callable[[pathlib.Path],
                          None]) -> List[Tuple[pathlib.Path, str]]:
    result = []
    cleanup = []
    try:
        for _root, dirnames, filenames in os.walk(src):
            root = pathlib.Path(_root)
            relative_dir = root.relative_to(src)

            for dirname in dirnames:
                dstdir = dst.joinpath(relative_dir, dirname)
                if dstdir.exists():
                    if not dstdir.is_dir():
                        raise BalletError
                else:
                    logger.debug(f'Making directory: {dstdir!s}')
                    dstdir.mkdir()
                    result.append((dstdir, 'dir'))
                    cleanup.append(partial(os.rmdir, dstdir))

            for filename in filenames:
                srcfile = root.joinpath(filename)
                dstfile = dst.joinpath(relative_dir, filename)
                if dstfile.exists():
                    onexist(dstfile)
                else:
                    logger.debug(f'Copying file to destination: {dstfile!s}')
                    copyfile(srcfile, dstfile)
                    result.append((dstfile, 'file'))
                    cleanup.append(partial(os.unlink, dstfile))

    except Exception:
        with suppress(Exception):
            for f in reversed(cleanup):
                f()
        raise

    return result
示例#48
0
文件: ME60.py 项目: sjava/weihu
def get_bingfa(ip):
    def _get_users(child, slot):
        record = do_some(
            child, 'disp max-online slot {s}'.format(s=slot))
        users = re_find(
            r'Max online users since startup\s+:\s+(\d+)', record)
        users = int(users or 0)
        date = re_find(
            r'Time of max online users\s+:\s+(\d{4}-\d{2}-\d{2})', record)
        return (slot, users, date)

    try:
        child = telnet(ip)
        rslt = do_some(child, 'disp dev | in BSU')
        ff = compose(partial(select, bool),
                     partial(map, r'(\d+)\s+BSU'))
        slots = ff(rslt.split('\r\n'))
        maxUsers = lmap(partial(_get_users, child), slots)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', maxUsers, ip)
示例#49
0
文件: bras.py 项目: sjava/weihu
def add_bingfa():
    funcs = {
        'ME60': ME60.get_bingfa,
        'ME60-X16': ME60.get_bingfa,
        'M6000-S': M6k.get_bingfa,
        'M6000': M6k.get_bingfa
    }
    _get_bf = partial(_model, funcs)

    clear()
    nodes = graph.find('Bras')
    bras = [(x['ip'], x['model']) for x in nodes]
    lmap(compose(_add_bingfa, _get_bf), bras)
示例#50
0
文件: switch.py 项目: sjava/weihu
def add_power_info():
    funcs = {
        'S8508': S85.get_power_info,
        'S8505': S85.get_power_info,
        'T64G': T64.get_power_info,
        'S8905': S89.get_power_info,
        'S8905E': S8905E.get_power_info,
        'S9306': S93.get_power_info,
        'S9303': S93.get_power_info
    }
    get_power_info = partial(_model, funcs)
    #  clear_log()
    nodes = graph.cypher.execute(
        "match (s:Switch) where s.snmpState='normal' return s.ip as ip,s.model as model"
    )
    switches = [dict(ip=x['ip'], model=x['model']) for x in nodes]
    pool = Pool(processor)
    lock = Manager().Lock()
    _ff = partial(_add_power_info, lock)
    list(pool.map(compose(_ff, get_power_info), switches))
    pool.close()
    pool.join()
示例#51
0
文件: ME60.py 项目: sjava/weihu
def get_ip_pool(ip):
    def _get_sections(child, name):
        rslt = do_some(child, 'disp cu configuration ip-pool {name}'.format(name=name))
        sections = re_all(r'section \d+ (\S+) (\S+)', rslt)
        return sections
    try:
        child = telnet(ip)
        rslt = do_some(child, 'disp domain 163.js | in pool-name')
        poolNames = re_all(r'pool-name\s+:\s(\S+)', rslt)
        ips = lmapcat(partial(_get_sections, child), poolNames)
        close(child)
    except(pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', ips, ip)
示例#52
0
文件: Zte.py 项目: sjava/weihu
def get_onus(ip):
    mark, ports = get_pon_ports(ip)[:-1]
    if mark == 'fail':
        return ('fail', None, ip)
    try:
        child = telnet(ip)
        gpo = partial(get_port_onus, child)
        rslt = lmap(gpo, ports)
        child.sendline('exit')
        child.close()
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    rslt1 = filter(lambda x: bool(x[1]), rslt)
    return ('success', rslt1, ip)
示例#53
0
def check_node_paths(nodes, analyses, status):
    """
    Checks that all variable paths listed in the QC file are valid. Sets an error
    message in the status if not.
    """
    variables = var.get_variable_names(status[nodes]['thresholds'])
    f = funcy.partial(var.is_variable_path_valid, status[analyses])
    errors = set(funcy.remove(f, variables))

    if len(errors) > 0:
        status['error'] = generator_error_string(variable_error_message,
                                                 errors)

    return status
示例#54
0
文件: T64.py 项目: sjava/weihu
def get_groups(ip):
    def _get_desc(child, group):
        name = group['name']
        rslt = do_some(child, 'show run interface {name}'.format(name=name))
        desc = re_find(r'description\s(\S+ *\S*)', rslt)
        group['desc'] = desc
        if group['mode'] == 'active':
            group['mode'] = 'yes'
        return group

    try:
        child = telnet(ip)
        rslt = do_some(child, 'show run | in smartgroup [0-9]+')
        ff = rcompose(partial(map, lambda x: x.strip()),
                      distinct,
                      partial(map, r'(smartgroup\s\d+)\smode\s(\w+)'),
                      partial(map, lambda x: dict(name=x[0].replace(' ', ''), mode=x[1])))
        temp = ff(rslt.splitlines()[:-1])
        get_desc = partial(_get_desc, child)
        groups = lmap(get_desc, temp)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', groups, ip)
示例#55
0
文件: bras.py 项目: sjava/weihu
def bingfa_check():
    funcs = {'ME60': ME60.get_bingfa,
             'ME60-X16': ME60.get_bingfa,
             'M6000': M6k.get_bingfa}
    _get_bf = partial(_model, funcs)

    clear()
    nodes = graph.find('Bras')
    bras = [(x['ip'], x['model']) for x in nodes]
    rslt = map(_get_bf, bras)
    with open(logFile, 'w') as flog, open(infoFile, 'w') as frslt:
        for mark, record, ip in rslt:
            flog.write('{ip}:{mark}\n'.format(ip=ip, mark=mark))
            for slot, user, date in record:
                frslt.write('{ip},{slot},{user},{date}\n'
                            .format(ip=ip, slot=slot, user=user, date=date))
示例#56
0
文件: ME60.py 项目: sjava/weihu
def get_vlan_users(ip, inf):
    def _get_users(child, i):
        rslt = do_some(child, 'disp access-user interface {i} | in /'.format(i=i))
        users = re_all(r'(\d+)/', rslt)
        return users

    try:
        child = telnet(ip)
        infs = do_some(child, 'disp cu interface | in Eth-Trunk{inf}\.'.format(inf=inf))
        infs = re_all(r'interface (\S+)', infs)
        rslt = lmapcat(partial(_get_users, child), infs)
        close(child)
        rslt = count_by(int, rslt)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', rslt, ip)
示例#57
0
文件: M6k.py 项目: sjava/weihu
def get_vlan_users(ip, inf):
    def _get_users(child, i):
        rslt = do_some(child, 'show subscriber interface {i} | in external-vlan'.format(i=i))
        vlans = re_all(r'external-vlan\s+:(\d+)', rslt)
        return vlans

    try:
        child = telnet(ip)
        rslt = do_some(child, 'show running-config | in smartgroup{inf}\.'.format(inf=inf))
        infs = distinct(re_all(r'(smartgroup\S+)', rslt))
        vlans = lmapcat(partial(_get_users, child), infs)
        close(child)
        vlans = count_by(int, vlans)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', vlans, ip)
示例#58
0
文件: Huawei.py 项目: sjava/weihu
def get_groups(ip):
    def _get_group(child, group):
        rslt = do_some(
            child, 'disp link-aggregation {group}'.format(group=group))
        desc = re_find(r'description:(\S+)', rslt)
        mode = re_find(r'work mode:\s+(\S+)', rslt)
        temp = re_all(r'(\d+/\d+)\s+(\d\S+)', rslt)
        temp1 = lmapcat(lambda x: ['{0}/{1}'.format(x[0], y)
                                   for y in x[1].split(',')], temp)
        return dict(name=group, desc=desc, mode=mode, infs=temp1)

    try:
        child = telnet(ip)
        temp = re_all(r'(\d+/\d+/\d+)', do_some(child,
                                                'disp link-aggregation all'))
        groups = lmap(partial(_get_group, child), temp)
        close(child)
    except (pexpect.EOF, pexpect.TIMEOUT) as e:
        return ('fail', None, ip)
    return ('success', groups, ip)