def zscore(collection, iteratee=None): """Calculate the standard score assuming normal distribution. If iteratee is passed, each element of `collection` is passed through a iteratee before the standard score is computed. Args: collection (list|dict): Collection to process. iteratee (mixed, optional): Iteratee applied per iteration. Returns: float: Calculated standard score. Example: >>> results = zscore([1, 2, 3]) # [-1.224744871391589, 0.0, 1.224744871391589] .. versionadded:: 2.1.0 """ array = pyd.map_(collection, iteratee) avg = mean(array) sig = std_deviation(array) return pyd.map_(array, lambda item: (item - avg) / sig)
async def on_define(self, context): print('define action of ' + self.name) api_config = context.api_configuration result = await api_config.get_resources() resources = pydash.filter_(result, {'resource_type': 'ExclusiveArea'}) resource_list = [] def cb(resource): resource_list.append( {'alias': resource['name'], 'value': resource['id']}) pydash.map_(resources, cb) return { 'name': self.name, 'func_name': self.func_name, 'args': [ { 'type': 'string', 'key': 'resource', 'default': resource_list[0], 'domain': resource_list, 'options':{'user_input': False} }, { 'type': 'object', 'key': 'quit', 'default': {}, 'domain': [], 'options': {'user_input': False} } ] }
def zscore(collection, callback=None): """Calculate the standard score assuming normal distribution. If callback is passed, each element of `collection` is passed through a callback before the standard score is computed. Args: collection (list|dict): Collection to process. callback (mixed, optional): Callback applied per iteration. Returns: float: Calculated standard score. Example: >>> results = zscore([1, 2, 3]) # [-1.224744871391589, 0.0, 1.224744871391589] .. versionadded:: 2.1.0 """ array = pyd.map_(collection, callback) ave = average(array) sig = sigma(array) return pyd.map_(array, lambda item: (item - ave) / sig)
def __init__(self, documents, data, train_params, model_params, rankings=None, pairs_to_flip=None, query_tok_to_doc_tok=None, normalized_score_lookup=None, use_bow_model=False, is_test=False, rel_vs_irrel=False, candidates=None, num_to_rank=1000): self.num_to_drop_in_ranking = train_params.num_to_drop_in_ranking if self.num_to_drop_in_ranking > 0: assert train_params.bin_rankings == 1, 'bin_rankings != 1 is not supported' rankings = _drop_next_n_from_ranking(self.num_to_drop_in_ranking, rankings) super().__init__(documents, data, train_params, model_params, rankings=rankings, query_tok_to_doc_tok=query_tok_to_doc_tok, normalized_score_lookup=normalized_score_lookup, use_bow_model=use_bow_model, is_test=is_test) self.use_variable_loss = train_params.use_variable_loss self.use_weighted_loss = train_params.use_weighted_loss self.bin_rankings = train_params.bin_rankings self.num_documents = len(documents) self.num_neg_samples = train_params.num_neg_samples if not is_test else 0 self.rankings_for_train = self.rankings if self.bin_rankings: num_pairs_per_ranking = _.map_( self.rankings_for_train, lambda ranking: (len(ranking[1]) - 1) * self.bin_rankings if len(ranking) > self.bin_rankings else 0) else: num_pairs_per_ranking = _.map_( self.rankings_for_train, lambda ranking: (len(ranking[1])**2) // 2 - len(ranking[1])) self.cumu_ranking_lengths = np.cumsum(num_pairs_per_ranking) self._num_pairs = None if self.bin_rankings: if self.bin_rankings != 1: raise NotImplementedError self._num_pos_pairs = _get_num_pos_pairs_with_bins( self.rankings_for_train, self.bin_rankings) else: self._num_pos_pairs = _get_num_pairs(self.rankings_for_train, 0) self.pairs_to_flip = pairs_to_flip self.rel_vs_irrel = rel_vs_irrel if self.rel_vs_irrel: self.candidates = candidates self.queries = [query for query, ranking in self.rankings] self.rel_irrel_by_query = _get_rel_irrel_by_query( self.rankings, self.candidates, num_to_rank) self.swap_labels = train_params.swap_labels
def pulse_global(message_groups: List[List[Dict]], interval: int = 5) -> Union[List[Dict], None]: if not message_groups: return [] pulses: List[List[Dict]] = _.map_(message_groups, lambda message: pulse(message, interval)) def remove_nones(pulses: List[Union[List[Dict], None]], pulse: Union[List[Dict], None]) -> List[List[Dict]]: if pulse is None: return pulses return _.push(pulses, pulse) pulses = _.reduce_(pulses, remove_nones, []) def pulses_align(pulses: List[List[Dict]]) -> List[List[Dict]]: earliest_time = _.min_by( pulses, lambda pulse: pulse[0].get('time'))[0].get('time') latest_time = _.max_by( pulses, lambda pulse: pulse[-1].get('time'))[0].get('time') return _.map_( pulses, lambda pulse: _.concat( zero_pulses(start_time=earliest_time, end_time=pulse[0].get('time'), interval=interval), pulse, zero_pulses(start_time=pulse[-1].get('time'), end_time=latest_time, interval=interval))) pulses = pulses_align(pulses) def collapser(collapsed_pulses: List[Dict], pulses: List[Dict]) -> List[Dict]: if not collapsed_pulses: return pulses def message_adder(index): collapsed_pulse = collapsed_pulses[index] pulse = pulses[index] return _.assign( collapsed_pulse, {'rate': collapsed_pulse.get('rate') + pulse.get('rate')}) return _.map_(_.range_(len(collapsed_pulses)), message_adder) pulse_clusters = _.reduce_(pulses, collapser, []) max_pulse_rate = _.max_by(pulse_clusters, 'rate').get('rate') def rate_normalizer(max_rate: int): return lambda pulse_dict: _.assign( pulse_dict, {'rate': pulse_dict.get('rate') / max_rate}) return _.map_(pulse_clusters, rate_normalizer(max_pulse_rate))
async def on_define(self, context): print('define action of ' + self.name) domain_goal_finishing = [{ 'alias': 'True', 'value': True }, { 'alias': 'False', 'value': False }] api_config = context.api_configuration result = await api_config.get_locations() domain_destination = [] def cb(location): domain_destination.append({ 'alias': location['name'], 'value': location['id'] }) pydash.map_(result, cb) return { 'name': self.name, 'func_name': self.func_name, 'args': [{ 'key': 'destination', 'options': { 'min': 0, 'user_input': False, 'max': 0 }, 'default': domain_destination[0], 'domain': domain_destination, 'type': 'string' }, { 'key': 'goal_finishing', 'type': 'boolean', 'default': domain_goal_finishing[0], 'domain': domain_goal_finishing, }, { 'key': 'finishing_timeout', 'type': 'number', 'default': 30, 'domain': [0, 10, 20, 30, 40, 50, 60], 'options': { 'user_input': True, 'min': 0, 'max': 300 } }] }
async def on_define(self, context): print('define action of ' + self.name) api_config = context.api_configuration result = await api_config.get_resources() resources = pydash.filter_(result, {'resource_type': 'ExclusiveArea'}) resource_list = [] def cb(resource): resource_list.append({ 'alias': resource['name'], 'value': resource['id'] }) pydash.map_(resources, cb) return { 'name': self.name, 'func_name': self.func_name, 'args': [{ 'key': 'resource_id', 'type': 'string', 'default': resource_list[0], 'domain': resource_list, }, { 'key': 'resource_type', 'type': 'string', 'enum': ['ExclusiveArea', 'Autodoor', 'Teleporter'], 'default': { 'alias': 'ExclusiveArea', 'value': 'ExclusiveArea' }, 'domain': [{ 'alias': 'ExclusiveArea', 'value': 'ExclusiveArea' }, { 'alias': 'Autodoor', 'value': 'Autodoor' }, { 'alias': 'Elevator', 'value': 'Teleporter' }] }, { 'key': 'params', 'type': 'object', 'default': {}, 'domain': {} }] }
def scale(array, maximum=1): """Scale list of value to a maximum number. Args: array (list): Numbers to scale. maximum (number): Maximum scale value. Returns: list: Scaled numbers. Example: >>> scale([1, 2, 3, 4]) [0.25, 0.5, 0.75, 1.0] >>> scale([1, 2, 3, 4], 1) [0.25, 0.5, 0.75, 1.0] >>> scale([1, 2, 3, 4], 4) [1.0, 2.0, 3.0, 4.0] >>> scale([1, 2, 3, 4], 2) [0.5, 1.0, 1.5, 2.0] .. versionadded:: 2.1.0 """ array_max = max(array) return pyd.map_(array, lambda item: item * (maximum / array_max))
def power(x, n): """Calculate exponentiation of `x` raised to the `n` power. Args: x (number): Base number. n (number): Exponent. Returns: number: Result of calculation. Example: >>> power(5, 2) 25 >>> power(12.5, 3) 1953.125 See Also: - :func:`power` (main definition) - :func:`pow_` (alias) .. versionadded:: 2.1.0 """ if pyd.is_number(x): result = pow(x, n) elif pyd.is_list(x): result = pyd.map_(x, lambda item: pow(item, n)) else: result = None return result
def unzip_with(array, callback=None): """This method is like :func:`unzip` except that it accepts a callback to specify how regrouped values should be combined. The callback is invoked with four arguments: ``(accumulator, value, index, group)``. Args: array (list): List to process. callback (callable, optional): Function to combine regrouped values. Returns: list: Unzipped list. Example: >>> from pydash import add >>> unzip_with([[1, 10, 100], [2, 20, 200]], add) [3, 30, 300] .. versionadded:: 3.3.0 """ if not array: return [] result = unzip(array) if callback is None: return result def _callback(group): return pyd.reduce_(group, callback, None) return pyd.map_(result, _callback)
def dark_chocolate(_path): def ingredients(x, _path): with open(_path + '/' + x, 'r') as f: xoxo = _.reduce_right(json.load(f), coco_mixer) print(xoxo) if len(xoxo) > 0 and xoxo[0] != None: with open( "/home/anandita/Documents/acad/sem8/idp/test/yolo_format/" + str(xoxo[0]['output']), 'w') as makefile: for item in xoxo: object_id = str(item['darkchocolate'][0]) center_x = str(item['darkchocolate'][1]) center_y = str(item['darkchocolate'][2]) width = str(item['darkchocolate'][3]) height = str(item['darkchocolate'][4]) spaces = object_id + " " + center_x + " " + center_y + " " + width + " " + height makefile.write('%s\n' % spaces) return _.map_([ pos_json for pos_json in os.listdir(_path) if pos_json.endswith('.json') ], lambda x: ingredients(x, _path))
def task_reports(name, _id, endpoint, method="GET", source='report', args={}, chain=[]): msg = "Task Report - %s" % _id normalize = from_pairs(map_(args, lambda i: [i['key'], i['value']])) query = json.dumps({ '_id': normalize.get('report_id'), 'owner._id': normalize.get('owner_user'), 'active': True }) resource = ExternalMaestroScheduler(_id) \ .post_request(path="reports", body={'query': query}) if resource.get_status() >= 400: msg = resource.get_error() task_deplete.delay(msg, _id) if resource.get_status() < 400: result = resource.get_results() msg = "Report success - %s" % _id if result.get('found', 0) == 1: objs = pick(result.get('items')[0], ['filters', 'component']) transformParams(args, objs) task_webhook.delay(name, _id, endpoint, source, method, args, chain) return {'msg': msg, 'status_code': resource.get_status()}
def task_webhook(name, _id, endpoint, source=None, method="GET", args={}, chain=[]): normalize = from_pairs(map_(args, lambda i: [i['key'], i['value']])) msg = "Scheduler run - %s" % (name) EREquester = ExternalMaestroScheduler(_id, source) \ .set_headers(create_jwt()) try: funcm = "%s_request" % method.lower() result = getattr(EREquester, funcm)(path=endpoint, body=normalize)\ .get_results() except requests.exceptions.RequestException as error: deple_id = task_deplete.delay(str(error), _id) return {'msg': result, 'deple_id': deple_id} if result: notify_id = task_notify_event.delay(msg=msg, roles=EREquester.templateRoles(), description=result, status='success') call_chains(chain) return {'notify_id': notify_id}
def zscore(collection, iteratee=None): """Calculate the standard score assuming normal distribution. If iteratee is passed, each element of `collection` is passed through a iteratee before the standard score is computed. Args: collection (list|dict): Collection to process. iteratee (mixed, optional): Iteratee applied per iteration. Returns: float: Calculated standard score. Example: >>> results = zscore([1, 2, 3]) # [-1.224744871391589, 0.0, 1.224744871391589] .. versionadded:: 2.1.0 """ array = pyd.map_(collection, iteratee) avg = mean(array) sig = std_deviation(array) return [(item - avg) / sig for item in array]
def update_yield_strength_regression_data(): conn = db.get_connection() cur = conn.cursor() sql_select = "SELECT * FROM rloveshhenko$mydbtest.composed_data" cur.execute(sql_select) data = cur.fetchall() x = np.array(map_(data, extract_x)) y = np.array(map_(data, extract_y)) data = {'x': x, 'y': y} # 'ndarray' is not JSON serializable # writeDataToFile(data, 'regressionData.txt') start_regression_learning(data)
def join(array, separator=""): """Joins an iterable into a string using `separator` between each element. Args: array (iterable): Iterable to implode. separator (str, optional): Separator to using when joining. Defaults to ``''``. Returns: str: Joined string. Example: >>> join(['a', 'b', 'c']) == 'abc' True >>> join([1, 2, 3, 4], '&') == '1&2&3&4' True >>> join('abcdef', '-') == 'a-b-c-d-e-f' True See Also: - :func:`join` (main definition) - :func:`implode` (alias) .. versionadded:: 2.0.0 .. versionchanged:: 3.0.0 Modified :func:`implode` to have :func:`join` as main definition and :func:`implode` as alias. """ return pyd.to_string(separator).join(pyd.map_(array or (), pyd.to_string))
def join(array, separator=''): """Joins an iterable into a string using `separator` between each element. Args: array (iterable): Iterable to implode. separator (str, optional): Separator to using when joining. Defaults to ``''``. Returns: str: Joined string. Example: >>> join(['a', 'b', 'c']) == 'abc' True >>> join([1, 2, 3, 4], '&') == '1&2&3&4' True >>> join('abcdef', '-') == 'a-b-c-d-e-f' True See Also: - :func:`join` (main definition) - :func:`implode` (alias) .. versionadded:: 2.0.0 .. versionchanged:: 3.0.0 Modified :func:`implode` to have :func:`join` as main definition and :func:`implode` as alias. """ return pyd.to_string(separator).join(pyd.map_(array or (), pyd.to_string))
def test_bigger_field(self) -> None: c = CellState.CELL_DECK.value field = [[0, 0, 0, 0, 0, 0], [0, c, 0, 0, 0, 0], [0, 0, 0, 0, c, 0], [0, 0, 0, 0, 0, 0]] self.assertEqual( py_.map_([(0, 3), (0, 4), (0, 5), (3, 0), (3, 1), (3, 2)], Coord), sorted(get_available_cells(field, Coord((4, 6)))))
def round_(x, precision=0): """Round number to precision. Args: x (number): Number to round. precision (int, optional): Rounding precision. Defaults to ``0``. Returns: int: Rounded number. Example: >>> round_(3.275) == 3.0 True >>> round_(3.275, 1) == 3.3 True See Also: - :func:`round_` (main definition) - :func:`curve` (alias) .. versionadded:: 2.1.0 """ rounder = pyd.partial_right(round, precision) if pyd.is_number(x): result = rounder(x) elif pyd.is_list(x): # pylint: disable=unnecessary-lambda result = pyd.map_(x, lambda item: rounder(item)) else: result = None return result
def power(x, n): """Calculate exponentiation of `x` raised to the `n` power. Args: x (number): Base number. n (number): Exponent. Returns: number: Result of calculation. Example: >>> power(5, 2) 25 >>> power(12.5, 3) 1953.125 .. versionadded:: 2.1.0 .. versionchanged:: 4.0.0 Removed alias ``pow_``. """ if pyd.is_number(x): result = pow(x, n) elif pyd.is_list(x): result = pyd.map_(x, lambda item: pow(item, n)) else: result = None return result
def test_map_(case, expected, sort_results): actual = _.map_(*case) if sort_results: actual = sorted(actual) assert actual == expected
async def most_active_by_channel_route(request: Request) -> Response: user_session = await route_helpers.get_session_from_request(request) if user_session is None: return json_response({ 'status': 400, 'data': [], 'message': 'No authentication token provided', 'errors': [] }) try: mattermost = client.Client(user_session) except: return json_response({ 'status': 400, 'data': {}, 'message': 'Invalid credentials', 'errors': [] }) channel_id = request.match_info['channel_id'] most_active_users = _.map_(most_active.most_active_n_users( mattermost.get_channel_messages(channel_id), 5 ), lambda user_id: mattermost.get_username_by_id(user_id)) return json_response({ 'status': 200, 'data': most_active_users, 'message': 'Success', 'errors': [] })
def check_fleet_config( fleet: Any, is_setup_stage=False) -> Tuple[bool, Optional[Dict[int, int]]]: """ Проверяет конфигурацию флота (1 4палубный, 2 3палубных итд) :param fleet: список кораблей :param is_setup_stage: Если True, то отключает проверку на недостающие корабли (считается, что поле находится в процессе заполнения и игрок еще не выставил все корабли) :return: (bool, [dict]) Если флот собран корректно, возвращает True, None Если есть лишние корабли, возвращает False, None Если кораблей не хватает, возвращает True или False (в зависимости от is_setup_stage) и список слотов """ lengths = map(len, fleet) config = Counter(lengths) if config == SHIP_CONFIG: return True, None # Checking for extra ships configs = group_by_keys((config, SHIP_CONFIG), 0) diff = py_.map_values(configs, lambda counts: counts[1] - counts[0]) extra_ships = any(py_.map_(list(diff.values()), lambda x: x < 0)) if extra_ships: return False, None missing_ships = {k: v for k, v in diff.items() if v > 0} if missing_ships: return is_setup_stage, missing_ships
def update_displayed_options(self, selected_object, changed_variable): if 'is_callable' in selected_object and selected_object['is_callable']: self.dropdown_map[str(changed_variable)].config( font='TkDefaultFont 20') if self.variables[-1].get(): key = '.'.join( map_(self.variables, lambda variable: variable.get())[:-1]).replace( self.cli.entry_name, self.cli.root_module_name, 1) action_key = '.'.join( map_(self.variables, lambda variable: variable.get())).replace( self.cli.entry_name, self.cli.root_module_name, 1) else: key = '.'.join( map_(self.variables, lambda variable: variable.get())[:-2]).replace( self.cli.entry_name, self.cli.root_module_name, 1) action_key = '.'.join( map_(self.variables, lambda variable: variable.get())[:-1]).replace( self.cli.entry_name, self.cli.root_module_name, 1) if key in self.cli.parsers: parser = self.cli.parsers[key] last_selected_variable = self.getLastSelectedVariable() if 'callables' in parser and last_selected_variable in parser[ 'callables']: callable_item = parser['callables'][ self.getLastSelectedVariable()] if self.variables[-1].get( ) or 'function_name' in selected_object: inspect_function = inspect.signature( get(callable_item['class_ref'], callable_item['function_name'])) self.run_command_button.pack() self.copy_command_button.pack() for key in inspect_function.parameters: if key != 'self': self.add_field_to_form( key, inspect_function.parameters[key], parser) if action_key in self.cli.parsers: parser = self.cli.parsers[action_key] else: parser = parser['callables'][self.getLastSelectedVariable()] self.add_actions_to_form(selected_object) self.display_help_text(parser)
def add_dropdown_option(self, selected_object): new_variable = tk.StringVar(self) new_variable.trace('w', self.update_options) new_option_menu = tk.OptionMenu( self, new_variable, *map_(selected_object["choices"], lambda item: item['name'])) self.dropdown_map[str(new_variable)] = new_option_menu self.variables.append(new_variable) new_option_menu.pack(side=tk.LEFT)
def getAzureLoadBalancerNatPool(self, token: str, subscription_id: str, resource_group_name: str, lb_name: str) -> list: url = f'https://management.azure.com/subscriptions/{subscription_id}/resourceGroups/{resource_group_name}/providers/Microsoft.Network/loadBalancers/{lb_name}?api-version=2018-11-01' res = requests.get(url, headers={'Authorization': f'Bearer {token}'}) content = json.loads(res.text) ssh_rules = pydash.filter_(content['properties']['inboundNatRules'], lambda item: 'ssh' in item['name']) return pydash.map_(ssh_rules, 'properties.frontendPort')
async def turn_on_electrodes(self, payload, params): # Get the three object from device-model scicad = MicropedeAsync(APPNAME, port=self.port, loop=self.loop) three_object = await scicad.get_state('device-model', 'three-object') active_electrodes = payload def active_filter(obj): return _.includes(active_electrodes, obj["id"]) active_objects = _.filter_(three_object, active_filter) channels = _.map_(_.map_(active_objects, "channel"), int) max_channels = self.control_board.number_of_channels channel_states = np.zeros(max_channels, dtype=int) channel_states[channels] = 1 self.control_board.set_state_of_channels(channel_states) print(self.control_board.state_of_channels) await self.update_board_info()
def find_adjacent_cells(origin: Coord, cells: List[Coord], only_orthogonal: bool = False) -> List[Coord]: """ Находит соседние клетки от origin среди cells :param only_orthogonal: :param origin: :param cells: :return: """ chosen_nearness = orthogonal if only_orthogonal else adjacent_square diffs = py_.map_(cells, lambda other: other - origin) adjacent = py_.filter_( py_.zip_(cells, diffs), lambda pair: pair[1] in chosen_nearness, ) return py_.map_(adjacent, 0)
def main(): print("Welcome to TicTacToe") player1 = Player(input("Enter name for player1: "), CROSS_SYMBOL) player2 = Player(input("Enter name for player2: "), NAUGHT_SYMBOL) game = TicTacToe(GRID_SIZE, player1, player2) game.print_grid() while (game.state == TicTacToe.STATES.CROSS_TURN or game.state == TicTacToe.STATES.NAUGHT_TURN): if game.state == TicTacToe.STATES.CROSS_TURN: print("Player1 turn") while True: marker = pydash.map_( input("Input row, column to place marker in: ").split(","), int) if game.place_marker(CROSS_SYMBOL, marker[0], marker[1]): game.player1.set_marker(marker) game.player1.process_move() game.state = TicTacToe.STATES.NAUGHT_TURN if game.player1.is_winner(): game.state = TicTacToe.STATES.CROSS_WON print("{} wins the game".format(game.player1.name)) break else: print("Please input marker into empty cell") else: print("Player2 turn") while True: marker = pydash.map_( input("Input row, column to place marker in: ").split(","), int) if game.place_marker(NAUGHT_SYMBOL, marker[0], marker[1]): game.player2.set_marker(marker) game.player2.process_move() game.state = TicTacToe.STATES.CROSS_TURN if game.player2.is_winner(): game.state = TicTacToe.STATES.NAUGHT_WON print("{} wins the game".format(game.player2.name)) break else: print("Please input marker into empty cell") if game.move_count == GRID_SIZE**2: game.state = TicTacToe.STATES.DRAW print("Game is draw!")
def process_raw_candidates(query_name_to_id, queries, document_title_to_id, query_names, raw_ranking_candidates): ranking_candidates = _.pick(raw_ranking_candidates, query_names) lookup_by_title = lambda title: document_title_to_id.get(title) or 0 test_ranking_candidates = _.map_values( ranking_candidates, lambda candidate_names: _.map_(candidate_names, lookup_by_title)) return _.map_keys( test_ranking_candidates, lambda ranking, query_name: str(queries[ query_name_to_id[query_name]])[1:-1])
def convert_to_pivot(start_date, end_date, results): """ Конвертация в сводную форму """ g = group_by(results, 'employee.name') names = sorted_uniq(map_(results, 'employee.name')) list_result = list( map(lambda name: {'employee_name': name, 'results': g[name]}, names)) print(list_result) list_result = ResultCtrl.fill_empty_day_in_result( start_date, end_date, list_result) return list_result
def allocate_first(): def try_allocate(device): try: return self.allocate(device, timeout_seconds=timeout_seconds) except AssertionError as error: self.logger.warning(f"{device.get('serial')}Allocation fails: {error}") return None tasks = map_(suitable_devices, lambda item: wrap(item, try_allocate)) return find(tasks, lambda allocFunc: allocFunc())
async def perform_autodoor(self, context, target_autodoor): worker = context.blackboard.get_worker() worker_location = pydash.get(worker, 'type_specific.location') aligns = target_autodoor['aligns'] if worker_location['map'] != target_autodoor['map']: print( 'perform_autodoor : does not match worker\'s current map with target resources {}' .format({ 'worker_map': worker_location['map'], 'resource_map': target_autodoor['map'] })) raise Exception( 'perform_autodoor : does not match worker\'s current map with target resources' ) if self._actuator is None: self._actuator = Actuator(context) await self._actuator.init_path_planner(context) #TODO Promise.all 대응 루틴 확인 필요 def cb(point): path = self._actuator.path_planner.get_path( target_autodoor['map'], worker_location['pose2d'], point) point['distance'] = self._actuator.path_planner.get_distance(path) return point aligns = pydash.map_(aligns, cb) aligns = pydash.sort_by(aligns, 'distance') entry = aligns[0] exit = aligns[len(aligns) - 1] autodoor_consumer = AutodoorConsumer() #1. move to entry point await self._actuator.moving(context, entry) #2. request door open await autodoor_consumer.request_open_autodoor(target_autodoor['id'], context) #3. waiting autodoor open await autodoor_consumer.ensure_autodoor_opened(target_autodoor['id'], context) #4. move to exit point await self._actuator.moving(context, exit) #5. close door await autodoor_consumer.request_close_autodoor(target_autodoor['id'], context) await autodoor_consumer.ensure_autodoor_closed(target_autodoor['id'], context) #6. release autodoor resource await context.api_configuration.return_resource( worker['id'], target_autodoor['id']) return True
def get_yield_strength_to_composed_data(): conn = db.get_connection() cur = conn.cursor() main_query_select = 'SELECT main_info_id, sortament, sigma_t FROM rloveshhenko$mydbtest.mechanical_properties WHERE sigma_t != " " and main_info_id in (SELECT id FROM mydbtest.main_info WHERE classification like "%Сталь%");' ids_query_select = 'SELECT distinct main_info_id FROM rloveshhenko$mydbtest.mechanical_properties WHERE sigma_t != " " and main_info_id in (SELECT id FROM mydbtest.main_info WHERE classification like "%Сталь%");' cur.execute(main_query_select) data = cur.fetchall() cur.execute(ids_query_select) ids = cur.fetchall() sigmas = map_( ids, lambda item: { 'id': item['main_info_id'], 'sigmas': map_( filter_( data, lambda it: it['main_info_id'] == item[ 'main_info_id']), lambda x: get(x, 'sigma_t')) }) new_sigmas = map_( sigmas, lambda item: { 'id': item['id'], 'sigma': format( reduce_( item['sigmas'], lambda total, x: float(total) + float( x) / len(item['sigmas']), 0), '.2f') }) for item in new_sigmas: cur.execute( "UPDATE rloveshhenko$mydbtest.composed_data SET sigma_t = %s WHERE id = %s", (item['sigma'], item['id'])) conn.commit() cur.close() return True
def rounder(func, x, precision): precision = pow(10, precision) def rounder_func(item): return func(item * precision) / precision result = None if pyd.is_number(x): result = rounder_func(x) elif pyd.is_iterable(x): try: result = pyd.map_(x, rounder_func) except TypeError: pass return result
def mapcat(array, callback=None): """Map a callback to each element of a list and concatenate the results into a single list using :func:`cat`. Args: array (list): List to map and concatenate. callback (mixed): Callback to apply to each element. Returns: list: Mapped and concatenated list. Example: >>> mapcat(range(4), lambda x: list(range(x))) [0, 0, 1, 0, 1, 2] .. versionadded:: 2.0.0 """ return cat(*pyd.map_(array, callback))
def omit_by(obj, iteratee=None): """The opposite of :func:`pick_by`. This method creates an object composed of the string keyed properties of object that predicate doesn't return truthy for. The predicate is invoked with two arguments: ``(value, key)``. Args: obj (mixed): Object to process. iteratee (mixed, optional): Iteratee used to determine which properties to omit. Returns: dict: Results of omitting properties. Example: >>> omit_by({'a': 1, 'b': '2', 'c': 3}, lambda v: isinstance(v, int)) {'b': '2'} .. versionadded:: 4.0.0 .. versionchanged:: 4.2.0 Support deep paths for `iteratee`. """ if not callable(iteratee): paths = pyd.map_(iteratee, to_path) if any(len(path) > 1 for path in paths): cloned = clone_deep(obj) else: cloned = to_dict(obj) def _unset(obj, path): pyd.unset(obj, path) return obj ret = pyd.reduce_(paths, _unset, cloned) else: argcount = getargcount(iteratee, maxargs=2) ret = {key: value for key, value in iterator(obj) if not callit(iteratee, value, key, argcount=argcount)} return ret
def create_label( y_col, x_col, title=None, y_title=None, x_title=None, legend_name=None): '''Create label dict for go.Layout with smart resolution''' legend_name = legend_name or y_col y_col_list, x_col_list, legend_name_list = ps.map_( [y_col, x_col, legend_name], util.cast_list) y_title = str(y_title or ','.join(y_col_list)) x_title = str(x_title or ','.join(x_col_list)) title = title or f'{y_title} vs {x_title}' label = { 'y_title': y_title, 'x_title': x_title, 'title': title, 'y_col_list': y_col_list, 'x_col_list': x_col_list, 'legend_name_list': legend_name_list, } return label
def get_markets(self, main_market_filter=None): """ Gets all the Bittrex markets and filters them based on the main market filter :param main_market_filter: Main market to filter on (ex: BTC, ETH, USDT) :type main_market_filter: str :return: All Bittrex markets (with filter applied, if any) :rtype : list """ markets = self.Bittrex.get_markets() if not markets["success"]: logger.error("Failed to fetch Bittrex markets") exit() markets = markets["result"] if main_market_filter is not None: market_check = main_market_filter + "-" markets = py_.filter_(markets, lambda market: market_check in market["MarketName"]) markets = py_.map_(markets, lambda market: market["MarketName"]) return markets
def download_catalog_year(college_pages, auth): # Create target URL parameters = copy.copy(URL_PARAMS) url = SOURCE_URL + '?' + urllib.parse.urlencode(parameters) parameters['changeMajor']='Next' parameters['call']='5' for college in COLLEGES: parameters['college']=college print('Downloading College',college) for idx,major in enumerate(college_pages[college]): print(' Getting Degree', major['name']) parameters['major']=major['name'] url = SOURCE_URL + '?' + urllib.parse.urlencode(parameters) export_page=auth.get(url).content soup = bs4.BeautifulSoup(export_page) # print(export_page) data=soup.select('option') # print(data) if len(data) > 0: college_pages[college][idx]['year']=_.map_(soup.select('option'),lambda x: _.strip_tags(x)) else: # print(soup.find('body table tbody')) decoded = export_page.decode() college_pages[college][idx]['MajorFile']=decoded.split('name=MajorFile value=')[1].split('>')[0] year=_.js_match(_.js_match(export_page,'/[<input type=hidden name=year year="](\d{4})/'),'/\d{4}/') college_pages[college][idx]['year']=year # if idx>5: # return college_pages # print(college_pages[college][idx]) return college_pages
def test_partial_as_callback(): func = _.partial(lambda offset, value, *args: value + offset, 5) case = [1, 2, 3] expected = [6, 7, 8] _.map_(case, func) == expected
def test_zscore(case, expected): assert _.map_(_.zscore(*case), lambda v: round(v, 3)) == expected
def test_annotated_iteratee(): assert _.map_([1, 2], typed_function) == [2, 3]
def __call__(self, *objs): return pyd.map_(self.funcs, lambda func: func(*objs))
def test_property_of(case, arg, expected): assert _.map_(arg, _.property_of(case)) == expected
def test_iteratee(case, arg, expected): getter = _.iteratee(case) assert _.map_(arg, getter) == expected
def test_deep_property(case, arg, expected): assert _.map_(arg, _.deep_property(case)) == expected