예제 #1
0
    def groupby(
        self,
        keys: Union[List[str], str],
        criteria: Optional[Dict] = None,
        properties: Union[Dict, List, None] = None,
        sort: Optional[Dict[str, Sort]] = None,
        skip: int = 0,
        limit: int = 0,
    ) -> Iterator[Tuple[Dict, List[Dict]]]:
        pipeline = self._get_pipeline(criteria=criteria,
                                      properties=properties,
                                      skip=skip,
                                      limit=limit)
        if not isinstance(keys, list):
            keys = [keys]
        group_id = {}  # type: Dict[str,Any]
        for key in keys:
            set_(group_id, key, "${}".format(key))
        pipeline.append(
            {"$group": {
                "_id": group_id,
                "docs": {
                    "$push": "$$ROOT"
                }
            }})

        agg = self._collection.aggregate(pipeline)

        for d in agg:
            yield d["_id"], d["docs"]
예제 #2
0
def convert_diagnostic_v5():
    def pred(end_entry, step_num):
        # remove end entries going to step 70 or 94 unless
        # except when they are the next step
        goto_step = int(end_entry["Step"])

        goto_70_not_next = goto_step == 70 and step_num != 69
        goto_94_not_next = goto_step == 94 and step_num != 93

        return not (goto_70_not_next or goto_94_not_next)

    converter = MaccorToBiologicMb()
    ast = converter.load_maccor_ast(
        os.path.join(PROCEDURE_TEMPLATE_DIR, "diagnosticV5.000"))

    # set main looping value to 20
    set_(ast, "MaccorTestProcedure.ProcSteps.TestStep.68.Ends.EndEntry.Value",
         "20")
    filtered = converter.remove_end_entries_by_pred(ast, pred)

    # gotos for step 94 were in case of unsafe Voltage levels
    # we'll set them in the output seqs
    seqs = converter.maccor_ast_to_biologic_seqs(filtered)
    for seq in seqs:
        seq["E range min (V)"] = "2.000"
        seq["E range max (V)"] = "4.400"

    converter.biologic_seqs_to_protocol_file(
        seqs, os.path.join(BIOLOGIC_TEMPLATE_DIR, "diagnosticV5_70.mps"))
예제 #3
0
    def __clear_sensitive_data(self, metadata: dict) -> dict:

        for item in self.__mask_targets:
            json_path_expression = parse(item.expression)
            json_path_expression.find(metadata)
            matches = [
                str(match.full_path)
                for match in json_path_expression.find(metadata)
            ]
            for match in matches:
                target_value = pydash.get(metadata, match, default="")

                if item.target == MaskTargetsKeys.CREDIT_CARD.value:
                    mask_data = CustomLogger.__mask_credit_data(
                        self, target_value)
                    pydash.set_(metadata, match, mask_data)
                if item.target == MaskTargetsKeys.BIN.value:
                    mask_data = CustomLogger.__mask_bin_data(
                        self, target_value)
                    pydash.set_(metadata, match, mask_data)
                if item.target == MaskTargetsKeys.EXP_DATE.value:
                    pydash.set_(metadata, match, "XX")
                if item.target == MaskTargetsKeys.CVV.value:
                    pydash.set_(metadata, match, "XXX")
                if item.target == MaskTargetsKeys.SECRET_KEY.value:
                    mask_data = CustomLogger.__mask_secret_data(
                        self, target_value)
                    pydash.set_(metadata, match, mask_data)

        return metadata
예제 #4
0
    def remove_end_entries_by_pred(self, maccor_ast, pred):
        new_ast = copy.deepcopy(maccor_ast)
        steps = get(new_ast, "MaccorTestProcedure.ProcSteps.TestStep")
        if steps is None:
            print("Could not find any Maccor steps loaded")
            return maccor_ast

        for i, step in enumerate(steps):
            step_num = i + 1
            if get(step, "Ends.EndEntry") is None:
                continue
            elif type(get(step, "Ends.EndEntry")) == list:
                filtered = list(
                    filter(
                        lambda end_entry: pred(end_entry, step_num),
                        step["Ends"]["EndEntry"],
                    ))

                if len(filtered) == 0:
                    unset(step, "Ends.EndEntry")
                elif len(filtered) == 1:
                    set_(step, "Ends.EndEntry", filtered[0])
                else:
                    set_(step, "Ends.EndEntry", filtered)
            else:
                if not pred(get(step, "Ends.EndEntry"), step_num):
                    unset(step, "Ends.EndEntry")

        return new_ast
예제 #5
0
    def send_message(self,
                     topic,
                     msg={},
                     retain=False,
                     qos=0,
                     dup=False,
                     timeout=DEFAULT_TIMEOUT):
        future = asyncio.Future(loop=self.loop)
        if (_.is_dict(msg) and _.get(msg, '__head__') is None):
            head = wrap_data(None, None, self.name, self.version)['__head__']
            _.set_(msg, '__head__', head)
        message = json.dumps(msg)

        _mid = None

        def on_publish(client, userdata, mid):
            if (mid == _mid):
                if (future.done() == False):
                    future.set_result('done')

        def on_timeout():
            if (future.done() == False):
                future.set_exception(Exception(f'timeout {timeout}ms'))

        self.client.on_publish = self.safe(on_publish)
        (qos, _mid) = self.client.publish(topic,
                                          payload=message,
                                          qos=qos,
                                          retain=retain)
        set_timeout(self.safe(on_timeout), timeout)

        return future
예제 #6
0
def get_entities_from_sent(f1):
    entities = {}

    nlp = spacy.load('pt_core_news_sm')
    doc = nlp(f1)
    for ent in doc.ents:
        list = []
        if get(entities, str(ent.label_)) is not None:
            list = get(entities, str(ent.label_))
        list = push(list, str(ent))
        set_(entities, str(ent.label_), list)

    nlp = spacy.load('en_core_web_sm')
    doc = nlp(f1)
    for ent in doc.ents:
        if str(ent.label_) == 'TIME' or str(ent.label_) == 'ORDINAL' or str(
                ent.label_) == 'DATE' or str(ent.label_) == 'CARDINAL' or str(
                    ent.label_) == 'MONEY' or str(ent.label_) == 'QUANTITY':
            list = []
            if get(entities, str(ent.label_)) is not None:
                list = get(entities, str(ent.label_))
            list = push(list, str(ent))
            set_(entities, str(ent.label_), list)

    return entities
예제 #7
0
def spec_from_config(experiment, config):
    '''Helper to create spec from config - variables in spec.'''
    spec = deepcopy(experiment.spec)
    spec.pop('search', None)
    for k, v in config.items():
        ps.set_(spec, k, v)
    return spec
예제 #8
0
 async def put_plugin(self, receiver, prop, val, timeout=DEFAULT_TIMEOUT):
     if (not _.is_dict(val)):
         msg = {}
         _.set_(msg, prop, val)
         val = msg
     result = await self.call_action(receiver, prop, val, 'put', timeout)
     return result
예제 #9
0
def get_obj_pos_from_sent(f1):

    doc = nlp(f1)
    pos = {
        "ADJ": [],
        "ADP": [],
        "ADV": [],
        "AUX": [],
        "CONJ": [],
        "CCONJ": [],
        "DET": [],
        "INTJ": [],
        "NOUN": [],
        "NUM": [],
        "PART": [],
        "PRON": [],
        "PROPN": [],
        "PUNCT": [],
        "SCONJ": [],
        "SYM": [],
        "VERB": [],
        "X": []
    }
    for token in doc:
        list = []
        id = (str(token.pos_))
        # if get(pos, id) is not None:
        #     list = get(pos, id)
        list = push(list, (str(token.orth_)).lower())
        set_(pos, id, list)
    return pos
예제 #10
0
파일: screens.py 프로젝트: xuover/gs-quant
    def __set_up_parameters(self) -> dict:
        filter_to_parameter = {
            'face_value': 'face_value',
            'direction': 'direction',
            'gs_liquidity_score': 'liquidity_score',
            'gs_charge_bps': 'gs_charge_bps',
            'gs_charge_dollars': 'gs_charge_dollars',
            'modified_duration': 'duration',
            'yield_to_convention': 'yield_',
            'spread_to_benchmark': 'spread',
            'z_spread': 'z_spread',
            'g_spread': 'g_spread',
            'bval_mid_price': 'mid_price',
            'maturity': 'maturity',
            'amount_outstanding': 'amount_outstanding',
            'rating_standard_and_poors': 'rating',
            'seniority': 'seniority',
            'currency': 'currency',
            'sector': 'sector',
            'issue_date': 'issue_date'
        }

        parameters = {}
        for prop in TargetScreenParameters.properties():
            set_(parameters, prop,
                 get(self.__filters, filter_to_parameter[prop]))
        return parameters
예제 #11
0
def purgeResource(dryrun, id, idToken, url, data=None):
    logging.debug(f'Remove: {url}, id: {id}, data: {data}')
    kwargs = {}
    if data != None:
        pydash.set_(kwargs, 'data', json.dumps(data))

    # print for dry run
    if dryrun == True:
        if bool(kwargs):
            logging.warning(
                f'Remove: {url.split("/v1/",2)[1]}: item: {kwargs}')
        else:
            logging.warning(f'Delete: {url.split("/v1/",2)[1]}: {id}')
        return ''

    quotedId = urllib.parse.quote(id)
    url = f'{url}/{quotedId}'
    resp = requests.delete(
        f'{API_HOST}{url}',
        headers={
            'content-type': 'application/json',
            'idToken': idToken
        },
        **kwargs,
    )
    output = resp.status_code
    return output
예제 #12
0
 def spec_from_config(self, config):
     '''Helper to create spec from config - variables in spec.'''
     spec = deepcopy(self.experiment.spec)
     spec.pop('search', None)
     for k, v in config.items():
         _.set_(spec, k, v)
     return spec
예제 #13
0
    def connection_string(self, value):
        if isinstance(value, dict):
            dburl = str(URL(**value))
        else:
            dburl = str(value)

        _.set_(self._data, 'FLASK.SQLALCHEMY_DATABASE_URI', dburl)
예제 #14
0
def inject_config(spec, config):
    '''Inject flattened config into SLM Lab spec.'''
    spec = deepcopy(spec)
    spec.pop('search', None)
    for k, v in config.items():
        ps.set_(spec, k, v)
    return spec
    def groupby(self, keys, criteria=None, properties=None, allow_disk_use=True, **kwargs):
        """
        Simple grouping function that will group documents
        by keys.

        Args:
            keys (list or string): fields to group documents
            criteria (dict): filter for documents to group
            properties (list): properties to return in grouped documents
            allow_disk_use (bool): whether to allow disk use in aggregation

        Returns:
            command cursor corresponding to grouped documents

            elements of the command cursor have the structure:
            {'_id': {"KEY_1": value_1, "KEY_2": value_2 ...,
             'docs': [list_of_documents corresponding to key values]}

        """
        pipeline = []
        if criteria is not None:
            pipeline.append({"$match": criteria})

        if properties is not None:
            pipeline.append({"$project": {p: 1 for p in properties}})

        if isinstance(keys, str):
            keys = [keys]

        group_id = {}
        for key in keys:
            set_(group_id, key, "${}".format(key))
        pipeline.append({"$group": {"_id": group_id, "docs": {"$push": "$$ROOT"}}})

        return self.collection.aggregate(pipeline, allowDiskUse=allow_disk_use)
예제 #16
0
    async def moving(self,
                     context,
                     destination_pose,
                     semantic_location_id=None):

        UPDATE_INTERVAL = 500
        worker = context.blackboard.get_worker()
        worker_location = pydash.get(worker, 'type_specific.location')

        path = self.path_planner.get_path(worker_location['map'],
                                          worker_location['pose2d'],
                                          destination_pose)
        trajectory = self.path_planner.path_to_trajectory(
            path, 1, UPDATE_INTERVAL)

        print('start to moving robot on path')

        def rotate_nearby(cx, cy, x, y, angle):
            radians = (math.pi / 180) * angle
            cos = math.cos(radians)
            sin = math.sin(radians)
            nx = cos * (x - cx) + sin * (y - cy) + cx
            ny = cos * (y - cy) - sin * (x - cx) + cy
            return {'x': nx, 'y': ny}

        for point in trajectory:
            worker = context.blackboard.get_worker()
            updated_type_specific = worker['type_specific']

            if 'theta' in point and point['theta'] != None:
                pass
            else:
                point['theta'] = pydash.get(
                    worker, 'type_specific.location.pose2d.theta')

            updated_type_specific['location'] = pydash.assign(
                {}, updated_type_specific['location'], {
                    'map': worker_location['map'],
                    'pose2d': point,
                    'semantic_location': None
                })

            #if config.get('action.move') == 'nearby' and idx == len(trajectory)-1:  조건 필요?

            context.blackboard.set_worker(
                {'type_specific': updated_type_specific})
            await context.blackboard.sync_worker()

            #print('moving...sleep')
            await asyncio.sleep(0.1)
            #print('moving...done sleep')

        updated_type_specific = context.blackboard.get_worker(
        )['type_specific']
        pydash.set_(updated_type_specific, 'location.semantic_location',
                    semantic_location_id)
        context.blackboard.set_worker({'type_specific': updated_type_specific})
        await context.blackboard.sync_worker()
        return True
예제 #17
0
    def test_creation(
        self, generic_temp_folder, data, cfg_extension
    ):  # TODO: toml 0.10.2 needed! toml has a bug otherwise

        generic_temp_folder = Path(generic_temp_folder)

        sample_dict = data["data"]
        to_be_raplaced_keys = data[
            "to_be_replaced"
        ]  # , _, _, to_be_raplaced_keys = data
        volatile_dict = copy.deepcopy(sample_dict)

        subtitutions_values = {}
        for k in to_be_raplaced_keys:
            random_name = k + f".{cfg_extension}"
            random_name = DirectiveAT.generate_directive_string("import", [random_name])
            subtitutions_values[random_name] = pydash.get(volatile_dict, k)

            pydash.set_(volatile_dict, k, random_name)

        output_cfg_filename = generic_temp_folder / f"out_config.{cfg_extension}"
        output_cfg_filename2 = generic_temp_folder / f"out_config2.{cfg_extension}"

        subtitutions_values[str(output_cfg_filename)] = volatile_dict

        saved_cfgs = []
        for directive_value, d in subtitutions_values.items():
            directive = DirectiveAT(value=directive_value)
            if directive.valid:
                output_filename = generic_temp_folder / directive.args[0]
            else:
                output_filename = generic_temp_folder / directive_value
            store_cfg(output_filename, d)
            saved_cfgs.append(output_filename)

        yconf = XConfig(output_cfg_filename)
        yconf.save_to(output_cfg_filename2)

        yconf_reloaded = XConfig(output_cfg_filename2)

        with pytest.raises(NotImplementedError):
            yconf.save_to(str(output_cfg_filename2) + "#IMPOSSIBLE.EXTENSION")

        assert not DeepDiff(yconf.to_dict(), sample_dict)
        assert not DeepDiff(yconf_reloaded.to_dict(), sample_dict)
        assert not DeepDiff(
            XConfig.from_dict(yconf_reloaded.to_dict()).to_dict(),
            yconf_reloaded.to_dict(),
        )
        assert len(XConfig.from_dict(yconf_reloaded.to_dict())) > len(
            sample_dict
        )  # YConf contains 2 more private keys!

        # remove last cfg file
        saved_cfgs[0].unlink()

        with pytest.raises(OSError):
            yconf = XConfig(output_cfg_filename)
예제 #18
0
    def __init__(self, config_path=None):

        self._version = None
        self._data = {}
        dd = self._data
        dd['FLASK'] = {}

        _.merge(dd, yaml.load(_get_default_yaml()))

        if config_path is None:
            config_path = os.path.join(cwd(), 'recordingmonitor.yml')
            log.info('Load configuration from DEFAULT path: %s', config_path)
        else:
            log.info('Load configuration from path: %s', config_path)

        self._config_path = config_path

        try:
            with open(config_path, 'r') as stream:
                file_conf = yaml.load(stream)
                _.merge(dd, file_conf)

        except FileNotFoundError:
            log.debug(
                'There is NO config file, continue using defaults and env')
            print('There is NO config file, continue using defaults and env')

        # LOAD ENVIRONMENT:
        eover = self._gather_from_environment()
        for stg, val in eover.items():
            _.set_(dd, stg, val)

        # set default values for settings which could not be
        # initalized declaratively:
        self.apply_defaults('log.version', 1)
        self.apply_defaults('capture.paths.jobsRoot',
                            os.path.join(cwd(), 'tmp'))

        # list of debugging options
        self.apply_defaults('maintenance.rmdir', True)
        self.apply_defaults('capture.rmdir', True)

        # ----------------------------------------------------------------------
        # CONFIGURE LOGGING:
        # ----------------------------------------------------------------------
        logging.config.dictConfig(dd['log'])

        self._reset_frozendebug()

        if self.get('FLASK.DEBUG'):
            self._test_logging()

        # move connection string to expected place
        dbconfig = self.get('db.connection')
        if dbconfig is not None:
            self.connection_string = dbconfig

        log.debug('main database: %s', self.connection_string)
예제 #19
0
 def build_pattern_tree(subtree, path=None):
     if path is None:
         path = []
     for key, value in subtree.iteritems():
         if isinstance(value, dict):
             build_pattern_tree(value, path + [key])
         else:
             pydash.set_(self.pattern_tree, path + [key],
                         re.compile(value))
예제 #20
0
 def set(self, key, value):
     """
     Set value for key
     :param key: String
     :param value: new value for key
     :return: value
     """
     set_(self._data, key, value)
     return value
예제 #21
0
    async def _init_battery(self, worker_record):
        type_specific = worker_record['type_specific']
        pydash.set_(type_specific, 'battery',
                    {
                        'battery_level': 75,
                        'charging_status': 0
                    })

        return worker_record
예제 #22
0
파일: mongolike.py 프로젝트: FilipchukB/P1
    def groupby(
        self,
        keys: Union[List[str], str],
        criteria: Optional[Dict] = None,
        properties: Union[Dict, List, None] = None,
        sort: Optional[Dict[str, Union[Sort, int]]] = None,
        skip: int = 0,
        limit: int = 0,
    ) -> Iterator[Tuple[Dict, List[Dict]]]:
        """
        Simple grouping function that will group documents
        by keys.

        Args:
            keys: fields to group documents
            criteria: PyMongo filter for documents to search in
            properties: properties to return in grouped documents
            sort: Dictionary of sort order for fields. Keys are field names and
                values are 1 for ascending or -1 for descending.
            skip: number documents to skip
            limit: limit on total number of documents returned

        Returns:
            generator returning tuples of (key, list of docs)
        """
        pipeline = []
        if isinstance(keys, str):
            keys = [keys]

        if properties is None:
            properties = []
        if isinstance(properties, dict):
            properties = list(properties.keys())

        if criteria is not None:
            pipeline.append({"$match": criteria})

        if len(properties) > 0:
            pipeline.append({"$project": {p: 1 for p in properties + keys}})

        alpha = "abcdefghijklmnopqrstuvwxyz"
        group_id = {letter: f"${key}" for letter, key in zip(alpha, keys)}
        pipeline.append(
            {"$group": {
                "_id": group_id,
                "docs": {
                    "$push": "$$ROOT"
                }
            }})
        for d in self._collection.aggregate(pipeline, allowDiskUse=True):
            id_doc = {}  # type: Dict[str,Any]
            for letter, key in group_id.items():
                if has(d["_id"], letter):
                    set_(id_doc, key[1:], d["_id"][letter])
            yield (id_doc, d["docs"])
예제 #23
0
    def list_version_files(self, version, return_unmatched_urls=False):
        if not isinstance(version, list):
            versions = [version]
        else:
            versions = version

        urls_to_parse = []

        for version in versions:
            version_url = urlparse.urljoin(self.starting_url,
                                           'v' + version + '/')
            urls_to_parse.append(version_url)

        responses = http_multiget(urls_to_parse)

        parsed_urls = pydash.flatten(
            [parse_response_for_urls(response) for (_, response) in responses])

        version_files = {}
        unmatched_urls = []

        for parsed_url in parsed_urls:

            pattern_match = self.pattern_tree.search(parsed_url)

            if pattern_match is not None:
                match, path = pattern_match
                file_type = path[0]

                if file_type == 'binaries':
                    url_version = match.group(1)
                    filename = match.group(2)
                    system = match.group(3)
                    extension = match.group(4)
                    pydash.set_(version_files,
                                [url_version, file_type, system, extension], {
                                    'filename': filename,
                                    'url': parsed_url
                                })
                else:
                    url_version = match.group(1)
                    filename = match.group(2)
                    extension = match.group(3)
                    pydash.set_(version_files,
                                [url_version, file_type, extension], {
                                    'filename': filename,
                                    'url': parsed_url
                                })

            else:
                unmatched_urls.append(parsed_url)
        if return_unmatched_urls:
            version_files['unmatched_urls'] = unmatched_urls
        return version_files
예제 #24
0
    def inject_cluster_and_family(self):
        """Assign the cluster and family for the service/task using the `name` and `cluster` fields if set"""

        cluster_val = self.get_with_prefix('cluster')
        family_val = self.get_with_prefix('name')

        if cluster_val is None:
            cluster_val = self.get_with_prefix('name')

        set_(self.deploy_json, '.cluster', cluster_val)
        set_(self.deploy_json, '.family', family_val)
예제 #25
0
def wrap_data(key, value, name, version):
    msg = {}
    if (_.is_object(value) and value is not None):
        msg = value
    else:
        msg[key] = value

    _.set_(msg, "__head__.plugin_name", name)
    _.set_(msg, "__head__.plugin_version", version)

    return msg
예제 #26
0
 async def put_plugin(self, receiver, prop, val, timeout=DEFAULT_TIMEOUT):
     LABEL = f'<MicrodropPython::put_plugin::{receiver}>'
     try:
         if not isinstance(val, dict):
             msg = {}
             _.set_(msg, prop, val)
             val = msg
         await self.reset_client()
         return await self.call_action(receiver, prop, val, 'put', timeout)
     except Exception as err:
         raise self.dump_stack(LABEL, err)
    def groupby(self, keys, criteria=None, properties=None, **kwargs):
        pipeline = self._get_pipeline(criteria=criteria, properties=properties)
        if not isinstance(keys, list):
            keys = [keys]
        group_id = {}
        for key in keys:
            set_(group_id, key, "${}".format(key))
        pipeline.append({"$group": {"_id": group_id, "docs": {"$push": "$$ROOT"}}})

        agg = self.collection.aggregate(pipeline, **kwargs)

        return agg
예제 #28
0
 def replace_variable(self, old_value: str, new_value: str):
     """Replaces target variables with custom new value
     :param old_value: value to replace
     :type old_value: str
     :param new_value: new key value
     :type new_value: str
     """
     chunks = self.chunks_as_lists(discard_private_qualifiers=True)
     for k, v in chunks:
         p = Placeholder.from_string(v)
         if p is not None and p.is_valid():
             if old_value == p.name:
                 pydash.set_(self, k, p.cast(new_value))
예제 #29
0
    def update_prop(self, path, value):
        """
        Update a prop inner value.

        Mostly used by element implementations but can be used for some low level updates.

        :param path: The nested path of the prop. If the prop is named ``A`` then reaching ``A.b.c`` would be
                    ``['A', 'b', 'c']``.
        :param value: The value to set in the nested prop path.
        """
        if isinstance(path, six.string_types):
            path = [path]
        pydash.set_(self.props, path, value)
        self.update_element(path=['props'] + path, action='set', data=value)
예제 #30
0
    def apply_defaults(self, path, val):
        if path is None:
            path = ''

        try:
            subcfg = self[path]
        except KeyError:
            # key not found, create a new branch in config:
            subcfg = None

        if subcfg is None:
            _.set_(self._data, path, val)
        else:
            _.defaults_deep(subcfg, val)
예제 #31
0
def file_push(**kwargs):
    # 数据处理
    command_split = get(kwargs, "command_split")
    data_split = get(kwargs, "data_split")
    msg_json = get(kwargs, "msg_json")
    device_id = get(kwargs, "device_id")
    extra = get(kwargs, "extra")

    # 指定推送
    if command_split[1] == "points":
        if fileModel(get(msg_json, "data", "")):
            send_data = {}

            set_(send_data, "deviceId", device_id)
            set_(send_data, "message", "fileUpdate")  # type 说明是模型更新了
            set_(send_data, "data", get(data_split, "1", ""))  # fileId
            set_(send_data, "timestamp", int(time.time()))

            waiting(extra)

            # {"deviceId": "1234567890", "message": "modelUpdate", "data": "N1334859111304531968", "timestamp": 1607966540}
            sender(send_data)
    # 广播推送
    elif command_split[1] == "all":
        if pushModel(get(msg_json, "data", "")):
            i = 0

    print("\n")
예제 #32
0
def transpose(array):
    """Transpose the elements of `array`.

    Args:
        array (list): List to process.

    Returns:
        list: Transposed list.

    Example:

        >>> transpose([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
        [[1, 4, 7], [2, 5, 8], [3, 6, 9]]

    .. versionadded:: 2.1.0
    """
    trans = []

    for y, row in iterator(array):
        for x, col in iterator(row):
            trans = pyd.set_(trans, [x, y], col)

    return trans
예제 #33
0
def config(args=None):
    '''
    Parameters
    ----------
    args : argparse.Namespace, optional
        Arguments as parsed by :func:`parse_args`.

    See also
    --------
    :func:`parse_args`

    Returns
    -------
    configobj.ConfigObj
        Parsed (and potentially modified) configuration.
    '''
    if args is None:
        args = parse_args()

    config = md.config.Config(args.config)

    if args.command == 'locate':
        print config.filename
    elif args.command == 'show':
        if args.get:
            data = pydash.get(config.data.dict(), args.get)
        else:
            data = config.data.dict()

        if args.json:
            # Output in JSON.
            json.dump(obj=data, fp=sys.stdout, indent=4)
        elif args.yaml:
            # Output in YAML format.
            print yaml.dump(data, default_flow_style=False),
        elif isinstance(data, dict):
            # Output in `ini` format.
            output = io.BytesIO()
            configobj.ConfigObj(data).write(output)
            print output.getvalue(),
        else:
            print data
    elif args.command == 'edit':
        for action_i in ('append', 'prepend', 'set', 'remove', 'remove_key'):
            if getattr(args, action_i):
                action = action_i
                break

        if action in ('append', 'prepend', 'set', 'remove'):
            # Unpack key and new value.
            key, new_value = getattr(args, action)

            # Look up existing value.
            config_value = pydash.get(config.data, key)

            if action == 'set':
                # Set a key to a string value.

                # Create dictionary structure containing only the specified key
                # and value.
                nested_value = pydash.set_({}, key, new_value)
                # Merge nested value into existing configuration structure.
                pydash.merge(config.data, nested_value)
            else:
                # Action is a list action.

                if config_value is None:
                    # Create dictionary structure containing only empty list for
                    # specified key.
                    config_value = []
                    nested_value = pydash.set_({}, key, config_value)
                    # Merge nested value into existing configuration structure.
                    pydash.merge(config.data, nested_value)
                elif not isinstance(config_value, list):
                    print >> sys.stderr, 'Value at %s is not a list.' % key
                    raise SystemExit(1)

                if new_value in config_value:
                    # Remove value even if we are appending or prepending to
                    # avoid duplicate values.
                    config_value.remove(new_value)

                if args.append:
                    config_value.append(new_value)
                elif args.prepend:
                    config_value.insert(0, new_value)
        elif action == 'remove_key':
            key = getattr(args, action)

            if pydash.get(config.data, key) is not None:
                # Key exists.

                # Split key into levels.
                # Use [negative lookbehind assertion][1] to only split on
                # non-escaped '.' characters.
                #
                # [1]: https://stackoverflow.com/a/21107911/345236
                levels = re.split(r'(?<!\\)\.', key)
                parents = levels[:-1]

                parent = config.data

                for parent_i in parents:
                    parent = parent[parent_i]

                # Delete key from deepest parent.
                del parent[levels[-1]]
        if args.dry_run:
            output = io.BytesIO()
            config.data.write(output)
            print output.getvalue(),
        else:
            config.save()
    return config
예제 #34
0
def test_set_(case, expected):
    assert _.set_(*case) == expected