Example #1
1
def traverse_and_remove_path(obj, path=None, match="First"):
    if path is None:
        path = []
    if isinstance(obj, dict):
        res = OrderedDict()
        for k, v in obj.items():
            cmatch = False
            if match and len(path) > 0 and path[0] == k:
                cmatch = True
            res.update({k: traverse_and_remove_path(v, path=path[1:], match=cmatch)})
        if len(path) == 1 and path[0] in res.keys() and match:
            del res[path[0]]
        return res
    elif isinstance(obj, list):
        res = []
        for i, elem in enumerate(obj):
            cmatch = False
            if match and len(path) >= 1 and isinstance(path[0], int) and path[0] < len(obj) and i == path[0]:
                cmatch = True
            res.append(traverse_and_remove_path(elem, path=path[1:], match=cmatch))
        if len(path) == 1 and isinstance(path[0], int) and path[0] < len(res) and match:
            res.pop(path[0])
        return res
    else:
        return obj  # no container, just values (str, int, float)
Example #2
1
def _get_sorteddict(object, dictwithhash=False):
    if isinstance(object, dict):
        out = OrderedDict()
        for key, val in sorted(object.items()):
            if val:
                out[key] = _get_sorteddict(val, dictwithhash)
    elif isinstance(object, (list, tuple)):
        out = []
        for val in object:
            if val:
                out.append(_get_sorteddict(val, dictwithhash))
        if isinstance(object, tuple):
            out = tuple(out)
    else:
        if isinstance(object, string_types) and os.path.isfile(object):
            hash = hash_infile(object)
            if dictwithhash:
                out = (object, hash)
            else:
                out = hash
        elif isinstance(object, float):
            out = "%.10f" % object
        else:
            out = object
    return out
    def _serialize(self):
        """
        Serialize the sheet in a JSON-ready format.
        """
        obj = OrderedDict()

        if "key" in self._columns and "value" in self._columns:
            for row in self:
                obj[row["key"]] = row["value"]
        elif "key" in self._columns:
            for row in self:
                obj[row["key"]] = OrderedDict()

                for column in self._columns:
                    if column == "key":
                        continue

                    value = row[column]

                    obj[row["key"]][column] = value
        else:
            obj = []

            for row in self:
                row_obj = OrderedDict()

                for i, column in enumerate(row):
                    row_obj[self._columns[i]] = column

                obj.append(row_obj)

        return obj
Example #4
1
def parse(tokens):
    if tokens[0] == "{":
        ret = OrderedDict()
        tokens = tokens[1:]
        while tokens[0] != "}":
            key = tokens[0]
            tokens = tokens[1:]

            tokens = tokens[1:]  # Skip ':'

            value, tokens = parse(tokens)

            if tokens[0] == ",":
                tokens = tokens[1:]

            ret[key] = value
        tokens = tokens[1:]
        return ret, tokens
    elif tokens[0] == "[":
        ret = []
        tokens = tokens[1:]
        while tokens[0] != "]":
            value, tokens = parse(tokens)
            if tokens[0] == ",":
                tokens = tokens[1:]
            ret.append(value)
        tokens = tokens[1:]
        return ret, tokens
    else:
        return tokens[0], tokens[1:]
Example #5
1
    def check_for_add_to_toolbars(self, plugin):
        from calibre.gui2.preferences.toolbar import ConfigWidget
        from calibre.customize import InterfaceActionBase

        if not isinstance(plugin, InterfaceActionBase):
            return

        all_locations = OrderedDict(ConfigWidget.LOCATIONS)
        plugin_action = plugin.load_actual_plugin(self.gui)
        installed_actions = OrderedDict([(key, list(gprefs.get("action-layout-" + key, []))) for key in all_locations])

        # If already installed in a GUI container, do nothing
        for action_names in installed_actions.itervalues():
            if plugin_action.name in action_names:
                return

        allowed_locations = [
            (key, text) for key, text in all_locations.iteritems() if key not in plugin_action.dont_add_to
        ]
        if not allowed_locations:
            return  # This plugin doesn't want to live in the GUI

        from calibre.gui2.dialogs.choose_plugin_toolbars import ChoosePluginToolbarsDialog

        d = ChoosePluginToolbarsDialog(self, plugin_action, allowed_locations)
        if d.exec_() == d.Accepted:
            for key, text in d.selected_locations():
                installed_actions = list(gprefs.get("action-layout-" + key, []))
                installed_actions.append(plugin_action.name)
                gprefs["action-layout-" + key] = tuple(installed_actions)
Example #6
1
def get_data(task, key, separate_cols=True):
    data_dir = results_dir

    if separate_cols:
        data = OrderedDict()
    else:
        data = []
    dirs = [d for d in sorted(os.listdir(data_dir)) if ".sim" in d]
    sims = [os.path.basename(d).split(".")[0] for d in dirs]

    for dir_, sim in zip(dirs, sims):
        if separate_cols:
            data[sim] = []

        paths = glob(os.path.join(data_dir, dir_, "test_%s*.txt" % task))
        for path in paths:
            with open(path, "r") as fp:
                try:
                    if separate_cols:
                        data[sim].append(json.load(fp)[key])
                    else:
                        data.append((sim, json.load(fp)[key]))
                except:
                    print(path)
                    raise
    if separate_cols:
        return pd.DataFrame(data)
    else:
        return pd.DataFrame(data, columns=["Backend", key])
Example #7
0
    def _parse(self, data, base_url):
        """
        Given some parsed JSON data, returns the corresponding DocJSON objects.
        """
        if isinstance(data, OrderedDict) and data.get("_type") == "link":
            # Link objects
            url = data.get("url")
            method = data.get("method")
            fields = data.get("fields")
            try:
                return Link(url, method, fields, _base_url=base_url, _transport=self)
            except (TypeError, ValueError) as exc:
                raise ParseError(str(exc))

        elif isinstance(data, OrderedDict):
            # Any unknown types should be ignored and treaded as a regular object.
            data.pop("_type", None)

            # Parse all the items in the dict and wrap them in a `Object`.
            parsed = OrderedDict()
            for key, value in data.items():
                parsed[key] = self._parse(value, base_url)
            return Object(parsed)

        elif isinstance(data, list):
            # Parse all the items in the list and wrap them in a `List`.
            parsed = []
            for item in data:
                value = self._parse(item, base_url)
                # Ignore 'Link' objects contained in a list.
                if not isinstance(value, Link):
                    parsed.append(value)
            return List(parsed)

        return data
Example #8
0
def to_json_stat(input_df, value="value", output="list"):
    """Encode pandas.DataFrame object into JSON-stat format. The DataFrames
       must have exactly one value column.

    Args:
      df(pandas.DataFrame): pandas data frame (or list of data frames) to
      encode.
       value (string, optional): name of the value column. Defaults to 'value'.
      output(string): accepts two values: 'list' or 'dict'. Produce list of\
                      dicts or dict of dicts as output.

    Returns:
      output(string): String with JSON-stat object.

    """

    data = []
    if output == "list":
        result = []
    elif output == "dict":
        result = OrderedDict({})
    if isinstance(input_df, pd.DataFrame):
        data.append(input_df)
    else:
        data = input_df
    for row, dataframe in enumerate(data):
        pd.notnull(dataframe[value])
        dims = data[row].filter([item for item in data[row].columns.values if item not in value])
        if len(dims.columns.values) != len(set(dims.columns.values)):
            raise ValueError("Non-value columns must constitute a unique ID")
        dim_names = list(dims)
        categories = [
            {
                to_int(i): {
                    "label": to_str(i),
                    "category": {
                        "index": OrderedDict([(to_str(j), to_int(k)) for k, j in enumerate(uniquify(dims[i]))]),
                        "label": OrderedDict([(to_str(j), to_str(j)) for k, j in enumerate(uniquify(dims[i]))]),
                    },
                }
            }
            for i in dims.columns.values
        ]
        dataset = {"dataset" + str(row + 1): {"dimension": OrderedDict(), value: [x for x in dataframe[value].values]}}
        for category in categories:
            dataset["dataset" + str(row + 1)]["dimension"].update(category)
        dataset["dataset" + str(row + 1)]["dimension"].update({"id": dim_names})
        dataset["dataset" + str(row + 1)]["dimension"].update(
            {"size": [len(dims[i].unique()) for i in dims.columns.values]}
        )
        for category in categories:
            dataset["dataset" + str(row + 1)]["dimension"].update(category)
        if output == "list":
            result.append(dataset)
        elif output == "dict":
            result.update(dataset)
        else:
            result = None
    return json.dumps(result)
Example #9
0
def elementToValue(field, element, default=_marker):
    """Read the contents of an element that is assumed to represent a value
    allowable by the given field.

    If converter is given, it should be an IToUnicode instance.

    If not, the field will be adapted to this interface to obtain a converter.
    """
    value = default

    if IDict.providedBy(field):
        key_converter = IFromUnicode(field.key_type)
        value = OrderedDict()
        for child in element.iterchildren(tag=etree.Element):
            if noNS(child.tag.lower()) != "element":
                continue
            parseinfo.stack.append(child)

            key_text = child.attrib.get("key", None)
            if key_text is None:
                k = None
            else:
                k = key_converter.fromUnicode(unicode(key_text))

            value[k] = elementToValue(field.value_type, child)
            parseinfo.stack.pop()
        value = fieldTypecast(field, value)

    elif ICollection.providedBy(field):
        value = []
        for child in element.iterchildren(tag=etree.Element):
            if noNS(child.tag.lower()) != "element":
                continue
            parseinfo.stack.append(child)
            v = elementToValue(field.value_type, child)
            value.append(v)
            parseinfo.stack.pop()
        value = fieldTypecast(field, value)

    # Unicode
    else:
        text = element.text
        if text is None:
            value = field.missing_value
        else:
            converter = IFromUnicode(field)
            value = converter.fromUnicode(unicode(text))

        # handle i18n
        if isinstance(value, unicode) and parseinfo.i18n_domain is not None:
            translate_attr = ns("translate", I18N_NAMESPACE)
            msgid = element.attrib.get(translate_attr)
            if msgid:
                value = Message(msgid, domain=parseinfo.i18n_domain, default=value)
            elif translate_attr in element.attrib:
                value = Message(value, domain=parseinfo.i18n_domain)

    return value
Example #10
0
def get_categories(url):
    soup = bs(urlopen(url))
    cats = OrderedDict()
    for opt in soup.findAll("option")[1:]:
        c = opt.text.split("&")[0].lower().replace(" ", "-")
        cat_url = url + "category/" + c + "/"
        cats.append(c)
        """
	if isValid(cat_url):
	    cats.append(c)
	"""
    return cats
Example #11
0
def _literal(expr, state):
    if isinstance(expr, ast.Dict):
        out = OrderedDict()
        for key, value in zip(expr.keys, expr.values):
            if isinstance(key, ast.Str) or (
                isinstance(key, ast.Name) and isinstance(state["subs"].get(key.id, None), basestring)
            ):
                kkey = _literal(key, state)
                vvalue = _literal(value, state)
                out[kkey] = vvalue
            else:
                raise PythonToPfaException(
                    "literal JSON keys must be strings or subs identifiers, not {0} (source line {1})".format(
                        ast.dump(expr), expr.lineno
                    )
                )
        return out

    elif isinstance(expr, ast.Num):
        return expr.n

    elif isinstance(expr, ast.Str):
        return expr.s

    elif isinstance(expr, ast.Name):
        if expr.id in state["subs"]:
            return state["subs"][expr.id]
        elif expr.id == "None":
            return None
        elif expr.id == "True":
            return True
        elif expr.id == "False":
            return False
        else:
            raise PythonToPfaException(
                "identifiers ({0}) are not allowed in a literal expression, unless they are subs identifiers (source line {1})".format(
                    ast.dump(expr), expr.lineno
                )
            )

    elif isinstance(expr, ast.List):
        out = []
        for value in expr.elts:
            out.append(_literal(value, state))
        return out

    raise PythonToPfaException(
        "Python AST node {0} (source line {1}) is not literal JSON".format(ast.dump(expr), expr.lineno)
    )
Example #12
0
 def getProducerInterval(self, dataType, aggregationType=None):
     if aggregationType == None:
         print(dataType.subtrees["avg"].subtrees["producer_interval"].value)
         if not self._root.subtrees["data"].subtrees[dataType].subtrees.get(aggregationType) == None:
             return self._root.subtrees["data"].subtrees[dataType].subtrees[aggregationType].value
         else:
             return False
     else:
         result = OrderedDict()
         for i in range(len(self._root.subtrees["data"].subtrees[dataType].subtrees)):
             string = self._root.subtrees["data"].subtrees[dataType].subtrees.items()[i][0]
             print(string)
             result.append(string)
             # =  self._root.subtrees['data'].subtrees[dataType].subtrees[string].value
         return result
Example #13
0
 def ununicodify(obj):
     result = None
     if isinstance(obj, OrderedDict):
         result = OrderedDict()
         for k, v in obj.iteritems():
             k1 = str(k) if isinstance(k, unicode) else k
             result[k1] = ununicodify(v)
     elif isinstance(obj, list):
         result = []
         for v in obj:
             result.append(ununicodify(v))
     elif isinstance(obj, unicode):
         result = str(obj)
     else:
         result = obj
     return result
    def getAllRecords(self, asDict=True):
        if self.database.isOpen():
            if asDict:
                records = OrderedDict()
            else:
                records = []

            rows = self.database.execute("SELECT token, expiration FROM authTokens")
            for row in rows:
                if asDict:
                    records[str(row[0])] = RMAuthToken(str(row[0]), row[1])
                else:
                    records.append(RMAuthToken(str(row[0]), row[1]))

            return records

        return None
Example #15
0
def _to_ordered_dict(sorted_json):
    if isinstance(sorted_json, list):
        ordered = OrderedDict()
        if sorted_json and isinstance(sorted_json[0], list):
            ordered = []
            for i in sorted_json:
                ordered.append(_to_ordered_dict(i))
            return ordered
        else:
            for i in sorted_json:
                if isinstance(i, list):
                    ordered.update(_to_ordered_dict(i))
                elif isinstance(i, tuple):
                    ordered[i[0]] = _to_ordered_dict(i[1])
            return ordered
    else:
        return sorted_json
Example #16
0
class MessageCache(object):
    def __init__(self, unique_keys=True):
        self._unique_keys = unique_keys
        if unique_keys:
            self._cache = OrderedDict()
        else:
            self._cache = list()
            self._keys = list()

    def append(self, record):
        if self._unique_keys:
            self._cache[record["_key"]] = record
        else:
            self._cache.append(record)
            self._keys.append(record["_key"])

    def values(self):
        while self._cache:
            yield self._pop()

    def __contains__(self, key):
        if self._unique_keys:
            return key in self._cache
        return key in self._keys

    def __getitem__(self, key):
        if self._unique_keys:
            return self._cache[key]
        return self._cache[self._keys.index(key)]

    def get(self, key, default=None):
        if key in self:
            return self[key]
        return default

    def __len__(self):
        return len(self._cache)

    def _pop(self):
        if self._unique_keys:
            record = self._cache.popitem(False)[1]
        else:
            record = self._cache.pop(0)
        return record
Example #17
0
def _parse_sig(sig, arg_names, validate=False):
    """
  Parses signatures into a ``OrderedDict`` of paramName => type.
  Numerically-indexed arguments that do not correspond to an argument
  name in python (ie: it takes a variable number of arguments) will be
  keyed as the stringified version of it's index.

    sig         the signature to be parsed
    arg_names   a list of argument names extracted from python source

  Returns a tuple of (method name, types dict, return type)
  """
    d = SIG_RE.match(sig)
    if not d:
        raise ValueError("Invalid method signature %s" % sig)
    d = d.groupdict()
    ret = [(n, Any) for n in arg_names]
    if "args_sig" in d and type(d["args_sig"]) is str and d["args_sig"].strip():
        for i, arg in enumerate(d["args_sig"].strip().split(",")):
            _type_checking_available(sig, validate)
            if "=" in arg:
                if not type(ret) is OrderedDict:
                    ret = OrderedDict(ret)
                dk = KWARG_RE.match(arg)
                if not dk:
                    raise ValueError("Could not parse arg type %s in %s" % (arg, sig))
                dk = dk.groupdict()
                if not sum(
                    [(k in dk and type(dk[k]) is str and bool(dk[k].strip())) for k in ("arg_name", "arg_type")]
                ):
                    raise ValueError("Invalid kwarg value %s in %s" % (arg, sig))
                ret[dk["arg_name"]] = _eval_arg_type(dk["arg_type"], None, arg, sig)
            else:
                if type(ret) is OrderedDict:
                    raise ValueError("Positional arguments must occur " "before keyword arguments in %s" % sig)
                if len(ret) < i + 1:
                    ret.append((str(i), _eval_arg_type(arg, None, arg, sig)))
                else:
                    ret[i] = (ret[i][0], _eval_arg_type(arg, None, arg, sig))
    if not type(ret) is OrderedDict:
        ret = OrderedDict(ret)
    return (d["method_name"], ret, (_eval_arg_type(d["return_sig"], Any, "return", sig) if d["return_sig"] else Any))
Example #18
0
def _dump_node(node, name=None, indent=0):
    if node is None:
        return None
    if isinstance(node, (int, bool, basestring)):
        return node

    struct = OrderedDict({"type": None})
    if isinstance(node, pyast.Node):
        struct["type"] = node.__class__.__name__
        for field in node._fields:
            struct[field] = _dump_node(getattr(node, field))
    elif isinstance(node, list):
        struct = []
        for elem in node:
            struct.append(_dump_node(elem))
    elif isinstance(node, dict):
        struct = {}
        for key, elem in node.items():
            struct[key] = _dump_node(elem)
    return struct
Example #19
0
    def get_body(self, lang=None):
        """Return the contents of the HTML body."""
        if lang is None:
            lang = self.get_lang()

        bodies = self.xml.findall("{%s}body" % XHTML_NS)

        if lang == "*":
            result = OrderedDict()
            for body in bodies:
                body_lang = body.attrib.get("{%s}lang" % self.xml_ns, "")
                body_result = []
                body_result.append(body.text if body.text else "")
                for child in body:
                    body_result.append(tostring(child, xmlns=XHTML_NS))
                body_result.append(body.tail if body.tail else "")
                result[body_lang] = "".join(body_result)
            return result
        else:
            for body in bodies:
                if body.attrib.get("{%s}lang" % self.xml_ns, self.get_lang()) == lang:
                    result = []
                    result.append(body.text if body.text else "")
                    for child in body:
                        result.append(tostring(child, xmlns=XHTML_NS))
                    result.append(body.tail if body.tail else "")
                    return "".join(result)
        return ""
def extract_text(d):
    """
	To recursively extract the value from '#text' fields to simplify the parsed XML. 
	"""
    if isinstance(d, OrderedDict):
        if "style" in d:
            text = d["style"]["#text"]
        else:
            if len(d) > 1:  # more then 1 key:value pair
                text = OrderedDict()
                for key in d.keys():
                    text[key] = extract_text(d[key])
            elif len(d) == 1:  # unpack dictionary if only one key:value pair
                text = extract_text(d.values()[0])

    else:  # d is a list of dicts
        text = []
        for d_ in d:
            text_ = extract_text(d_)
            text.append(text_)

    return text
Example #21
0
    def check_for_add_to_toolbars(self, plugin, previously_installed=True):
        from calibre.gui2.preferences.toolbar import ConfigWidget
        from calibre.customize import InterfaceActionBase, EditBookToolPlugin

        if isinstance(plugin, EditBookToolPlugin):
            return self.check_for_add_to_editor_toolbar(plugin, previously_installed)

        if not isinstance(plugin, InterfaceActionBase):
            return

        all_locations = OrderedDict(ConfigWidget.LOCATIONS)
        try:
            plugin_action = plugin.load_actual_plugin(self.gui)
        except:
            # Broken plugin, fails to initialize. Given that, it's probably
            # already configured, so we can just quit.
            return
        installed_actions = OrderedDict([(key, list(gprefs.get("action-layout-" + key, []))) for key in all_locations])

        # If already installed in a GUI container, do nothing
        for action_names in installed_actions.itervalues():
            if plugin_action.name in action_names:
                return

        allowed_locations = [
            (key, text) for key, text in all_locations.iteritems() if key not in plugin_action.dont_add_to
        ]
        if not allowed_locations:
            return  # This plugin doesn't want to live in the GUI

        from calibre.gui2.dialogs.choose_plugin_toolbars import ChoosePluginToolbarsDialog

        d = ChoosePluginToolbarsDialog(self, plugin_action, allowed_locations)
        if d.exec_() == d.Accepted:
            for key, text in d.selected_locations():
                installed_actions = list(gprefs.get("action-layout-" + key, []))
                installed_actions.append(plugin_action.name)
                gprefs["action-layout-" + key] = tuple(installed_actions)
Example #22
0
    def to_json(self, path, key=None, newline=False, indent=None, **kwargs):
        """
        Write this table to a JSON file or file-like object.

        :code:`kwargs` will be passed through to the JSON encoder.

        :param path:
            File path or file-like object to write to.
        :param key:
            If specified, JSON will be output as an hash instead of a list. May
            be either the name of a column from the this table containing
            unique values or a :class:`function` that takes a row and returns
            a unique value.
        :param newline:
            If `True`, output will be in the form of "newline-delimited JSON".
        :param indent:
            If specified, the number of spaces to indent the JSON for
            formatting.
        """
        if key is not None and newline:
            raise ValueError("key and newline may not be specified together.")

        if newline and indent is not None:
            raise ValueError("newline and indent may not be specified together.")

        key_is_row_function = hasattr(key, "__call__")

        json_kwargs = {"ensure_ascii": False, "indent": indent}

        if six.PY2:
            json_kwargs["encoding"] = "utf-8"

        # Pass remaining kwargs through to JSON encoder
        json_kwargs.update(kwargs)

        json_funcs = [c.jsonify for c in self._column_types]

        close = True
        f = None

        try:
            if hasattr(path, "write"):
                f = path
                close = False
            else:
                if os.path.dirname(path) and not os.path.exists(os.path.dirname(path)):
                    os.makedirs(os.path.dirname(path))
                f = open(path, "w")

            if six.PY2:
                f = codecs.getwriter("utf-8")(f)

            def dump_json(data):
                json.dump(data, f, **json_kwargs)

                if newline:
                    f.write("\n")

            # Keyed
            if key is not None:
                output = OrderedDict()

                for row in self._rows:
                    if key_is_row_function:
                        k = key(row)
                    else:
                        k = row[key]

                    if k in output:
                        raise ValueError("Value %s is not unique in the key column." % six.text_type(k))

                    values = tuple(json_funcs[i](d) for i, d in enumerate(row))
                    output[k] = OrderedDict(zip(row.keys(), values))
                dump_json(output)
            # Newline-delimited
            elif newline:
                for row in self._rows:
                    values = tuple(json_funcs[i](d) for i, d in enumerate(row))
                    dump_json(OrderedDict(zip(row.keys(), values)))
            # Normal
            else:
                output = []

                for row in self._rows:
                    values = tuple(json_funcs[i](d) for i, d in enumerate(row))
                    output.append(OrderedDict(zip(row.keys(), values)))

                dump_json(output)
        finally:
            if close and f is not None:
                f.close()
Example #23
0
                    requests += 1
                    json_string = urlopen(branch_url).read()
                    data = json.loads(str(json_string, encoding="utf-8"))
                    sha = data["commit"]["sha"]

                    tree_url = "https://api.github.com/repos/%s/%s/git/trees/%s?%s" % (user, repo, sha, client_auth)
                    requests += 1
                    json_string = urlopen(tree_url).read()
                    data = json.loads(str(json_string, encoding="utf-8"))

                    success = True
                except (HTTPError):
                    five_hundreds += 1
                    print("Requests: %s, 500s: %s" % (requests, five_hundreds))
                    pass

            has_python = False
            for entry in data["tree"]:
                if re.search("\.py$", entry["path"]) is not None:
                    has_python = True
                    break

            if not has_python:
                print("No python: %s" % name)
            else:
                print("Yes python: %s" % name)

            master_list.append(name)


########NEW FILE########
Example #24
0
            question = OrderedDict()
            # The first entry in the question array is the question
            answer = queslist[0].rstrip()
        elif len(anslist) > 0:
            if question is not False and answer is not False:
                if len(question) == 0:
                    question["question"] = answer
                    question["answer"] = 0
                    question["answers"] = list()
                else:
                    question["answers"].append(answer)
            answer = anslist[0].rstrip()
        elif answer is not False:
            answer += " " + line.rstrip()
        elif question is not False:
            question.append(line.rstrip())

    if question is not False and answer is not False:
        if len(question) > 0:
            question["answers"].append(answer)

    if question is not False and len(question) > 0 and len(question["answers"]) > 0:
        questions.append(question)

    if title is False:
        continue
    if questions is False:
        continue
    if len(questions) == 0:
        continue
Example #25
0
def cast(facet, **context):
    """Project the objects in sources to the given facet

    Args:
        facet (mapping): The definition of facet
        **context: The context that the cast is running in.

    Returns:
        The sources' cast on given facet
    """
    attr_getter = context.get("attr_getter", get_attr)
    undefined_values = context.get("undefined_values", [])
    undefined_object = context.get("undefined_object", undefined)
    prefix = context.get("path", "")

    if isinstance(facet, six.string_types):
        # fetch then attribute source object
        value = attr_getter(context["source"], facet)

    elif isinstance(facet, Mapping):
        if meta("src") in facet:
            # get the value from source object
            value = cast(facet[meta("src")], **context)
        elif meta("val") in facet:
            # this is a constant value
            return facet[meta("val")]
        elif meta("get") in facet:
            # value is provided by external getter
            return facet[meta("get")](facet, **context)
        else:
            # this should be a nested dict
            value = OrderedDict()
            for k, v in facet.items():
                if not is_meta(k):
                    value[k] = cast(v, path="{}.{}".format(prefix, k).lstrip("."), **context)

    elif isinstance(facet, Iterable):
        # this should be a nested list
        value = []
        ind = 0
        for item in facet:
            value.append(cast(item, path="{}[{}]".format(prefix, ind), **context))
            ind += 1

    else:
        # this is a invalid facet defination
        raise FacetDefinitionError(facet)

    if value is undefined or value in undefined_values:
        # this this is a undefined value then put it as undefined
        value = undefined_object

    if isinstance(facet, Mapping):
        if value is undefined_object:
            if meta("default") in facet:
                return facet[meta("default")]
            if facet.get(meta("required")):
                raise FacetRequiredError(prefix)
        else:
            try:
                if meta("transform") in facet:
                    value = facet[meta("transform")](value)
                if meta("validate") in facet:
                    facet[meta("validate")](value)
            except Exception as err:
                raise FacetValidationError(prefix, err)

    return value
Example #26
0
    def to_json(self, path, key=None, newline=False, indent=None, **kwargs):
        """
        Write this table to a JSON file or file-like object.

        ``kwargs`` will be passed through to the JSON encoder.

        :param path:
            File path or file-like object to write to.
        :param key:
            If specified, JSON will be output as an hash instead of a list. May
            be either the name of a column from the this table containing
            unique values or a :class:`function` that takes a row and returns
            a unique value.
        :param newline:
            If ``True``, output will be in the form of "newline-delimited JSON".
        :param indent:
            If specified, the number of spaces to indent the JSON for
            formatting.
        """
        if key is not None and newline:
            raise ValueError("key and newline may not be specified together.")

        if newline and indent is not None:
            raise ValueError("newline and indent may not be specified together.")

        key_is_row_function = hasattr(key, "__call__")

        json_kwargs = {"ensure_ascii": False, "indent": indent, "default": utils.json_encode}

        if six.PY2:
            json_kwargs["encoding"] = "utf-8"

        close = True

        try:
            if hasattr(path, "write"):
                f = path
                close = False
            else:
                f = open(path, "w")

            if six.PY2:
                f = codecs.getwriter("utf-8")(f)

            def dump_json(data):
                json.dump(data, f, **json_kwargs)

                if newline:
                    f.write("\n")

            # Keyed
            if key is not None:
                output = OrderedDict()

                for row in self._rows:
                    if key_is_row_function:
                        k = key(row)
                    else:
                        k = row[key]

                    if k in output:
                        raise ValueError("Value %s is not unique in the key column." % six.text_type(k))

                    output[k] = row.dict()
                dump_json(output)
            # Newline-delimited
            elif newline:
                for row in self._rows:
                    dump_json(row.dict())
            # Normal
            else:
                output = []

                for row in self._rows:
                    output.append(row.dict())

                dump_json(output)
        finally:
            if close:
                f.close()
Example #27
0
class MultiApp(Configurator):
    """A :class:`MultiApp` is a tool for creating several :class:`Application`
    and starting them at once.

    It makes sure all :ref:`settings <settings>` for the
    applications created are available in the command line.

    :class:`MultiApp` derives from :class:`Configurator` and therefore
    supports all its configuration utilities,
    :meth:`build` is the only method which must be implemented by
    subclasses.

    A minimal example usage::

        import pulsar

        class Server(pulsar.MultiApp):

            def build(self):
                yield self.new_app(TaskQueue)
                yield self.new_app(WSGIserver, prefix="rpc", callable=..., ...)
                yield self.new_app(WSGIserver, prefix="web", callable=..., ...)
    """

    _apps = None

    def build(self):
        """Virtual method, must be implemented by subclasses and return an
        iterable over results obtained from calls to the
        :meth:`new_app` method.
        """
        raise NotImplementedError

    def apps(self):
        """List of :class:`Application` for this :class:`MultiApp`.

        The list is lazily loaded from the :meth:`build` method.
        """
        if self._apps is None:
            # Add modified settings values to the list of cfg params
            self.cfg.params.update(((s.name, s.value) for s in self.cfg.settings.values() if s.modified))
            self.cfg.settings = {}
            self._apps = OrderedDict()
            self._apps.update(self._build())
            if not self._apps:
                return []
            # Load the configuration (command line and config file)
            self.load_config()
            kwargs = self._get_app_params()
            apps = self._apps
            self._apps = []
            for App, name, callable, cfg in self._iter_app(apps):
                settings = self.cfg.settings
                new_settings = {}
                for key in cfg:
                    setting = settings[key].copy()
                    if setting.orig_name and setting.orig_name != setting.name:
                        setting.name = setting.orig_name
                    new_settings[setting.name] = setting
                cfg.settings = new_settings
                kwargs.update({"name": name, "cfg": cfg, "callable": callable})
                if name == self.name:
                    params = kwargs.copy()
                    params["version"] = self.version
                else:
                    params = kwargs
                self._apps.append(App(**params))
        return self._apps

    def new_app(self, App, prefix=None, callable=None, **params):
        """Invoke this method in the :meth:`build` method as many times
        as the number of :class:`Application` required by this
        :class:`MultiApp`.

        :param App: an :class:`Application` class.
        :param prefix: The prefix to use for the application,
            the prefix is appended to
            the application :ref:`config parameters <settings>` and to the
            application name. Each call to this methjod must use a different
            value of for this parameter. It can be ``None``.
        :param callable: optional callable (function of object) used during
            initialisation of *App* (the :class:`Application.callable`).
        :param params: additional key-valued parameters used when creating
            an instance of *App*.
        :return: a tuple used by the :meth:`apps` method.
        """
        params.update(self.cfg.params.copy())
        params.pop("name", None)  # remove the name
        prefix = prefix or ""
        if not prefix and "" in self._apps:
            prefix = App.name or App.__name__.lower()
        if not prefix:
            name = self.name
            cfg = App.create_config(params, name=name)
        else:
            name = "%s_%s" % (prefix, self.name)
            cfg = App.create_config(params, prefix=prefix, name=name)
        # Add the config entry to the multi app config if not available
        for k in cfg.settings:
            if k not in self.cfg.settings:
                self.cfg.settings[k] = cfg.settings[k]
        return new_app(prefix, (App, name, callable, cfg))

    def __call__(self, actor=None):
        apps = [app(actor) for app in self.apps()]
        return asyncio.gather(*apps, loop=get_actor()._loop)

    #    INTERNALS
    def _build(self):
        for app in self.build():
            if not isinstance(app, new_app):
                raise ImproperlyConfigured("You must use new_app when building a MultiApp")
            yield app

    def _iter_app(self, app_name_callables):
        main = app_name_callables.pop("", None)
        if not main:
            raise ImproperlyConfigured("No main application in MultiApp")
        yield main
        for app in app_name_callables.values():
            yield app

    def _get_app_params(self):
        params = self.cfg.params.copy()
        for key, value in self.__dict__.items():
            if key.startswith("_"):
                continue
            elif key == "console_parsed":
                params["parse_console"] = not value
            else:
                params[key] = value
        params["load_config"] = False
        return params
Example #28
0
    def __call__(self, obj):
        if ConfigItemEncoder.recursion_check.in_default:
            in_default = ConfigItemEncoder.recursion_check.in_default
            ConfigItemEncoder.recursion_check.in_default = None
            if self.recursion_check.warn_nesting:
                print("Warning: Nested json calls:", file=sys.stderr)
                print("outer object type:", type(in_default), file=sys.stderr)
                print("inner object type:", repr(type(obj)) + ", inner obj:", obj, file=sys.stderr)
            raise NestedJsonCallError(
                "Nested json calls detected. Maybe a @property method calls json or repr (implicitly)?"
            )

        try:
            ConfigItemEncoder.recursion_check.in_default = obj
            if not self.start_obj:
                self.start_obj = obj

            if self.seen.get(id(obj)):
                return self._already_dumped_str(obj)
            self.seen[id(obj)] = obj

            if isinstance(obj, self.multiconf_base_type):
                # print("# Handle ConfigItems", type(obj))
                dd = self._mc_class_dict(obj)

                entries = ()
                try:
                    entries = dir(obj)
                except Exception as ex:
                    self.num_errors += 1
                    print("Error in json generation:", file=sys.stderr)
                    traceback.print_exception(*sys.exc_info())
                    dd[
                        "__json_error__ # trying to list property methods, failed call to dir(), @properties will not be included"
                    ] = repr(ex)

                # Order 'env' first on root object
                if isinstance(obj, self.multiconf_root_type):
                    dd["env"] = obj.env

                # Handle attributes
                attributes_overriding_property = set()
                attr_dict = {}
                item_dict = OrderedDict()
                for key, item in obj._iterattributes():
                    val = orig_val = item._mc_value()

                    if self.user_filter_callable:
                        key, val = self.user_filter_callable(obj, key, val)
                        if key is False:
                            continue

                    if not self.builders and isinstance(val, self.multiconf_builder_type):
                        continue

                    val = self._check_nesting(obj, val)
                    if isinstance(val, Excluded):
                        if self.compact:
                            item_dict[key] = "false #" + repr(val)
                        else:
                            item_dict[key] = False
                            item_dict[key + " #" + repr(val)] = True
                    elif val != McInvalidValue.MC_NO_VALUE:
                        # TODO: Include type of dict in json meta info
                        if isinstance(orig_val, (self.multiconf_base_type, Repeatable)):
                            item_dict[key] = val
                        elif isinstance(val, dict):
                            new_val = OrderedDict()
                            for inner_key, maybeitem in val.items():
                                if not isinstance(maybeitem, self.multiconf_base_type):
                                    new_val[inner_key] = maybeitem
                                    continue
                                new_val[inner_key] = "#ref, id: " + repr(id(maybeitem))
                            attr_dict[key] = new_val
                        else:
                            try:
                                iterable = iter(val)
                            except TypeError:
                                attr_dict[key] = val
                            else:
                                # TODO: Include type of iterable in json meta info
                                if isinstance(orig_val, str):
                                    attr_dict[key] = val
                                else:
                                    new_val = []
                                    found_mc_ref = False
                                    for maybeitem in val:
                                        if not isinstance(maybeitem, self.multiconf_base_type):
                                            new_val.append(maybeitem)
                                            continue
                                        found_mc_ref = True
                                        new_val.append("#ref, id: " + repr(id(maybeitem)))
                                    if found_mc_ref:
                                        attr_dict[key] = new_val
                                    else:
                                        # We leave this to be handled later
                                        attr_dict[key] = val

                    if key in entries:
                        if val != McInvalidValue.MC_NO_VALUE:
                            attributes_overriding_property.add(key)
                            attr_dict[key + " #!overrides @property"] = True
                        else:
                            attr_dict[key + " #value for current env provided by @property"] = True
                    elif val == McInvalidValue.MC_NO_VALUE:
                        attr_dict[key + " #no value for current env"] = True

                for key in sorted(attr_dict):
                    dd[key] = attr_dict[key]
                dd.update(item_dict)
                if not self.property_methods:
                    return dd

                # Handle @property methods (defined in subclasses)
                for key in entries:
                    if key.startswith("_") or key in self.filter_out_keys:
                        continue

                    real_key = key
                    if key in attributes_overriding_property:
                        key += " #!overridden @property"

                    try:
                        val = object.__getattribute__(obj, real_key)
                    except InvalidUsageException as ex:
                        self.num_invalid_usages += 1
                        dd[key + " #invalid usage context"] = repr(ex)
                        continue
                    except Exception as ex:
                        self.num_errors += 1
                        print("Error in json generation:", file=sys.stderr)
                        traceback.print_exception(*sys.exc_info())
                        dd[key + " # json_error trying to handle property method"] = repr(ex)
                        continue

                    if type(val) == types.MethodType:
                        continue

                    if self.user_filter_callable:
                        real_key, val = self.user_filter_callable(obj, real_key, val)
                        if real_key is False:
                            continue

                    if type(val) == type:
                        dd[key] = repr(val)
                        continue

                    val = self._check_nesting(obj, val)

                    # Figure out if the attribute is a @property or a static value
                    for cls in get_bases(object.__getattribute__(obj, "__class__")):
                        try:
                            real_attr = object.__getattribute__(cls, real_key)
                            if isinstance(real_attr, property):
                                calc_or_static = _calculated_value
                            else:
                                calc_or_static = _static_value
                            break
                        except AttributeError:
                            pass

                    if (self.compact or real_key in attributes_overriding_property) and isinstance(
                        val, (str, int, long, float)
                    ):
                        dd[key] = str(val) + calc_or_static
                        continue

                    if isinstance(val, (list, tuple)):
                        new_list = []
                        for item in val:
                            new_list.append(self._check_nesting(obj, item))
                        dd[key] = new_list
                        dd[key + calc_or_static] = True
                        continue

                    if isinstance(val, dict):
                        new_dict = OrderedDict()
                        for item_key, item in val.items():
                            new_dict[item_key] = self._check_nesting(obj, item)
                        dd[key] = new_dict
                        dd[key + calc_or_static] = True
                        continue

                    dd[key] = val
                    dd[key + calc_or_static] = True
                return dd

            if isinstance(obj, envs.BaseEnv):
                # print "# Handle Env objects", type(obj)
                dd = OrderedDict((_class_tuple(obj),))
                for eg in obj.all:
                    dd["name"] = eg.name
                return dd

            if type(obj) == type:
                return repr(obj)

            # If obj defines json_equivalent, then return the result of that
            if hasattr(obj, "json_equivalent"):
                return obj.json_equivalent()

            try:
                iterable = iter(obj)
            except TypeError:
                pass
            else:
                # print "# Handle iterable objects", type(obj)
                return list(iterable)

            if self.user_fallback_callable:
                obj, handled = self.user_fallback_callable(obj)
                if handled:
                    return obj

            if major_version < 3 and isinstance(obj, types.InstanceType):
                # print "# Handle instances of old style classes", type(obj)
                # Note that new style class instances are practically indistinguishable from other types of objects
                dd = self._class_dict(obj)
                for key, val in obj.__dict__.items():
                    if key[0] != "_":
                        dd[key] = self._already_dumped_str(val) if self.seen.get(id(val)) else val
                return dd

            self.num_errors += 1
            return "__json_error__ # don't know how to handle obj of type: " + repr(type(obj))

        finally:
            ConfigItemEncoder.recursion_check.in_default = None
Example #29
0
    def __init__(self, data=None, items=None, post=None, **args):

        super().__init__(data, items, post, **args)

        config = self.config
        config_data = config["data"]
        field_config = config_data.get("field_config", {})

        set = self["configset"]

        options = OrderedDict()

        options["autocomplete"] = s("Auto complete")
        options["select"] = s("Select list")
        options["checkboxes"] = s("Check boxes")
        options["radioboxes"] = s("Radio boxes")

        # max limit
        set.add(
            "HTMLSelect",
            {
                "id": "field_selection_type",
                "value": post.get("field_selection_type", None) or field_config.get("field_selection_type", None),
                "title": s("Field selection type"),
                "options": options,
                "required": True,
                "css": ["i-form-large"],
                "multiple": False,
                "weight": 1,
            },
        )

        field_value_options = post.get("field_value_options", None)
        if field_value_options is None:
            keys = field_config.get("field_value_option_keys", [])
            values = field_config.get("field_value_option_values", {})
            options = []
            for key in keys:
                if key in values:
                    options.append(":".join((str(key), values[key])))
            field_value_options = "\n".join(options)

        set.add(
            "TextArea",
            {
                "id": "field_value_options",
                "value": field_value_options,
                "title": s("Field allowed values"),
                "required": True,
                "css": ["i-form-large i-width-1-1"],
                "multiple": False,
                "weight": 2,
                "info": s(
                    """Colon separated key:value pair of allowed values, one per line. Key should be a number.
example:<br>
0:Female<br>
1:Male<br>
2:Shemale<br>
"""
                ),
                "validation_rule": ["Not", [["Empty"]], "Values field is required"],
            },
        )
Example #30
0
    def main(self):
        """
        Convert CSV to JSON. 
        """
        if self.args.lat and not self.args.lon:
            self.argparser.error("--lon is required whenever --lat is specified.")

        if self.args.lon and not self.args.lat:
            self.argparser.error("--lat is required whenever --lon is specified.")

        if self.args.crs and not self.args.lat:
            self.argparser.error("--crs is only allowed when --lat and --lon are also specified.")

        rows = CSVKitReader(self.input_file, **self.reader_kwargs)
        column_names = next(rows)

        if six.PY2:
            stream = codecs.getwriter("utf-8")(self.output_file)
        else:
            stream = self.output_file

        # GeoJSON
        if self.args.lat and self.args.lon:
            features = []
            min_lon = None
            min_lat = None
            max_lon = None
            max_lat = None

            lat_column = match_column_identifier(column_names, self.args.lat, self.args.zero_based)
            lon_column = match_column_identifier(column_names, self.args.lon, self.args.zero_based)

            if self.args.key:
                id_column = match_column_identifier(column_names, self.args.key, self.args.zero_based)
            else:
                id_column = None

            for row in rows:
                feature = OrderedDict()
                feature["type"] = "Feature"
                properties = OrderedDict()
                geoid = None
                lat = None
                lon = None

                for i, c in enumerate(row):
                    if i == lat_column:
                        try:
                            lat = float(c)
                        except ValueError:
                            lat = None

                        if min_lat is None or lat < min_lat:
                            min_lat = lat

                        if max_lat is None or lat > max_lat:
                            max_lat = lat
                    elif i == lon_column:
                        try:
                            lon = float(c)
                        except ValueError:
                            lon = None

                        if min_lon is None or lon < min_lon:
                            min_lon = lon

                        if max_lon is None or lon > max_lon:
                            max_lon = lon
                    elif id_column is not None and i == id_column:
                        geoid = c
                    else:
                        properties[column_names[i]] = c

                if id_column is not None:
                    feature["id"] = geoid

                feature["geometry"] = OrderedDict([("type", "Point"), ("coordinates", [lon, lat])])

                feature["properties"] = properties

                features.append(feature)

            output = OrderedDict(
                [("type", "FeatureCollection"), ("bbox", [min_lon, min_lat, max_lon, max_lat]), ("features", features)]
            )

            if self.args.crs:
                output["crs"] = OrderedDict([("type", "name"), ("properties", {"name": self.args.crs})])
        # Keyed JSON
        elif self.args.key:
            output = OrderedDict()

            for row in rows:
                data = OrderedDict()

                for i, column in enumerate(column_names):
                    data[column] = row[i]

                k = data[self.args.key]

                if k in output:
                    raise NonUniqueKeyColumnException("Value %s is not unique in the key column." % six.text_type(k))

                output[k] = data
        # Boring JSON
        else:
            output = []

            for row in rows:
                data = OrderedDict()

                for i, column in enumerate(column_names):
                    try:
                        data[column] = row[i]
                    except IndexError:
                        data[column] = None

                output.append(data)

        kwargs = {"ensure_ascii": False, "indent": self.args.indent}

        if six.PY2:
            kwargs["encoding"] = "utf-8"

        json.dump(output, stream, **kwargs)