Exemple #1
0
def matchall(phasekey, entirerepo=False):
    """
    Run 101meta rule matching for a given phase.

    This will derive all files incrementally automatically. You need to run
    rules101meta to have the rules dump available and have an environment
    variable called "phase101dump", where phase is the name of your matching
    phase. That's where the dump will be written to.


    Parameters
    ----------
    phasekey : string
        The name of the matching phase to be run. At the time of writing, this
        may be "matches", "predicates" or "fragments".

    entirerepo : optional, bool
        Force that incrementality be ignored and walk over the entire 101repo
        instead, re-matching all files. This is useful if a module has changed
        for example. Defaults to False.


    See Also
    --------
    getphase : where the phasekey gets passed to
    havechanged : for checking if you should set entirerepo to True
    Phase : the base class for all the matching phases
    """
    dumpfile  = os.environ[phasekey + "101dump"]
    rulesfile = os.environ["rules101dump"]
    changed   = havechanged(rulesfile)
    runonly   = os.environ.get("RUNONLY")
    args      = []

    if os.path.exists(dumpfile):
        with open(dumpfile) as f:
            dump = json.load(f)
            args.append(dump["matches" ])
            args.append(dump["failures"])

    with open(rulesfile) as f:
        args.insert(0, json.load(f)["results"]["rules"])

    dump = getphase(phasekey)(*args).run(changed or entirerepo or runonly)
    incremental101.writejson(dumpfile, dump)
Exemple #2
0
    def onfile(self, **kwargs):
        """
        Derives metadata from for a file from 101repo. Called when a file was
        added or changed, or if the entire repo is walked to re-derive all
        files.
        """
        self.cleandump(kwargs["relative"])
        units = []

        for value in self.rules:
            rule   = value["rule"]
            result = self.match(rule, **kwargs)
            if result is not None and "metadata" in rule:
                for metadata in tolist(rule["metadata"]):
                    result["metadata"] = metadata
                    units.append(result.copy())

        # TODO fix this dominator code
        keys     = []
        for unit in units:
            metadata = unit["metadata"]
            if "dominator" in metadata:
                keys.append(metadata["dominator"])
        removals = []
        for key in keys:
            for unit in units:
                metadata = unit["metadata"]
                if key in metadata \
                and ("dominator" not in metadata
                     or metadata["dominator"] != key):
                    removals.append(unit)
        survivals = []
        for unit in units:
            if not unit in removals:
                survivals.append(unit)
        units = survivals

        if units:
            incremental101.writejson(kwargs["target"], units)
            self.matches[kwargs["relative"]] = units
        else:
            incremental101.deletefile(kwargs["target"])
Exemple #3
0
        # Shield against JSON encoding errors
        try:
            with open(filename) as jsonfile:
                data = json.load(jsonfile)

            # Handle lists of rules
            if isinstance(data, list):
                for rule in data:
                    handleRule(rule, relative)
            else:
                handleRule(data, relative)
            break

        except ValueError as e:
            print "Unreadable file {}: {}".format(filename, e)
            unreadableFiles.append(relative)

# Completion of dump
numbers["numberOfRules"] = len(rules)
numbers["numberOfProblems"] = len(unreadableFiles) + len(invalidFiles)
numbers["numberOfSuffixes"] = len(suffixes)
numbers["numberOfPredicates"] = len(predicates)

# sort everything so it can actually be tested
rules          .sort(key=lambda rule: rule["filename"])
suffixes       .sort()
unreadableFiles.sort()
invalidFiles   .sort()

incremental101.writejson(os.environ["rules101dump"], dump)
        code()
    except Exception as e:
        exception = e
    else:
        exception = None
    ok(exception, message)


# only writejson is tested here, writefile is patched out
def monkeypatch(*args):
    global called
    called = list(args)


inc.writefile = monkeypatch


inc.writejson(
    "path",
    {"list": [1, 2, 3], "string": "string!", "float": 0.23, "other": {"True": True, "False": False, "None": None}},
)

want = '{"float":0.23,"list":[1,2,3],"other":{"False":false,' '"None":null,"True":true},"string":"string!"}'

eq_ok(called, ["path", want], "data fit for json serializes correctly")


dies_ok(
    lambda: inc.writejson("whatever", [1, 2, set("not", "json")]), "serialize a set (which doesn't exist in json) dies"
)