Ejemplo n.º 1
0
    def test_match_root_files(self):
        basedirPath = 'root'

        matchrules = []
        matchrules.append(MatchRule(r'root\\[^\\]*match[^\\]*$'))
        matchrules.append(
            MatchRule(r'root\\[^\\]*doesnt[^\\]*$', polarity=False))

        crawler = FilesystemCrawler(matchrules)
        matchedPaths = crawler.search(basedirPath, True)
        self.assertEqual(len(matchedPaths), 1)
        self.assertEqual(matchedPaths[0][0], 'root\\match.txt')
Ejemplo n.º 2
0
    def test_match_all_levels(self):
        basedirPath = 'root'

        matchrules = []
        matchrules.append(MatchRule(r'root\\.*match[^\\]*$'))
        matchrules.append(MatchRule(r'root\\.*doesnt[^\\]*$', polarity=False))

        crawler = FilesystemCrawler(matchrules)
        matchedPaths = crawler.search(basedirPath, True)
        self.assertEqual(len(matchedPaths), 6)
        for matchedPath in matchedPaths:
            self.assertTrue('\\match.txt' in matchedPath[0])
def parse_match_rules(basedir, rawRules):
    basedir = re.escape(basedir)
    rules = []
    errors = []

    rawLines = None
    rawLines = rawRules.splitlines()

    for lineCount, rawLine in enumerate(rawLines):
        rawLine = rawLine.strip()

        # continue uppon comments or empty lines?
        if not rawLine or rawLine[0] == '#':
            continue

        patternsLine, (isRegex, isNegated, filesOnly,
                       dirsOnly) = (_extract_flags_from_rawline(
                           rawLine, 'r', '!i', 'f', 'd'))

        pathPattern, contentPattern = (
            _extract_patterns_form_patters_line(patternsLine))

        pathPattern = _path_pattern_to_regex(pathPattern, isRegex)

        try:
            rules.append(
                MatchRule(basedir + pathPattern + '$', not isNegated, dirsOnly,
                          filesOnly, contentPattern))
        except Exception as e:
            errors.append((lineCount + 1, e))
    return rules, errors
Ejemplo n.º 4
0
    def test_match_callback(self):
        basedirPath = 'root'

        matchrules = []
        matchrules.append(MatchRule(r'root\\[^\\]*match[^\\]*$'))
        matchrules.append(
            MatchRule(r'root\\[^\\]*doesnt[^\\]*$', polarity=False))

        matchedCallbacks = []

        def matchCallback(match):
            matchedCallbacks.append(match[0])

        crawler = FilesystemCrawler(matchrules, matchCallback=matchCallback)
        matchedPaths = crawler.search(basedirPath, True)

        self.assertEqual(len(matchedPaths), len(matchedCallbacks))
        self.assertEqual(matchedCallbacks[0], 'root\\match.txt')
Ejemplo n.º 5
0
    def test_parse_match_rules_from_file(self):
        basedirPath = 'x:\\root'

        expectedRules = []
        expectedRules.append(MatchRule(r'x\:\\root\\[^\\]*match[^\\]*$'))
        expectedRules.append(
            MatchRule(r'x\:\\root\\[^\\]*doesnt[^\\]*$', polarity=False))

        actualRules, actualErrors = parse_match_rules_from_file(
            basedirPath, self.fileRulesName)

        self.assertEqual(len(actualErrors), 0)

        self.assertEqual(len(expectedRules), len(actualRules))

        actualRulesPatterns = [str(p.pattern) for p in actualRules]

        for expectedRule in expectedRules:
            self.assertTrue(str(expectedRule.pattern) in actualRulesPatterns)
Ejemplo n.º 6
0
    def test_match_files_with_content(self):
        basedirPath = 'root'

        matchrules = []
        matchrules.append(
            MatchRule(r'root\\[^\\]*$', contentPattern='matching text'))

        crawler = FilesystemCrawler(matchrules)
        matchedPaths = crawler.search(basedirPath, True)

        self.assertEqual(len(matchedPaths), 1)
        self.assertEqual(matchedPaths[0][0], 'root\\valid content.txt')