def _calc_static_tokens(self, definition): """ Finds the tokens from a definition which are not involved in defining keys. """ # expand the definition to include the prefix unless the definition is empty in which # case we just want to parse the prefix. For example, in the case of a path template, # having an empty definition would result in expanding to the project/storage root expanded_definition = (os.path.join(self._prefix, definition) if definition else self._prefix) regex = r"{%s}" % constants.TEMPLATE_KEY_NAME_REGEX tokens = re.split(regex, expanded_definition.lower()) # Remove empty strings return [x for x in tokens if x]
def test_unicode_override(self): """ Ensure that the unicode flag overrides the flag insertion behavior. """ char = u"a漢字" expr = r"a\w+" # test all wrapped methods self.assertTrue(bool(sgre.compile(expr, flags=re.U).match(char))) self.assertEqual(len(sgre.findall(expr, char, flags=re.U)), 1) self.assertTrue(bool(sgre.match(expr, char, flags=re.U))) self.assertTrue(bool(sgre.search(expr, char, flags=re.U))) self.assertEqual(len(sgre.split(expr, "$ %s @" % char, flags=re.U)), 2) self.assertEqual(sgre.sub(expr, "@", char, flags=re.U), "@")
def test_wrap(self): r""" Ensure that sgre injects the re.ASCII flag appropriately, and that unicode characters do not match `\w` in Python 2 or 3. """ char = u"漢字" expr = r"\w+" # test all wrapped methods self.assertFalse(bool(sgre.compile(expr).match(char))) self.assertEqual(len(sgre.findall(expr, char)), 0) self.assertFalse(bool(sgre.match(expr, char))) self.assertFalse(bool(sgre.search(expr, char))) self.assertEqual(len(sgre.split(expr, "$ %s @" % char)), 1) self.assertEqual(sgre.sub(expr, "@", char), char)
def test_wrap_kwarg(self): r""" Ensure that sgre injects the re.ASCII flag appropriately when flags are also passed as keyword arguments, and that unicode characters do not match `\w` in Python 2 or 3. """ char = u"a漢字" expr = r"a\w+" # test all wrapped methods self.assertFalse(bool(sgre.compile(expr, flags=re.I).match(char))) self.assertEqual(len(sgre.findall(expr, char, flags=re.I)), 0) self.assertFalse(bool(sgre.match(expr, char, flags=re.I))) self.assertFalse(bool(sgre.search(expr, char, flags=re.I))) self.assertEqual(len(sgre.split(expr, "$ %s @" % char, flags=re.I)), 1) self.assertEqual(sgre.sub(expr, "@", char, flags=re.I), char)
def _definition_variations(self, definition): """ Determines all possible definition based on combinations of optional sectionals. "{foo}" ==> ['{foo}'] "{foo}_{bar}" ==> ['{foo}_{bar}'] "{foo}[_{bar}]" ==> ['{foo}', '{foo}_{bar}'] "{foo}_[{bar}_{baz}]" ==> ['{foo}_', '{foo}_{bar}_{baz}'] """ # split definition by optional sections tokens = re.split(r"(\[[^]]*\])", definition) # seed with empty string definitions = [""] for token in tokens: temp_definitions = [] # regex return some blank strings, skip them if token == "": continue if token.startswith("["): # check that optional contains a key if not re.search("{*%s}" % constants.TEMPLATE_KEY_NAME_REGEX, token): raise TankError( 'Optional sections must include a key definition. Token: "%s" Template: %s' % (token, self)) # Add definitions skipping this optional value temp_definitions = definitions[:] # strip brackets from token token = re.sub(r"[\[\]]", "", token) # check non-optional contains no dangleing brackets if re.search(r"[\[\]]", token): raise TankError( "Square brackets are not allowed outside of optional section definitions." ) # make defintions with token appended for definition in definitions: temp_definitions.append(definition + token) definitions = temp_definitions return definitions