def setup(self, app): super(FilePlugin, self).setup(app) c = app.config conf_keys = c.keys(self._meta.label) # get include and exclude list from config file if 'include' in conf_keys: self.include = self._process_pattern_list( c.get(self._meta.label, 'include').split('\n')) if 'exclude' in conf_keys: self.exclude = self._process_pattern_list( c.get(self._meta.label, 'exclude').split('\n')) if 'no_assume_nochange' in conf_keys: self.assume_change = self._process_pattern_list( c.get(self._meta.label, 'no_assume_nochange').split('\n')) # compile globre patterns for include and exclude self.include_pats = [] for pat in self.include: self.include_pats.append( globre.compile(pat, flags=globre.EXACT, split_prefix=False)) self.exclude_pats = [] for pat in self.exclude: self.exclude_pats.append( globre.compile(pat, flags=globre.EXACT, split_prefix=False)) if 'assume_nochange' in conf_keys: self.assume_nochange = [ x.strip() for x in c.get(self._meta.label, 'assume_nochange').split(',') ]
def factory(handler, registry): get = morph.pick(registry.settings, prefix=CONFIG_PREFIX).get conf = aadict() conf.enabled = asbool(get('enabled', True)) conf.include = [globre.compile(el, globre.EXACT) for el in aslist(get('include', []))] conf.exclude = [globre.compile(el, globre.EXACT) for el in aslist(get('exclude', []))] conf.reparse = aslist(get('reparse-methods', DEFAULT_REPARSE_METHODS)) conf.name = get('attribute-name', DEFAULT_ATTRIBUTE_NAME) conf.deep = asbool(get('combine.deep', True)) conf.reqdict = asbool(get('require-dict', True)) conf.failunk = asbool(get('fail-unknown', True)) conf.ndict = asbool(get('native-dict', False)) conf.error = get('error-handler', None) if conf.error: conf.error = asset.symbol(conf.error) conf.xfmt = asbool(get('xml.enable', True)) conf.jfmt = asbool(get('json.enable', True)) conf.yfmt = asbool(get('yaml.enable', bool(yaml or get('yaml.parser')))) if conf.jfmt: conf.jparser = get('json.parser', None) if conf.jparser: conf.jparser = asset.symbol(conf.jparser) if conf.yfmt: conf.yparser = asset.symbol(get('yaml.parser', 'yaml.load')) if conf.xfmt: conf.xparser = asset.symbol(get('xml.parser', 'xml.etree.ElementTree.fromstring')) def input_tween(request): return process(handler, request, conf) return input_tween
def test_prefix(self): self.assertEqual( globre.compile('/foo/bar', split_prefix=True)[0], '/foo/bar') self.assertEqual( globre.compile('/foo/b**', split_prefix=True)[0], '/foo/b') self.assertEqual( globre.compile('??/foo/b**', split_prefix=True)[0], '')
def set_up_pickup(self): empty = [] # Fix up booleans and paths for stanza, stanza_config in self.pickup_config.items(): # user_config_items is empty by default if not stanza_config: empty.append(stanza) continue stanza_config.read_on_pickup = asbool(stanza_config.get('read_on_pickup', True)) stanza_config.parse_on_pickup = asbool(stanza_config.get('parse_on_pickup', True)) stanza_config.delete_after_pickup = asbool(stanza_config.get('delete_after_pickup', True)) stanza_config.case_insensitive = asbool(stanza_config.get('case_insensitive', True)) stanza_config.pickup_from = absolutize(stanza_config.pickup_from, self.base_dir) stanza_config.is_service_hot_deploy = False mpt = stanza_config.get('move_processed_to') stanza_config.move_processed_to = absolutize(mpt, self.base_dir) if mpt else None services = stanza_config.get('services') or [] stanza_config.services = [services] if not isinstance(services, list) else services topics = stanza_config.get('topics') or [] stanza_config.topics = [topics] if not isinstance(topics, list) else topics flags = globre.EXACT if stanza_config.case_insensitive: flags |= IGNORECASE patterns = stanza_config.patterns stanza_config.patterns = [patterns] if not isinstance(patterns, list) else patterns stanza_config.patterns = [globre.compile(elem, flags) for elem in stanza_config.patterns] if not os.path.exists(stanza_config.pickup_from): logger.warn('Pickup dir `%s` does not exist (%s)', stanza_config.pickup_from, stanza) for item in empty: del self.pickup_config[item] # Ok, now that we have configured everything that pickup.conf had # we still need to make it aware of services and how to pick them up from FS. stanza = 'zato_internal_service_hot_deploy' stanza_config = Bunch({ 'pickup_from': self.hot_deploy_config.pickup_dir, 'patterns': [globre.compile('*.py', globre.EXACT | IGNORECASE)], 'read_on_pickup': False, 'parse_on_pickup': False, 'delete_after_pickup': self.hot_deploy_config.delete_after_pickup, 'is_service_hot_deploy': True, }) self.pickup_config[stanza] = stanza_config self.pickup = PickupManager(self, self.pickup_config) spawn_greenlet(self.pickup.run)
def setup(self, app): super(FilePlugin, self).setup(app) c = app.config conf_keys = c.keys(self._meta.label) # get include and exclude list from config file if 'include' in conf_keys: self.include = self._process_pattern_list(c.get(self._meta.label, 'include').split('\n')) if 'exclude' in conf_keys: self.exclude = self._process_pattern_list(c.get(self._meta.label, 'exclude').split('\n')) if 'no_assume_nochange' in conf_keys: self.assume_change = self._process_pattern_list(c.get(self._meta.label, 'no_assume_nochange').split('\n')) # compile globre patterns for include and exclude self.include_pats = [] for pat in self.include: self.include_pats.append(globre.compile(pat, flags=globre.EXACT, split_prefix=False)) self.exclude_pats = [] for pat in self.exclude: self.exclude_pats.append(globre.compile(pat, flags=globre.EXACT, split_prefix=False)) os.stat_float_times(True) if 'assume_nochange' in conf_keys: self.assume_nochange = [x.strip() for x in c.get(self._meta.label, 'assume_nochange').split(',')]
def load(pattern, *args, **kw): ''' Given a package asset-spec glob-pattern `pattern`, returns an :class:`AssetGroup` object, which in turn can act as a generator of :class:`Asset` objects that match the pattern. Example: .. code-block:: python import asset # concatenate all 'css' files into one string: css = asset.load('mypackage:static/style/**.css').read() ''' spec = pattern if ':' not in pattern: raise ValueError('`pattern` must be in the format "PACKAGE:GLOB"') pkgname, pkgpat = pattern.split(':', 1) pkgdir, pattern = globre.compile(pkgpat, split_prefix=True, flags=globre.EXACT) if pkgdir: idx = pkgdir.rfind('/') pkgdir = pkgdir[:idx] if idx >= 0 else '' group = AssetGroup(pkgname, pkgdir, pattern, spec) if globre.iswild(pkgpat): return group return Asset(group, pkgname, pkgpat)
def test_compile_exact(self): expr = globre.compile('/foo/bar/*.dir/**.ini', flags=globre.EXACT) self.assertEqual(expr.pattern, r'^\/foo\/bar\/[^/]*?\.dir\/.*?\.ini$') self.assertIsNotNone(expr.match('/foo/bar/a.dir/blue/conf.ini')) self.assertIsNotNone(expr.match('/foo/bar/a.dir/conf.ini')) self.assertIsNone(expr.match('/foo/bar/blue/a.dir/conf.ini')) self.assertIsNone(expr.match('/foo/bar/a.dir/conf.ini.x')) self.assertIsNone(expr.match('/x/foo/bar/a.dir/conf.ini'))
def token_match(pattern, tokens, match_type='exact', ignore_case=False, glob_method='match'): """ Return a boolean NumPy array signaling matches between `pattern` and `tokens`. `pattern` is a string that will be compared with each element in sequence `tokens` either as exact string equality (`match_type` is ``'exact'``) or regular expression (`match_type` is ``'regex'``) or glob pattern (`match_type` is ``'glob'``). :param pattern: either a string or a compiled RE pattern used for matching against `tokens` :param tokens: list or NumPy array of string tokens :param match_type: one of: 'exact', 'regex', 'glob'; if 'regex', `search_token` must be RE pattern; if `glob`, `search_token` must be a "glob" pattern like "hello w*" (see https://github.com/metagriffin/globre) :param ignore_case: if True, ignore case for matching :param glob_method: if `match_type` is 'glob', use this glob method. Must be 'match' or 'search' (similar behavior as Python's `re.match` or `re.search`) :return: 1D boolean NumPy array of length ``len(tokens)`` where elements signal matches between `pattern` and the respective token from `tokens` """ if match_type not in {'exact', 'regex', 'glob'}: raise ValueError( "`match_type` must be one of `'exact', 'regex', 'glob'`") if len(tokens) == 0: return np.array([], dtype=bool) if not isinstance(tokens, np.ndarray): tokens = np.array(tokens) ignore_case_flag = dict(flags=re.IGNORECASE) if ignore_case else {} if match_type == 'exact': return np.char.lower( tokens) == pattern.lower() if ignore_case else tokens == pattern elif match_type == 'regex': if isinstance(pattern, str): pattern = re.compile(pattern, **ignore_case_flag) vecmatch = np.vectorize(lambda x: bool(pattern.search(x))) return vecmatch(tokens) else: if glob_method not in {'search', 'match'}: raise ValueError( "`glob_method` must be one of `'search', 'match'`") if isinstance(pattern, str): pattern = globre.compile(pattern, **ignore_case_flag) if glob_method == 'search': vecmatch = np.vectorize(lambda x: bool(pattern.search(x))) else: vecmatch = np.vectorize(lambda x: bool(pattern.match(x))) return vecmatch(tokens) if len(tokens) > 0 else np.array([], dtype=bool)
def _create(self, config): """ Low-level implementation of self.create. """ # type: (Bunch) -> None flags = globre.EXACT if not config.is_case_sensitive: flags |= IGNORECASE file_patterns = config.file_patterns pattern_matcher_list = [ file_patterns ] if not isinstance(file_patterns, list) else file_patterns pattern_matcher_list = [ globre.compile(elem, flags) for elem in file_patterns ] self.pattern_matcher_dict[config.name] = pattern_matcher_list # This will be a list in the case of pickup.conf and not a list if read from ODB-based file transfer channels if isinstance(config.pickup_from_list, list): pickup_from_list = config.pickup_from_list else: pickup_from_list = str(config.pickup_from_list) # type: str pickup_from_list = [ elem.strip() for elem in pickup_from_list.splitlines() ] # Make sure that a parser is given if we are to parse any input .. if config.should_parse_on_pickup: # .. log a warning and disable parsing if no parser was configured when it was expected. if not config.parse_with: logger.warn( 'Parsing is enabled but no parser is declared for file transfer channel `%s` (%s)', config.name, config.source_type) config.should_parse_on_pickup = False # Create an observer object .. observer_class = source_type_to_observer_class[config.source_type] observer = observer_class(self, config) # type: BaseObserver # .. and add it to data containers .. self.observer_list.append(observer) # .. but do not add it to the mapping dict because locally-defined observers (from pickup.conf) # may not have any ID, or to be more precise, the may have the same ID. if not observer.is_notify: self.observer_dict[observer.channel_id] = observer # .. finally, set up directories and callbacks for the observer. event_handler = FileTransferEventHandler(self, config.name, config) observer.set_up(event_handler, pickup_from_list, recursive=False)
def setup(self, app): super(ConffilePlugin, self).setup(app) c = app.config # get include and exclude list from config file self.exclude = c.get(self._meta.label, 'exclude').split('\n') self.exclude[:] = [v.strip() for v in self.exclude] # strip all elements self.exclude[:] = [v for v in self.exclude if len(v) != 0] # throw out empty elements self.excludepats = [] for pat in self.exclude: self.excludepats.append(globre.compile(pat, flags=globre.EXACT, split_prefix=False)) self.size_limit = int(c.get(self._meta.label, 'size_limit'))
def find(self, expr=None): # todo: use pylucene?... if not expr: expr = None elif expr.startswith('regex:'): expr = re.compile(expr[6:], flags=re.IGNORECASE) else: if expr.startswith('query:'): expr = expr[6:] # todo: is this really the best natural language evaluation?... expr = globre.compile(expr, flags=re.IGNORECASE) ret = self.getEntries() if expr: # todo: order the results by best match ret = [entry for entry in ret if self._matches(entry, expr)] return [r.clearPassword() for r in ret]
def setup(self, app): super(ConffilePlugin, self).setup(app) c = app.config # get include and exclude list from config file self.exclude = c.get(self._meta.label, 'exclude').split('\n') self.exclude[:] = [v.strip() for v in self.exclude] # strip all elements self.exclude[:] = [v for v in self.exclude if len(v) != 0] # throw out empty elements self.excludepats = [] for pat in self.exclude: self.excludepats.append( globre.compile(pat, flags=globre.EXACT, split_prefix=False)) self.size_limit = int(c.get(self._meta.label, 'size_limit'))
def __init__(self, spec, regex=False, unmatched=False, link=False, clone=False, *args, **kw): super(DocEndpoint, self).__init__(*args, **kw) self.unmatched = unmatched self.link = link self.clone = clone if spec is None: self.cre = re.compile('.*') else: if regex: self.cre = re.compile(spec) else: self.cre = globre.compile( spec if not spec.endswith('/**') else spec[:-3] + '{(/.*)?}', flags=globre.EXACT)
def token_match(pattern, tokens, match_type='exact', ignore_case=False, glob_method='match'): """ Return a NumPy array signaling matches between `pattern` and `tokens`. `pattern` is a string that will be compared with each element in sequence `tokens` either as exact string equality (`match_type` is 'exact') or regular expression (`match_type` is 'regex') or glob pattern (`match_type` is 'glob'). """ if match_type not in {'exact', 'regex', 'glob'}: raise ValueError( "`match_type` must be one of `'exact', 'regex', 'glob'`") if not isinstance(tokens, np.ndarray): tokens = np.array(tokens) if match_type == 'exact': return np.char.lower( tokens) == pattern.lower() if ignore_case else tokens == pattern elif match_type == 'regex': if isinstance(pattern, six.string_types): pattern = re.compile(pattern, flags=re.IGNORECASE) vecmatch = np.vectorize(lambda x: bool(pattern.search(x))) return vecmatch(tokens) else: if glob_method not in {'search', 'match'}: raise ValueError( "`glob_method` must be one of `'search', 'match'`") if isinstance(pattern, six.string_types): pattern = globre.compile(pattern, flags=re.IGNORECASE) if glob_method == 'search': vecmatch = np.vectorize(lambda x: bool(pattern.search(x))) else: vecmatch = np.vectorize(lambda x: bool(pattern.match(x))) return vecmatch(tokens)
def test_prefix(self): self.assertEqual(globre.compile('/foo/bar', split_prefix=True)[0], '/foo/bar') self.assertEqual(globre.compile('/foo/b**', split_prefix=True)[0], '/foo/b') self.assertEqual(globre.compile('??/foo/b**', split_prefix=True)[0], '')
def test_compile(self): expr = globre.compile('/foo/bar/*.dir/**.ini', flags=0) self.assertEqual(expr.pattern, r'\/foo\/bar\/[^/]*?\.dir\/.*?\.ini') expr = globre.compile('/foo/bar-??-[a-z0-9].ini', flags=0) self.assertEqual(expr.pattern, r'\/foo\/bar\-[^/][^/]\-[a-z0-9]\.ini')
def __init__(self, path, *args, **kw): super(PathMatcher, self).__init__(*args, **kw) self.path = globre.compile(path, flags=globre.EXACT)
def test_complete(self): expr = globre.compile(r'/foo[0-9a-f]/*/bar\[??\]/{\\D{2,4\}}/**.txt') self.assertEqual( expr.pattern, r'\/foo[0-9a-f]\/[^/]*?\/bar\[[^/][^/]\]\/\D{2,4}\/.*?\.txt') self.assertIsNotNone(expr.match('/foo6/zog/bar[16]/abra/cadabra.txt'))
def test_sep_compile(self): self.assertEqual( globre.compile('!foo!bar!*.dir!**.ini', sep='!').pattern, r'\!foo\!bar\![^\!]*?\.dir\!.*?\.ini')