Example #1
0
    def on_task_start(self, name, is_conditional):
        msg = "TASK: [%s]" % name
        if is_conditional:
            msg = "NOTIFIED: [%s]" % name

        if hasattr(self, "start_at"):
            if name == self.start_at or fnmatch.fnmatch(name, self.start_at):
                # we found out match, we can get rid of this now
                del self.start_at
            elif self.task.role_name:
                # handle tasks prefixed with rolenames
                actual_name = name.split("|", 1)[1].lstrip()
                if actual_name == self.start_at or fnmatch.fnmatch(actual_name, self.start_at):
                    del self.start_at

        if hasattr(self, "start_at"):  # we still have start_at so skip the task
            self.skip_task = True
        elif hasattr(self, "step") and self.step:
            msg = ("Perform task: %s (y/n/c): " % name).encode(sys.stdout.encoding)
            resp = raw_input(msg)
            if resp.lower() in ["y", "yes"]:
                self.skip_task = False
                display(banner(msg))
            elif resp.lower() in ["c", "continue"]:
                self.skip_task = False
                self.step = False
                display(banner(msg))
            else:
                self.skip_task = True
        else:
            self.skip_task = False
            display(banner(msg))

        call_callback_module("playbook_on_task_start", name, is_conditional)
Example #2
0
    def __check_extension(files, ignore_globs=None, include_globs=None):
        """
        Internal method to filter a list of file changes by extension and ignore_dirs.

        :param files:
        :param ignore_globs: a list of globs to ignore (if none falls back to extensions and ignore_dir)
        :param include_globs: a list of globs to include (if none, includes all).
        :return: dict
        """

        if include_globs is None or include_globs == []:
            include_globs = ["*"]

        out = {}
        for key in files.keys():
            # count up the number of patterns in the ignore globs list that match
            if ignore_globs is not None:
                count_exclude = sum([1 if fnmatch.fnmatch(key, g) else 0 for g in ignore_globs])
            else:
                count_exclude = 0

            # count up the number of patterns in the include globs list that match
            count_include = sum([1 if fnmatch.fnmatch(key, g) else 0 for g in include_globs])

            # if we have one vote or more to include and none to exclude, then we use the file.
            if count_include > 0 and count_exclude == 0:
                out[key] = files[key]

        return out
Example #3
0
def GetListOfFiles(Prefix, InputPattern, Suffix):
    List = []

    if InputPattern.find("/store/cmst3") == 0:
        index = InputPattern.rfind("/")
        Listtmp = commands.getstatusoutput("cmsLs " + InputPattern[0:index] + " | awk '{print $5}'")[1].split("\n")
        pattern = InputPattern[index + 1 : len(InputPattern)]
        for file in Listtmp:
            if fnmatch.fnmatch(file, pattern):
                List.append(InputPattern[0:index] + "/" + file)
    elif InputPattern.find("/castor/") == 0:
        index = InputPattern.rfind("/")
        Listtmp = commands.getstatusoutput("rfdir " + InputPattern[0:index] + " | awk '{print $9}'")[1].split("\n")
        pattern = InputPattern[index + 1 : len(InputPattern)]
        for file in Listtmp:
            if fnmatch.fnmatch(file, pattern):
                List.append(InputPattern[0:index] + "/" + file)
    else:
        List = glob.glob(InputPattern)

    List = sorted(List)

    for i in range(len(List)):
        List[i] = Prefix + List[i] + Suffix
    return List
Example #4
0
    def check_exceptions():

        # Match pattern and verify which language is translatable
        for pattern in pattern_list:
            for exception_row in range(1, exception_sheet.nrows):
                for exception_col in range(0, exception_sheet.ncols):
                    if (
                        fnmatch(pattern, exception_sheet.cell_value(exception_row, 0))
                        and exception_col > 0
                        and exception_sheet.cell_value(exception_row, exception_col) == "Y"
                    ):
                        target_locale = header_list[exception_col]
                        for file in os.listdir(source_folder):
                            if fnmatch(file, pattern + ".*"):
                                shutil.copyfile(
                                    source_folder + "/" + file, target_folder + "/" + target_locale + "/" + file
                                )
                                matched_list.append(file)

        # Create a list of matched patterns/files for the cleanup process
        matched_set = set(matched_list)
        for set_match in matched_set:
            exception_clean.append(set_match)

        # Add the list of files to the file_clean_up list
        counter = 2
        files_to_clean(exception_clean, counter)

        # Process the remaining files
        process_remaining_files()

        # Free that RAM
        matched_set.clear()
Example #5
0
    def expect_content(self, name, content, exact=False):
        actual = self.__read_file(name, exact)
        content = string.replace(content, "$toolset", self.toolset + "*")

        matched = False
        if exact:
            matched = fnmatch.fnmatch(actual, content)
        else:

            def sorted_(x):
                x.sort()
                return x

            actual_ = map(lambda x: sorted_(x.split()), actual.splitlines())
            content_ = map(lambda x: sorted_(x.split()), content.splitlines())
            if len(actual_) == len(content_):
                matched = map(lambda x, y: map(lambda n, p: fnmatch.fnmatch(n, p), x, y), actual_, content_)
                matched = reduce(lambda x, y: x and reduce(lambda a, b: a and b, y), matched)

        if not matched:
            print "Expected:\n"
            print content
            print "Got:\n"
            print actual
            self.fail_test(1)
Example #6
0
def _should_include_path(path, includes, excludes):
    """Return True iff the given path should be included."""
    from os.path import basename
    from fnmatch import fnmatch

    base = basename(path)
    if includes:
        for include in includes:
            if fnmatch(base, include):
                try:
                    log.debug("include `%s' (matches `%s')", path, include)
                except (NameError, AttributeError):
                    pass
                break
        else:
            try:
                log.debug("exclude `%s' (matches no includes)", path)
            except (NameError, AttributeError):
                pass
            return False
    for exclude in excludes:
        if fnmatch(base, exclude):
            try:
                log.debug("exclude `%s' (matches `%s')", path, exclude)
            except (NameError, AttributeError):
                pass
            return False
    return True
def compare(list, values, attrName, subattrName=None, otherList=None, linkProperties=None):

    if len(values) == 0:
        return Exception

    # List to return
    returnList = []

    for value in values:
        for item in list:
            if otherList is None:
                # Get attribut "attrName" at first level
                compareName = getattr(item, attrName)
                if subattrName is None:
                    if fnmatch.fnmatch(compareName, value):
                        returnList.append(item)
                # If sub attribute requested, get it
                else:
                    compareName2 = getattr(compareName, subattrName)
                    if fnmatch.fnmatch(compareName2, value):
                        returnList.append(item)
            else:
                for otherItem in otherList:
                    if getattr(item, linkProperties[0]) == getattr(otherItem, linkProperties[1]):
                        compareName = getattr(otherItem, attrName)
                        if subattrName is None:
                            if fnmatch.fnmatch(compareName, value):
                                returnList.append(item)
                        else:
                            compareName2 = getattr(compareName, subattrName)
                            if fnmatch.fnmatch(compareName2, value):
                                returnList.append(item)
    return returnList
Example #8
0
def build_archive_list(pattern_list, target_dir):
    glob_file_list = []

    for pattern in pattern_list:
        # Extract patterns
        subdir = pattern[0]
        file_pattern = pattern[1]
        date_pattern = pattern[2]

        # Build list of files
        if len(date_pattern) > 0:
            file_list = [
                (datetime.strptime(file_name, date_pattern).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), file_name, subdir)
                for file_name in os.listdir(target_dir)
                if fnmatch.fnmatch(file_name, file_pattern)
            ]
        else:
            file_list = [
                (date.fromtimestamp(tardate(file_name)).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), file_name, subdir)
                for file_name in os.listdir(target_dir)
                if fnmatch.fnmatch(file_name, file_pattern)
            ]

        assert len(file_list) > 0

        # Append files to global list
        glob_file_list.extend(file_list)

    # Sort the global list by date and return it
    glob_file_list.sort(key=lambda x: x[0])
    return glob_file_list
Example #9
0
            def test_filter(test):
                for pat in getattr(obj, "__doctest_skip__", []):
                    if pat == "*":
                        return False
                    elif pat == "." and test.name == name:
                        return False
                    elif fnmatch.fnmatch(test.name, ".".join((name, pat))):
                        return False

                reqs = getattr(obj, "__doctest_requires__", {})
                for pats, mods in reqs.items():
                    if not isinstance(pats, tuple):
                        pats = (pats,)
                    for pat in pats:
                        if not fnmatch.fnmatch(test.name, ".".join((name, pat))):
                            continue
                        for mod in mods:
                            if mod in self._import_cache:
                                return self._import_cache[mod]
                            try:
                                imp.find_module(mod)
                            except ImportError:
                                self._import_cache[mod] = False
                                return False
                            else:
                                self._import_cache[mod] = True
                return True
Example #10
0
    def recfind(p, pats=["*"]):
        denied_dirs = [os.path.dirname(d) for d in denied_set if d.endswith("/")]
        for (dp, dnames, fnames) in os.walk(p):
            # see if we should ignore the whole directory
            dp_norm = dp.replace("\\", "/") + "/"
            deny = False
            # do not traverse under already rejected dirs
            for d in cur_rejected_dirs:
                if dp.startswith(d):
                    deny = True
                    break
            if deny:
                continue

            # print "dp",dp
            bname = os.path.basename(dp)
            for deny_pat in denied_dirs:
                if fnmatch.fnmatch(bname, deny_pat):
                    deny = True
                    cur_rejected_dirs.add(dp)
                    break
            if deny:
                continue

            for f in fnames:
                matched = False
                for p in pats:
                    if fnmatch.fnmatch(f, p):
                        matched = True
                        break
                if matched:
                    yield os.path.join(dp, f)
Example #11
0
def getLocalFontFiles():
    """ Initialises localfontfiles to the list of fonts in the static directories,
        if there are any fonts to be found. This saves us reinitialising the
        same array.
    """
    global localfontfiles
    global fontnametodirectory
    global localfontempty
    if localfontfiles != []:
        localfontempty = False
        return (localfontfiles, fontnametodirectory)
    for i in [settings.STATIC_ROOT]:
        font_dir = os.path.join(i, FONTS_DIR_ADD)
        listdir = os.listdir(font_dir)
        filtereddir = filter(
            lambda (x): fnmatch.fnmatch(x, "*.ttf") or fnmatch.fnmatch(x, "*.otf") or fnmatch.fnmatch(x, "*.woff"),
            listdir,
        )
        for j in filtereddir:
            if not fontnametodirectory.has_key(j):
                localfontfiles.append(j)
                fontnametodirectory[j] = font_dir
    localfontfiles.sort()
    if localfontfiles != []:
        localfontempty = False
    else:
        localfontempty = True
    return (localfontfiles, fontnametodirectory)
def clean_package_tree(directory, remove_dirs=DIRECTORIES_TO_REMOVE, remove_files=FILES_TO_REMOVE):
    """
    Clean up files that should not be included in a Debian package from the
    given directory. Uses the :py:mod:`fnmatch` module for directory and
    filename matching. Matching is done on the base name of each directory and
    file. This function assumes it is safe to unlink files from the given
    directory (which it should be when :py:func:`copy_package_files()` was
    previously called, e.g. by :py:func:`build_package()`).

    :param directory: The pathname of the directory to clean (a string).
    :param remove_dirs: An iterable with filename patterns of directories that
                        should not be included in the package (e.g. version
                        control directories like ``.git`` and ``.hg``).
    :param remove_files: An iterable with filename patterns of files that
                         should not be included in the package (e.g. version
                         control files like ``.gitignore`` and
                         ``.hgignore``).
    """
    for root, dirs, files in os.walk(directory):
        for name in dirs:
            if any(fnmatch.fnmatch(name, p) for p in remove_dirs):
                pathname = os.path.join(root, name)
                logger.debug("Cleaning up directory: %s", format_path(pathname))
                shutil.rmtree(pathname)
        for name in files:
            if any(fnmatch.fnmatch(name, p) for p in remove_files):
                pathname = os.path.join(root, name)
                logger.debug("Cleaning up file: %s", format_path(pathname))
                os.unlink(pathname)
Example #13
0
File: ui.py Project: jcparrad/vunit
    def get_source_files(self, pattern="*", library_name=None, allow_empty=False):
        """
        Get a list of source files

        :param pattern: A wildcard pattern matching either an absolute or relative path
        :param library_name: The name of a specific library to search if not all libraries
        :param allow_empty: To disable an error if no files matched the pattern
        :returns: A :class:`.SourceFileList` object
        """
        results = []
        for source_file in self._project.get_source_files_in_order():
            if library_name is not None:
                if source_file.library.name != library_name:
                    continue

            if not (fnmatch(abspath(source_file.name), pattern) or fnmatch(relpath(source_file.name), pattern)):
                continue

            results.append(SourceFile(source_file, self._project))

        if (not allow_empty) and len(results) == 0:
            raise ValueError(
                ("Pattern %r did not match any file. " "Use allow_empty=True to avoid exception,") % pattern
            )

        return SourceFileList(results)
 def ExpectationAppliesToPage(self, expectation, browser, page):
     matches_url = fnmatch.fnmatch(page.url, expectation.url_pattern)
     matches_name = page.name and fnmatch.fnmatch(page.name, expectation.name_pattern)
     if matches_url or matches_name:
         if self.ModifiersApply(browser, expectation):
             return True
     return False
Example #15
0
    def resolve_filenames(expr):
        c, _ = Hdfs.client_and_path(expr)

        t = Tokenizer(expr)
        scheme = t.next("://")
        domain = t.next("/")
        fixed_path = t.next(["*", "?"])
        file_expr = t.next()

        if file_expr and "/" in fixed_path:
            file_expr = fixed_path[fixed_path.rfind("/") + 1 :] + file_expr
            fixed_path = fixed_path[: fixed_path.rfind("/")]
        # file_expr is only the actual file expression if there was a * or ?.
        # Handle this case.
        if not file_expr:
            if "/" in fixed_path:
                file_expr = fixed_path[fixed_path.rfind("/") + 1 :]
                fixed_path = fixed_path[: fixed_path.rfind("/")]
            else:
                file_expr = fixed_path
                fixed_path = ""

        files = []
        for fn in c.list("/" + fixed_path, status=False):
            if fnmatch(fn, file_expr) or fnmatch(fn, file_expr + "/part*"):
                files.append("{0}://{1}/{2}/{3}".format(scheme, domain, fixed_path, fn))
        return files
Example #16
0
    def allow(self, include=None, exclude=None):
        """
        Given a set of wilcard patterns in the include and exclude arguments,
        tests if the patterns allow this item for processing.

        The exclude parameter is processed first as a broader filter and then
        include is used as a narrower filter to override the results for more
        specific files.

        Example:
        exclude = (".*", "*~")
        include = (".htaccess")

        """
        if not include:
            include = ()
        if not exclude:
            exclude = ()

        if reduce(lambda result, pattern: result or fnmatch.fnmatch(self.name, pattern), include, False):
            return True

        if reduce(lambda result, pattern: result and not fnmatch.fnmatch(self.name, pattern), exclude, True):
            return True

        return False
Example #17
0
def copy(source, destination, ignores=None):
    """Recursively copy source into destination.

    If source is a file, destination has to be a file as well.
    The function is able to copy either files or directories.

    :param source: the source file or directory
    :param destination: the destination file or directory
    :param ignores: either None, or a list of glob patterns;
        files matching those patterns will _not_ be copied.
    """

    def walk_error(err):
        logger.warning("While copying %s: %s: %s", source_, err.filename, err.strerror)

    source_ = os.path.abspath(os.path.expanduser(source))
    destination_ = os.path.abspath(os.path.expanduser(destination))

    if ignores is None:
        ignores = []

    if any(fnmatch.fnmatch(os.path.basename(source), ignore) for ignore in ignores):
        logger.info("Not copying %s due to ignores", source_)
        return

    if os.path.isfile(source_):
        dst_dir = os.path.dirname(destination_)
        if not os.path.exists(dst_dir):
            logger.info("Creating directory %s", dst_dir)
            os.makedirs(dst_dir)
        logger.info("Copying %s to %s", source_, destination_)
        shutil.copy2(source_, destination_)

    elif os.path.isdir(source_):
        if not os.path.exists(destination_):
            logger.info("Creating directory %s", destination_)
            os.makedirs(destination_)
        if not os.path.isdir(destination_):
            logger.warning("Cannot copy %s (a directory) to %s (a file)", source_, destination_)
            return

        for src_dir, subdirs, others in os.walk(source_):
            dst_dir = os.path.join(destination_, os.path.relpath(src_dir, source_))

            subdirs[:] = (s for s in subdirs if not any(fnmatch.fnmatch(s, i) for i in ignores))
            others[:] = (o for o in others if not any(fnmatch.fnmatch(o, i) for i in ignores))

            if not os.path.isdir(dst_dir):
                logger.info("Creating directory %s", dst_dir)
                # Parent directories are known to exist, so 'mkdir' suffices.
                os.mkdir(dst_dir)

            for o in others:
                src_path = os.path.join(src_dir, o)
                dst_path = os.path.join(dst_dir, o)
                if os.path.isfile(src_path):
                    logger.info("Copying %s to %s", src_path, dst_path)
                    shutil.copy2(src_path, dst_path)
                else:
                    logger.warning("Skipped copy %s (not a file or directory) to %s", src_path, dst_path)
def najdu_testovací_soubory(cesta):

    počet_nalezených_testů = 0

    if os.path.isdir(cesta):

        for cesta_do_adresáře, nalezené_adresáře, nalezené_soubory in os.walk(cesta):
            for jméno_nalezeného_souboru in nalezené_soubory:
                if fnmatch.fnmatch(jméno_nalezeného_souboru, MASKA_TESTOVACÍCH_SOUBORŮ):
                    #                    if jméno_nalezeného_souboru.endswith('.py')  and not  jméno_nalezeného_souboru.startswith('__init__'):
                    cesta_k_nalezenému_souboru = os.path.join(cesta_do_adresáře, jméno_nalezeného_souboru)
                    počet_nalezených_testů = počet_nalezených_testů + 1
                    yield cesta_k_nalezenému_souboru
    else:
        if os.path.isfile(cesta):
            if fnmatch.fnmatch(os.path.basename(cesta), MASKA_TESTOVACÍCH_SOUBORŮ):
                počet_nalezených_testů = počet_nalezených_testů + 1
                yield cesta
            else:
                raise IOError('Soubor testu "{}" neodpovídá masce {}'.format(cesta, MASKA_TESTOVACÍCH_SOUBORŮ))
        else:
            raise IOError('Soubor testu "{}" nejestvuje'.format(cesta))

    if počet_nalezených_testů == 0:
        raise IOError(
            'Nenašel jsem žádný testovací soubor v cestě "{}" za pomocí masky "{}"'.format(
                cesta, MASKA_TESTOVACÍCH_SOUBORŮ
            )
        )
Example #19
0
 def __init__(self, sqla_conn, args, schema=None):
     self.args = args
     self.sqla_conn = sqla_conn
     self.schema = schema
     self.engine = sa.create_engine(sqla_conn)
     self.meta = sa.MetaData(bind=self.engine)  # excised schema=schema to prevent errors
     self.meta.reflect(schema=self.schema)
     self.inspector = Inspector(bind=self.engine)
     self.conn = self.engine.connect()
     self.tables = OrderedDict()
     for tbl in self.meta.sorted_tables:
         if any(fnmatch.fnmatch(tbl.name, each) for each in args.exclude_tables):
             continue
         tbl.db = self
         # TODO: Replace all these monkeypatches with an instance assigment
         tbl.find_n_rows = types.MethodType(_find_n_rows, tbl)
         tbl.random_row_func = types.MethodType(_random_row_func, tbl)
         tbl.fks = self.inspector.get_foreign_keys(tbl.name, schema=tbl.schema)
         tbl.pk = self.inspector.get_primary_keys(tbl.name, schema=tbl.schema)
         if not tbl.pk:
             tbl.pk = [d["name"] for d in self.inspector.get_columns(tbl.name)]
         tbl.filtered_by = types.MethodType(_filtered_by, tbl)
         tbl.by_pk = types.MethodType(_by_pk, tbl)
         tbl.pk_val = types.MethodType(_pk_val, tbl)
         tbl.child_fks = []
         estimate_rows = not (any(fnmatch.fnmatch(tbl.name, each) for each in self.args.full_tables))
         tbl.find_n_rows(estimate=estimate_rows)
         self.tables[(tbl.schema, tbl.name)] = tbl
     for ((tbl_schema, tbl_name), tbl) in self.tables.items():
         constraints = args.config.get("constraints", {}).get(tbl_name, [])
         for fk in tbl.fks + constraints:
             fk["constrained_schema"] = tbl_schema
             fk["constrained_table"] = tbl_name  # TODO: check against constrained_table
             self.tables[(fk["referred_schema"], fk["referred_table"])].child_fks.append(fk)
Example #20
0
File: mime.py Project: boube/minino
def get_type_by_name(path):
    """Returns type of file by its name, or None if not known"""
    if not _cache_uptodate:
        _cache_database()

    leaf = os.path.basename(path)
    if leaf in literals:
        return literals[leaf]

    lleaf = leaf.lower()
    if lleaf in literals:
        return literals[lleaf]

    ext = leaf
    while 1:
        p = ext.find(".")
        if p < 0:
            break
        ext = ext[p + 1 :]
        if ext in exts:
            return exts[ext]
    ext = lleaf
    while 1:
        p = ext.find(".")
        if p < 0:
            break
        ext = ext[p + 1 :]
        if ext in exts:
            return exts[ext]
    for (glob, mime_type) in globs:
        if fnmatch.fnmatch(leaf, glob):
            return mime_type
        if fnmatch.fnmatch(lleaf, glob):
            return mime_type
    return None
Example #21
0
def find_fit_speed_log(out_file, path):
    pattern = "fitlog_time_speed.dat"
    fitlog = []
    for root, dirs, files in os.walk(path):
        for name in files:
            if fnmatch.fnmatch(name, pattern):
                fitlog.append(os.path.join(root, name))

    pattern = "fitlog_time_speed.dat"
    fitlog_time_speed = []
    for root, dirs, files in os.walk(path):
        for name in files:
            if fnmatch.fnmatch(name, pattern):
                fitlog_time_speed.append(os.path.join(root, name))

    string = "plot "
    for my_file in fitlog:
        string = string + "'" + my_file + "' using ($1/60/60):($2) with lp,"

        # for my_file in fitlog_time_speed:
        # string=string+"'"+my_file+"' using ($2) axis x1y2 with lp,"

    string = string[:-1]
    text_file = open(out_file, "w")
    text_file.write(string)
    text_file.close()
Example #22
0
    def find_matches(pkg, provides, matchfor=None):
        ### Skip installed packages
        if pkg.repo.id == "installed":
            return

        ### TODO: Only select the latest package from the deck
        ### Skip packages we already processed
        if conduit._base.conf.debuglevel <= 2:
            if (pkg.name, pkg.repo.id) in [(p.name, p.repo.id) for p in elrepo_matches]:
                return

        ### Skip packages that are excluded
        for excl in elrepo_exclude:
            if fnmatch.fnmatch(pkg.name, excl):
                return

        ### Check if the package matches current hardware
        for prov in provides:
            for modalias in elrepo_devices:
                filter = prov.split()[0]
                if fnmatch.fnmatch(modalias, filter):
                    elrepo_matches.append(pkg)

                    ### If we get a match, skip all other provides from this package
                    return
Example #23
0
def get_repos(config, dirmatch=None, repomatch=None, namematch=None):
    """Return a :py:obj:`list` list of repos from (expanded) config file.

    :param config: the expanded repo config in :py:class:`dict` format.
    :type config: dict
    :param dirmatch: array of fnmatch's for directory
    :type dirmatch: str or None
    :param repomatch: array of fnmatch's for vcs url
    :type repomatch: str or None
    :param namematch: array of fnmatch's for project name
    :type namematch: str or None
    :rtype: list
    :todo: optimize performance, tests.

    """
    repo_list = []
    for directory, repos in config.items():
        for repo, repo_data in repos.items():
            if dirmatch and not fnmatch.fnmatch(directory, dirmatch):
                continue
            if repomatch and not fnmatch.fnmatch(repo_data["repo"], repomatch):
                continue
            if namematch and not fnmatch.fnmatch(repo, namematch):
                continue
            repo_dict = {"name": repo, "cwd": directory, "url": repo_data["repo"]}

            if "remotes" in repo_data:
                repo_dict["remotes"] = []
                for remote_name, url in repo_data["remotes"].items():
                    remote_dict = {"remote_name": remote_name, "url": url}
                    repo_dict["remotes"].append(remote_dict)
            repo_list.append(repo_dict)
    return repo_list
Example #24
0
 def doSearch(self):
     found = False
     for objID in client.db.keys():
         obj = client.get(objID, noUpdate=1)
         if hasattr(obj, "type") and obj.type in (T_SYSTEM, T_WORMHOLE):
             name = string.lower(getattr(obj, "name", ""))
             if len(name) > 0 and not name in self.founded and fnmatch.fnmatch(name, self.pattern):
                 if hasattr(obj, "x") and hasattr(obj, "y") and self.mapWidget:
                     self.mapWidget.highlightPos = (obj.x, obj.y)
                     self.mapWidget.setPos(obj.x, obj.y)
                     self.update()
                 found = True
                 self.founded.append(name)
                 break
         name = str(objID)
         if not name in self.founded and fnmatch.fnmatch(name, self.pattern):
             if hasattr(obj, "x") and hasattr(obj, "y") and self.mapWidget:
                 self.mapWidget.highlightPos = (obj.x, obj.y)
                 self.mapWidget.setPos(obj.x, obj.y)
                 self.update()
             found = True
             self.founded.append(name)
             break
     if not found and self.mapWidget:
         self.mapWidget.highlightPos = None
         self.win.setStatus(_("No system found"))
Example #25
0
 def get_host_info(context, init=None):
     resp = default.copy()
     if init:
         resp.update(init)
     info = HOST_REGEX.match(context).groupdict()
     host = info["host"]
     for pattern in hostpatterns:
         if fnmatch(host, pattern):
             resp.update(hostpatterninfo[pattern])
         if fnmatch(context, pattern):
             resp.update(hostpatterninfo[pattern])
     if host in hostinfo:
         resp.update(hostinfo[host])
     if context in hostinfo:
         resp.update(hostinfo[context])
     resp["host"] = host
     resp["host_string"] = context
     if info["port"]:
         resp["port"] = info["port"]
     elif "port" not in resp:
         resp["port"] = "22"
     if info["user"]:
         resp["user"] = info["user"]
     elif "user" not in resp:
         resp["user"] = env.user
     return resp
Example #26
0
File: db_list.py Project: Byron/bit
    def _iter_jobs(self):
        """@return iterator over job information based on our current options"""
        if self.args.input_mode == self.input_mode_all:
            # SETUP ARGS
            find_args = dict()
            if self.args.user:
                find_args["user"] = self.args.user
            # end handle settings

            for jid, user, jobdir in self.context.FindJob(**find_args):
                if self.args.jid_glob and not fnmatch(str(jid), self.args.jid_glob):
                    continue

                if self.args.output_mode == self.output_mode_info:
                    yield jid, user, jobdir
                else:
                    yield self.context.job(user, jid, jobdir=jobdir, mode=TrContext.TrModeObject)
                # end handle job conversion
        else:
            for jid, user in CacheParser(self.args.root).iter_entries():
                if (self.args.user and self.args.user != user) or (
                    self.args.jid_glob and not fnmatch(str(jid), self.args.jid_glob)
                ):
                    continue
                job = self.context.job(user, jid)

                if self.args.output_mode == self.output_mode_info:
                    # This is expensive !
                    job_dir = job.locate_jobdir()
                    yield jid, user, job_dir
                else:
                    yield job
Example #27
0
 def f(candidate, ignore_cfg=ignore_cfg):
     for ignore in ignore_cfg:
         if fnmatch.fnmatch(candidate, ignore):
             return True
         elif fnmatch.fnmatch(os.path.relpath(candidate), ignore):
             return True
     return False
Example #28
0
File: acls.py Project: nabeken/func
    def check(self, cm_cert, cert, ip, method, params):

        # certmaster always gets to run things
        # unless we are testing, and need to turn it off.. -al;

        if self.config.certmaster_overrides_acls:
            ca_cn = cm_cert.get_subject().CN
            ca_hash = cm_cert.subject_name_hash()
            ca_key = "%s-%s" % (ca_cn, ca_hash)
            self.acls[ca_key] = ["*"]

        cn = cert.get_subject().CN
        sub_hash = cert.subject_name_hash()
        self.logger.debug("cn: %s sub_hash: %s" % (cn, sub_hash))
        self.logger.debug("acls %s" % self.acls)
        if self.acls:
            allow_list = []
            hostkey = "%s-%s" % (cn, sub_hash)
            self.logger.debug("hostkey %s" % hostkey)
            # search all the keys, match to 'cn-subhash'
            for hostmatch in self.acls.keys():
                if fnmatch.fnmatch(hostkey, hostmatch):
                    allow_list.extend(self.acls[hostmatch])
            # go through the allow_list and make sure this method is in there
            for methodmatch in allow_list:
                if fnmatch.fnmatch(method, methodmatch):
                    return True

        return False
Example #29
0
def filter_models(context, models, exclude):
    """
    Returns (model, perm,) for all models that match models/exclude patterns
    and are visible by current user.
    """
    items = get_avail_models(context)
    included = []
    full_name = lambda model: "%s.%s" % (model.__module__, model.__name__)

    # I believe that that implemented
    # O(len(patterns)*len(matched_patterns)*len(all_models))
    # algorithm is fine for model lists because they are small and admin
    # performance is not a bottleneck. If it is not the case then the code
    # should be optimized.

    if len(models) == 0:
        included = items
    else:
        for pattern in models:
            for item in items:
                model, perms = item
                if fnmatch(full_name(model), pattern) and item not in included:
                    included.append(item)

    result = included[:]
    for pattern in exclude:
        for item in included:
            model, perms = item
            if fnmatch(full_name(model), pattern):
                try:
                    result.remove(item)
                except ValueError:  # if the item was already removed skip
                    pass
    return result
Example #30
0
    def _get(genre):
        response = []
        for f in os.listdir("%s/%s" % (options.store, genre)):
            if not fnmatch.fnmatch(f, "*_thumbnail.jpg") and not fnmatch.fnmatch(f, "*_meta.json"):
                response.append(MediaManager._build_response_for(genre, f))

        return response