def get_files(self, file_name_filters=None):
     input_zip = ZipFile(self.xlsm_doc_path)
     result = {}
     if not file_name_filters:
         file_name_filters = ['*']
     for i in input_zip.namelist():
         for filter in file_name_filters:
             if i == filter or fnmatch.fnmatch(i, filter):
                 result[i] = input_zip.read(i)
     #Excel Crack is Wack... Excel converts \x5c to \x2f in zip file names and will happily eat it for you. Sample 51762ea84ac51f9e40b1902ebe22c306a732d77a5aa8f03650279d8b21271516
     if not result:
         for i in input_zip.namelist():
             for filter in file_name_filters:            
                 if i == filter.replace('\x2f','\x5c') or fnmatch.fnmatch(i, filter.replace('\x2f','\x5c')):
                     result[i.replace('\x5c','\x2f')] = input_zip.read(i)
     return result
Example #2
0
    def _filter(dir, ls):
        incs = [opt.split('=').pop() for opt in opts if 'inc=' in opt]
        filtered = []
        for f in ls:
            _f = os.path.join(dir, f)

            if not os.path.isdir(_f) and not _f.endswith('.py') and incs:
                if True not in [fnmatch.fnmatch(_f, inc) for inc in incs]:
                    logging.debug('Not syncing %s, does not match include '
                                  'filters (%s)' % (_f, incs))
                    filtered.append(f)
                else:
                    logging.debug('Including file, which matches include '
                                  'filters (%s): %s' % (incs, _f))
            elif (os.path.isfile(_f) and not _f.endswith('.py')):
                logging.debug('Not syncing file: %s' % f)
                filtered.append(f)
            elif (os.path.isdir(_f) and ('test' in f)):
                logging.debug('Not syncing directory: %s' % f)
                filtered.append(f)
            elif (os.path.isdir(_f)
                  and not os.path.isfile(os.path.join(_f, '__init__.py'))):
                logging.debug('Not syncing directory: %s' % f)
                filtered.append(f)
        return filtered
Example #3
0
 def do_search(self, arg):
     matches = []
     for cmdname in self._custom_complete:
         if fnmatch.fnmatch(cmdname, arg):
             matches.append(cmdname)
     cprint("Found %s matches:" % len(matches))
     print "\n".join(matches)
Example #4
0
def _match(all_packages, key_glob_dict):
    """

    Parameters
    ----------
    all_packages : iterable
        Iterable of package metadata dicts from repodata.json
    key_glob_dict : iterable of kv pairs
        Iterable of (key, glob_value) dicts

    Returns
    -------
    matched : dict
        Iterable of package metadata dicts which match the `target_packages`
        (key, glob_value) tuples
    """
    matched = dict()
    key_glob_dict = {
        key.lower(): glob.lower()
        for key, glob in key_glob_dict.items()
    }
    for pkg_name, pkg_info in all_packages.items():
        matched_all = []
        # normalize the strings so that comparisons are easier
        for key, pattern in key_glob_dict.items():
            name = str(pkg_info.get(key, '')).lower()
            if fnmatch.fnmatch(name, pattern):
                matched_all.append(True)
            else:
                matched_all.append(False)
        if all(matched_all):
            matched.update({pkg_name: pkg_info})

    return matched
Example #5
0
def _archive_get_files_from_glob(arc, glob):
    arr = []
    for cffolder in arc.get_folders():
        for cffile in cffolder.get_files():
            filename = cffile.get_name().replace('\\', '/')
            if fnmatch.fnmatch(filename, glob):
                arr.append(cffile)
    return arr
Example #6
0
 def is_filename_allowed(self, name):
     if not self.file_masks:
         #print "masks not specified for component"
         return True
     for mask in self.file_masks.split():
         if fnmatch.fnmatch(name, mask):
             #print "{} matches to {}".format(name, mask)
             return True
     return False
Example #7
0
    def get_files(self, file_name_filters=None):
        result = {}
        if not file_name_filters:
            file_name_filters = ['*']

        for i in self._zf.namelist():
            for filter in file_name_filters:
                if fnmatch.fnmatch(i, filter):
                    result[i] = self._zf.read(i)

        return result
Example #8
0
    def types_by_filename(self, filename):
        """Returns a list of DocTypes matching for the given filename"""
        if not filename:
            #FIXME: return default type
            return []
        rv = []
        for test in self._globs.keys():
            if fnmatch.fnmatch(filename, test):
                rv += self._globs[test]

        return rv
Example #9
0
    def types_by_filename(self, filename):
        """Returns a list of DocTypes matching for the given filename"""
        if not filename:
            # FIXME: return default type
            return []
        rv = []
        for test in self._globs.keys():
            if fnmatch.fnmatch(filename, test):
                rv += self._globs[test]

        return rv
    def get_files(self, file_name_filters=None):
        input_zip = ZipFile(self.xlsm_doc_path)
        result = {}
        if not file_name_filters:
            file_name_filters = ['*']

        for i in input_zip.namelist():
            for filter in file_name_filters:
                if i == filter or fnmatch.fnmatch(i, filter):
                    result[i] = input_zip.read(i)

        return result
Example #11
0
    def matches(self, filename, filename_matches):
        for filename_match in filename_matches:
            if fnmatch.fnmatch(filename, filename_match):
                return True

        # perhaps 'filename' is part of a query string, so
        # try a regex match
        for filename_match in filename_matches:
            regex = re.compile(r'\\%s\?' % filename_match)
            if regex.search(filename):
                return True

        return False
Example #12
0
    def matches(self, filename, filename_matches):
        for filename_match in filename_matches:
            if fnmatch.fnmatch(filename, filename_match):
                return True

        # perhaps 'filename' is part of a query string, so
        # try a regex match
        for filename_match in filename_matches:
            regex = re.compile(r"\\%s\?" % filename_match)
            if regex.search(filename):
                return True

        return False
Example #13
0
 def _build_extra_yum_config(self, branch):
     """Build extra yum configuration for a branch's mock environment
     according to values in the releasers configuration file
     """
     debug("looking for extra yum configuration")
     yum_config = []
     if self.releaser_config.has_option(self.target, EXTRA_YUM_REPOS):
         debug("Appending extra yum configuration")
         yum_config.append(self.releaser_config.get(self.target, EXTRA_YUM_REPOS))
     if self.releaser_config.has_option(self.target, EXTRA_YUM_REPOS_FOR):
         debug("Adding branch-specific extra yum configuraition")
         for pattern, yum_conf in json.loads(self.releaser_config.get(self.target, EXTRA_YUM_REPOS_FOR)):
             debug("  matching extra repos pattern '{0}' against '{1}'".format(pattern, branch))
             if fnmatch.fnmatch(branch, pattern):
                 debug("  found extra yum configuration for '{0}'".format(branch))
                 yum_config.append(yum_conf)
     return "\n".join(yum_config)
Example #14
0
    def list(self, filter_by=None, incremental=False, fetch_since_days=7):
        server = xmlrpclib.Server(self._pypi_xmlrpc_url)
        packages = server.list_packages()
        if not filter_by:
            return packages

        filtered_packages = []
        for package in packages:
            if not True in [fnmatch.fnmatch(package, f) for f in filter_by]:
                continue
            filtered_packages.append(package)

        if incremental:
            changelog = server.changelog(int(time.time() - fetch_since_days * 24 * 3600))
            changed_packages = [tp[0] for tp in changelog if "file" in tp[3]]
            changed_packages = [package for package in changed_packages if package in filtered_packages]
            return changed_packages
        else:
            return filtered_packages
Example #15
0
    def type_by_filename(self, filename):
        """Tries to find only one, the best guess for the type."""
        if not filename:
            return None
        best = None
        best_glob = ""
        best_list = []
        for test in self._globs.keys():
            if fnmatch.fnmatch(filename, test):
                if len(test) > len(best_glob):
                    best_glob = test
                    best_list += self._globs[test]

        if best_glob == '':
            return None
        return self._globs[best_glob][0]

        if len(best_list) > 1:
            # erks. got more then one result. try different approach
            # guess the mimetype through the python mimetype lib
            #import mimetypes
            gtest = None
            import subprocess
            try:
                gtest = subprocess.Popen(
                    ['file', '-bzki', filename],
                    stdout=subprocess.PIPE).communicate()[0].strip()
                if gtest.find(';') != -1:
                    gtest = gtest[:gtest.find(';')]
            except OSError:
                pass
            if gtest:
                for dt in best_list:
                    if gtest in dt.mimes:
                        best = dt
            else:
                # use the first one as total fallback :(
                best = best_list[0]
        elif len(best_list):
            best = best_list[0]

        return best
Example #16
0
    def list(self, filter_by=None, incremental=False, fetch_since_days=7):
        server = xmlrpclib.Server(self._pypi_xmlrpc_url)
        packages = server.list_packages()
        if not filter_by:
            return packages

        filtered_packages = []
        for package in packages:
            if not True in [fnmatch.fnmatch(package, f) for f in filter_by]:
                continue
            filtered_packages.append(package)

        if incremental:
            changelog = server.changelog(int(time.time() - fetch_since_days*24*3600))
            changed_packages = [tp[0] for tp in changelog 
                                if 'file' in tp[3]]
            changed_packages = [package for package in changed_packages if package in filtered_packages]
            return changed_packages
        else:
            return filtered_packages
Example #17
0
    def type_by_filename(self, filename):
        """Tries to find only one, the best guess for the type."""
        if not filename:
            return None
        best = None
        best_glob = ""
        best_list = []
        for test in self._globs.keys():
            if fnmatch.fnmatch(filename, test):
                if len(test) > len(best_glob):
                    best_glob = test
                    best_list += self._globs[test]

        if best_glob == '':
            return None
        return self._globs[best_glob][0]

        if len(best_list) > 1:
            # erks. got more then one result. try different approach
            # guess the mimetype through the python mimetype lib
            #import mimetypes
            gtest = None
            import subprocess
            try:
                gtest = subprocess.Popen(['file', '-bzki', filename], stdout=subprocess.PIPE).communicate()[0].strip()
                if gtest.find(';') != -1:
                    gtest = gtest[:gtest.find(';')]
            except OSError:
                pass
            if gtest:
                for dt in best_list:
                    if gtest in dt.mimes:
                        best = dt
            else:
                # use the first one as total fallback :(
                best = best_list[0]
        elif len(best_list):
            best = best_list[0]

        return best
Example #18
0
 def _build_extra_yum_config(self, branch):
     """Build extra yum configuration for a branch's mock environment
     according to values in the releasers configuration file
     """
     debug('looking for extra yum configuration')
     yum_config = []
     if self.releaser_config.has_option(self.target, EXTRA_YUM_REPOS):
         debug('Appending extra yum configuration')
         yum_config.append(
             self.releaser_config.get(self.target, EXTRA_YUM_REPOS))
     if self.releaser_config.has_option(self.target, EXTRA_YUM_REPOS_FOR):
         debug('Adding branch-specific extra yum configuraition')
         for pattern, yum_conf in json.loads(
                 self.releaser_config.get(self.target,
                                          EXTRA_YUM_REPOS_FOR)):
             debug("  matching extra repos pattern '{0}' against '{1}'".
                   format(pattern, branch))
             if fnmatch.fnmatch(branch, pattern):
                 debug("  found extra yum configuration for '{0}'".format(
                     branch))
                 yum_config.append(yum_conf)
     return '\n'.join(yum_config)
Example #19
0
    def _validator(k, v):
        # Check for exact match
        if k in type_table:
            try:
                return cast_value(v, type_table[k])
            except ValueError:
                pass
            raise TypeError(k + ": invalid type")

        # Check wildcards (regexps in the future?)
        for (candidate_key, candidate_type) in type_table.items():
            if fnmatch.fnmatch(k, candidate_key):
                try:
                    return cast_value(v, candidate_type)
                except ValueError():
                    pass
                raise TypeError(k + ": invalid type")

        # No matches
        if relaxed:
            return v

        # Finally raise a typeerror
        raise TypeError(k + ": can't validate")
Example #20
0
def _verify_username_vendor_glob(username, username_glob):
    for tmp in username_glob.split(','):
        if fnmatch.fnmatch(username, tmp):
            return True
    return False
Example #21
0
    def parse_deps(self, filename, ext_module, verify=True):
        """
        Open a Cython file and extract all of its dependencies. 
        
        INPUT: 
            filename -- the file to parse
            verify   -- only return existing files (default True)
        
        OUTPUT:
            list of dependency files
        """
        is_cython_file = lambda f:\
            fnmatch.fnmatch(f,'*.pyx') or \
            fnmatch.fnmatch(f,'*.pxd') or \
            fnmatch.fnmatch(f,'*.pxi')

        # only parse cython files
        if not is_cython_file(filename):
            return []
        
        dirname = os.path.split(filename)[0]
        deps = set()
        if filename.endswith('.pyx'):
            pxd_file = filename[:-4] + '.pxd'
            if os.path.exists(pxd_file):
                deps.add(pxd_file)
        
        raw_deps = []
        f = open(filename)
        for m in dep_regex.finditer(open(filename).read()):
            groups = m.groups()
            modules = groups[0] or groups[1] # cimport or from ... cimport
            if modules is not None:
                for module in modules.split(','):
                    module = module.strip().split(' ')[0] # get rid of 'as' clause
                    if '.' in module:
                        path = module.replace('.', '/') + '.pxd'
                        base_dependency_name = path
                    else:
                        path = "%s/%s.pxd" % (dirname, module)
                        base_dependency_name = "%s.pxd"%module
                    raw_deps.append((path, base_dependency_name))
            else: # include or extern from
                extern_file = groups[2] or groups[3]
                path = '%s/%s'%(dirname, extern_file)
                if not os.path.exists(path):
                    path = extern_file
                raw_deps.append((path, extern_file))

        for path, base_dependency_name in raw_deps:
            # if we can find the file, add it to the dependencies.
            path = os.path.normpath(path)
            if os.path.exists(path):
                deps.add(path)
            # we didn't find the file locally, so check the
            # Cython include path. 
            else:
                found_include = False
                for idir in ext_module.include_dirs + CYTHON_INCLUDE_DIRS + include_dirs + extra_include_dirs:
                    new_path = os.path.normpath(idir + '/' + base_dependency_name)
                    if os.path.exists(new_path):
                        deps.add(new_path)
                        found_include = True
                        break
                    new_path = os.path.normpath(idir + base_dependency_name[:-4] + "/__init__.pxd")
                    if os.path.exists(new_path):
                        deps.add(new_path)
                        found_include = True
                        break
                # so we really couldn't find the dependency -- raise
                # an exception. 
                if not found_include:
                    msg = 'could not find dependency %s included in %s.'%(path, filename)
                    if is_cython_file(path):
                        raise IOError, msg
                    else:
                        warnings.warn(msg+' I will assume it is a system C/C++ header.')
        f.close()
        return list(deps)
Example #22
0
 def _find(where, what):
     for dirpath, dirnames, filenames in os.walk(where):
         for item in dirnames + filenames:
             if fnmatch.fnmatch(item, what):
                 return os.path.join(dirpath, item)
     return ''
Example #23
0
    def parse_deps(self, filename, ext_module, verify=True):
        """
        Open a Cython file and extract all of its dependencies. 
        
        INPUT: 
            filename -- the file to parse
            verify   -- only return existing files (default True)
        
        OUTPUT:
            list of dependency files
        """
        is_cython_file = lambda f:\
            fnmatch.fnmatch(f,'*.pyx') or \
            fnmatch.fnmatch(f,'*.pxd') or \
            fnmatch.fnmatch(f,'*.pxi')

        # only parse cython files
        if not is_cython_file(filename):
            return []

        dirname = os.path.split(filename)[0]
        deps = set()
        if filename.endswith('.pyx'):
            pxd_file = filename[:-4] + '.pxd'
            if os.path.exists(pxd_file):
                deps.add(pxd_file)

        raw_deps = []
        f = open(filename)
        for m in dep_regex.finditer(open(filename).read()):
            groups = m.groups()
            modules = groups[0] or groups[1]  # cimport or from ... cimport
            if modules is not None:
                for module in modules.split(','):
                    module = module.strip().split(' ')[
                        0]  # get rid of 'as' clause
                    if '.' in module:
                        path = module.replace('.', '/') + '.pxd'
                        base_dependency_name = path
                    else:
                        path = "%s/%s.pxd" % (dirname, module)
                        base_dependency_name = "%s.pxd" % module
                    raw_deps.append((path, base_dependency_name))
            else:  # include or extern from
                extern_file = groups[2] or groups[3]
                path = '%s/%s' % (dirname, extern_file)
                if not os.path.exists(path):
                    path = extern_file
                raw_deps.append((path, extern_file))

        for path, base_dependency_name in raw_deps:
            # if we can find the file, add it to the dependencies.
            path = os.path.normpath(path)
            if os.path.exists(path):
                deps.add(path)
            # we didn't find the file locally, so check the
            # Cython include path.
            else:
                found_include = False
                for idir in ext_module.include_dirs + CYTHON_INCLUDE_DIRS + include_dirs + extra_include_dirs:
                    new_path = os.path.normpath(idir + '/' +
                                                base_dependency_name)
                    if os.path.exists(new_path):
                        deps.add(new_path)
                        found_include = True
                        break
                    new_path = os.path.normpath(idir +
                                                base_dependency_name[:-4] +
                                                "/__init__.pxd")
                    if os.path.exists(new_path):
                        deps.add(new_path)
                        found_include = True
                        break
                # so we really couldn't find the dependency -- raise
                # an exception.
                if not found_include:
                    msg = 'could not find dependency %s included in %s.' % (
                        path, filename)
                    if is_cython_file(path):
                        raise IOError, msg
                    else:
                        warnings.warn(
                            msg +
                            ' I will assume it is a system C/C++ header.')
        f.close()
        return list(deps)
        return dst
    else:
        try:
            urlretrieve(url, dst)
            info("  Downloaded: %s => %s", url, dst)
            return dst
        except Exception as e:
            error("Can't download %s: %s", url, e)
            return url


def process_rst(path):
    info("Processing: %s", path)
    output = ""
    with open(path, 'r') as rst:
        for line in rst:
            urls = URL_RE.findall(line)
            for url in urls:
                dst = download_image(url)
                line = line.replace(url, relpath(dst, dirname(path)))
            output = output + line

    with open(path, 'w') as rst:
        rst.write(output)


for directory, subdirs, fnames in walk("."):
    for fname in fnames:
        if fnmatch.fnmatch(fname, "*.rst"):
            process_rst(join(directory, fname))
Example #25
0
def list_datasets(pattern="*"):
    from glob import fnmatch
    return [PwaDataset.from_file(x)
            for x in resource_listdir("pwa", "datasets") 
            if x.endswith(".yaml") and fnmatch.fnmatch(x, pattern)]