Example #1
1
    def run(self,):
        clean_orig.run(self)

        # remove *.pyc, *.pyo
        for _d, _sd, _fs in os.walk("."):
            _t = filter(lambda x: not x.startswith(".") and (x.endswith(".pyc") or x.endswith(".pyo")), _fs)

            if not _t:
                continue

            map(os.remove, map(lambda x: os.path.join(_d, x), _t))

        # remove useless directories
        _t = self.extras + ["build", self._get_egg_info_name()]
        if not self.not_dist:
            _t.append("dist")
        else:
            _t.append("*.egg")

        for i in _t:
            for j in glob.glob(i):
                if os.path.isdir(j):
                    shutil.rmtree(j)
                else:
                    os.remove(j)

        # remove all the temporary *.egg-info directories
        for _r, _ds, _fs in os.walk(".."):
            for _d in _ds:
                if os.path.basename(os.path.dirname(_d)).startswith("."):
                    continue

                if _d.endswith(".egg-info"):
                    shutil.rmtree(os.path.join(_r, _d))
Example #2
1
def calculate_current_status(env_root):
    docs_dir = get_sdk_docs_path(env_root)
    current_status = hashlib.md5()
    module_src_dir = os.path.join(env_root, "doc", "module-source")
    for (dirpath, dirnames, filenames) in os.walk(module_src_dir):
        for filename in filenames:
            if filename.endswith(".md"):
                current_status.update(filename)
                current_status.update(str(os.path.getmtime(os.path.join(dirpath, filename))))
    guide_src_dir = os.path.join(docs_dir, "dev-guide-source")
    for (dirpath, dirnames, filenames) in os.walk(guide_src_dir):
        for filename in filenames:
            if filename.endswith(".md"):
                current_status.update(filename)
                current_status.update(str(os.path.getmtime(os.path.join(dirpath, filename))))
    package_dir = os.path.join(env_root, "packages")
    for (dirpath, dirnames, filenames) in os.walk(package_dir):
        for filename in filenames:
            if filename.endswith(".md"):
                current_status.update(filename)
                current_status.update(str(os.path.getmtime(os.path.join(dirpath, filename))))
    base_html_file = os.path.join(docs_dir, "static-files", "base.html")
    current_status.update(base_html_file)
    current_status.update(str(os.path.getmtime(os.path.join(dirpath, base_html_file))))
    return current_status.digest()
Example #3
1
def do(renderer, keymap_counter):
    default_packages = ["Default"]
    user_packages = ["User"]
    global_settings = sublime.load_settings("Preferences.sublime-settings")
    ignored_packages = global_settings.get("ignored_packages", [])
    package_control_settings = sublime.load_settings("Package Control.sublime-settings")
    installed_packages = package_control_settings.get("installed_packages", [])
    if len(installed_packages) == 0:
        includes = ".sublime-package"
        os_packages = []
        for (root, dirs, files) in os.walk(sublime.installed_packages_path()):
            for file in files:
                if file.endswith(includes):
                    os_packages.append(file.replace(includes, ""))
        for (root, dirs, files) in os.walk(sublime.packages_path()):
            for dir in dirs:
                os_packages.append(dir)
            break  # just the top level
        installed_packages = []
        [installed_packages.append(package) for package in os_packages if package not in installed_packages]

    diff = lambda l1, l2: [x for x in l1 if x not in l2]
    active_packages = diff(default_packages + installed_packages + user_packages, ignored_packages)

    keymapsExtractor = KeymapsExtractor(active_packages, keymap_counter)
    worker_thread = WorkerThread(keymapsExtractor, renderer)
    worker_thread.start()
    ThreadProgress(worker_thread, "Searching " + MY_NAME, "Done.", keymap_counter)
    def testRemoveDumpFile(self):
        storage = dumpStorage.ProcessedDumpStorage(self.testDir, **self.initKwargs[0])
        self.createDumpSet(storage)
        expectedCount = len(createJDS.jsonFileData)
        dumpFiles = set()

        # should fail quitely
        storage.removeDumpFile(createJDS.jsonBadUuid)

        ooids = createJDS.jsonFileData.keys()
        for dir, dirs, files in os.walk(storage.root):
            dumpFiles.update(files)
        assert expectedCount == len(dumpFiles)

        # should happily remove them each and all
        for ooid in ooids:
            dumpFiles = set()
            storage.removeDumpFile(ooid)
            expectedCount -= 1
            for dir, dirs, files in os.walk(storage.root):
                dumpFiles.update(files)
            assert expectedCount == len(dumpFiles), "\n   %s: expected %d, but %d\n - %s" % (
                ooid,
                expectedCount,
                len(dumpFiles),
                "\n - ".join(dumpFiles),
            )
def write_permacache_from_dir(dirname):
    # we want the whole list so that we can display accurate progress
    # information. If we're operating on more than tens of millions of
    # files, we should either bail out or tweak this to not need the
    # whole list at once
    allfiles = []
    for root, dirs, files in os.walk(dirname):
        for f in files:
            allfiles.append(os.path.join(root, f))

    for fname in progress(allfiles, persec=True):
        try:
            write_permacache_from_file(fname)
            os.unlink(fname)
        except:
            mr_tools.status("failed on %r" % fname)
            raise

    mr_tools.status("Removing empty directories")
    for root, dirs, files in os.walk(dirname, topdown=False):
        for d in dirs:
            dname = os.path.join(root, d)
            try:
                os.rmdir(dname)
            except OSError as e:
                if e.errno == errno.ENOTEMPTY:
                    mr_tools.status("%s not empty" % (dname,))
                else:
                    raise
Example #6
1
def walk_directory(dir_name, exts=[]):
    walk_dir = "%s/%s" % (WORKING_DIR, dir_name)
    file_list = []
    if exts:
        for root, sub_folders, files in os.walk(walk_dir):
            # Get dir relative to the script
            cur_dir = root.replace("%s/" % WORKING_DIR, "")

            # Iterate through the files in the current dir
            for file in files:
                # Get the extension without the dot
                file_ext = os.path.splitext(file)[1][1:]

                # Only append if it's in the set
                if file_ext in exts:
                    file_list.append("resources/%s/%s" % (cur_dir, file))
                    print "-> %s" % file
    else:
        # No extensions are given
        for root, sub_folders, files in os.walk(walk_dir):
            cur_dir = root.replace("%s/" % WORKING_DIR, "")
            for file in files:
                file_list.append("%s/%s" % (cur_dir, file))

    return file_list
Example #7
0
File: tests.py Project: durden/stag
    def generateAndCheck(self):
        """Generate site and verify all files exist"""

        self.assertTrue(os.path.isdir(self.src_dir), "Missing %s" % (self.src_dir))

        stag.generate(self.src_dir, self.dest_dir, self.templates)

        self.assertTrue(os.path.isdir(self.dest_dir), "Missing %s" % (self.dest_dir))

        for root, dirs, files in os.walk(self.src_dir):
            for file_name in files:
                md_ext = "".join([os.extsep, "md"])
                markdown_ext = "".join([os.extsep, "markdown"])

                if not file_name.endswith(md_ext) and not file_name.endswith(markdown_ext):
                    continue

                gen_file = re.sub(self.src_dir, self.dest_dir, os.path.join(root, file_name))

                gen_file = os.path.normpath(gen_file.replace(".md", ".html"))
                gen_file = os.path.normpath(gen_file.replace(".markdown", ".html"))

                self.assertTrue(os.path.isfile(gen_file), "Missing %s" % gen_file)

        for root, dirs, files in os.walk(self.dest_dir):
            for gen_file in files:
                self.assertTrue(gen_file.endswith(".html"), "Non-html file in output")
Example #8
0
def main():
    # set the PATH to find Thirdparty on Windows systems
    if os.name == "nt":
        os.environ["PATH"] = THIRDPARTY_PATH + "/boost/bin/"
        os.environ["PATH"] += ";" + THIRDPARTY_PATH + "/pluma/bin/"
        os.environ["PATH"] += ";" + THIRDPARTY_PATH + "/cmake/"

        # find our own components, set the PATH for them
        matches = []
        for root, dirnames, filenames in os.walk(INSTALL_PATH):
            for filename in fnmatch.filter(dirnames, "bin"):
                matches.append(os.path.join(root, filename))
        for path in matches:
            os.environ["PATH"] = os.environ["PATH"] + ";" + path
    else:
        os.environ["LD_LIBRARY_PATH"] = THIRDPARTY_PATH + "/boost/lib/"
        os.environ["LD_LIBRARY_PATH"] += ":" + THIRDPARTY_PATH + "/pluma/lib/"

        # search for projects which need to be tested
    for root, dirnames, filenames in os.walk(BUILD_PATH):
        for filename in fnmatch.filter(filenames, "CTestTestfile.cmake"):
            os.chdir(root)

            # run the tests
            testCmd = []
            testCmd.append(CTEST_EXE)
            testCmd.append("--no-compress-output")
            testCmd.append("-T")
            testCmd.append("Test")
            testCmd.append(".")
            process = subprocess.Popen(testCmd)
            process.wait()
            print "Tests executed with errorcode: " + str(process.returncode)
Example #9
0
    def load_themes(self):
        all_themes = []

        try:  # use find_resources() first for ST3
            for theme_resource in sublime.find_resources("*.sublime-theme"):
                filename = os.path.basename(theme_resource)
                all_themes.append(filename)

        except:  # fallback to walk() for ST2
            for root, dirs, files in os.walk(sublime.packages_path()):
                for filename in (filename for filename in files if filename.endswith(".sublime-theme")):
                    all_themes.append(filename)

            for root, dirs, files in os.walk(sublime.installed_packages_path()):
                for package in (package for package in files if package.endswith(".sublime-package")):
                    zf = zipfile.ZipFile(os.path.join(sublime.installed_packages_path(), package))
                    for filename in (filename for filename in zf.namelist() if filename.endswith(".sublime-theme")):
                        all_themes.append(filename)

        favorite_themes = self.get("themr_favorites", [])
        themes = []

        for theme in all_themes:
            favorited = theme in favorite_themes
            pretty_name = "Theme: " + theme.replace(".sublime-theme", "")
            if favorited:
                pretty_name += u" \N{BLACK STAR}"  # Put a pretty star icon next to favorited themes. :)
            themes.append([pretty_name, theme, favorited])

        themes.sort()
        return themes
Example #10
0
 def upload_new_world(self, world):
     """
     Uploads a new world to the current FTP server connection
     
     :param world: The InfiniteWorld object for the world to upload
     :type world: InfiniteWorld
     """
     path = world.worldFolder.getFilePath("level.dat")[:-10]
     world_name = path.split(os.path.sep)[-1]
     if not self._host.path.exists(world_name):
         self._host.mkdir(world_name)
     self._host.chdir(world_name)
     for root, directory, files in os.walk(world.worldFolder.getFilePath("level.dat")[:-10]):
         for folder in directory:
             target = self._host.path.join(root.replace(path, ""), folder).replace("\\", "", 1).replace("\\", "/")
             print "Target: " + target
             if not "##MCEDIT.TEMP##" in target:
                 if not self._host.path.exists(target):
                     self._host.makedirs(target)
     for root, directory, files in os.walk(world.worldFolder.getFilePath("level.dat")[:-10]):
         for f in files:
             target = self._host.path.join(root.replace(path, ""), f).replace("\\", "", 1).replace("\\", "/")
             print target
             try:
                 self._host.upload(os.path.join(root, f), target)
             except Exception as e:
                 if "226" in e.message:
                     pass
                 else:
                     print "Error: {0}".format(e.message)
Example #11
0
    def upload(self):
        """
        Uploads an edited world to the current FTP server connection
        """
        for root, directory, files in os.walk(os.path.join("ftp", self._worldname)):
            for folder in directory:
                target = (
                    self._host.path.join(root, folder)
                    .replace("ftp" + os.path.sep + self._worldname + "/", "")
                    .replace("\\", "", 1)
                    .replace("\\", "/")
                )
                target = target.replace("ftp" + self._worldname, "")
                if not "##MCEDIT.TEMP##" in target:
                    if not self._host.path.exists(target):
                        self._host.makedirs(target)
        for root, directory, files in os.walk(os.path.join("ftp", self._worldname)):
            for f in files:
                if self._host.path.join(root, f).replace("ftp" + os.path.sep + self._worldname, "").startswith("\\"):
                    target = (
                        self._host.path.join(root, f)
                        .replace("ftp" + os.path.sep + self._worldname, "")
                        .replace("\\", "", 1)
                    )
                else:
                    target = self._host.path.join(root, f).replace("ftp" + os.path.sep + self._worldname, "")

                if "\\" in target:
                    target = target.replace("\\", "/")
                try:
                    self._host.upload(os.path.join(root, f), target)
                except Exception as e:
                    if "226" in e.message:
                        pass
Example #12
0
    def copy(self, destination):
        """
        Copy all registered files to the given destination path. The given
        destination can be an existing directory, or not exist at all. It
        can't be e.g. a file.
        The copy process acts a bit like rsync: files are not copied when they
        don't need to (see mozpack.files for details on file.copy), and files
        existing in the destination directory that aren't registered are
        removed.
        """
        assert isinstance(destination, basestring)
        assert not os.path.exists(destination) or os.path.isdir(destination)
        destination = os.path.normpath(destination)
        dest_files = set()
        for path, file in self:
            destfile = os.path.normpath(os.path.join(destination, path))
            dest_files.add(destfile)
            ensure_parent_dir(destfile)
            file.copy(destfile)

        actual_dest_files = set()
        for root, dirs, files in os.walk(destination):
            for f in files:
                actual_dest_files.add(os.path.normpath(os.path.join(root, f)))
        for f in actual_dest_files - dest_files:
            os.remove(f)
        for root, dirs, files in os.walk(destination):
            if not files and not dirs:
                os.removedirs(root)
Example #13
0
def setup_posix():
    # Specific definitios for Posix installations
    _extra = {}
    _scripts = ["thg"]
    _packages = ["tortoisehg", "tortoisehg.hgqt", "tortoisehg.util"]
    _data_files = [
        (os.path.join("share/pixmaps/tortoisehg", root), [os.path.join(root, file_) for file_ in files])
        for root, dirs, files in os.walk("icons")
    ]
    _data_files += [
        (os.path.join("share", root), [os.path.join(root, file_) for file_ in files])
        for root, dirs, files in os.walk("locale")
    ]
    _data_files += [("lib/nautilus/extensions-2.0/python", ["contrib/nautilus-thg.py"])]

    # Create a config.py.  Distributions will need to supply their own
    cfgfile = os.path.join("tortoisehg", "util", "config.py")
    if not os.path.exists(cfgfile) and not os.path.exists(".hg/requires"):
        f = open(cfgfile, "w")
        f.write('bin_path     = "/usr/bin"\n')
        f.write('license_path = "/usr/share/doc/tortoisehg/Copying.txt.gz"\n')
        f.write('locale_path  = "/usr/share/locale"\n')
        f.write('icon_path    = "/usr/share/pixmaps/tortoisehg/icons"\n')
        f.write("nofork       = True\n")
        f.close()

    return _scripts, _packages, _data_files, _extra
Example #14
0
    def indexPlugins(self):
        self.pluginList = []
        path = "./plugins/"
        # Get plugin filenames only from the plugins directory, This runs the
        # plugins no matter what.
        rootPlugins = os.listdir("./plugins/")
        # Run the root plugins in a new thread
        self.pluginList.append({"nameList": rootPlugins, "path": path})
        # Check if the recv is a privmsg from a channel
        if re.match("^:[^!]*!~?[^@]*@[^\s]*\s(PRIVMSG|privmsg)\s#[^:]*\s:.*", self.recvData) is not None:
            # Run plugins from ./plugins/privmsg/*
            for root, subFolders, files in os.walk("./plugins/privmsg/", followlinks=True):
                # Fetch plugins recursively. This means you can organize
                # plugins in sub folders however you'd like. eg. Have a folder
                # full of entertainment plugins that you can easily disable by
                # prepending '.' to the folder name.
                self.pluginList.append({"nameList": files, "path": root})
        # If recv is a private message to the bot
        elif (
            not re.match(
                "^:[^!]*!~?[^@]*@[^\s]*\s(PRIVMSG|privmsg)\s%s:.*" % (self.config["settings"]["botNick"]), self.recvData
            )
            == None
        ):
            # Run plugins from ./plugins/privmsgbot/*
            for root, subFolders, files in os.walk("./plugins/privmsgbot/", followlinks=True):
                self.pluginList.append({"nameList": files, "path": root})

        # run plugins from the directory named 'root'
        for root, subFolders, files in os.walk("./plugins/root/", followlinks=True):
            self.pluginList.append({"nameList": files, "path": root})
Example #15
0
def gen_all(wd):
    gen = os.walk(wd)
    root_path, target_dir, _ = gen.next()
    for td in target_dir:
        inner_path, case_dir, __ = os.walk(os.path.join(root_path, td)).next()
        for cd in case_dir:
            yield os.path.abspath(os.path.join(inner_path, cd))
Example #16
0
def install():
    folders = ["cogre", "common", "contrib", "ede", "eieio", "semantic", "speedbar"]
    for folder in folders:
        pisitools.insinto("/usr/share/emacs/site-lisp/cedet/", folder)

    for root, dirs, files in os.walk(get.workDIR()):
        pisitools.doinfo("%s/*info" % root)
        pisitools.doinfo("%s/*info-*" % root)

    remove = ["AUTHORS", "ONEWS", "README", "NEWS", "INSTALL", "Makefile", "Project.ede", "ChangeLog"]

    for root, dirs, files in os.walk("%s/usr/share/emacs/site-lisp/cedet/" % get.installDIR()):
        for name in files:
            if name in remove:
                shelltools.unlink(os.path.join(root, name))
            elif name.split(".")[-1] in (
                "info",
                "info-1",
                "info-2",
                "info-3",
                "info-4",
                "~",
                "~1~",
                "el~",
                "elc",
                "texi",
            ):
                shelltools.unlink(os.path.join(root, name))
Example #17
0
 def install_gecko(self, gecko_path, marionette):
     """
     Install gecko into the emulator using adb push.  Restart b2g after the
     installation.
     """
     print "installing gecko binaries..."
     # need to remount so we can write to /system/b2g
     self._run_adb(["remount"])
     # See bug 800102.  We use this particular method of installing
     # gecko in order to avoid an adb bug in which adb will sometimes
     # hang indefinitely while copying large files to the system
     # partition.
     for root, dirs, files in os.walk(gecko_path):
         for filename in files:
             data_local_file = os.path.join("/data/local", filename)
             print "pushing", data_local_file
             self.dm.pushFile(os.path.join(root, filename), data_local_file)
     self.dm.shellCheckOutput(["stop", "b2g"])
     for root, dirs, files in os.walk(gecko_path):
         for filename in files:
             data_local_file = os.path.join("/data/local", filename)
             rel_file = os.path.relpath(os.path.join(root, filename), gecko_path)
             system_file = os.path.join("/system/b2g", rel_file)
             print "copying", data_local_file, "to", system_file
             self.dm.shellCheckOutput(["dd", "if=%s" % data_local_file, "of=%s" % system_file])
     print "restarting B2G"
     self.dm.shellCheckOutput(["start", "b2g"])
     self.wait_for_port()
     self.wait_for_system_message(marionette)
Example #18
0
    def _detect_treestyle(self):
        try:
            dirlisting = os.walk(self.get_real_path())
            dirpath, dirnames, filenames = dirlisting.next()

            if not dirnames:
                # No subdirectories
                if filter(self.file_belongs_to_project, filenames):
                    # Translation files found, assume gnu
                    return "gnu"
            else:
                # There are subdirectories
                if filter(lambda dirname: dirname == "templates" or langcode_re.match(dirname), dirnames):
                    # Found language dirs assume nongnu
                    return "nongnu"
                else:
                    # No language subdirs found, look for any translation file
                    for dirpath, dirnames, filenames in os.walk(self.get_real_path()):
                        if filter(self.file_belongs_to_project, filenames):
                            return "gnu"
        except:
            pass

        # Unsure
        return None
def get_package_data():
    ret = []
    for t in reduce(
        list.__add__,
        [
            ".git" not in d and [os.path.join(d[len("src/goliat") + 1 :], e) for e in static_types] or []
            for (d, s, f) in os.walk(os.path.join("src/goliat", "evoque"))
        ],
    ):
        ret.append(t)
    for t in reduce(
        list.__add__,
        [
            ".git" not in d and [os.path.join(d[len("src/goliat") + 1 :], e) for e in static_types] or []
            for (d, s, f) in os.walk(os.path.join("src/goliat", "static"))
        ],
    ):
        ret.append(t)
    for t in reduce(
        list.__add__,
        [
            ".git" not in d and [os.path.join(d[len("src/goliat") + 1 :], e) for e in static_types] or []
            for (d, s, f) in os.walk(os.path.join("src/goliat", "web"))
        ],
    ):
        ret.append(t)
    return ret
Example #20
0
    def _read_descriptor_files(self):
        new_processed_files = {}
        remaining_files = list(self._targets)

        while remaining_files and not self._is_stopped.isSet():
            target = remaining_files.pop(0)

            if not os.path.exists(target):
                self._notify_skip_listeners(target, FileMissing())
                continue

            if os.path.isdir(target):
                if stem.prereq.is_python_26():
                    walker = os.walk(target, followlinks=self._follow_links)
                else:
                    walker = os.walk(target)

                self._handle_walker(walker, new_processed_files)
            else:
                self._handle_file(target, new_processed_files)

        self._processed_files = new_processed_files

        if not self._is_stopped.isSet():
            self._unreturned_descriptors.put(FINISHED)

        self._iter_notice.set()
def find_snippets():
    global snippets

    new_snippets = []
    # Packages folder
    for root, dirs, files in os.walk(sublime.packages_path()):
        for name in files:
            try:
                ext = os.path.splitext(name)[-1]
                if ext in (".sublime-snippet", ".tmSnippet"):
                    path = os.path.join(root, name)
                    f = open(path, "rb")
                    new_snippets.append(parse_snippet(f, ext))
                    f.close()
                elif ext in (".sublime-package", ".tmBundle"):
                    new_snippets += read_zip(path)

            except:
                pass

                # Installed Packages
    for root, dirs, files in os.walk(sublime.installed_packages_path()):
        for name in files:
            try:
                ext = os.path.splitext(name)[-1]
                if ext == ".sublime-package":
                    path = os.path.join(root, name)
                    new_snippets += read_zip(path)
            except:
                pass

    snippets = new_snippets
Example #22
0
def find_fit_speed_log(out_file, path):
    pattern = "fitlog_time_speed.dat"
    fitlog = []
    for root, dirs, files in os.walk(path):
        for name in files:
            if fnmatch.fnmatch(name, pattern):
                fitlog.append(os.path.join(root, name))

    pattern = "fitlog_time_speed.dat"
    fitlog_time_speed = []
    for root, dirs, files in os.walk(path):
        for name in files:
            if fnmatch.fnmatch(name, pattern):
                fitlog_time_speed.append(os.path.join(root, name))

    string = "plot "
    for my_file in fitlog:
        string = string + "'" + my_file + "' using ($1/60/60):($2) with lp,"

        # for my_file in fitlog_time_speed:
        # string=string+"'"+my_file+"' using ($2) axis x1y2 with lp,"

    string = string[:-1]
    text_file = open(out_file, "w")
    text_file.write(string)
    text_file.close()
Example #23
0
    def findthis(self):
        """Walks through and entire directory to create the checksums exporting
        the result at the end."""
        db = DbWork()

        searched_files = 0
        print self.path
        number_of_files = sum(len(filenames) for path, dirnames, filenames in os.walk(self.path))

        print "Number of files to scan : %s" % number_of_files
        pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=number_of_files).start()
        for root, dir, files in os.walk(self.path):
            # for i in range(number_of_files):

            for f in files:
                searched_files += 1

                try:
                    absolute = os.path.join(root, f)
                    if os.path.isfile(absolute):
                        size = os.path.getsize(absolute)

                        pbar.update((searched_files - 1) + 1)
                        if size > self.size:

                            readfile = open(absolute).read(16384)
                            sh = hashlib.sha224(readfile).hexdigest()
                            db.insert(absolute, size, sh)

                except IOError:
                    pass
        pbar.finish()
        db.insert_opts(searched_files, self.size)
        db.export()
    def runMac(self):
        import plistlib

        self.logger.debug("AppInstall runMac")
        self.installerName = ["Setup", "Install"]
        ##        self._uninstallMACAPP('photoshopcs4')
        if self.parameter["installerType"] == "RAW":
            for root, dirs, files in os.walk(self.downloadFolder):
                for f in files:
                    if zipfile.is_zipfile(os.path.join(root, os.path.basename(f))):
                        self.unzipbuildPath = root
                        self._unzipfile(os.path.join(root, os.path.basename(f)), root)
                    if ".tar.gz" in os.path.basename(f):
                        self.unzipbuildPath = root
                        tar = tarfile.open(os.path.join(root, os.path.basename(f)), "r:gz")
                        for tarinfo in tar:
                            tar.extract(tarinfo.name, root)
                        tar.close()
                    if ".dmg" in os.path.basename(f):
                        self.unzipbuildPath = self._attachDMG(os.path.join(root, os.path.basename(f)))
        else:
            for root, dirs, files in os.walk(self.downloadFolder):
                for f in files:
                    if "dmg" in os.path.basename(f):
                        self._attachDMG(os.path.join(root, os.path.basename(f)))

        self._installMACAPP(self.app)
        self._cleanupMAC()
def read_files(dir, dataset):
    """
    yield data_type(train? val? test?), numpy.ndarray('uint8')
    """
    dir_path = os.path.join(dir, dataset)
    if dataset == "train":
        for (root, dirs, files) in os.walk(dir_path):
            for file in files:
                if not ".txt" in file:
                    label = file.split("_")[0]
                    img_filepath = os.path.join(root, file)
                    yield label, data.imread(img_filepath)
    elif dataset == "val":
        for (root, dirs, files) in os.walk(dir_path):
            for file in files:
                if ".txt" in file:
                    # this is val_annotaions.txt
                    f = open(os.path.join(root, file), "r")
                    while 1:
                        line = f.readline()
                        if not line:
                            break
                        line_seg = line.split()
                        img_filepath = os.path.join(root, "images", line_seg[0])
                        label = line_seg[1]
                        yield label, data.imread(img_filepath)
                    f.close()
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, mode="w"):
    """Create a zip file from all the files under 'base_dir'.  The output
    zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
    Python module (if available) or the InfoZIP "zip" utility (if installed
    and found on the default search path).  If neither tool is available,
    raises DistutilsExecError.  Returns the name of the output zip file.
    """
    import zipfile

    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
    log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)

    def visit(z, dirname, names):
        for name in names:
            path = os.path.normpath(os.path.join(dirname, name))
            if os.path.isfile(path):
                p = path[len(base_dir) + 1 :]
                if not dry_run:
                    z.write(path, p)
                log.debug("adding '%s'" % p)

    if compress is None:
        compress = sys.version >= "2.4"  # avoid 2.3 zipimport bug when 64 bits

    compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
    if not dry_run:
        z = zipfile.ZipFile(zip_filename, mode, compression=compression)
        for dirname, dirs, files in os.walk(base_dir):
            visit(z, dirname, files)
        z.close()
    else:
        for dirname, dirs, files in os.walk(base_dir):
            visit(None, dirname, files)
    return zip_filename
Example #27
0
def hashlib_manifest(manifest_dir, manifest_textfile, path_to_remove):
    file_count = 0
    for root, directories, filenames in os.walk(manifest_dir):
        filenames = [f for f in filenames if not f[0] == "."]
        directories[:] = [d for d in directories if not d[0] == "."]
        for files in filenames:
            print "Calculating number of files to process in current directory -  %s files        \r" % file_count,
            file_count += 1
    manifest_generator = ""
    md5_counter = 1
    for root, directories, filenames in os.walk(manifest_dir):
        filenames = [f for f in filenames if not f[0] == "."]
        directories[:] = [d for d in directories if not d[0] == "."]
        for files in filenames:
            print "Generating MD5 for %s - file %d of %d" % (os.path.join(root, files), md5_counter, file_count)
            md5 = hashlib_md5(os.path.join(root, files))
            md5_counter += 1
            root2 = os.path.abspath(root).replace(path_to_remove, "")
            try:
                if root2[0] == "/":
                    root2 = root2[1:]
                if root2[0] == "\\":
                    root2 = root2[1:]
            except:
                IndexError
            manifest_generator += md5[:32] + "  " + os.path.join(root2, files).replace("\\", "/") + "\n"
    manifest_list = manifest_generator.splitlines()
    files_in_manifest = len(manifest_list)
    # http://stackoverflow.com/a/31306961/2188572
    manifest_list = sorted(manifest_list, key=lambda x: (x[34:]))
    with open(manifest_textfile, "wb") as fo:
        for i in manifest_list:
            fo.write(i + "\n")
def main():
    sha1s = {".oct": {}, ".octx": {}}
    # First remove top-level duplicate files
    for path, dirs, filenames in os.walk("."):
        for filename in filenames:
            ext = os.path.splitext(filename)[1]
            if ext in sha1s:
                filepath = os.path.join(path, filename)
                with open(filepath) as fp:
                    sha1sum = sha1(fp.read()).hexdigest()
                if sha1sum in sha1s[ext]:
                    updated = remove_longer(sha1s[ext][sha1sum], filepath)
                    if updated:
                        sha1s[ext][sha1sum] = updated
                else:
                    sha1s[ext][sha1sum] = filepath

    # Second remove files in the same directory if they're contained in a octx
    for path, dirs, filenames in os.walk("."):
        for filename in filenames:
            if filename.endswith(".octx"):  # Compute sha1 for each file in zip
                filepath = os.path.join(path, filename)
                with ZipFile(filepath) as zfp:
                    for info in zfp.infolist():
                        with zfp.open(info) as fp:
                            sha1sum = sha1(fp.read()).hexdigest()
                        if sha1sum in sha1s[".oct"]:
                            updated = remove_if_zip_duplicate(sha1s[".oct"][sha1sum], info, filepath)
                            if updated:
                                sha1s[".oct"][sha1sum] = updated
                        else:
                            sha1s[".oct"][sha1sum] = info, filepath
Example #29
0
    def list_episodes(self, showname, snum=None):
        """
        Get the list of all of the episodes available for a given show for a given season. If no season is specified,
        it will return all of the episodes for the show.
        :param showname: The name of the show
        :param season: The season whose episodes you want. If not specified, this function will return all of the episodes
                        for the show.
        :return: A list of the episode file names for the requested show.
        :rtype: list[str]
        """
        seasons = self.list_seasons(showname)
        showdirectory = self.show_path(showname)
        ret = []

        if snum is None:
            # Get all the episodes if no season is specified
            for season in seasons:
                walk = os.walk(showdirectory + "/" + season)
                directory, _, episodes = next(walk)
                ret = ret + episodes
        else:
            seasoncandidates = filter(lambda x: mediautlils.seasonnumber(x) == snum, seasons)
            for season in seasoncandidates:
                walk = os.walk(showdirectory + "/" + season)
                directory, _, episodes = next(walk)
                ret = episodes

        # Filter out non-video files
        filtered = []
        for result in ret:
            if any(ext in result for ext in extensionsToCheck):
                filtered.append(result)

        return filtered
Example #30
0
def getFilesForName(name):
    """Get a list of module files for a filename, a module or package name,
    or a directory.
    """
    if not os.path.exists(name):
        # check for glob chars
        if containsAny(name, "*?[]"):
            files = glob.glob(name)
            list = []
            for file in files:
                list.extend(getFilesForName(file))
            return list

        # try to find module or package
        try:
            spec = importlib.util.find_spec(name)
            name = spec.origin
        except ImportError:
            name = None
        if not name:
            return []

    if os.path.isdir(name):
        # find all python files in directory
        list = []
        os.walk(name, _visit_pyfiles, list)
        return list
    elif os.path.exists(name):
        # a single file
        return [name]

    return []