def wipe_swap_linux(devices, proc_swaps): """Shred the Linux swap file and then reinitilize it""" if None == devices: return if 0 < count_swap_linux(): raise RuntimeError('Cannot wipe swap while it is in use') for device in devices: print "info: wiping swap device '%s'" % device safety_limit_bytes = 16 * 1024**3 # 16 gibibytes actual_size_bytes = get_swap_size_linux(device, proc_swaps) if actual_size_bytes > safety_limit_bytes: raise RuntimeError( 'swap device %s is larger (%d) than expected (%d)' % (device, actual_size_bytes, safety_limit_bytes)) uuid = get_swap_uuid(device) # wipe FileUtilities.wipe_contents(device, truncate=False) # reinitialize print "debug: reinitializing swap device %s" % device args = ['mkswap', device] if uuid: args.append("-U") args.append(uuid) (rc, _, stderr) = General.run_external(args) if 0 != rc: raise RuntimeError(stderr.replace("\n", ""))
def yum_clean(): """Run 'yum clean all' and return size in bytes recovered""" if os.path.exists('/var/run/yum.pid'): msg = _( "%s cannot be cleaned because it is currently running. Close it, and try again." ) % "Yum" raise RuntimeError(msg) if not FileUtilities.exe_exists('yum'): raise RuntimeError(_('Executable not found: %s') % 'yum') old_size = FileUtilities.getsizedir('/var/cache/yum') args = ['yum', "--enablerepo=*", 'clean', 'all'] p = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) non_blank_line = "" while True: line = p.stdout.readline().replace("\n", "") if len(line) > 2: non_blank_line = line if -1 != line.find('You need to be root'): # Seen before Fedora 13 raise RuntimeError(line) if -1 != line.find('Cannot remove rpmdb file'): # Since first in Fedora 13 raise RuntimeError(line) if -1 != line.find('Another app is currently holding'): print "debug: yum: '%s'" % line old_size = FileUtilities.getsizedir('/var/cache/yum') if "" == line and p.poll() != None: break print 'debug: yum process return code = %d' % p.returncode if p.returncode > 0: raise RuntimeError(non_blank_line) new_size = FileUtilities.getsizedir('/var/cache/yum') return old_size - new_size
def run_cleaner_cmd(cmd, args, freed_space_regex=r'[\d.]+[kMGTE]?B?', error_line_regexes=None): """Runs a specified command and returns how much space was (reportedly) freed. The subprocess shouldn't need any user input and the user should have the necessary rights. freed_space_regex gets applied to every output line, if the re matches, add values captured by the single group in the regex""" if not FileUtilities.exe_exists(cmd): raise RuntimeError(_('Executable not found: %s') % cmd) freed_space_regex = re.compile(freed_space_regex) error_line_regexes = [ re.compile(regex) for regex in error_line_regexes or [] ] output = subprocess.check_output([cmd] + args, stderr=subprocess.STDOUT, universal_newlines=True, env={'LC_ALL': 'C'}) freed_space = 0 for line in output.split('\n'): m = freed_space_regex.match(line) if m is not None: freed_space += FileUtilities.human_to_bytes(m.group(1)) for error_re in error_line_regexes: if error_re.search(line): raise RuntimeError('Invalid output from %s: %s' % (cmd, line)) return freed_space
def delete_chrome_history(path): """Clean history from History and Favicon files without affecting bookmarks""" cols = ('url', 'title') where = "" ids_int = get_chrome_bookmark_ids(path) if ids_int: ids_str = ",".join([str(id0) for id0 in ids_int]) where = "where id not in (%s) " % ids_str cmds = __shred_sqlite_char_columns('urls', cols, where) cmds += __shred_sqlite_char_columns('visits') cols = ('lower_term', 'term') cmds += __shred_sqlite_char_columns('keyword_search_terms', cols) ver = __get_chrome_history(path) if ver >= 20: # downloads, segments, segment_usage first seen in Chrome 14, # Google Chrome 15 (database version = 20). # Google Chrome 30 (database version 28) doesn't have full_path, but it # does have current_path and target_path if ver >= 28: cmds += __shred_sqlite_char_columns( 'downloads', ('current_path', 'target_path')) cmds += __shred_sqlite_char_columns('downloads_url_chains', ('url', )) else: cmds += __shred_sqlite_char_columns('downloads', ('full_path', 'url')) cmds += __shred_sqlite_char_columns('segments', ('name', )) cmds += __shred_sqlite_char_columns('segment_usage') FileUtilities.execute_sqlite3(path, cmds)
def delete_chrome_history(path): """Clean history from History and Favicon files without affecting bookmarks""" cols = ('url', 'title') where = "" ids_int = get_chrome_bookmark_ids(path) if ids_int: ids_str = ",".join([str(id0) for id0 in ids_int]) where = "where id not in (%s) " % ids_str cmds = __shred_sqlite_char_columns('urls', cols, where) cmds += __shred_sqlite_char_columns('visits') cols = ('lower_term', 'term') cmds += __shred_sqlite_char_columns('keyword_search_terms', cols) ver = __get_chrome_history(path) if ver >= 20: # downloads, segments, segment_usage first seen in Chrome 14, Google Chrome 15 (database version = 20) # Google Chrome 30 (database version 28) doesn't have full_path, but it # does have current_path and target_path if ver >= 28: cmds += __shred_sqlite_char_columns( 'downloads', ('current_path', 'target_path')) cmds += __shred_sqlite_char_columns( 'downloads_url_chains', ('url', )) else: cmds += __shred_sqlite_char_columns( 'downloads', ('full_path', 'url')) cmds += __shred_sqlite_char_columns('segments', ('name',)) cmds += __shred_sqlite_char_columns('segment_usage') FileUtilities.execute_sqlite3(path, cmds)
def apt_autoremove(): """Run 'apt-get autoremove' and return the size (un-rounded, in bytes) of freed space""" if not FileUtilities.exe_exists('apt-get'): raise RuntimeError(_('Executable not found: %s') % 'apt-get') args = ['apt-get', '--yes', 'autoremove'] process = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) total_bytes = 0 while True: line = process.stdout.readline().replace("\n", "") if line.startswith('E: '): raise RuntimeError(line) # After this operation, 74.7MB disk space will be freed. match = re.search(r", ([0-9.]+[a-zA-Z]{2}) disk space will be freed", line) if match: pkg_bytes_str = match.groups(0)[0] pkg_bytes = FileUtilities.human_to_bytes(pkg_bytes_str.upper()) total_bytes += pkg_bytes if "" == line and process.poll() != None: break return total_bytes
def apt_autoclean(): """Run 'apt-get autoclean' and return the size (un-rounded, in bytes) of freed space""" if not FileUtilities.exe_exists('apt-get'): raise RuntimeError(_('Executable not found: %s') % 'apt-get') args = ['apt-get', 'autoclean'] process = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) total_bytes = 0 while True: line = process.stdout.readline().replace("\n", "") if line.startswith('E: '): raise RuntimeError(line) # Del cups-common 1.3.9-17ubuntu3 [1165kB] match = re.search("^Del .*\[([0-9.]+[a-zA-Z]{2})\]", line) if match: pkg_bytes_str = match.groups(0)[0] pkg_bytes = FileUtilities.human_to_bytes(pkg_bytes_str.upper()) total_bytes += pkg_bytes if "" == line and process.poll() != None: break return total_bytes
def delete_chrome_history(path): """Clean history from History and Favicon files without affecting bookmarks""" cols = ("url", "title") where = "" ids_int = get_chrome_bookmark_ids(path) if ids_int: ids_str = ",".join([str(id0) for id0 in ids_int]) where = "where id not in (%s) " % ids_str cmds = __shred_sqlite_char_columns("urls", cols, where) cmds += __shred_sqlite_char_columns("visits") cols = ("lower_term", "term") cmds += __shred_sqlite_char_columns("keyword_search_terms", cols) ver = __get_chrome_history(path) if ver >= 20: # downloads, segments, segment_usage first seen in Chrome 14, Google Chrome 15 (database version = 20) # Google Chrome 30 (database version 28) doesn't have full_path, but it # does have current_path and target_path if ver >= 28: cmds += __shred_sqlite_char_columns("downloads", ("current_path", "target_path")) cmds += __shred_sqlite_char_columns("downloads_url_chains", ("url",)) else: cmds += __shred_sqlite_char_columns("downloads", ("full_path", "url")) cmds += __shred_sqlite_char_columns("segments", ("name",)) cmds += __shred_sqlite_char_columns("segment_usage") FileUtilities.execute_sqlite3(path, cmds)
def execute(self, really_delete): """Make changes and return results""" if FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = { # TRANSLATORS: This is the label in the log indicating will be # deleted (for previews) or was actually deleted 'label': _('Delete'), 'n_deleted': 1, 'n_special': 0, 'path': self.path, 'size': FileUtilities.getsize(self.path)} if really_delete: try: FileUtilities.delete(self.path, self.shred) except WindowsError, e: # WindowsError: [Error 32] The process cannot access the file because it is being # used by another process: u'C:\\Documents and # Settings\\username\\Cookies\\index.dat' if 32 != e.winerror and 5 != e.winerror: raise try: Windows.delete_locked_file(self.path) except: raise else: # TRANSLATORS: The file will be deleted when the # system reboots ret['label'] = _('Mark for deletion')
def execute(self, really_delete): """Make changes and return results""" if FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = { # TRANSLATORS: This is the label in the log indicating will be # deleted (for previews) or was actually deleted 'label': _('Delete'), 'n_deleted': 1, 'n_special': 0, 'path': self.path, 'size': FileUtilities.getsize(self.path) } if really_delete: try: FileUtilities.delete(self.path, self.shred) except WindowsError, e: # WindowsError: [Error 32] The process cannot access the file because it is being # used by another process: u'C:\\Documents and # Settings\\username\\Cookies\\index.dat' if 32 != e.winerror and 5 != e.winerror: raise try: Windows.delete_locked_file(self.path) except: raise else: # TRANSLATORS: The file will be deleted when the # system reboots ret['label'] = _('Mark for deletion')
def apt_autoremove(): """Run 'apt-get autoremove' and return the size (un-rounded, in bytes) of freed space""" if not FileUtilities.exe_exists('apt-get'): raise RuntimeError(_('Executable not found: %s') % 'apt-get') args = ['apt-get', '--yes', 'autoremove'] process = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) total_bytes = 0 while True: line = process.stdout.readline().replace("\n", "") if line.startswith('E: '): raise RuntimeError(line) # After this operation, 74.7MB disk space will be freed. match = re.search( r", ([0-9.]+[a-zA-Z]{2}) disk space will be freed", line) if match: pkg_bytes_str = match.groups(0)[0] pkg_bytes = FileUtilities.human_to_bytes(pkg_bytes_str.upper()) total_bytes += pkg_bytes if "" == line and process.poll() != None: break return total_bytes
def yum_clean(): """Run 'yum clean all' and return size in bytes recovered""" if os.path.exists('/var/run/yum.pid'): msg = _( "%s cannot be cleaned because it is currently running. Close it, and try again.") % "Yum" raise RuntimeError(msg) if not FileUtilities.exe_exists('yum'): raise RuntimeError(_('Executable not found: %s') % 'yum') old_size = FileUtilities.getsizedir('/var/cache/yum') args = ['yum', "--enablerepo=*", 'clean', 'all'] p = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) non_blank_line = "" while True: line = p.stdout.readline().replace("\n", "") if len(line) > 2: non_blank_line = line if -1 != line.find('You need to be root'): # Seen before Fedora 13 raise RuntimeError(line) if -1 != line.find('Cannot remove rpmdb file'): # Since first in Fedora 13 raise RuntimeError(line) if -1 != line.find('Another app is currently holding'): print "debug: yum: '%s'" % line old_size = FileUtilities.getsizedir('/var/cache/yum') if "" == line and p.poll() != None: break print 'debug: yum process return code = %d' % p.returncode if p.returncode > 0: raise RuntimeError(non_blank_line) new_size = FileUtilities.getsizedir('/var/cache/yum') return old_size - new_size
def addPodcast(url): name = fileUtil.getPodcastName(url) #Gets the name of the podcast if (name == None): #Checks if it has found no name print("Error - There has been an issue getting the podcasts name...\n" ) #If so this is an error return name = fileUtil.cleanForWindows(name) #Removes invalid characters file = open(".feeds", 'a') file.writelines(url + "," + name + "\n") file.close
def saveAs(canvas, name, outputFormats=['png'], outputFolder=''): canvas.RedrawAxis() if not outputFolder == '' and not outputFolder.endswith('/'): outputFolder += '/' for outputFormat in outputFormats: fullFileName = outputFolder + name + '.' + outputFormat if '/' in fullFileName: path = fullFileName[:fullFileName.rfind('/')] FileUtilities.createFolderIfDoesNotExist(path) canvas.SaveAs(fullFileName)
def execute(self, really_delete): if None != self.path and FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = { 'label': self.label, 'n_deleted': 0, 'n_special': 1, 'path': self.path, 'size': None } if really_delete: if None == self.path: # Function takes no path. It returns the size. func_ret = self.func() if isinstance(func_ret, types.GeneratorType): # function returned generator for func_ret in self.func(): if True == func_ret or isinstance(func_ret, tuple): # Return control to GTK idle loop. # If tuple, then display progress. yield func_ret # either way, func_ret should be an integer assert isinstance(func_ret, (int, long)) ret['size'] = func_ret else: if os.path.isdir(self.path): raise RuntimeError( 'Attempting to run file function %s on directory %s' % (self.func.func_name, self.path)) # Function takes a path. We check the size. oldsize = FileUtilities.getsize(self.path) try: self.func(self.path) except DatabaseError, e: if -1 == e.message.find('file is encrypted or is not a database') and \ -1 == e.message.find('or missing database'): raise print 'Warning:', e.message return try: newsize = FileUtilities.getsize(self.path) except OSError, e: from errno import ENOENT if e.errno == ENOENT: # file does not exist newsize = 0 else: raise ret['size'] = oldsize - newsize
def delete_chrome_favicons(path): """Delete Google Chrome and Chromium favicons not use in in history for bookmarks""" path_history = os.path.join(os.path.dirname(path), 'History') ver = __get_chrome_history(path) cmds = "" if ver in [4, 20, 22, 23, 25, 26, 28, 29, 32]: # Version 4 includes Chromium 12 # Version 20 includes Chromium 14, Google Chrome 15, Google Chrome 19 # Version 22 includes Google Chrome 20 # Version 25 is Google Chrome 26 # Version 26 is Google Chrome 29 # Version 28 is Google Chrome 30 # Version 29 is Google Chrome 37 # Version 32 is Google Chrome 51 # icon_mapping cols = ('page_url',) where = None if os.path.exists(path_history): cmds += "attach database \"%s\" as History;" % path_history where = "where page_url not in (select distinct url from History.urls)" cmds += __shred_sqlite_char_columns('icon_mapping', cols, where) # favicon images cols = ('image_data', ) where = "where id not in (select distinct id from icon_mapping)" cmds += __shred_sqlite_char_columns('favicon_bitmaps', cols, where) # favicons # Google Chrome 30 (database version 28): image_data moved to table # favicon_bitmaps if ver < 28: cols = ('url', 'image_data') else: cols = ('url', ) where = "where id not in (select distinct icon_id from icon_mapping)" cmds += __shred_sqlite_char_columns('favicons', cols, where) elif 3 == ver: # Version 3 includes Google Chrome 11 cols = ('url', 'image_data') where = None if os.path.exists(path_history): cmds += "attach database \"%s\" as History;" % path_history where = "where id not in(select distinct favicon_id from History.urls)" cmds += __shred_sqlite_char_columns('favicons', cols, where) else: raise RuntimeError('%s is version %d' % (path, ver)) FileUtilities.execute_sqlite3(path, cmds)
def delete_chrome_favicons(path): """Delete Google Chrome and Chromium favicons not use in in history for bookmarks""" path_history = os.path.join(os.path.dirname(path), 'History') ver = __get_chrome_history(path) cmds = "" if ver in [4, 20, 22, 23, 25, 26, 28, 29, 32]: # Version 4 includes Chromium 12 # Version 20 includes Chromium 14, Google Chrome 15, Google Chrome 19 # Version 22 includes Google Chrome 20 # Version 25 is Google Chrome 26 # Version 26 is Google Chrome 29 # Version 28 is Google Chrome 30 # Version 29 is Google Chrome 37 # Version 32 is Google Chrome 51 # icon_mapping cols = ('page_url', ) where = None if os.path.exists(path_history): cmds += "attach database \"%s\" as History;" % path_history where = "where page_url not in (select distinct url from History.urls)" cmds += __shred_sqlite_char_columns('icon_mapping', cols, where) # favicon images cols = ('image_data', ) where = "where id not in (select distinct id from icon_mapping)" cmds += __shred_sqlite_char_columns('favicon_bitmaps', cols, where) # favicons # Google Chrome 30 (database version 28): image_data moved to table # favicon_bitmaps if ver < 28: cols = ('url', 'image_data') else: cols = ('url', ) where = "where id not in (select distinct icon_id from icon_mapping)" cmds += __shred_sqlite_char_columns('favicons', cols, where) elif 3 == ver: # Version 3 includes Google Chrome 11 cols = ('url', 'image_data') where = None if os.path.exists(path_history): cmds += "attach database \"%s\" as History;" % path_history where = "where id not in(select distinct favicon_id from History.urls)" cmds += __shred_sqlite_char_columns('favicons', cols, where) else: raise RuntimeError('%s is version %d' % (path, ver)) FileUtilities.execute_sqlite3(path, cmds)
def execute(self, really_delete): if None != self.path and FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = { 'label': self.label, 'n_deleted': 0, 'n_special': 1, 'path': self.path, 'size': None} if really_delete: if None == self.path: # Function takes no path. It returns the size. func_ret = self.func() if isinstance(func_ret, types.GeneratorType): # function returned generator for func_ret in self.func(): if True == func_ret or isinstance(func_ret, tuple): # Return control to GTK idle loop. # If tuple, then display progress. yield func_ret # either way, func_ret should be an integer assert isinstance(func_ret, (int, long)) ret['size'] = func_ret else: if os.path.isdir(self.path): raise RuntimeError('Attempting to run file function %s on directory %s' % \ (self.func.func_name, self.path)) # Function takes a path. We check the size. oldsize = FileUtilities.getsize(self.path) try: self.func(self.path) except DatabaseError, e: if -1 == e.message.find('file is encrypted or is not a database') and \ -1 == e.message.find('or missing database'): raise print 'Warning:', e.message return try: newsize = FileUtilities.getsize(self.path) except OSError, e: from errno import ENOENT if e.errno == ENOENT: # file does not exist newsize = 0 else: raise ret['size'] = oldsize - newsize
def delete_chrome_keywords(path): """Delete keywords table in Chromium/Google Chrome 'Web Data' database""" cols = ("short_name", "keyword", "favicon_url", "originating_url", "suggest_url") where = "where not date_created = 0" cmds = __shred_sqlite_char_columns("keywords", cols, where) cmds += "update keywords set usage_count = 0;" ver = __get_chrome_history(path, "Web Data") if ver >= 43 and ver < 49: # keywords_backup table first seen in Google Chrome 17 / Chromium 17 which is Web Data version 43 # In Google Chrome 25, the table is gone. cmds += __shred_sqlite_char_columns("keywords_backup", cols, where) cmds += "update keywords_backup set usage_count = 0;" FileUtilities.execute_sqlite3(path, cmds)
def execute(self, really_delete): """Make changes and return results""" if FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = {"label": _("Clean file"), "n_deleted": 0, "n_special": 1, "path": self.path, "size": None} if really_delete: oldsize = FileUtilities.getsize(self.path) FileUtilities.clean_json(self.path, self.address) newsize = FileUtilities.getsize(self.path) ret["size"] = oldsize - newsize yield ret
def yum_clean(): """Run 'yum clean all' and return size in bytes recovered""" if os.path.exists('/var/run/yum.pid'): msg = _( "%s cannot be cleaned because it is currently running. Close it, and try again." ) % "Yum" raise RuntimeError(msg) old_size = FileUtilities.getsizedir('/var/cache/yum') args = ['--enablerepo=*', 'clean', 'all'] invalid = ['You need to be root', 'Cannot remove rpmdb file'] run_cleaner_cmd('yum', args, '^unused regex$', invalid) new_size = FileUtilities.getsizedir('/var/cache/yum') return old_size - new_size
def delete_mozilla_url_history(path): """Delete URL history in Mozilla places.sqlite (Firefox 3 and family)""" cmds = "" # delete the URLs in moz_places places_suffix = "where id in (select " \ "moz_places.id from moz_places " \ "left join moz_bookmarks on moz_bookmarks.fk = moz_places.id " \ "where moz_bookmarks.id is null); " cols = ('url', 'rev_host', 'title') cmds += __shred_sqlite_char_columns('moz_places', cols, places_suffix) # delete any orphaned annotations in moz_annos annos_suffix = "where id in (select moz_annos.id " \ "from moz_annos " \ "left join moz_places " \ "on moz_annos.place_id = moz_places.id " \ "where moz_places.id is null); " cmds += __shred_sqlite_char_columns('moz_annos', ('content', ), annos_suffix) # delete any orphaned favicons fav_suffix = "where id not in (select favicon_id " \ "from moz_places where favicon_id is not null ); " cols = ('url', 'data') cmds += __shred_sqlite_char_columns('moz_favicons', cols, fav_suffix) # delete any orphaned history visits cmds += "delete from moz_historyvisits where place_id not " \ "in (select id from moz_places where id is not null); " # delete any orphaned input history input_suffix = "where place_id not in (select distinct id from moz_places)" cols = ('input', ) cmds += __shred_sqlite_char_columns('moz_inputhistory', cols, input_suffix) # delete the whole moz_hosts table # Reference: https://bugzilla.mozilla.org/show_bug.cgi?id=932036 # Reference: # https://support.mozilla.org/en-US/questions/937290#answer-400987 if __sqlite_table_exists(path, 'moz_hosts'): cmds += __shred_sqlite_char_columns('moz_hosts', ('host', )) cmds += "delete from moz_hosts;" # execute the commands FileUtilities.execute_sqlite3(path, cmds)
def get_commands(self, option_id): # paths for which to run expand_glob_join egj = [] if 'recent_documents' == option_id: egj.append( "user/registry/data/org/openoffice/Office/Histories.xcu") egj.append( "user/registry/cache/org.openoffice.Office.Histories.dat") if 'recent_documents' == option_id and not 'cache' == option_id: egj.append("user/registry/cache/org.openoffice.Office.Common.dat") for egj_ in egj: for prefix in self.prefixes: for path in FileUtilities.expand_glob_join(prefix, egj_): if 'nt' == os.name: path = os.path.normpath(path) if os.path.lexists(path): yield Command.Delete(path) if 'cache' == option_id: dirs = [] for prefix in self.prefixes: dirs += FileUtilities.expand_glob_join(prefix, "user/registry/cache/") for dirname in dirs: if 'nt' == os.name: dirname = os.path.normpath(dirname) for filename in children_in_directory(dirname, False): yield Command.Delete(filename) if 'recent_documents' == option_id: for prefix in self.prefixes: for path in FileUtilities.expand_glob_join( prefix, "user/registry/data/org/openoffice/Office/Common.xcu"): if os.path.lexists(path): yield Command.Function(path, Special.delete_ooo_history, _('Delete the usage history')) # ~/.openoffice.org/3/user/registrymodifications.xcu # Apache OpenOffice.org 3.4.1 from openoffice.org on Ubuntu 13.04 # %AppData%\OpenOffice.org\3\user\registrymodifications.xcu # Apache OpenOffice.org 3.4.1 from openoffice.org on Windows XP for path in FileUtilities.expand_glob_join( prefix, "user/registrymodifications.xcu"): if os.path.lexists(path): yield Command.Function( path, Special.delete_office_registrymodifications, _('Delete the usage history'))
def delete_mozilla_url_history(path): """Delete URL history in Mozilla places.sqlite (Firefox 3 and family)""" cmds = "" # delete the URLs in moz_places places_suffix = "where id in (select " \ "moz_places.id from moz_places " \ "left join moz_bookmarks on moz_bookmarks.fk = moz_places.id " \ "where moz_bookmarks.id is null); " cols = ('url', 'rev_host', 'title') cmds += __shred_sqlite_char_columns('moz_places', cols, places_suffix) # delete any orphaned annotations in moz_annos annos_suffix = "where id in (select moz_annos.id " \ "from moz_annos " \ "left join moz_places " \ "on moz_annos.place_id = moz_places.id " \ "where moz_places.id is null); " cmds += __shred_sqlite_char_columns( 'moz_annos', ('content', ), annos_suffix) # delete any orphaned favicons fav_suffix = "where id not in (select favicon_id " \ "from moz_places where favicon_id is not null ); " cols = ('url', 'data') cmds += __shred_sqlite_char_columns('moz_favicons', cols, fav_suffix) # delete any orphaned history visits cmds += "delete from moz_historyvisits where place_id not " \ "in (select id from moz_places where id is not null); " # delete any orphaned input history input_suffix = "where place_id not in (select distinct id from moz_places)" cols = ('input', ) cmds += __shred_sqlite_char_columns('moz_inputhistory', cols, input_suffix) # delete the whole moz_hosts table # Reference: https://bugzilla.mozilla.org/show_bug.cgi?id=932036 # Reference: # https://support.mozilla.org/en-US/questions/937290#answer-400987 if __sqlite_table_exists(path, 'moz_hosts'): cmds += __shred_sqlite_char_columns('moz_hosts', ('host',)) cmds += "delete from moz_hosts;" # execute the commands FileUtilities.execute_sqlite3(path, cmds)
def start_with_computer(enabled): """If enabled, create shortcut to start application with computer. If disabled, then delete the shortcut.""" if not enabled: if os.path.lexists(autostart_path): FileUtilities.delete(autostart_path) return if os.path.lexists(autostart_path): return import shutil General.makedirs(os.path.dirname(autostart_path)) shutil.copy(launcher_path, autostart_path) os.chmod(autostart_path, 0755) if General.sudo_mode(): General.chownself(autostart_path)
def start_with_computer(enabled): """If enabled, create shortcut to start application with computer. If disabled, then delete the shortcut.""" autostart_path = get_autostart_path() if not enabled: if os.path.lexists(autostart_path): FileUtilities.delete(autostart_path) return if os.path.lexists(autostart_path): return import win32com.client wscript_shell = win32com.client.Dispatch('WScript.Shell') shortcut = wscript_shell.CreateShortCut(autostart_path) shortcut.TargetPath = os.path.join(Common.bleachbit_exe_path, 'bleachbit.exe') shortcut.save()
def delete_chrome_keywords(path): """Delete keywords table in Chromium/Google Chrome 'Web Data' database""" cols = ('short_name', 'keyword', 'favicon_url', 'originating_url', 'suggest_url') where = "where not date_created = 0" cmds = __shred_sqlite_char_columns('keywords', cols, where) cmds += "update keywords set usage_count = 0;" ver = __get_chrome_history(path, 'Web Data') if ver >= 43 and ver < 49: # keywords_backup table first seen in Google Chrome 17 / Chromium 17 which is Web Data version 43 # In Google Chrome 25, the table is gone. cmds += __shred_sqlite_char_columns('keywords_backup', cols, where) cmds += "update keywords_backup set usage_count = 0;" FileUtilities.execute_sqlite3(path, cmds)
def delete_updates(): """Returns commands for deleting Windows Updates files""" windir = os.path.expandvars('$windir') dirs = glob.glob(os.path.join(windir, '$NtUninstallKB*')) dirs += [os.path.expandvars('$windir\\SoftwareDistribution\\Download')] dirs += [os.path.expandvars('$windir\\ie7updates')] dirs += [os.path.expandvars('$windir\\ie8updates')] if not dirs: # if nothing to delete, then also do not restart service return import win32serviceutil wu_running = win32serviceutil.QueryServiceStatus('wuauserv')[1] == 4 args = ['net', 'stop', 'wuauserv'] def wu_service(): General.run_external(args) return 0 if wu_running: yield Command.Function(None, wu_service, " ".join(args)) for path1 in dirs: for path2 in FileUtilities.children_in_directory(path1, True): yield Command.Delete(path2) if os.path.exists(path1): yield Command.Delete(path1) args = ['net', 'start', 'wuauserv'] if wu_running: yield Command.Function(None, wu_service, " ".join(args))
def __init__(self, action_element): """Initialize file search""" self.regex = action_element.getAttribute('regex') assert(isinstance(self.regex, (str, unicode, types.NoneType))) self.nregex = action_element.getAttribute('nregex') assert(isinstance(self.nregex, (str, unicode, types.NoneType))) self.wholeregex = action_element.getAttribute('wholeregex') assert(isinstance(self.wholeregex, (str, unicode, types.NoneType))) self.nwholeregex = action_element.getAttribute('nwholeregex') assert(isinstance(self.nwholeregex, (str, unicode, types.NoneType))) self.search = action_element.getAttribute('search') self.object_type = action_element.getAttribute('type') self.path = os.path.expanduser(FileUtilities.expandvars( action_element.getAttribute('path'))) if 'nt' == os.name and self.path: # convert forward slash to backslash for compatibility with getsize() # and for display. Do not convert an empty path, or it will become # the current directory (.). self.path = os.path.normpath(self.path) self.ds = {} if 'deep' == self.search: self.ds['regex'] = self.regex self.ds['nregex'] = self.nregex self.ds['cache'] = General.boolstr_to_bool( action_element.getAttribute('cache')) self.ds['command'] = action_element.getAttribute('command') self.ds['path'] = self.path if not any([self.object_type, self.regex, self.nregex, self.wholeregex, self.nwholeregex]): # If the filter is not needed, bypass it for speed. self.get_paths = self._get_paths
def __localization_path(self, basedir, language_filter, dir_filter): """Return localization paths in a single directory tree""" if -1 != basedir.find('*'): for basedir2 in glob.iglob(basedir): for path in self.__localization_path(basedir2, language_filter, dir_filter): yield path return if not os.path.exists(basedir): return for path in os.listdir(basedir): if None != dir_filter and dir_filter(path): continue locale_code = path try: language_code = locale_to_language(path) except: continue if None != language_filter and language_filter( locale_code, language_code): continue locale_dirname = os.path.join(basedir, locale_code) for path in FileUtilities.children_in_directory( locale_dirname, True): yield path yield locale_dirname
def update_total_size(self, bytes_removed): """Callback to update the total size cleaned""" context_id = self.status_bar.get_context_id('size') text = FileUtilities.bytes_to_human(bytes_removed) if 0 == bytes_removed: text = "" self.status_bar.push(context_id, text)
def delete_chrome_autofill(path): """Delete autofill table in Chromium/Google Chrome 'Web Data' database""" cols = ('name', 'value', 'value_lower') cmds = __shred_sqlite_char_columns('autofill', cols) cols = ('first_name', 'middle_name', 'last_name', 'full_name') cmds += __shred_sqlite_char_columns('autofill_profile_names', cols) cmds += __shred_sqlite_char_columns('autofill_profile_emails', ('email',)) cmds += __shred_sqlite_char_columns('autofill_profile_phones', ('number',)) cols = ('company_name', 'street_address', 'dependent_locality', 'city', 'state', 'zipcode', 'country_code') cmds += __shred_sqlite_char_columns('autofill_profiles', cols) cols = ( 'company_name', 'street_address', 'address_1', 'address_2', 'address_3', 'address_4', 'postal_code', 'country_code', 'language_code', 'recipient_name', 'phone_number') cmds += __shred_sqlite_char_columns('server_addresses', cols) FileUtilities.execute_sqlite3(path, cmds)
def get_commands(self, option_id): # paths for which to run expand_glob_join egj = [] if 'recent_documents' == option_id: egj.append( "user/registry/data/org/openoffice/Office/Histories.xcu") egj.append( "user/registry/cache/org.openoffice.Office.Histories.dat") if 'recent_documents' == option_id and not 'cache' == option_id: egj.append("user/registry/cache/org.openoffice.Office.Common.dat") for egj_ in egj: for prefix in self.prefixes: for path in FileUtilities.expand_glob_join(prefix, egj_): if 'nt' == os.name: path = os.path.normpath(path) if os.path.lexists(path): yield Command.Delete(path) if 'cache' == option_id: dirs = [] for prefix in self.prefixes: dirs += FileUtilities.expand_glob_join( prefix, "user/registry/cache/") for dirname in dirs: if 'nt' == os.name: dirname = os.path.normpath(dirname) for filename in children_in_directory(dirname, False): yield Command.Delete(filename) if 'recent_documents' == option_id: for prefix in self.prefixes: for path in FileUtilities.expand_glob_join(prefix, "user/registry/data/org/openoffice/Office/Common.xcu"): if os.path.lexists(path): yield Command.Function(path, Special.delete_ooo_history, _('Delete the usage history')) # ~/.openoffice.org/3/user/registrymodifications.xcu # Apache OpenOffice.org 3.4.1 from openoffice.org on Ubuntu 13.04 # %AppData%\OpenOffice.org\3\user\registrymodifications.xcu # Apache OpenOffice.org 3.4.1 from openoffice.org on Windows XP for path in FileUtilities.expand_glob_join(prefix, "user/registrymodifications.xcu"): if os.path.lexists(path): yield Command.Function(path, Special.delete_office_registrymodifications, _('Delete the usage history'))
def updatePodcastXML(url): #First get the podcasts name name = fileUtil.getPodcastName(url) #Gets the name from the url if (name == None): #Checks if it has found no name print("Error - Podcast has no name...") #If so this is an error return name = fileUtil.cleanForWindows( name) #Removes any characters from a name that are invalid in linux #Downloading the rss file rssFilepath = Path(".rss/" + name) print("\nDownloading rss file for: " + name) success = webUtil.downloadFileStream(url, rssFilepath, 0) #Downloads the rss file if (success == False): print("Error in updating podcast") return else: print("Checking for new episodes") urls = fileUtil.getEnclosedLinks( str(rssFilepath)) #Gets the enclosed links and titles from the file #Check for a folder podcastFolderExists = fileUtil.checkDir(name) if (not (podcastFolderExists)): os.mkdir(name) #Run the urls (episodes) for url in urls: filePathSansType = Path( name + "/" + url[0]) #Generates the filepath for where the new file will go fileAlreadyExists = fileUtil.checkFileSansType( filePathSansType) #Checks to see if the file is already downloaded if (not (fileAlreadyExists)): #If not filetype = webUtil.getFileType( url[1]) #Gets the filetype of the file the url points to if (filetype == None): #If the filetypes is not found correctly print("Error Downloading file") #Error message continue #Try next file filePath = Path( name + "/" + url[0] + filetype ) #Create the filepath where the file will be downloaded to print("Fetching new file: " + url[0]) success = webUtil.downloadFileStream( url[1], filePath, 0) #Download the file to the correct location if (success == None): print("Error - File not downloaded")
def __init__(self, action_element): self.cmd = FileUtilities.expandvars(action_element.getAttribute('cmd')) # by default, wait self.wait = True wait = action_element.getAttribute('wait') if wait and wait.lower()[0] in ('f', 'n'): # false or no self.wait = False
def delete_chrome_autofill(path): """Delete autofill table in Chromium/Google Chrome 'Web Data' database""" cols = ('name', 'value', 'value_lower') cmds = __shred_sqlite_char_columns('autofill', cols) cols = ('first_name', 'middle_name', 'last_name', 'full_name') cmds += __shred_sqlite_char_columns('autofill_profile_names', cols) cmds += __shred_sqlite_char_columns('autofill_profile_emails', ('email', )) cmds += __shred_sqlite_char_columns('autofill_profile_phones', ('number', )) cols = ('company_name', 'street_address', 'dependent_locality', 'city', 'state', 'zipcode', 'country_code') cmds += __shred_sqlite_char_columns('autofill_profiles', cols) cols = ('company_name', 'street_address', 'address_1', 'address_2', 'address_3', 'address_4', 'postal_code', 'country_code', 'language_code', 'recipient_name', 'phone_number') cmds += __shred_sqlite_char_columns('server_addresses', cols) FileUtilities.execute_sqlite3(path, cmds)
def execute(self, really_delete): if None != self.path and FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = {"label": self.label, "n_deleted": 0, "n_special": 1, "path": self.path, "size": None} if really_delete: if None == self.path: # Function takes no path. It returns the size. func_ret = self.func() if isinstance(func_ret, types.GeneratorType): # function returned generator for func_ret in self.func(): if True == func_ret or isinstance(func_ret, tuple): # Return control to GTK idle loop. # If tuple, then display progress. yield func_ret # either way, func_ret should be an integer assert isinstance(func_ret, (int, long)) ret["size"] = func_ret else: # Function takes a path. We check the size. oldsize = FileUtilities.getsize(self.path) try: self.func(self.path) except DatabaseError, e: if -1 == e.message.find("file is encrypted or is not a database") and -1 == e.message.find( "or missing database" ): raise print "Warning:", e.message return try: newsize = FileUtilities.getsize(self.path) except OSError, e: from errno import ENOENT if e.errno == ENOENT: # file does not exist newsize = 0 else: raise ret["size"] = oldsize - newsize
def execute(self, really_delete): """Make changes and return results""" if FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = { # TRANSLATORS: The file will be truncated to 0 bytes in length 'label': _('Truncate'), 'n_deleted': 1, 'n_special': 0, 'path': self.path, 'size': FileUtilities.getsize(self.path)} if really_delete: f = open(self.path, 'wb') f.truncate(0) yield ret
def locale_globex(globpath, regex): """List a path by glob, filter by regex, and return tuple in format (locale, pathname)""" for pathname in FileUtilities.globex(globpath, regex): match = re.search(regex, pathname) if None == match: continue locale_code = match.groups(0)[0] yield (locale_code, pathname)
def get_commands(self): # Checking allows auto-hide to work for non-APT systems if not FileUtilities.exe_exists('yum'): raise StopIteration yield Command.Function( None, Unix.yum_clean, 'yum clean all')
def get_count_alt_qpileup(pileup): outfile = pileup + '.alt_count' fh = gzip.open(pileup + '.gz', 'r') fu.delete(outfile + '.gz') fh_out = gzip.open(outfile + '.gz', 'w') header = 'CHROM' + '\t' + 'POS' + '\t' + 'REF' + '\t' + 'DEPTH' + '\t' + 'REFCOUNT' + '\t' + 'A' + '\t' + 'C' + '\t' + 'T' + '\t' + 'G' + '\t' + 'DEL' + '\t' + 'N' + '\t' + 'VC' fh_out.write(header + '\n') for line in fh: fields = line.split('\t') #1 909768 A 41 GGgGgggGGGGGGggggGGggggGggggggggggggggg^]g^]g tofile = str(fields[0]) + '\t' + str(fields[1]) + '\t' + str( fields[2]) + '\t' + str(fields[3]) + '\t' + count_alt( str(fields[4])) fh_out.write(tofile + '\n') fh.close() fh_out.close()
def execute(self, really_delete): """Make changes and return results""" if FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = { 'label': _('Clean file'), 'n_deleted': 0, 'n_special': 1, 'path': self.path, 'size': None} if really_delete: oldsize = FileUtilities.getsize(self.path) FileUtilities.clean_json(self.path, self.address) newsize = FileUtilities.getsize(self.path) ret['size'] = oldsize - newsize yield ret
def delete_mozilla_url_history(path): """Delete URL history in Mozilla places.sqlite (Firefox 3 and family)""" cmds = "" # delete the URLs in moz_places places_suffix = ( "where id in (select " "moz_places.id from moz_places " "left join moz_bookmarks on moz_bookmarks.fk = moz_places.id " "where moz_bookmarks.id is null); " ) cols = ("url", "rev_host", "title") cmds += __shred_sqlite_char_columns("moz_places", cols, places_suffix) # delete any orphaned annotations in moz_annos annos_suffix = ( "where id in (select moz_annos.id " "from moz_annos " "left join moz_places " "on moz_annos.place_id = moz_places.id " "where moz_places.id is null); " ) cmds += __shred_sqlite_char_columns("moz_annos", ("content",), annos_suffix) # delete any orphaned favicons fav_suffix = "where id not in (select favicon_id " "from moz_places where favicon_id is not null ); " cols = ("url", "data") cmds += __shred_sqlite_char_columns("moz_favicons", cols, fav_suffix) # delete any orphaned history visits cmds += "delete from moz_historyvisits where place_id not " "in (select id from moz_places where id is not null); " # delete any orphaned input history input_suffix = "where place_id not in (select distinct id from moz_places)" cols = ("input",) cmds += __shred_sqlite_char_columns("moz_inputhistory", cols, input_suffix) # execute the commands FileUtilities.execute_sqlite3(path, cmds)
def execute(self, really_delete): """Make changes and return results""" if FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = { # TRANSLATORS: The file will be truncated to 0 bytes in length "label": _("Truncate"), "n_deleted": 1, "n_special": 0, "path": self.path, "size": FileUtilities.getsize(self.path), } if really_delete: f = open(self.path, "wb") f.truncate(0) yield ret
def delete_mozilla_url_history(path): """Delete URL history in Mozilla places.sqlite (Firefox 3 and family)""" cmds = "" # delete the URLs in moz_places places_suffix = "where id in (select " \ "moz_places.id from moz_places " \ "left join moz_bookmarks on moz_bookmarks.fk = moz_places.id " \ "where moz_bookmarks.id is null); " cols = ('url', 'rev_host', 'title') cmds += __shred_sqlite_char_columns('moz_places', cols, places_suffix) # delete any orphaned annotations in moz_annos annos_suffix = "where id in (select moz_annos.id " \ "from moz_annos " \ "left join moz_places " \ "on moz_annos.place_id = moz_places.id " \ "where moz_places.id is null); " cmds += __shred_sqlite_char_columns('moz_annos', ('content', ), annos_suffix) # delete any orphaned favicons fav_suffix = "where id not in (select favicon_id " \ "from moz_places where favicon_id is not null ); " cols = ('url', 'data') cmds += __shred_sqlite_char_columns('moz_favicons', cols, fav_suffix) # delete any orphaned history visits cmds += "delete from moz_historyvisits where place_id not " \ "in (select id from moz_places where id is not null); " # delete any orphaned input history input_suffix = "where place_id not in (select distinct id from moz_places)" cols = ('input', ) cmds += __shred_sqlite_char_columns('moz_inputhistory', cols, input_suffix) # execute the commands FileUtilities.execute_sqlite3(path, cmds)
def execute(self, really_delete): """Make changes and return results""" if FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = { # TRANSLATORS: Parts of this file will be deleted 'label': _('Clean file'), 'n_deleted': 0, 'n_special': 1, 'path': self.path, 'size': None} if really_delete: oldsize = FileUtilities.getsize(self.path) FileUtilities.clean_ini(self.path, self.section, self.parameter) newsize = FileUtilities.getsize(self.path) ret['size'] = oldsize - newsize yield ret
def execute(self, really_delete): """Make changes and return results""" if FileUtilities.whitelisted(self.path): yield whitelist(self.path) return ret = { # TRANSLATORS: Parts of this file will be deleted "label": _("Clean file"), "n_deleted": 0, "n_special": 1, "path": self.path, "size": None, } if really_delete: oldsize = FileUtilities.getsize(self.path) FileUtilities.clean_ini(self.path, self.section, self.parameter) newsize = FileUtilities.getsize(self.path) ret["size"] = oldsize - newsize yield ret
def start_with_computer(enabled): """If enabled, create shortcut to start application with computer. If disabled, then delete the shortcut.""" if not enabled: # User requests to not automatically start BleachBit if os.path.lexists(autostart_path): # Delete the shortcut FileUtilities.delete(autostart_path) return # User requests to automatically start BleachBit if os.path.lexists(autostart_path): # Already automatic, so exit return if not os.path.exists(Common.launcher_path): print 'ERROR: does not exist: ', Common.launcher_path return import shutil General.makedirs(os.path.dirname(autostart_path)) shutil.copy(Common.launcher_path, autostart_path) os.chmod(autostart_path, 0755) if General.sudo_mode(): General.chownself(autostart_path)
def get_recycle_bin(): """Yield a list of files in the recycle bin""" pidl = shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_BITBUCKET) desktop = shell.SHGetDesktopFolder() h = desktop.BindToObject(pidl, None, shell.IID_IShellFolder) for item in h: path = h.GetDisplayNameOf(item, shellcon.SHGDN_FORPARSING) if os.path.isdir(path): for child in FileUtilities.children_in_directory(path, True): yield child yield path else: yield path