def __save_rules(self, rules): """ Save rules to db @param rules bytes """ SqlCursor.add(self) result = rules.decode('utf-8') count = 0 for line in result.split('\n'): SqlCursor.allow_thread_execution(self) if self.__cancellable.is_cancelled(): SqlCursor.remove(self) raise Exception("Cancelled") if line.startswith('#'): continue array = line.replace( ' ', '\t', 1).replace('\t', '@', 1).split('@') if len(array) <= 1: continue netloc = array[1].replace( ' ', '').replace('\r', '').split('#')[0] # Update entry if exists, create else if netloc != "localhost": Logger.debug("Add filter: %s", netloc) self.__add_netloc(netloc) count += 1 if count == 1000: SqlCursor.commit(self) # Do not flood sqlite, this allow webkit extension to run sleep(0.1) count = 0 SqlCursor.remove(self)
def __save_css_rules(self, rules, uris): """ Save rules to db @param rules as bytes @param uris as [str] """ SqlCursor.add(self) result = rules.decode("utf-8") count = 0 for line in result.split('\n'): if self.__cancellable.is_cancelled(): raise IOError("Cancelled") if line.find("-abp-") != -1: continue elif line.startswith("##"): self.__save_css_default_rule(line) elif line.find("##") != -1: self.__save_css_domain_rule(line) count += 1 if count == 1000: with SqlCursor(self) as sql: sql.commit() count = 0 # We are the last rule # Delete old entries if not uris: with SqlCursor(self) as sql: sql.execute( "DELETE FROM adblock_css\ WHERE mtime!=?", (self.__adblock_mtime, )) sql.commit() SqlCursor.remove(self)
def __save_abp_rules(self, rules): """ Save rules to db @param rules as bytes """ SqlCursor.add(self) result = rules.decode("utf-8") count = 0 for line in result.split('\n'): SqlCursor.allow_thread_execution(self) if self.__cancellable.is_cancelled(): SqlCursor.remove(self) raise Exception("Cancelled") if "-abp-" in line or "$" in line or "!" in line or "[" in line: continue elif line.startswith("##"): self.__save_css_default_rule(line) elif "#@#" in line: self.__save_css_exception(line) elif "##" in line: self.__save_css_domain_rule(line) elif line.startswith("@@"): self.__save_abp_rule(line[2:], True) else: self.__save_abp_rule(line, False) Logger.debug("Add abp filter: %s", line) count += 1 if count == 1000: SqlCursor.commit(self) # Do not flood sqlite, this allow webkit extension to run sleep(0.1) count = 0 SqlCursor.remove(self)
def __save_rules(self, rules, uris): """ Save rules to db @param rules as bytes @param uris as [str] """ SqlCursor.add(self) result = rules.decode('utf-8') j = json.loads(result) with SqlCursor(self) as sql: count = 0 for item in j: if self.__cancellable.is_cancelled(): raise IOError("Cancelled") sql.execute( "INSERT INTO phishing\ (uri, mtime) VALUES (?, ?)", (item["url"].rstrip("/"), self.__mtime)) count += 1 if count == 1000: sql.commit() count = 0 sql.commit() # We are the last call to save_rules()? # Delete removed entries and commit if not uris: with SqlCursor(self) as sql: sql.execute( "DELETE FROM phishing\ WHERE mtime!=?", (self.__mtime, )) sql.commit() SqlCursor.remove(self)
def import_chromium(self, chrome): """ Chromium/Chrome importer As Eolie doesn't sync with Chromium, we do not handle parent guid and just import parents as tags @param chrome as bool """ try: self.thread_lock.acquire() SqlCursor.add(self) import json homedir = GLib.get_home_dir() if chrome: path = homedir + "/.config/chrome/Default/Bookmarks" else: path = homedir + "/.config/chromium/Default/Bookmarks" f = Gio.File.new_for_path(path) if not f.query_exists(): return (status, content, tag) = f.load_contents(None) if status: data = content.decode("utf-8") j = json.loads(data) parents = [] # Setup initial parents for root in j["roots"]: parents.append(("", j["roots"][root]["children"])) # Walk parents and children while parents: (parent_name, children) = parents.pop(0) bookmarks = [] for child in children: if child["type"] == "folder": parents.append((child["name"], child["children"])) elif child["type"] == "url": bookmarks.append((child["name"], child["url"])) position = 0 for bookmark in bookmarks: tags = [parent_name] title = bookmark[0] uri = bookmark[1] if not uri.startswith('http') or not title: continue uri = uri.rstrip('/') rowid = self.get_id(uri) if rowid is None: # Add bookmark bookmark_id = self.add(title, uri, None, tags, 0, False) # Set position self.set_position(bookmark_id, position, False) position += 1 with SqlCursor(self) as sql: sql.commit() SqlCursor.remove(self) except Exception as e: print("DatabaseBookmarks::import_chromium:", e) finally: self.thread_lock.release()
def import_html(self, path): """ Import html bookmarks @param path as str """ try: self.thread_lock.acquire() from bs4 import BeautifulSoup SqlCursor.add(self) f = Gio.File.new_for_path(path) if not f.query_exists(): return (status, content, tag) = f.load_contents(None) if status: data = content.decode("utf-8") soup = BeautifulSoup(data, "html.parser") parent_name = "" position = 0 for dt in soup.findAll("dt"): h3 = dt.find("h3") if h3 is not None: parent_name = h3.contents[0] continue else: a = dt.find("a") uri = a.get("href") if a.get("tags") is None: tags = [parent_name] else: tags = [a.get("tags")] title = a.contents[0] if uri is None: parent_name = title continue elif not uri.startswith('http') or not title: continue uri = uri.rstrip('/') rowid = self.get_id(uri) if rowid is None: if not tags: tags = [parent_name] # Add bookmark bookmark_id = self.add(title, uri, None, tags, 0, False) # Set position self.set_position(bookmark_id, position, False) position += 1 with SqlCursor(self) as sql: sql.commit() SqlCursor.remove(self) except Exception as e: print("DatabaseBookmarks::import_html:", e) finally: self.thread_lock.release()
def __update(self): """ Update database """ self.__cancellable.reset() SqlCursor.add(self) result = "" try: for uri in self.__URIS: session = Soup.Session.new() request = session.request(uri) stream = request.send(self.__cancellable) bytes = bytearray(0) buf = stream.read_bytes(1024, self.__cancellable).get_data() while buf: bytes += buf buf = stream.read_bytes(1024, self.__cancellable).get_data() stream.close() result = bytes.decode('utf-8') count = 0 for line in result.split('\n'): if self.__cancellable.is_cancelled(): raise IOError("Cancelled") if line.startswith('#'): continue array = line.replace(' ', '\t', 1).replace('\t', '@', 1).split('@') if len(array) <= 1: continue dns = array[1].replace(' ', '').replace('\r', '').split('#')[0] # Update entry if exists, create else with SqlCursor(self) as sql: sql.execute( "INSERT INTO adblock\ (dns, mtime)\ VALUES (?, ?)", (dns, self.__mtime)) count += 1 if count == 1000: sql.commit() count = 0 # Delete removed entries with SqlCursor(self) as sql: sql.execute( "DELETE FROM adblock\ WHERE mtime!=?", (self.__mtime, )) except Exception as e: print("DatabaseAdlbock:__update():", e) with SqlCursor(self) as sql: sql.commit() SqlCursor.remove(self)
def __save_rules(self, rules, uris): """ Save rules to db @param rules as bytes @param uris as [str] """ SqlCursor.add(self) try: result = rules.decode('utf-8') j = json.loads(result) with SqlCursor(self) as sql: count = 0 for item in j: if self.__cancellable.is_cancelled(): raise IOError("Cancelled") uri = item["url"].rstrip("/") try: sql.execute( "INSERT INTO phishing\ (uri, mtime) VALUES (?, ?)", (uri, self.__phishing_mtime)) except: sql.execute( "UPDATE phishing set mtime=?\ WHERE uri=?", (self.__phishing_mtime, uri)) count += 1 if count == 1000: SqlCursor.commit(self) # Do not flood sqlite # this allow webkit extension to run sleep(0.1) count = 0 # We are the last call to save_rules()? # Delete removed entries and commit if not uris: with SqlCursor(self) as sql: sql.execute( "DELETE FROM phishing\ WHERE mtime!=?", (self.__phishing_mtime, )) try: dump(self.__phishing_mtime, open(EOLIE_DATA_PATH + "/phishing.bin", "wb")) except Exception as e: Logger.error("DatabasePhishing::__save_rules(): %s", e) except Exception as e: Logger.error("DatabasePhishing::__save_rules():%s -> %s", e, rules) SqlCursor.remove(self)
def __update(self): """ Update database """ self.__cancellable.reset() try: SqlCursor.add(self) session = Soup.Session.new() request = session.request(self.__URI) stream = request.send(self.__cancellable) bytes = bytearray(0) buf = stream.read_bytes(1024, self.__cancellable).get_data() while buf: bytes += buf buf = stream.read_bytes(1024, self.__cancellable).get_data() stream.close() data = bytes.decode('utf-8') j = json.loads(data) with SqlCursor(self) as sql: count = 0 for item in j: if self.__cancellable.is_cancelled(): raise IOError("Cancelled") sql.execute( "INSERT INTO phishing\ (uri, mtime) VALUES (?, ?)", (item["url"].rstrip("/"), self.__mtime)) count += 1 if count == 1000: sql.commit() count = 0 sql.commit() # Delete removed entries with SqlCursor(self) as sql: sql.execute( "DELETE FROM phishing\ WHERE mtime!=?", (self.__mtime, )) sql.commit() SqlCursor.remove(self) except Exception as e: print("DatabasePhishing::__update()", e)
def __save_rules(self, rules, uris): """ Save rules to db @param rules bytes @param uris as [str] """ SqlCursor.add(self) result = rules.decode('utf-8') count = 0 for line in result.split('\n'): if self.__cancellable.is_cancelled(): raise IOError("Cancelled") if line.startswith('#'): continue array = line.replace(' ', '\t', 1).replace('\t', '@', 1).split('@') if len(array) <= 1: continue dns = array[1].replace(' ', '').replace('\r', '').split('#')[0] # Update entry if exists, create else with SqlCursor(self) as sql: debug("Add filter: %s" % dns) sql.execute( "INSERT INTO adblock\ (dns, mtime) VALUES (?, ?)", (dns, self.__adblock_mtime)) count += 1 if count == 1000: sql.commit() count = 0 # We are the last call to save_rules()? # Delete removed entries and commit if not uris: with SqlCursor(self) as sql: sql.execute( "DELETE FROM adblock\ WHERE mtime!=?", (self.__adblock_mtime, )) sql.commit() SqlCursor.remove(self)
def import_firefox(self): """ Mozilla Firefox importer """ try: SqlCursor.add(self) firefox_path = GLib.get_home_dir() + "/.mozilla/firefox/" d = Gio.File.new_for_path(firefox_path) infos = d.enumerate_children( 'standard::name,standard::type', Gio.FileQueryInfoFlags.NOFOLLOW_SYMLINKS, None) sqlite_path = None for info in infos: if info.get_file_type() == Gio.FileType.DIRECTORY: f = Gio.File.new_for_path(firefox_path + info.get_name() + "/places.sqlite") if f.query_exists(): sqlite_path = f.get_path() break if sqlite_path is not None: c = sqlite3.connect(sqlite_path, 600.0) # Add bookmarks bookmarks = self.__get_firefox_bookmarks(c) for (title, uri, parent_name, bookmark_guid, parent_guid, position) in bookmarks: tags = self.__get_tags_for_firefox_bookmark( c, bookmark_guid) bookmark_guid = self.__clean_guid(bookmark_guid) parent_guid = self.__clean_guid(parent_guid) if not uri.startswith('http') or not title: continue uri = uri.rstrip('/') rowid = self.get_id(uri) if rowid is None: # If bookmark is not tagged, we use parent name if not tags: tags = [parent_name] # Bookmarks and folder bookmark_id = self.add(title, uri, bookmark_guid, tags, 0) self.set_parent(bookmark_id, parent_guid, parent_name) self.set_position(bookmark_id, position) # Add folders, we need to get them # as Firefox needs children order parents = self.__get_firefox_parents(c) for (title, parent_name, bookmark_guid, parent_guid, position) in parents: bookmark_guid = self.__clean_guid(bookmark_guid) parent_guid = self.__clean_guid(parent_guid) if not title or bookmark_guid == "root": continue uri = bookmark_guid rowid = self.get_id(uri) if rowid is None: # Bookmarks and folder bookmark_id = self.add(title, uri, bookmark_guid, [], 0) self.set_parent(bookmark_id, parent_guid, parent_name) self.set_position(bookmark_id, position) SqlCursor.remove(self) except Exception as e: Logger.error("DatabaseBookmarks::import_firefox(): %s", e)
def __pull_bookmarks(self, bulk_keys, first_sync): """ Pull from bookmarks @param bulk_keys as KeyBundle @param first_sync as bool @raise StopIteration """ debug("pull bookmarks") SqlCursor.add(El().bookmarks) records = self.__mozilla_sync.get_records("bookmarks", bulk_keys) children_array = [] for record in records: self.__check_worker() if record["modified"] < self.__mtimes["bookmarks"]: continue sleep(0.01) bookmark = record["payload"] bookmark_id = El().bookmarks.get_id_by_guid(bookmark["id"]) # Nothing to apply, continue if El().bookmarks.get_mtime(bookmark_id) >= record["modified"]: continue debug("pulling %s" % record) # Deleted bookmark if "deleted" in bookmark.keys(): El().bookmarks.remove(bookmark_id) # Keep folder only for firefox compatiblity elif "type" in bookmark.keys() and bookmark["type"] == "folder"\ and bookmark["id"] is not None\ and bookmark["title"]: if bookmark_id is None: bookmark_id = El().bookmarks.add(bookmark["title"], bookmark["id"], bookmark["id"], [], 0, False) # Will calculate position later if "children" in bookmark.keys(): children_array.append(bookmark["children"]) # We have a bookmark, add it elif "type" in bookmark.keys() and bookmark["type"] == "bookmark"\ and bookmark["id"] is not None\ and bookmark["title"]: # Add a new bookmark if bookmark_id is None: # Use parent name if no bookmarks tags if "tags" not in bookmark.keys() or\ not bookmark["tags"]: if "parentName" in bookmark.keys() and\ bookmark["parentName"]: bookmark["tags"] = [bookmark["parentName"]] else: bookmark["tags"] = [] bookmark_id = El().bookmarks.add(bookmark["title"], bookmark["bmkUri"], bookmark["id"], bookmark["tags"], 0, False) # Update bookmark else: El().bookmarks.set_title(bookmark_id, bookmark["title"], False) El().bookmarks.set_uri(bookmark_id, bookmark["bmkUri"], False) # Update tags current_tags = El().bookmarks.get_tags(bookmark_id) for tag in El().bookmarks.get_tags(bookmark_id): if "tags" in bookmark.keys() and\ tag not in bookmark["tags"]: tag_id = El().bookmarks.get_tag_id(tag) current_tags.remove(tag) El().bookmarks.del_tag_from(tag_id, bookmark_id, False) if "tags" in bookmark.keys(): for tag in bookmark["tags"]: # Tag already associated if tag in current_tags: continue tag_id = El().bookmarks.get_tag_id(tag) if tag_id is None: tag_id = El().bookmarks.add_tag(tag, False) El().bookmarks.add_tag_to(tag_id, bookmark_id, False) # Update parent name if available if bookmark_id is not None and "parentName" in bookmark.keys(): El().bookmarks.set_parent(bookmark_id, bookmark["parentid"], bookmark["parentName"], False) El().bookmarks.set_mtime(bookmark_id, record["modified"], False) # Update bookmark position for children in children_array: position = 0 for child in children: bid = El().bookmarks.get_id_by_guid(child) El().bookmarks.set_position(bid, position, False) position += 1 El().bookmarks.clean_tags() # Will commit SqlCursor.remove(El().bookmarks)
def __pull_bookmarks(self, bulk_keys, first_sync): """ Pull from bookmarks @param bulk_keys as KeyBundle @param first_sync as bool @raise StopIteration """ debug("pull bookmarks") SqlCursor.add(El().bookmarks) records = self.__mozilla_sync.get_records("bookmarks", bulk_keys) # We get all guids here and remove them while sync # At the end, we have deleted records # On fist sync, keep all if first_sync: to_delete = [] else: to_delete = El().bookmarks.get_guids() for record in records: if self.__stop: raise StopIteration("Cancelled") sleep(0.01) bookmark = record["payload"] if "type" not in bookmark.keys() or\ bookmark["type"] not in ["folder", "bookmark"]: continue bookmark_id = El().bookmarks.get_id_by_guid(bookmark["id"]) # This bookmark exists, remove from to delete if bookmark["id"] in to_delete: to_delete.remove(bookmark["id"]) # Nothing to apply, continue if El().bookmarks.get_mtime(bookmark_id) >= record["modified"]: continue debug("pulling %s" % record) if bookmark_id is None: if "bmkUri" in bookmark.keys(): # Use parent name if no bookmarks tags if "tags" not in bookmark.keys() or\ not bookmark["tags"]: if "parentName" in bookmark.keys() and\ bookmark["parentName"]: bookmark["tags"] = [bookmark["parentName"]] else: bookmark["tags"] = [] bookmark_id = El().bookmarks.add(bookmark["title"], bookmark["bmkUri"], bookmark["id"], bookmark["tags"], False) else: bookmark["tags"] = [] bookmark_id = El().bookmarks.add(bookmark["title"], bookmark["id"], bookmark["id"], bookmark["tags"], False) else: El().bookmarks.set_title(bookmark_id, bookmark["title"], False) if "bmkUri" in bookmark.keys(): El().bookmarks.set_uri(bookmark_id, bookmark["bmkUri"], False) elif "children" in bookmark.keys(): position = 0 for child in bookmark["children"]: bid = El().bookmarks.get_id_by_guid(child) El().bookmarks.set_position(bid, position, False) position += 1 # Remove previous tags current_tags = El().bookmarks.get_tags(bookmark_id) for tag in El().bookmarks.get_tags(bookmark_id): if "tags" in bookmark.keys() and\ tag not in bookmark["tags"]: tag_id = El().bookmarks.get_tag_id(tag) current_tags.remove(tag) El().bookmarks.del_tag_from(tag_id, bookmark_id, False) if "tags" in bookmark.keys(): for tag in bookmark["tags"]: # Tag already associated if tag in current_tags: continue tag_id = El().bookmarks.get_tag_id(tag) if tag_id is None: tag_id = El().bookmarks.add_tag(tag, False) El().bookmarks.add_tag_to(tag_id, bookmark_id, False) El().bookmarks.set_mtime(bookmark_id, record["modified"], False) if "parentName" in bookmark.keys(): El().bookmarks.set_parent(bookmark_id, bookmark["parentid"], bookmark["parentName"], False) for guid in to_delete: if self.__stop: raise StopIteration("Cancelled") debug("deleting: %s" % guid) bookmark_id = El().bookmarks.get_id_by_guid(guid) if bookmark_id is not None: El().bookmarks.remove(bookmark_id, False) El().bookmarks.clean_tags() # Will commit SqlCursor.remove(El().bookmarks)