def __on_entry_changed_timeout(self, entry, value): """ Update popover search if needed @param entry as Gtk.Entry @param value as str """ task_helper = TaskHelper() self.__entry_changed_timeout = None self.__window.container.current.webview.add_text_entry(value) # Populate completion model task_helper.run(self.__populate_completion, (value,)) self.__cancellable.cancel() self.__cancellable.reset() parsed = urlparse(value) self.__show_related_view(value) network = Gio.NetworkMonitor.get_default().get_network_available() is_uri = parsed.scheme in ["about, http", "file", "https", "populars"] if is_uri: self.__popover.set_search_text(parsed.netloc + parsed.path) else: self.__popover.set_search_text(value) parsed = urlparse(self.__uri) if value and not is_uri and network: El().search.search_suggestions(value, self.__cancellable, self.__search_suggestion) self.__entry.set_icon_from_icon_name(Gtk.EntryIconPosition.PRIMARY, "system-search-symbolic") self.__entry.set_icon_tooltip_text(Gtk.EntryIconPosition.PRIMARY, "")
def quit(self, vacuum=False): """ Quit application @param vacuum as bool """ self.__save_state() for window in self.windows: window.hide() # Stop pending tasks self.download_manager.cancel() self.adblock.stop() # Clear history active_id = str(self.settings.get_enum("history-storage")) if active_id != TimeSpan.FOREVER: atime = time() if active_id != TimeSpan.NEVER: atime -= TimeSpanValues[active_id] / 1000000 self.history.clear_to(int(atime)) if self.sync_worker is not None: if self.sync_worker.syncing: self.sync_worker.stop() self.sync_worker.save_pendings() if vacuum: task_helper = TaskHelper() task_helper.run(self.__vacuum, callback=(lambda x: Gio.Application.quit(self), )) else: Gio.Application.quit(self)
def quit(self, vacuum=False): """ Quit application @param vacuum as bool """ # Save webpage state self.__save_state() # Stop pending tasks self.download_manager.cancel() self.adblock.stop() # Clear history active_id = str(self.settings.get_enum("history-storage")) if active_id != TimeSpan.FOREVER: atime = time() if active_id != TimeSpan.NEVER: atime -= TimeSpanValues[active_id]/1000000 self.history.clear_to(int(atime)) # If sync is running, to avoid db lock, we do not vacuum if self.sync_worker is not None and self.sync_worker.syncing: self.sync_worker.stop() Gio.Application.quit(self) elif vacuum: task_helper = TaskHelper() task_helper.run(self.__vacuum, (), lambda x: Gio.Application.quit(self)) else: Gio.Application.quit(self)
def remove_from_passwords(self, uuid): """ Remove password from passwords collection @param uuid as str """ if self.__username and self.__password: task_helper = TaskHelper() task_helper.run(self.__remove_from_passwords, uuid)
def remove_from_bookmarks(self, guid): """ Remove bookmark guid from remote bookmarks @param guid as str """ if self.__username and self.__password: task_helper = TaskHelper() task_helper.run(self.__remove_from_bookmarks, guid)
def remove_from_history(self, guid): """ Remove history guid from remote history @param guid as str """ if self.__username and self.__password: task_helper = TaskHelper() task_helper.run(self.__remove_from_history, guid)
def push_bookmark(self, bookmark_id): """ Push bookmark id @param bookmark_id as int """ if self.__username and self.__password: task_helper = TaskHelper() task_helper.run(self.__push_bookmark, bookmark_id)
def sync(self): """ Start syncing """ if Gio.NetworkMonitor.get_default().get_network_available() and\ self.__username and self.__password and not self.syncing: task_helper = TaskHelper() task_helper.run(self.__sync)
def push_history(self, history_id): """ Push history id @param history_id as int """ if self.__username and self.__password: task_helper = TaskHelper() task_helper.run(self.__push_history, history_id)
def remove_from_history(self, guid): """ Remove history guid from remote history @param guid as str """ if Gio.NetworkMonitor.get_default().get_network_available(): task_helper = TaskHelper() task_helper.run(self.__remove_from_history, (guid, ))
def push_history(self, history_ids): """ Push history ids @param history_ids as [int] """ if Gio.NetworkMonitor.get_default().get_network_available(): task_helper = TaskHelper() task_helper.run(self.__push_history, (history_ids, ))
def remove_from_passwords(self, uuid): """ Remove password from passwords collection @param uuid as str """ if Gio.NetworkMonitor.get_default().get_network_available(): task_helper = TaskHelper() task_helper.run(self.__remove_from_passwords, (uuid, ))
def _on_button_clicked(self, button): """ Save visible images @param button as Gtk.Button """ task_helper = TaskHelper() task_helper.run(self.__move_images) self.__spinner.start()
def remove_from_bookmarks(self, guid): """ Remove bookmark guid from remote bookmarks @param guid as str """ if Gio.NetworkMonitor.get_default().get_network_available(): task_helper = TaskHelper() task_helper.run(self.__remove_from_bookmarks, (guid, ))
def push_history(self, history_ids): """ Push history ids @param history_ids as [int] """ if Gio.NetworkMonitor.get_default().get_network_available() and\ self.__username and self.__password: task_helper = TaskHelper() task_helper.run(self.__push_history, history_ids)
def sync(self, loop=False, first_sync=False): """ Start syncing, you need to check sync_status property @param loop as bool -> for GLib.timeout_add() @param first_sync as bool """ if Gio.NetworkMonitor.get_default().get_network_available() and\ self.__username and self.__password and not self.syncing: task_helper = TaskHelper() task_helper.run(self.__sync, first_sync) return loop
def sync(self, loop=False, first_sync=False): """ Start syncing, you need to check sync_status property @param loop as bool -> for GLib.timeout_add() @param first_sync as bool """ if self.syncing or\ not Gio.NetworkMonitor.get_default().get_network_available(): return task_helper = TaskHelper() task_helper.run(self.__sync, (first_sync, )) return loop
def push_password(self, user_form_name, user_form_value, pass_form_name, pass_form_value, uri, form_uri, uuid): """ Push password @param user_form_name as str @param user_form_value as str @param pass_form_name as str @param pass_form_value as str @param uri as str @param form_uri as str @param uuid as str """ if Gio.NetworkMonitor.get_default().get_network_available(): task_helper = TaskHelper() task_helper.run(self.__push_password, (user_form_name, user_form_value, pass_form_name, pass_form_value, uri, form_uri, uuid))
def push_password(self, user_form_name, user_form_value, pass_form_name, pass_form_value, uri, form_uri, uuid): """ Push password @param user_form_name as str @param user_form_value as str @param pass_form_name as str @param pass_form_value as str @param uri as str @param form_uri as str @param uuid as str """ if self.__username and self.__password: task_helper = TaskHelper() task_helper.run(self.__push_password, user_form_name, user_form_value, pass_form_name, pass_form_value, uri, form_uri, uuid)
def _on_infobar_response(self, infobar, response_id): """ Handle user response and remove wanted history ids @param infobar as Gtk.InfoBar @param response_id as int """ if response_id == 1: active_id = self.__infobar_select.get_active_id() if active_id == TimeSpan.CUSTOM: (year, month, day) = self.__calendar.get_date() date = "%02d/%02d/%s" % (day, month + 1, year) atime = mktime(datetime.strptime(date, "%d/%m/%Y").timetuple()) else: atime = int(time() - TimeSpanValues[active_id] / 1000000) task_helper = TaskHelper() task_helper.run(self.__clear_history, (atime, )) infobar.hide()
def _on_sync_button_clicked(self, button): """ Connect to Mozilla Sync to get tokens @param button as Gtk.Button """ icon_name = self.__result_image.get_icon_name()[0] if icon_name == "network-transmit-receive-symbolic": El().sync_worker.stop(True) El().sync_worker.delete_secret() self.__setup_sync_button(False) else: El().sync_worker.delete_secret() self.__result_label.set_text(_("Connecting…")) button.set_sensitive(False) self.__result_image.set_from_icon_name("content-loading-symbolic", Gtk.IconSize.MENU) task_helper = TaskHelper() task_helper.run(self.__connect_mozilla_sync, (self.__login_entry.get_text(), self.__password_entry.get_text()))
def __on_entry_changed_timeout(self, entry, value): """ Update popover search if needed @param entry as Gtk.Entry @param value as str """ task_helper = TaskHelper() self.__entry_changed_id = None self.__window.container.current.webview.add_text_entry(value) # Populate completion model task_helper.run(self.__populate_completion, value) self.__cancellable.cancel() self.__cancellable.reset() parsed = urlparse(value) network = Gio.NetworkMonitor.get_default().get_network_available() is_uri = parsed.scheme in ["about, http", "file", "https", "populars"] if is_uri: self.__popover.set_search_text(parsed.netloc + parsed.path) else: self.__popover.set_search_text(value) # Remove any pending suggestion search if self.__suggestion_id is not None: GLib.source_remove(self.__suggestion_id) self.__suggestion_id = None # Search for suggestions if needed if App().settings.get_value("enable-suggestions") and\ value and not is_uri and network: self.__suggestion_id = GLib.timeout_add( 50, self.__on_suggestion_timeout, value) task_helper.run(self.__search_in_current_views, value) self.__entry.set_icon_from_icon_name(Gtk.EntryIconPosition.PRIMARY, "system-search-symbolic") self.__entry.set_icon_tooltip_text(Gtk.EntryIconPosition.PRIMARY, "")
class DatabasePhishing: """ Phishing database """ DB_PATH = "%s/phishing.db" % EOLIE_DATA_PATH __URI = "http://data.phishtank.com/data/online-valid.json" # SQLite documentation: # In SQLite, a column with type INTEGER PRIMARY KEY # is an alias for the ROWID. # Here, we define an id INT PRIMARY KEY but never feed it, # this make VACUUM not destroy rowids... __create_phishing = '''CREATE TABLE phishing ( id INTEGER PRIMARY KEY, uri TEXT NOT NULL, mtime INT NOT NULL )''' def __init__(self): """ Create database tables or manage update if needed """ self.__cancellable = Gio.Cancellable.new() self.__task_helper = TaskHelper() # Lazy loading if not empty if not GLib.file_test(self.DB_PATH, GLib.FileTest.IS_REGULAR): try: if not GLib.file_test(EOLIE_DATA_PATH, GLib.FileTest.IS_DIR): GLib.mkdir_with_parents(EOLIE_DATA_PATH, 0o0750) # Create db schema with SqlCursor(self) as sql: sql.execute(self.__create_phishing) sql.commit() except Exception as e: print("DatabasePhishing::__init__(): %s" % e) def update(self): """ Update database """ if not Gio.NetworkMonitor.get_default().get_network_available(): return # Get in db mtime # Only update if filters older than one day mtime = 0 with SqlCursor(self) as sql: result = sql.execute("SELECT mtime FROM phishing LIMIT 1") v = result.fetchone() if v is not None: mtime = v[0] self.__mtime = int(time()) if self.__mtime - mtime < 86400: return # Update phishing db self.__cancellable.reset() self.__on_load_uri_content(None, False, b"", [self.__URI]) def is_phishing(self, uri): """ True if uri is phishing @param uri as str @return bool """ uri = uri.rstrip("/") try: with SqlCursor(self) as sql: result = sql.execute( "SELECT uri FROM phishing\ WHERE uri=?", (uri, )) v = result.fetchone() return v is not None except Exception as e: print("DatabasePhishing::is_phishing():", e) return False def stop(self): """ Stop update """ self.__cancellable.cancel() self.__stop = True def get_cursor(self): """ Return a new sqlite cursor """ try: c = sqlite3.connect(self.DB_PATH, 600.0) return c except Exception as e: print(e) exit(-1) ####################### # PRIVATE # ####################### def __save_rules(self, rules, uris): """ Save rules to db @param rules as bytes @param uris as [str] """ SqlCursor.add(self) result = rules.decode('utf-8') j = json.loads(result) with SqlCursor(self) as sql: count = 0 for item in j: if self.__cancellable.is_cancelled(): raise IOError("Cancelled") sql.execute( "INSERT INTO phishing\ (uri, mtime) VALUES (?, ?)", (item["url"].rstrip("/"), self.__mtime)) count += 1 if count == 1000: sql.commit() count = 0 sql.commit() # We are the last call to save_rules()? # Delete removed entries and commit if not uris: with SqlCursor(self) as sql: sql.execute( "DELETE FROM phishing\ WHERE mtime!=?", (self.__mtime, )) sql.commit() SqlCursor.remove(self) def __on_load_uri_content(self, uri, status, content, uris): """ Load pending uris @param uri as str @param status as bool @param content as bytes @param uris as [str] """ if status: self.__task_helper.run(self.__save_rules, content, uris) if uris: uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_content, uris)
class DatabaseAdblock: """ Eolie adblock db """ __DB_PATH = "%s/adblock.db" % EOLIE_DATA_PATH __URIS = ["https://adaway.org/hosts.txt", "https://pgl.yoyo.org/adservers/serverlist.php?" + "hostformat=hosts&showintro=0&startdate%5Bday%5D=" + "&startdate%5Bmonth%5D=&startdate%5Byear%5D=", "http://winhelp2002.mvps.org/hosts.txt", "http://hosts-file.net/ad_servers.txt", "https://pgl.yoyo.org/adservers/serverlist.php?" "hostformat=hosts&showintro=0&mimetype=plaintext"] __CSS_URIS = ["https://easylist-downloads.adblockplus.org/easylist.txt"] __CSS_LOCALIZED_URIS = { "bg": "http://stanev.org/abp/adblock_bg.txt", "zh": "https://easylist-downloads.adblockplus.org/easylistchina.txt", "sk": "https://raw.github.com/tomasko126/" + "easylistczechandslovak/master/filters.txt", "cs": "https://raw.github.com/tomasko126/" + "easylistczechandslovak/master/filters.txt", "nl": "https://easylist-downloads.adblockplus.org/easylistdutch.txt", "de": "https://easylist-downloads.adblockplus.org/easylistgermany.txt", "he": "https://raw.githubusercontent.com/easylist/" + "EasyListHebrew/master/EasyListHebrew.txt", "it": "https://easylist-downloads.adblockplus.org/easylistitaly.txt", "lt": "http://margevicius.lt/easylistlithuania.txt", "es": "https://easylist-downloads.adblockplus.org/easylistspanish.txt", "lv": "https://notabug.org/latvian-list/" + "adblock-latvian/raw/master/lists/latvian-list.txt", "ar": "https://easylist-downloads.adblockplus.org/Liste_AR.txt", "fr": "https://easylist-downloads.adblockplus.org/liste_fr.txt", "ro": "http://www.zoso.ro/pages/rolist.txt", "ru": "https://easylist-downloads.adblockplus.org/advblock.txt", "ja": "http://bit.ly/11QrCfx", "fi": "https://adb.juvander.net/Finland_adb.txt", "cz": "http://adblock.dajbych.net/adblock.txt", "et": "http://gurud.ee/ab.txt", "hu": "https://raw.githubusercontent.com/szpeter80/" + "hufilter/master/hufilter.txt"} __SCHEMA_VERSION = 0 __UPDATE = 172800 __SPECIAL_CHARS = r"([.$+?{}()\[\]\\])" __REPLACE_CHARS = {"^": "(?:[^\w\d_\-.%]|$)", "*": ".*"} # SQLite documentation: # In SQLite, a column with type INTEGER PRIMARY KEY # is an alias for the ROWID. # Here, we define an id INT PRIMARY KEY but never feed it, # this make VACUUM not destroy rowids... __create_adblock = """CREATE TABLE adblock ( id INTEGER PRIMARY KEY, netloc TEXT TEXT NOT NULL, mtime INT NOT NULL )""" __create_adblock_re = """CREATE TABLE adblock_re ( id INTEGER PRIMARY KEY, regex TEXT NOT NULL, mtime INT NOT NULL )""" __create_adblock_re_domain = """CREATE TABLE adblock_re_domain ( id INTEGER PRIMARY KEY, domain TEXT NOT NULL, regex TEXT NOT NULL, mtime INT NOT NULL )""" __create_adblock_re_domain_ex = """CREATE TABLE adblock_re_domain_ex ( id INTEGER PRIMARY KEY, domain TEXT NOT NULL, regex TEXT NOT NULL, mtime INT NOT NULL )""" __create_adblock_css = """CREATE TABLE adblock_css ( id INTEGER PRIMARY KEY, rule TEXT NOT NULL, whitelist TEXT DEFAULT '', blacklist TEXT DEFAULT '', mtime INT NOT NULL )""" __create_adblock_cache = """CREATE TABLE adblock_cache ( id INTEGER PRIMARY KEY, allowed_uri TEXT NOT NULL )""" __create_adblock_idx = """CREATE UNIQUE INDEX idx_adblock ON adblock( netloc)""" __create_adblock_re_idx = """CREATE UNIQUE INDEX idx_adblock_re ON adblock_re(regex)""" __create_adblock_re_domain_idx = """CREATE INDEX idx_adblock_re_domain ON adblock_re_domain(domain)""" __create_adblock_re_domain_regex_idx = """CREATE INDEX idx_adblock_re_domain_regex ON adblock_re_domain(regex)""" __create_adblock_re_domain_ex_idx = """CREATE INDEX idx_adblock_re_domain_ex ON adblock_re_domain(domain)""" __create_adblock_re_domain_regex_ex_idx = """CREATE INDEX idx_adblock_re_domain_regex_ex ON adblock_re_domain(regex)""" __create_adblock_css_black_idx = """CREATE INDEX idx_adblock_css_black ON adblock_css( blacklist)""" __create_adblock_css_white_idx = """CREATE INDEX idx_adblock_css_white ON adblock_css( whitelist)""" __create_adblock_cache_idx = """CREATE UNIQUE INDEX idx_adblock_cache ON adblock_cache( allowed_uri)""" def __init__(self): """ Create database tables or manage update if needed """ self.thread_lock = Lock() self.__cancellable = Gio.Cancellable.new() self.__task_helper = TaskHelper() self.__adblock_mtime = int(time()) self.__regex = None def create_db(self): """ Create databse """ if not GLib.file_test(EOLIE_DATA_PATH, GLib.FileTest.IS_DIR): GLib.mkdir_with_parents(EOLIE_DATA_PATH, 0o0750) # If DB schema changed, remove it if GLib.file_test(self.__DB_PATH, GLib.FileTest.IS_REGULAR): with SqlCursor(self) as sql: result = sql.execute("PRAGMA user_version") v = result.fetchone() if v is None or v[0] != self.__SCHEMA_VERSION: f = Gio.File.new_for_path(self.__DB_PATH) f.delete() if not GLib.file_test(self.__DB_PATH, GLib.FileTest.IS_REGULAR): try: # Create db schema with SqlCursor(self) as sql: sql.execute(self.__create_adblock) sql.execute(self.__create_adblock_re) sql.execute(self.__create_adblock_re_domain) sql.execute(self.__create_adblock_re_domain_ex) sql.execute(self.__create_adblock_css) sql.execute(self.__create_adblock_cache) sql.execute(self.__create_adblock_idx) sql.execute(self.__create_adblock_re_idx) sql.execute(self.__create_adblock_re_domain_idx) sql.execute(self.__create_adblock_re_domain_regex_idx) sql.execute(self.__create_adblock_re_domain_ex_idx) sql.execute(self.__create_adblock_re_domain_regex_ex_idx) sql.execute(self.__create_adblock_css_black_idx) sql.execute(self.__create_adblock_css_white_idx) sql.execute(self.__create_adblock_cache_idx) sql.execute("PRAGMA user_version=%s" % self.__SCHEMA_VERSION) except Exception as e: Logger.error("DatabaseAdblock::__init__(): %s", e) def update(self): """ Update database """ if not Gio.NetworkMonitor.get_default().get_network_available(): return # Update adblock_js repo git = GLib.find_program_in_path("git") if git is None: Logger.info(_("For stronger ad blocking, install git command")) else: if GLib.file_test(ADBLOCK_JS, GLib.FileTest.IS_DIR): argv = [git, "-C", ADBLOCK_JS, "pull", "https://gitlab.gnome.org/gnumdk/eolie-adblock.git"] else: argv = [git, "clone", "https://gitlab.gnome.org/gnumdk/eolie-adblock.git", ADBLOCK_JS] (pid, a1, a2, a3) = GLib.spawn_async( argv, flags=GLib.SpawnFlags.STDOUT_TO_DEV_NULL) GLib.spawn_close_pid(pid) # DB version is last successful sync mtime try: version = load(open(EOLIE_DATA_PATH + "/adblock.bin", "rb")) except: version = 0 self.__cancellable.reset() if self.__adblock_mtime - version > self.__UPDATE: # Update host rules uris = list(self.__URIS) locales = GLib.get_language_names() user_locale = locales[0].split("_")[0] try: uris += self.__CSS_URIS +\ [self.__CSS_LOCALIZED_URIS[user_locale]] except: uris += self.__CSS_URIS uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_content, uris) def stop(self): """ Stop update """ self.__cancellable.cancel() def get_default_css_rules(self): """ Return default css rules """ rules = "" with SqlCursor(self) as sql: request = "SELECT rule FROM adblock_css WHERE\ blacklist='' AND whitelist=''" result = sql.execute(request) for rule in list(itertools.chain(*result)): rules += "%s,\n" % rule return rules[:-2] + "{display: none !important;}" def get_css_rules(self, uri): """ Return css rules for uri @return str """ rules = "" parsed = urlparse(uri) if parsed.scheme not in ["http", "https"]: return "" netloc = remove_www(parsed.netloc) with SqlCursor(self) as sql: request = "SELECT rule FROM adblock_css WHERE\ (blacklist!='' AND blacklist NOT LIKE ?) OR\ whitelist LIKE ?" result = sql.execute(request, ("%" + netloc + "%", "%" + netloc + "%")) for rule in list(itertools.chain(*result)): rules += "%s,\n" % rule return rules[:-2] + "{display: none !important;}" def is_netloc_blocked(self, netloc): """ Return True if netloc is blocked @param netloc as str @return bool """ try: with SqlCursor(self) as sql: result = sql.execute("SELECT mtime FROM adblock\ WHERE netloc=?", (netloc,)) v = result.fetchone() return v is not None except Exception as e: Logger.error("DatabaseAdblock::is_netloc_blocked(): %s", e) return False def is_uri_blocked(self, uri, netloc): """ Return True if uri is blocked @param uri as str @param netloc as str @return bool """ # We cache result for allowed uris # because regex are quite slow in python with SqlCursor(self) as sql: result = sql.execute("SELECT allowed_uri\ FROM adblock_cache\ WHERE allowed_uri=?", (uri,)) v = result.fetchone() if v is None: # Search in main regexes if self.__regex is None: request = "SELECT regex FROM adblock_re" result = sql.execute(request) rules = list(itertools.chain(*result)) if rules: regexes = "|".join(regex for regex in rules) self.__regex = re.compile(regexes) if self.__regex is not None: blocked_re = bool(self.__regex.search(uri)) else: blocked_re = False # Find in domain regexes request = "SELECT regex FROM adblock_re_domain\ WHERE domain=?" result = sql.execute(request, (netloc,)) rules = list(itertools.chain(*result)) if rules: regexes = "|".join(regex for regex in rules) blocked_re_domain = bool(re.search(regexes, uri)) else: blocked_re_domain = False # If previous regexes blocked uri, check for an exception if blocked_re_domain: request = "SELECT regex FROM adblock_re_domain_ex\ WHERE domain=?" result = sql.execute(request, (netloc,)) rules = list(itertools.chain(*result)) if rules: regexes = "|".join(regex for regex in rules) if bool(re.search(regexes, uri)): blocked_re_domain = False if not blocked_re and not blocked_re_domain: sql.execute("INSERT INTO adblock_cache\ (allowed_uri) VALUES (?)", (uri,)) return False else: return True else: return False def get_cursor(self): """ Return a new sqlite cursor """ try: c = sqlite3.connect(self.__DB_PATH, 600.0) return c except Exception as e: Logger.error("DatabaseAdblock::get_cursor(): %s", e) exit(-1) ####################### # PRIVATE # ####################### def __add_netloc(self, netloc): """ Add a new netloc @param netloc as str """ with SqlCursor(self) as sql: try: sql.execute("INSERT INTO adblock\ (netloc, mtime) VALUES (?, ?)", (netloc, self.__adblock_mtime)) except: sql.execute("UPDATE adblock SET mtime=?\ WHERE netloc=?", (self.__adblock_mtime, netloc)) def __add_regex(self, regex): """ Add a new regex @param regex as str """ with SqlCursor(self) as sql: try: sql.execute("INSERT INTO adblock_re\ (regex, mtime) VALUES (?, ?)", (regex, self.__adblock_mtime)) except: sql.execute("UPDATE adblock_re SET mtime=?\ WHERE regex=?", (self.__adblock_mtime, regex)) def __add_regex_domain(self, regex, domain): """ Add a new regex @param regex as str @param domain """ with SqlCursor(self) as sql: result = sql.execute("SELECT mtime FROM adblock_re_domain\ WHERE regex=? AND domain=?", (regex, domain)) v = result.fetchone() if v is None: sql.execute("INSERT INTO adblock_re_domain\ (regex, domain, mtime) VALUES (?, ?, ?)", (regex, domain, self.__adblock_mtime)) else: sql.execute("UPDATE adblock_re_domain SET mtime=?\ WHERE regex=? AND domain=?", (self.__adblock_mtime, regex, domain)) def __add_regex_domain_ex(self, regex, domain): """ Add a new exception regex @param regex as str @param domain """ with SqlCursor(self) as sql: result = sql.execute("SELECT mtime FROM adblock_re_domain_ex\ WHERE regex=? AND domain=?", (regex, domain)) v = result.fetchone() if v is None: sql.execute("INSERT INTO adblock_re_domain_ex\ (regex, domain, mtime) VALUES (?, ?, ?)", (regex, domain, self.__adblock_mtime)) else: sql.execute("UPDATE adblock_re_domain_ex SET mtime=?\ WHERE regex=? AND domain=?", (self.__adblock_mtime, regex, domain)) def __rule_to_regex(self, rule): """ Convert rule to regex @param rule as str @return regex as str """ try: # Do nothing if rule is already a regex if rule[0] == rule[-1] == "/": return rule[1:-1] rule = re.sub(self.__SPECIAL_CHARS, r"\\\1", rule) # Handle ^ separator character, *, etc... for key in self.__REPLACE_CHARS.keys(): rule = rule.replace(key, self.__REPLACE_CHARS[key]) # End of the address if rule[-1] == "|": rule = rule[:-1] + "$" # Start of the address if rule[0] == "|": rule = "^" + rule[1:] # Escape remaining | but not |$ => see self.__REPLACE_CHARS rule = re.sub("(\|)[^$]", r"\|", rule) return rule except Exception as e: Logger.error("DatabaseAdblock::__rule_to_regex(): %s", e) return None def __save_rules(self, rules): """ Save rules to db @param rules bytes """ SqlCursor.add(self) result = rules.decode('utf-8') count = 0 for line in result.split('\n'): SqlCursor.allow_thread_execution(self) if self.__cancellable.is_cancelled(): SqlCursor.remove(self) raise Exception("Cancelled") if line.startswith('#'): continue array = line.replace( ' ', '\t', 1).replace('\t', '@', 1).split('@') if len(array) <= 1: continue netloc = array[1].replace( ' ', '').replace('\r', '').split('#')[0] # Update entry if exists, create else if netloc != "localhost": Logger.debug("Add filter: %s", netloc) self.__add_netloc(netloc) count += 1 if count == 1000: SqlCursor.commit(self) # Do not flood sqlite, this allow webkit extension to run sleep(0.1) count = 0 SqlCursor.remove(self) def __save_css_default_rule(self, line): """ Save default (without blacklist, whitelist) rule to db @param line as str """ rule = line[2:] # Update entry if exists, create else with SqlCursor(self) as sql: try: sql.execute("INSERT INTO adblock_css\ (rule, mtime) VALUES (?, ?)", (rule, self.__adblock_mtime)) except: sql.execute("UPDATE adblock_css SET mtime=?\ WHERE rule=?", (self.__adblock_mtime, rule)) def __save_css_domain_rule(self, line): """ Save domain rule to db @param line as str """ whitelist = [] blacklist = [] (domains, rule) = line.split("##") for domain in domains.split(","): if domain.startswith("~"): blacklist.append(domain[1:]) else: whitelist.append(domain) str_whitelist = ",".join(whitelist) str_blacklist = ",".join(blacklist) with SqlCursor(self) as sql: result = sql.execute("SELECT mtime FROM adblock_css\ WHERE blacklist=? AND whitelist=?\ AND rule=?", (str_blacklist, str_whitelist, rule)) v = result.fetchone() if v is None: sql.execute("INSERT INTO adblock_css\ (rule, whitelist, blacklist, mtime)\ VALUES (?, ?, ?, ?)", (rule, str_whitelist, str_blacklist, self.__adblock_mtime)) else: sql.execute("UPDATE adblock_css SET mtime=?\ WHERE rule=? and blacklist=? and whitelist=?", (self.__adblock_mtime, rule, str_blacklist, str_whitelist)) def __save_css_exception(self, line): """ Add a new exception @param line as str """ (domain, rule) = line.split("#@#") with SqlCursor(self) as sql: result = sql.execute("SELECT rowid, blacklist FROM adblock_css\ WHERE rule=?", (rule,)) v = result.fetchone() if v is None: sql.execute("INSERT INTO adblock_css\ (rule, whitelist, blacklist, mtime)\ VALUES (?, ?, ?, ?)", (rule, "", domain, self.__adblock_mtime)) else: (rowid, blacklist) = v blacklist += ",%s" % domain sql.execute("UPDATE adblock_css SET blacklist=?\ WHERE rowid=?", (blacklist, rowid)) def __save_abp_rule(self, rule, exception): """ Save abp rule @param rule as str @param exception as bool """ # Simple host rule if rule[:2] == "||": if rule[-1] == "^" and not exception: self.__add_netloc(rule[2:-1]) else: regex = self.__rule_to_regex(rule[2:]) split = re.split("/|\^", rule[2:]) uri = "http://%s" % split[0] parsed = urlparse(uri) if parsed.netloc: if exception: self.__add_regex_domain_ex(regex, parsed.netloc) else: self.__add_regex_domain(regex, parsed.netloc) elif not exception: self.__add_regex(regex) elif not exception: regex = self.__rule_to_regex(rule) if regex is not None: self.__add_regex(regex) def __save_abp_rules(self, rules): """ Save rules to db @param rules as bytes """ SqlCursor.add(self) result = rules.decode("utf-8") count = 0 for line in result.split('\n'): SqlCursor.allow_thread_execution(self) if self.__cancellable.is_cancelled(): SqlCursor.remove(self) raise Exception("Cancelled") if "-abp-" in line or "$" in line or "!" in line or "[" in line: continue elif line.startswith("##"): self.__save_css_default_rule(line) elif "#@#" in line: self.__save_css_exception(line) elif "##" in line: self.__save_css_domain_rule(line) elif line.startswith("@@"): self.__save_abp_rule(line[2:], True) else: self.__save_abp_rule(line, False) Logger.debug("Add abp filter: %s", line) count += 1 if count == 1000: SqlCursor.commit(self) # Do not flood sqlite, this allow webkit extension to run sleep(0.1) count = 0 SqlCursor.remove(self) def __on_save_rules(self, result, uris=[]): """ Load next uri, if finished, load CSS rules @param result (unused) @param uris as [str] """ if self.__cancellable.is_cancelled(): return if uris: uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_content, uris) else: with SqlCursor(self) as sql: sql.execute("DELETE FROM adblock\ WHERE mtime!=?", (self.__adblock_mtime,)) sql.execute("DELETE FROM adblock_re\ WHERE mtime!=?", (self.__adblock_mtime,)) sql.execute("DELETE FROM adblock_re_domain\ WHERE mtime!=?", (self.__adblock_mtime,)) sql.execute("DELETE FROM adblock_re_domain_ex\ WHERE mtime!=?", (self.__adblock_mtime,)) sql.execute("DELETE FROM adblock_css\ WHERE mtime!=?", (self.__adblock_mtime,)) sql.execute("DELETE FROM adblock_cache") try: dump(self.__adblock_mtime, open(EOLIE_DATA_PATH + "/adblock.bin", "wb")) except Exception as e: Logger.error("DatabaseAdblock::__on_save_rules(): %s", e) def __on_load_uri_content(self, uri, status, content, uris): """ Save loaded values @param uri as str @param status as bool @param content as bytes @param uris as [str] """ Logger.debug("DatabaseAdblock::__on_load_uri_content(): %s", uri) if status: if uri in self.__URIS: self.__task_helper.run(self.__save_rules, content, callback=(self.__on_save_rules, uris)) else: self.__task_helper.run(self.__save_abp_rules, content, callback=(self.__on_save_rules, uris)) else: self.__on_save_rules(None, uris)
class WebViewArtwork: """ Handle webview artwork: snapshot and favicon """ def __init__(self): """ Init class """ self.__helper = TaskHelper() self.__snapshot_id = None self.__save_favicon_timeout_id = None self.__cancellable = Gio.Cancellable() self.__initial_uri = None self.__favicon_width = {} self.__current_netloc = None self.connect("notify::uri", self.__on_uri_changed) def set_favicon(self): """ Set favicon based on current webview favicon """ if self.ephemeral or\ self.error or\ self.current_event != WebKit2.LoadEvent.FINISHED: return self.__set_favicon_from_surface(self.get_favicon(), self.uri, self.__initial_uri) def set_current_favicon(self): """ Set favicon based on current webview favicon Use this for JS update (do not update initial uri) """ self.__set_favicon_from_surface(self.get_favicon(), self.uri, None) ####################### # PROTECTED # ####################### def _on_load_changed(self, webview, event): """ Update sidebar/urlbar @param webview as WebView @param event as WebKit2.LoadEvent """ parsed = urlparse(self.uri) if event == WebKit2.LoadEvent.STARTED: self.__cancellable.cancel() self.__cancellable.reset() if parsed.scheme in ["http", "https"]: self.__initial_uri = self.uri.rstrip('/') else: self.__initial_uri = None elif event == WebKit2.LoadEvent.FINISHED: is_http = parsed.scheme in ["http", "https"] if self.__snapshot_id is not None: GLib.source_remove(self.__snapshot_id) self.__snapshot_id = GLib.timeout_add(2500, self.__set_snapshot, is_http) self.set_favicon() self.__current_netloc = parsed.netloc or None ####################### # PRIVATE # ####################### def __set_snapshot(self, save): """ Set webpage preview @param save as bool """ self.__snapshot_id = None self.get_snapshot(WebKit2.SnapshotRegion.FULL_DOCUMENT, WebKit2.SnapshotOptions.NONE, self.__cancellable, get_snapshot, self.__on_snapshot, save, True) def __set_favicon_from_surface(self, surface, uri, initial_uri): """ Set favicon for surface @param favicon_db as WebKit2.FaviconDatabase @param result as Gio.AsyncResult @param uri as str @param initial_uri as str """ resized = None # Save webview favicon if surface is not None: favicon_width = surface.get_width() if uri not in self.__favicon_width.keys() or\ favicon_width >= self.__favicon_width[uri]: if self.__save_favicon_timeout_id is not None: GLib.source_remove(self.__save_favicon_timeout_id) self.__save_favicon_timeout_id = None self.__favicon_width[uri] = favicon_width resized = resize_favicon(surface) favicon_type = "favicon" else: netloc = remove_www(urlparse(uri).netloc) if netloc: resized = App().art.get_favicon(uri, self.get_scale_factor()) if resized is None: resized = get_char_surface(netloc[0]) favicon_type = "favicon_alt" # We wait for a better favicon if resized is not None and uri == self.uri: self.__save_favicon_timeout_id = GLib.timeout_add( 500, self.__save_favicon_to_cache, resized, uri, initial_uri, favicon_type) def __save_favicon_to_cache(self, surface, uri, initial_uri, favicon_type): """ Save favicon to cache @param surface as cairo.Surface @param uri as str @param initial_uri as str @param favicon_type as str """ self.__save_favicon_timeout_id = None self.emit("favicon-changed", surface) # Save favicon for URI if not App().art.exists(uri, favicon_type): self.__helper.run(App().art.save_artwork, uri, surface, favicon_type) # Save favicon for initial URI if initial_uri is not None and\ not App().art.exists(initial_uri, favicon_type): striped_uri = uri.rstrip("/") if initial_uri != striped_uri: self.__helper.run(App().art.save_artwork, initial_uri, surface, favicon_type) def __on_uri_changed(self, webview, param): """ Handle JS updates @param webview as WebKit2.WebView @param param as GObject.ParamSpec """ if not webview.is_loading() and not webview.ephemeral: self.__initial_uri = None if self.__snapshot_id is not None: GLib.source_remove(self.__snapshot_id) self.__snapshot_id = GLib.timeout_add(2500, self.__set_snapshot, True) def __on_snapshot(self, surface, save, first_pass): """ Cache snapshot @param surface as cairo.Surface @param uri as str @param save as bool @param first_pass as bool """ # The 32767 limit on the width/height dimensions # of an image surface is new in cairo 1.10, # try with WebKit2.SnapshotRegion.VISIBLE if surface is None: if first_pass: self.get_snapshot(WebKit2.SnapshotRegion.VISIBLE, WebKit2.SnapshotOptions.NONE, self.__cancellable, get_snapshot, self.__on_snapshot, save, False) return self.emit("snapshot-changed", surface) if not save or self.error: return uri = self.uri # We also cache initial URI uris = [uri.rstrip("/")] if self.__initial_uri is not None and\ self.__initial_uri not in uris: uris.append(self.__initial_uri) for uri in uris: exists = App().art.exists(uri, "start") if not exists: App().art.save_artwork(uri, surface, "start")
class ImagesPopover(Gtk.Popover): """ Show images for page id """ def __init__(self, uri, page_id, window): """ Init popover @param uri as str @param page_id as int @param window as Window """ Gtk.Popover.__init__(self) self.set_modal(False) window.register(self) self.__cache_uris = [] self.__uri = uri self.__page_id = page_id self.__cancellable = Gio.Cancellable() self.__filter = "" builder = Gtk.Builder() builder.add_from_resource("/org/gnome/Eolie/PopoverImages.ui") builder.connect_signals(self) widget = builder.get_object("widget") self.__spinner = builder.get_object("spinner") self.__flowbox = builder.get_object("flowbox") self.__flowbox.set_filter_func(self.__filter_func) self.__entry = builder.get_object("entry") self.__button = builder.get_object("button") self.add(widget) if Gio.NetworkMonitor.get_default().get_network_available(): El().helper.call("GetImages", page_id, None, self.__on_get_images) (width, height) = El().active_window.get_size() self.set_size_request(width / 2, height / 1.5) self.connect("closed", self.__on_closed) self.__task_helper = TaskHelper() ####################### # PROTECTED # ####################### def _on_search_changed(self, entry): """ Filter flowbox @param entry as Gtk.Entry """ self.__filter = entry.get_text() self.__flowbox.invalidate_filter() def _on_button_clicked(self, button): """ Save visible images @param button as Gtk.Button """ task_helper = TaskHelper() task_helper.run(self.__move_images) self.__spinner.start() def _on_button_toggled(self, button): """ Cancel previous download """ self.__cancellable.cancel() self.__spinner.start() self.__button.set_sensitive(False) for child in self.__flowbox.get_children(): child.destroy() self.__links = button.get_active() if Gio.NetworkMonitor.get_default().get_network_available(): if button.get_active(): El().helper.call("GetImageLinks", self.__page_id, None, self.__on_get_images) else: El().helper.call("GetImages", self.__page_id, None, self.__on_get_images) ####################### # PRIVATE # ####################### def __filter_func(self, child): """ Filter child @param child as image """ if child.uri.find(self.__filter) != -1: return True def __add_image(self, uri): """ Add a child to flowbox @param uri as str """ image = Image(uri) image.show() self.__flowbox.add(image) def __move_images(self): """ Move image to download directory """ parsed = urlparse(self.__uri) directory_uri = El().settings.get_value('download-uri').get_string() if not directory_uri: directory = GLib.get_user_special_dir( GLib.UserDirectory.DIRECTORY_DOWNLOAD) directory_uri = GLib.filename_to_uri(directory, None) destination_uri = "%s/%s" % (directory_uri, parsed.netloc) directory = Gio.File.new_for_uri(destination_uri) if not directory.query_exists(): directory.make_directory_with_parents() for child in self.__flowbox.get_children(): if child.uri.find(self.__filter) != -1: encoded = sha256(child.uri.encode("utf-8")).hexdigest() child_basename = child.uri.split("/")[-1] filepath = "%s/%s" % (EOLIE_CACHE_PATH, encoded) s = Gio.File.new_for_path(filepath) if not s.query_exists(): continue d = Gio.File.new_for_uri("%s/%s" % (destination_uri, child_basename)) try: s.move(d, Gio.FileCopyFlags.OVERWRITE, None, None, None) except Exception as e: print("ImagesPopover::__move_images()", e) GLib.idle_add(self.hide) def __clean_cache(self): """ Clean the cache """ for uri in self.__cache_uris: encoded = sha256(uri.encode("utf-8")).hexdigest() filepath = "%s/%s" % (EOLIE_CACHE_PATH, encoded) f = Gio.File.new_for_path(filepath) try: if f.query_exists(): f.delete() except Exception as e: print("ImagesPopover::__clean_cache():", e) def __on_write_all_async(self, stream, result, uri): """ Add image @param stream as Gio.OutputStream @param result as Gio.AsyncResult @param uri as str """ try: stream.write_all_finish(result) self.__add_image(uri) self.__cache_uris.append(uri) except Exception as e: print("ImagesPopover::__on_write_all_async()", e) def __on_load_uri_content(self, uri, status, content, uris): """ Load pending uris @param uri as str @param status as bool @param content as bytes @param uris as [str] """ if status: encoded = sha256(uri.encode("utf-8")).hexdigest() filepath = "%s/%s" % (EOLIE_CACHE_PATH, encoded) f = Gio.File.new_for_path(filepath) stream = f.append_to(Gio.FileCreateFlags.REPLACE_DESTINATION, self.__cancellable) stream.write_all_async(content, GLib.PRIORITY_DEFAULT, self.__cancellable, self.__on_write_all_async, uri) if uris and not self.__cancellable.is_cancelled(): uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_content, uris) else: self.__spinner.stop() self.__button.set_sensitive(True) def __on_get_images(self, source, result): """ Get result and load pending uris @param source as GObject.Object @param result as Gio.AsyncResult """ uris = [] try: uris = source.call_finish(result)[0] except Exception as e: print("ImagesPopover::__on_get_images()", e) self.__cancellable.reset() self.__on_load_uri_content(None, False, b"", uris) def __on_closed(self, popover): """ Clean cache """ self.__spinner.stop() self.__cancellable.cancel() self.__task_helper.run(self.__clean_cache)
class DatabaseAdblock: """ Eolie adblock db """ DB_PATH = "%s/adblock.db" % EOLIE_LOCAL_PATH __URIS = [ "https://adaway.org/hosts.txt", "http://winhelp2002.mvps.org/hosts.txt", "http://hosts-file.net/ad_servers.txt", "https://pgl.yoyo.org/adservers/serverlist.php?" "hostformat=hosts&showintro=0&mimetype=plaintext" ] # SQLite documentation: # In SQLite, a column with type INTEGER PRIMARY KEY # is an alias for the ROWID. # Here, we define an id INT PRIMARY KEY but never feed it, # this make VACUUM not destroy rowids... __create_adblock = '''CREATE TABLE adblock ( id INTEGER PRIMARY KEY, dns TEXT NOT NULL, mtime INT NOT NULL )''' def __init__(self): """ Create database tables or manage update if needed """ self.__cancellable = Gio.Cancellable.new() self.__task_helper = TaskHelper() f = Gio.File.new_for_path(self.DB_PATH) # Lazy loading if not empty if not f.query_exists(): try: d = Gio.File.new_for_path(EOLIE_LOCAL_PATH) if not d.query_exists(): d.make_directory_with_parents() # Create db schema with SqlCursor(self) as sql: sql.execute(self.__create_adblock) sql.commit() except Exception as e: print("DatabaseAdblock::__init__(): %s" % e) def update(self): """ Update database """ if not Gio.NetworkMonitor.get_default().get_network_available(): return # Update adblock_js repo git = GLib.find_program_in_path("git") if git is None: print(_("For stronger ad blocking, install git command")) else: d = Gio.File.new_for_path(ADBLOCK_JS) if d.query_exists(): argv = [git, "-C", ADBLOCK_JS, "pull"] else: argv = [ git, "clone", "https://github.com/gnumdk/eolie-adblock.git", ADBLOCK_JS ] (pid, a1, a2, a3) = GLib.spawn_async(argv, flags=GLib.SpawnFlags.STDOUT_TO_DEV_NULL) GLib.spawn_close_pid(pid) # Get in db mtime # Only update if filters older than one week mtime = 0 with SqlCursor(self) as sql: result = sql.execute("SELECT mtime FROM adblock LIMIT 1") v = result.fetchone() if v is not None: mtime = v[0] self.__mtime = int(time()) if self.__mtime - mtime < 604800: return # Update adblock db self.__cancellable.reset() self.__on_load_uri_content(None, False, b"", self.__URIS) def stop(self): """ Stop update """ self.__cancellable.cancel() def is_blocked(self, uri): """ Return True if uri is blocked @param uri as str @return bool """ try: parse = urlparse(uri) with SqlCursor(self) as sql: result = sql.execute( "SELECT mtime FROM adblock\ WHERE dns=?", (parse.netloc, )) v = result.fetchone() return v is not None except Exception as e: print("DatabaseAdblock::is_blocked():", e) return False def get_cursor(self): """ Return a new sqlite cursor """ try: c = sqlite3.connect(self.DB_PATH, 600.0) return c except Exception as e: print(e) exit(-1) ####################### # PRIVATE # ####################### def __save_rules(self, rules, uris): """ Save rules to db @param rules as bytes @param uris as [str] """ SqlCursor.add(self) result = rules.decode('utf-8') count = 0 for line in result.split('\n'): if self.__cancellable.is_cancelled(): raise IOError("Cancelled") if line.startswith('#'): continue array = line.replace(' ', '\t', 1).replace('\t', '@', 1).split('@') if len(array) <= 1: continue dns = array[1].replace(' ', '').replace('\r', '').split('#')[0] # Update entry if exists, create else with SqlCursor(self) as sql: sql.execute( "INSERT INTO adblock\ (dns, mtime)\ VALUES (?, ?)", (dns, self.__mtime)) count += 1 if count == 1000: sql.commit() count = 0 # We are the last call to save_rules()? # Delete removed entries and commit if not uris: with SqlCursor(self) as sql: sql.execute( "DELETE FROM adblock\ WHERE mtime!=?", (self.__mtime, )) sql.commit() SqlCursor.remove(self) def __on_load_uri_content(self, uri, status, content, uris): """ Load pending uris @param uri as str @param status as bool @param content as bytes @param uris as [str] """ if status: self.__task_helper.run(self.__save_rules, (content, uris)) if uris: uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_content, uris)
class DatabasePhishing: """ Phishing database """ __DB_PATH = "%s/phishing.db" % EOLIE_DATA_PATH __URI = "http://data.phishtank.com/data/online-valid.json" __SCHEMA_VERSION = 0 __UPDATE = 172800 # SQLite documentation: # In SQLite, a column with type INTEGER PRIMARY KEY # is an alias for the ROWID. # Here, we define an id INT PRIMARY KEY but never feed it, # this make VACUUM not destroy rowids... __create_phishing = '''CREATE TABLE phishing ( id INTEGER PRIMARY KEY, uri TEXT NOT NULL, mtime INT NOT NULL )''' __create_phishing_idx = """CREATE UNIQUE INDEX idx_phishing ON phishing( uri)""" def __init__(self): """ Create database tables or manage update if needed """ self.thread_lock = Lock() self.__cancellable = Gio.Cancellable.new() self.__task_helper = TaskHelper() self.__phishing_mtime = int(time()) self.__regex = None def create_db(self): """ Create databse """ if not GLib.file_test(EOLIE_DATA_PATH, GLib.FileTest.IS_DIR): GLib.mkdir_with_parents(EOLIE_DATA_PATH, 0o0750) # If DB schema changed, remove it if GLib.file_test(self.__DB_PATH, GLib.FileTest.IS_REGULAR): with SqlCursor(self) as sql: result = sql.execute("PRAGMA user_version") v = result.fetchone() if v is None or v[0] != self.__SCHEMA_VERSION: f = Gio.File.new_for_path(self.__DB_PATH) f.delete() if not GLib.file_test(self.__DB_PATH, GLib.FileTest.IS_REGULAR): try: # Create db schema with SqlCursor(self) as sql: sql.execute(self.__create_phishing) sql.execute(self.__create_phishing_idx) sql.execute("PRAGMA user_version=%s" % self.__SCHEMA_VERSION) except Exception as e: Logger.error("DatabasePhishing::__init__(): %s", e) def update(self): """ Update database """ if not Gio.NetworkMonitor.get_default().get_network_available(): return # DB version is last successful sync mtime try: version = load(open(EOLIE_DATA_PATH + "/phishing.bin", "rb")) except: version = 0 self.__cancellable.reset() if self.__phishing_mtime - version > self.__UPDATE: self.__on_load_uri_content(None, False, b"", [self.__URI]) def is_phishing(self, uri): """ True if uri is phishing @param uri as str @return bool """ uri = uri.rstrip("/") try: with SqlCursor(self) as sql: result = sql.execute( "SELECT uri FROM phishing\ WHERE uri=?", (uri, )) v = result.fetchone() return v is not None except Exception as e: Logger.error("DatabasePhishing::is_phishing(): %s", e) return False def stop(self): """ Stop update """ self.__cancellable.cancel() self.__stop = True def get_cursor(self): """ Return a new sqlite cursor """ try: c = sqlite3.connect(self.__DB_PATH, 600.0) return c except Exception as e: Logger.error("DatabasePhishing::get_cursor(): %s", e) exit(-1) ####################### # PRIVATE # ####################### def __save_rules(self, rules, uris): """ Save rules to db @param rules as bytes @param uris as [str] """ SqlCursor.add(self) try: result = rules.decode('utf-8') j = json.loads(result) with SqlCursor(self) as sql: count = 0 for item in j: if self.__cancellable.is_cancelled(): raise IOError("Cancelled") uri = item["url"].rstrip("/") try: sql.execute( "INSERT INTO phishing\ (uri, mtime) VALUES (?, ?)", (uri, self.__phishing_mtime)) except: sql.execute( "UPDATE phishing set mtime=?\ WHERE uri=?", (self.__phishing_mtime, uri)) count += 1 if count == 1000: SqlCursor.commit(self) # Do not flood sqlite # this allow webkit extension to run sleep(0.1) count = 0 # We are the last call to save_rules()? # Delete removed entries and commit if not uris: with SqlCursor(self) as sql: sql.execute( "DELETE FROM phishing\ WHERE mtime!=?", (self.__phishing_mtime, )) try: dump(self.__phishing_mtime, open(EOLIE_DATA_PATH + "/phishing.bin", "wb")) except Exception as e: Logger.error("DatabasePhishing::__save_rules(): %s", e) except Exception as e: Logger.error("DatabasePhishing::__save_rules():%s -> %s", e, rules) SqlCursor.remove(self) def __on_load_uri_content(self, uri, status, content, uris): """ Load pending uris @param uri as str @param status as bool @param content as bytes @param uris as [str] """ if status: self.__task_helper.run(self.__save_rules, content, uris) if uris: uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_content, uris)
def __init__(self, window): """ Init dialog @param window as Window """ self.__window = window self.__helper = PasswordsHelper() builder = Gtk.Builder() builder.add_from_resource("/org/gnome/Eolie/SettingsDialog.ui") self.__settings_dialog = builder.get_object("settings_dialog") self.__settings_dialog.set_transient_for(window) # self.__settings_dialog.connect("destroy", self.__on_destroy) if False: self.__settings_dialog.set_title(_("Preferences")) else: headerbar = builder.get_object("header_bar") headerbar.set_title(_("Preferences")) self.__settings_dialog.set_titlebar(headerbar) download_chooser = builder.get_object("download_chooser") dir_uri = App().settings.get_value("download-uri").get_string() if not dir_uri: directory = GLib.get_user_special_dir( GLib.UserDirectory.DIRECTORY_DOWNLOAD) if directory is not None: dir_uri = GLib.filename_to_uri(directory, None) if dir_uri: download_chooser.set_uri(dir_uri) else: download_chooser.set_uri("file://" + GLib.getenv("HOME")) open_downloads = builder.get_object("open_downloads_check") open_downloads.set_active(App().settings.get_value("open-downloads")) self.__start_page_uri = builder.get_object("start_page_uri") combo_start = builder.get_object("combo_start") start_page = App().settings.get_value("start-page").get_string() if start_page.startswith("http"): combo_start.set_active_id("address") self.__start_page_uri.set_text(start_page) self.__start_page_uri.show() else: combo_start.set_active_id(start_page) remember_session = builder.get_object("remember_sessions_check") remember_session.set_active( App().settings.get_value("remember-session")) suggestions = builder.get_object("suggestions_check") suggestions.set_active(App().settings.get_value("enable-suggestions")) enable_dev_tools = builder.get_object("dev_tools_check") enable_dev_tools.set_active( App().settings.get_value("developer-extras")) enable_plugins = builder.get_object("plugins_check") enable_plugins.set_active(App().settings.get_value("enable-plugins")) self.__fonts_grid = builder.get_object("fonts_grid") use_system_fonts = builder.get_object("system_fonts_check") use_system_fonts.set_active( App().settings.get_value("use-system-fonts")) self.__fonts_grid.set_sensitive( not App().settings.get_value("use-system-fonts")) sans_serif_button = builder.get_object("sans_serif_button") sans_serif_button.set_font_name( App().settings.get_value("font-sans-serif").get_string()) serif_button = builder.get_object("serif_button") serif_button.set_font_name( App().settings.get_value("font-serif").get_string()) monospace_button = builder.get_object("monospace_button") monospace_button.set_font_name( App().settings.get_value("font-monospace").get_string()) min_font_size_spin = builder.get_object("min_font_size_spin") min_font_size_spin.set_value( App().settings.get_value("min-font-size").get_int32()) monitor_model = get_current_monitor_model(window) zoom_levels = App().settings.get_value("default-zoom-level") wanted_zoom_level = 1.0 try: for zoom_level in zoom_levels: zoom_splited = zoom_level.split('@') if zoom_splited[0] == monitor_model: wanted_zoom_level = float(zoom_splited[1]) except: pass default_zoom_level = builder.get_object("default_zoom_level") percent_zoom = int(wanted_zoom_level * 100) default_zoom_level.set_value(percent_zoom) default_zoom_level.set_text("{} %".format(percent_zoom)) cookies_combo = builder.get_object("cookies_combo") storage = App().settings.get_enum("cookie-storage") cookies_combo.set_active_id(str(storage)) history_combo = builder.get_object("history_combo") storage = App().settings.get_enum("history-storage") history_combo.set_active_id(str(storage)) self.__populars_count = builder.get_object("populars_count") if start_page in ["popular_book", "popular_hist"]: self.__populars_count.show() max_popular_items = App().settings.get_value( "max-popular-items").get_int32() builder.get_object("popular_spin_button").set_value(max_popular_items) remember_passwords = builder.get_object("remember_passwords_check") remember_passwords.set_active( App().settings.get_value("remember-passwords")) dns_prediction_check = builder.get_object("dns_prediction_check") dns_prediction_check.set_active( App().settings.get_value("dns-prediction")) tracking_check = builder.get_object("tracking_check") tracking_check.set_active(App().settings.get_value("do-not-track")) self.__result_label = builder.get_object("result_label") self.__sync_button = builder.get_object("sync_button") self.__login_entry = builder.get_object("login_entry") self.__password_entry = builder.get_object("password_entry") self.__result_image = builder.get_object("result_image") builder.connect_signals(self) self.__helper.get_sync(self.__on_get_sync) task_helper = TaskHelper() task_helper.run(self.__get_sync_status)
class DatabaseAdblock: """ Eolie adblock db """ DB_PATH = "%s/adblock2.db" % EOLIE_DATA_PATH __URIS = [ "https://adaway.org/hosts.txt", "https://pgl.yoyo.org/adservers/serverlist.php?" + "hostformat=hosts&showintro=0&startdate%5Bday%5D=" + "&startdate%5Bmonth%5D=&startdate%5Byear%5D=", "http://winhelp2002.mvps.org/hosts.txt", "http://hosts-file.net/ad_servers.txt", "https://pgl.yoyo.org/adservers/serverlist.php?" "hostformat=hosts&showintro=0&mimetype=plaintext" ] __CSS_URIS = ["https://easylist-downloads.adblockplus.org/easylist.txt"] __CSS_LOCALIZED_URIS = { "bg": "http://stanev.org/abp/adblock_bg.txt", "zh": "https://easylist-downloads.adblockplus.org/easylistchina.txt", "sk": "https://raw.github.com/tomasko126/" + "easylistczechandslovak/master/filters.txt", "cs": "https://raw.github.com/tomasko126/" + "easylistczechandslovak/master/filters.txt", "nl": "https://easylist-downloads.adblockplus.org/easylistdutch.txt", "de": "https://easylist-downloads.adblockplus.org/easylistgermany.txt", "he": "https://raw.githubusercontent.com/easylist/" + "EasyListHebrew/master/EasyListHebrew.txt", "it": "https://easylist-downloads.adblockplus.org/easylistitaly.txt", "lt": "http://margevicius.lt/easylistlithuania.txt", "es": "https://easylist-downloads.adblockplus.org/easylistspanish.txt", "lv": "https://notabug.org/latvian-list/" + "adblock-latvian/raw/master/lists/latvian-list.txt", "ar": "https://easylist-downloads.adblockplus.org/Liste_AR.txt", "fr": "https://easylist-downloads.adblockplus.org/liste_fr.txt", "ro": "http://www.zoso.ro/pages/rolist.txt", "ru": "https://easylist-downloads.adblockplus.org/advblock.txt", "ja": "http://bit.ly/11QrCfx", "fi": "https://adb.juvander.net/Finland_adb.txt", "cz": "http://adblock.dajbych.net/adblock.txt", "et": "http://gurud.ee/ab.txt", "hu": "https://raw.githubusercontent.com/szpeter80/" + "hufilter/master/hufilter.txt" } __UPDATE = 172800 # SQLite documentation: # In SQLite, a column with type INTEGER PRIMARY KEY # is an alias for the ROWID. # Here, we define an id INT PRIMARY KEY but never feed it, # this make VACUUM not destroy rowids... __create_adblock = '''CREATE TABLE adblock ( id INTEGER PRIMARY KEY, dns TEXT NOT NULL, mtime INT NOT NULL )''' __create_adblock_css = '''CREATE TABLE adblock_css ( id INTEGER PRIMARY KEY, name TEXT NOT NULL, whitelist TEXT DEFAULT "", blacklist TEXT DEFAULT "", mtime INT NOT NULL )''' def __init__(self): """ Create database tables or manage update if needed """ self.__cancellable = Gio.Cancellable.new() self.__task_helper = TaskHelper() self.__adblock_mtime = int(time()) # Lazy loading if not empty if not GLib.file_test(self.DB_PATH, GLib.FileTest.IS_REGULAR): try: if not GLib.file_test(EOLIE_DATA_PATH, GLib.FileTest.IS_DIR): GLib.mkdir_with_parents(EOLIE_DATA_PATH, 0o0750) # Create db schema with SqlCursor(self) as sql: sql.execute(self.__create_adblock) sql.execute(self.__create_adblock_css) sql.commit() except Exception as e: print("DatabaseAdblock::__init__(): %s" % e) def update(self): """ Update database """ if not Gio.NetworkMonitor.get_default().get_network_available(): return # Update adblock_js repo git = GLib.find_program_in_path("git") if git is None: print(_("For stronger ad blocking, install git command")) else: if GLib.file_test(ADBLOCK_JS, GLib.FileTest.IS_DIR): argv = [git, "-C", ADBLOCK_JS, "pull"] else: argv = [ git, "clone", "https://gitlab.gnome.org/gnumdk/eolie-adblock.git", ADBLOCK_JS ] (pid, a1, a2, a3) = GLib.spawn_async(argv, flags=GLib.SpawnFlags.STDOUT_TO_DEV_NULL) GLib.spawn_close_pid(pid) # Check entries in DB, do we need to update? mtime = 0 with SqlCursor(self) as sql: result = sql.execute("SELECT mtime FROM adblock\ ORDER BY mtime LIMIT 1") v = result.fetchone() if v is not None: mtime = v[0] self.__cancellable.reset() if self.__adblock_mtime - mtime > self.__UPDATE: # Update host rules uris = list(self.__URIS) uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_content, uris) else: self.__on_save_rules() def stop(self): """ Stop update """ self.__cancellable.cancel() def get_default_css_rules(self): """ Return default css rules """ rules = "" with SqlCursor(self) as sql: request = "SELECT name FROM adblock_css WHERE\ blacklist='' AND whitelist=''" result = sql.execute(request) for name in list(itertools.chain(*result)): rules += "%s,\n" % name return rules[:-2] + "{display: none !important;}" def get_css_rules(self, uri): """ Return css rules for uri @return str """ rules = "" parsed = urlparse(uri) if parsed.scheme not in ["http", "https"]: return "" netloc = remove_www(parsed.netloc) with SqlCursor(self) as sql: request = "SELECT name FROM adblock_css WHERE\ (blacklist!='' AND blacklist!=?) OR whitelist=?" result = sql.execute(request, (netloc, netloc)) for name in list(itertools.chain(*result)): rules += "%s,\n" % name return rules[:-2] + "{display: none !important;}" def is_blocked(self, uri): """ Return True if uri is blocked @param uri as str @return bool """ try: parsed = urlparse(uri) if parsed.scheme not in ["http", "https"] or\ El().adblock_exceptions.find_parsed(parsed): return False with SqlCursor(self) as sql: result = sql.execute( "SELECT mtime FROM adblock\ WHERE dns=?", (parsed.netloc, )) v = result.fetchone() return v is not None except Exception as e: print("DatabaseAdblock::is_blocked():", e) return False def get_cursor(self): """ Return a new sqlite cursor """ try: c = sqlite3.connect(self.DB_PATH, 600.0) return c except Exception as e: print(e) exit(-1) ####################### # PRIVATE # ####################### def __save_rules(self, rules, uris): """ Save rules to db @param rules bytes @param uris as [str] """ SqlCursor.add(self) result = rules.decode('utf-8') count = 0 for line in result.split('\n'): if self.__cancellable.is_cancelled(): raise IOError("Cancelled") if line.startswith('#'): continue array = line.replace(' ', '\t', 1).replace('\t', '@', 1).split('@') if len(array) <= 1: continue dns = array[1].replace(' ', '').replace('\r', '').split('#')[0] # Update entry if exists, create else with SqlCursor(self) as sql: debug("Add filter: %s" % dns) sql.execute( "INSERT INTO adblock\ (dns, mtime) VALUES (?, ?)", (dns, self.__adblock_mtime)) count += 1 if count == 1000: sql.commit() count = 0 # We are the last call to save_rules()? # Delete removed entries and commit if not uris: with SqlCursor(self) as sql: sql.execute( "DELETE FROM adblock\ WHERE mtime!=?", (self.__adblock_mtime, )) sql.commit() SqlCursor.remove(self) def __save_css_default_rule(self, line): """ Save default (without blacklist, whitelist) rule to db @param line as str """ name = line[2:] # Update entry if exists, create else with SqlCursor(self) as sql: debug("Add filter: %s" % name) sql.execute( "INSERT INTO adblock_css\ (name, mtime) VALUES (?, ?)", (name, self.__adblock_mtime)) def __save_css_domain_rule(self, line): """ Save domain rule to db @param line as str """ whitelist = "" blacklist = "" (domains, name) = line.split("##") for domain in domains.split(","): if domain.startswith("~"): blacklist += "@%s@" % domain[1:] else: whitelist += domain with SqlCursor(self) as sql: debug("Add filter: %s" % name) sql.execute( "INSERT INTO adblock_css\ (name, whitelist, blacklist, mtime)\ VALUES (?, ?, ?, ?)", (name, whitelist, blacklist, self.__adblock_mtime)) def __save_css_rules(self, rules, uris): """ Save rules to db @param rules as bytes @param uris as [str] """ SqlCursor.add(self) result = rules.decode("utf-8") count = 0 for line in result.split('\n'): if self.__cancellable.is_cancelled(): raise IOError("Cancelled") if line.find("-abp-") != -1: continue elif line.startswith("##"): self.__save_css_default_rule(line) elif line.find("##") != -1: self.__save_css_domain_rule(line) count += 1 if count == 1000: with SqlCursor(self) as sql: sql.commit() count = 0 # We are the last rule # Delete old entries if not uris: with SqlCursor(self) as sql: sql.execute( "DELETE FROM adblock_css\ WHERE mtime!=?", (self.__adblock_mtime, )) sql.commit() SqlCursor.remove(self) def __on_save_css_rules(self, result, uris): """ Load next uri @param result as ?? @param uris as [str] """ if uris: uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_css_content, uris) def __on_load_uri_css_content(self, uri, status, content, uris): """ Load pending uris @param uri as str @param status as bool @param content as bytes @param uris as [str] """ if status: self.__task_helper.run(self.__save_css_rules, content, uris, callback=(self.__on_save_css_rules, uris)) def __on_save_rules(self, result=None, uris=[]): """ Load next uri, if finished, load CSS rules @param result as None @param uris as [str] """ if uris: uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_content, uris) else: # Check entries in DB, do we need to update? mtime = 0 with SqlCursor(self) as sql: result = sql.execute("SELECT mtime FROM adblock_css\ ORDER BY mtime LIMIT 1") v = result.fetchone() if v is not None: mtime = v[0] # We ignore update value from rules file if self.__adblock_mtime - mtime < self.__UPDATE: return locales = GLib.get_language_names() user_locale = locales[0].split("_")[0] try: uris = [self.__CSS_LOCALIZED_URIS[user_locale]] except: uris = [] uris += list(self.__CSS_URIS) uri = uris.pop(0) self.__task_helper.load_uri_content(uri, self.__cancellable, self.__on_load_uri_css_content, uris) def __on_load_uri_content(self, uri, status, content, uris): """ Save loaded values @param uri as str @param status as bool @param content as bytes @param uris as [str] """ if status: self.__task_helper.run(self.__save_rules, content, uris, callback=(self.__on_save_rules, uris))