def load_search_index(): ver = 1 # Increment this when you make any changes to the index name_map = {} path = os.path.join(cache_dir(), "unicode-name-index.pickle") if os.path.exists(path): with open(path, "rb") as f: name_map = cPickle.load(f) if name_map.pop("calibre-nm-version:", -1) != ver: name_map = {} if not name_map: name_map = defaultdict(set) from calibre.constants import ispy3 if not ispy3: chr = unichr for x in xrange(1, 0x10FFFF + 1): for word in unicodedata.name(chr(x), "").split(): name_map[word.lower()].add(x) from calibre.ebooks.html_entities import html5_entities for name, char in html5_entities.iteritems(): try: name_map[name.lower()].add(ord(char)) except TypeError: continue name_map["nnbsp"].add(0x202F) name_map["calibre-nm-version:"] = ver cPickle.dump(dict(name_map), open(path, "wb"), -1) del name_map["calibre-nm-version:"] return name_map
def create_profile(cache_name='', allow_js=False): from calibre.utils.random_ua import random_common_chrome_user_agent if cache_name: ans = QWebEngineProfile(cache_name, QApplication.instance()) ans.setCachePath(os.path.join(cache_dir(), 'scraper', cache_name)) else: ans = QWebEngineProfile(QApplication.instance()) ans.setHttpUserAgent(random_common_chrome_user_agent()) ans.setHttpCacheMaximumSize(0) # managed by webengine s = ans.settings() a = s.setAttribute a(QWebEngineSettings.WebAttribute.PluginsEnabled, False) a(QWebEngineSettings.WebAttribute.JavascriptEnabled, allow_js) s.setUnknownUrlSchemePolicy( QWebEngineSettings.UnknownUrlSchemePolicy.DisallowUnknownUrlSchemes) a(QWebEngineSettings.WebAttribute.JavascriptCanOpenWindows, False) a(QWebEngineSettings.WebAttribute.JavascriptCanAccessClipboard, False) # ensure javascript cannot read from local files a(QWebEngineSettings.WebAttribute.LocalContentCanAccessFileUrls, False) a(QWebEngineSettings.WebAttribute.AllowWindowActivationFromJavaScript, False) js = P('scraper.js', allow_user_override=False, data=True).decode('utf-8') ans.token = secrets.token_hex() js = js.replace('TOKEN', ans.token) insert_scripts(ans, create_script('scraper.js', js)) return ans
def tdir_in_cache(base): ''' Create a temp dir inside cache_dir/base. The created dir is robust against application crashes. i.e. it will be cleaned up the next time the application starts, even if it was left behind by a previous crash. ''' b = os.path.join(os.path.realpath(cache_dir()), base) try: os.makedirs(b) except EnvironmentError as e: if e.errno != errno.EEXIST: raise global_lock = retry_lock_tdir(b) try: if b not in tdir_in_cache.scanned: tdir_in_cache.scanned.add(b) try: clean_tdirs_in(b) except Exception: import traceback traceback.print_exc() tdir = tempfile.mkdtemp(dir=b) lock_data = lock_tdir(tdir) atexit.register(remove_tdir, tdir, lock_data) tdir = os.path.join(tdir, 'a') os.mkdir(tdir) return tdir finally: unlock_file(global_lock)
def __init__(self, log, confirm_callback=None, prompt_callback=None, user_agent=USER_AGENT, enable_developer_tools=False, parent=None): QWebPage.__init__(self, parent) self.log = log self.user_agent = user_agent if user_agent else USER_AGENT self.confirm_callback = confirm_callback self.prompt_callback = prompt_callback self.setForwardUnsupportedContent(True) self.unsupportedContent.connect(self.on_unsupported_content) settings = self.settings() if enable_developer_tools: settings.setAttribute(QWebSettings.DeveloperExtrasEnabled, True) QWebSettings.enablePersistentStorage( os.path.join(cache_dir(), 'webkit-persistence')) QWebSettings.setMaximumPagesInCache(0) self.bridge_name = 'b' + uuid.uuid4().get_hex() self.mainFrame().javaScriptWindowObjectCleared.connect( self.add_window_objects) self.dom_loaded = False
def tdir_in_cache(base): ''' Create a temp dir inside cache_dir/base. The created dir is robust against application crashes. i.e. it will be cleaned up the next time the application starts, even if it was left behind by a previous crash. ''' b = os.path.join(os.path.realpath(cache_dir()), base) try: os.makedirs(b) except OSError as e: if e.errno != errno.EEXIST: raise global_lock = retry_lock_tdir(b) try: if b not in tdir_in_cache.scanned: tdir_in_cache.scanned.add(b) try: clean_tdirs_in(b) except Exception: import traceback traceback.print_exc() tdir = tempfile.mkdtemp(dir=b) lock_data = lock_tdir(tdir) atexit.register(remove_tdir, tdir, lock_data) tdir = os.path.join(tdir, 'a') os.mkdir(tdir) return tdir finally: unlock_file(global_lock)
def get_cover(metadata): cdir = os.path.join(cache_dir(), 'icon-theme-covers') try: os.makedirs(cdir) except EnvironmentError as e: if e.errno != errno.EEXIST: raise def path(ext): return os.path.join(cdir, metadata['name'] + '.' + ext) etag_file, cover_file = map(path, 'etag jpg'.split()) def safe_read(path): try: with open(path, 'rb') as f: return f.read() except EnvironmentError as e: if e.errno != errno.ENOENT: raise return b'' etag, cached = safe_read(etag_file), safe_read(cover_file) cached, etag = download_cover(metadata['cover-url'], etag, cached) if cached: with open(cover_file, 'wb') as f: f.write(cached) if etag: with open(etag_file, 'wb') as f: f.write(etag) return cached or b''
def load_search_index(): topchar = 0x10ffff ver = (1, topchar, icu_unicode_version or unicodedata.unidata_version) # Increment this when you make any changes to the index name_map = {} path = os.path.join(cache_dir(), 'unicode-name-index.pickle') if os.path.exists(path): with open(path, 'rb') as f: name_map = cPickle.load(f) if name_map.pop('calibre-nm-version:', None) != ver: name_map = {} if not name_map: name_map = defaultdict(set) for x in xrange(1, topchar + 1): for word in character_name_from_code(x).split(): name_map[word.lower()].add(x) from calibre.ebooks.html_entities import html5_entities for name, char in html5_entities.iteritems(): try: name_map[name.lower()].add(ord(char)) except TypeError: continue name_map['nnbsp'].add(0x202F) name_map['calibre-nm-version:'] = ver cPickle.dump(dict(name_map), open(path, 'wb'), -1) del name_map['calibre-nm-version:'] return name_map
def get_cover(metadata): cdir = os.path.join(cache_dir(), 'icon-theme-covers') try: os.makedirs(cdir) except EnvironmentError as e: if e.errno != errno.EEXIST: raise def path(ext): return os.path.join(cdir, metadata['name'] + '.' + ext) etag_file, cover_file = map(path, 'etag jpg'.split()) def safe_read(path): try: with open(path, 'rb') as f: return f.read() except EnvironmentError as e: if e.errno != errno.ENOENT: raise return b'' etag, cached = safe_read(etag_file), safe_read(cover_file) etag = etag.decode('utf-8') cached, etag = download_cover(metadata['cover-url'], etag, cached) if cached: aname = cover_file + '.atomic' with open(aname, 'wb') as f: f.write(cached) atomic_rename(aname, cover_file) if etag: with open(etag_file, 'wb') as f: f.write(as_bytes(etag)) return cached or b''
def load_search_index(): topchar = 0x10ffff ver = (1, topchar, icu_unicode_version or unicodedata.unidata_version ) # Increment this when you make any changes to the index name_map = {} path = os.path.join(cache_dir(), 'unicode-name-index.pickle') if os.path.exists(path): with open(path, 'rb') as f: name_map = cPickle.load(f) if name_map.pop('calibre-nm-version:', None) != ver: name_map = {} if not name_map: name_map = defaultdict(set) for x in xrange(1, topchar + 1): for word in character_name_from_code(x).split(): name_map[word.lower()].add(x) from calibre.ebooks.html_entities import html5_entities for name, char in html5_entities.iteritems(): try: name_map[name.lower()].add(ord(char)) except TypeError: continue name_map['nnbsp'].add(0x202F) name_map['calibre-nm-version:'] = ver cPickle.dump(dict(name_map), open(path, 'wb'), -1) del name_map['calibre-nm-version:'] return name_map
def get_cache(): from calibre.constants import cache_dir cache = os.path.join(cache_dir(), "polish-test") if not os.path.exists(cache): os.mkdir(cache) return cache
def __init__( self, log, confirm_callback=None, prompt_callback=None, user_agent=USER_AGENT, enable_developer_tools=False, parent=None, ): QWebPage.__init__(self, parent) self.log = log self.user_agent = user_agent if user_agent else USER_AGENT self.confirm_callback = confirm_callback self.prompt_callback = prompt_callback self.setForwardUnsupportedContent(True) self.unsupportedContent.connect(self.on_unsupported_content) settings = self.settings() if enable_developer_tools: settings.setAttribute(QWebSettings.DeveloperExtrasEnabled, True) QWebSettings.enablePersistentStorage(os.path.join(cache_dir(), "webkit-persistence")) QWebSettings.setMaximumPagesInCache(0) self.bridge_name = "b" + uuid.uuid4().get_hex() self.mainFrame().javaScriptWindowObjectCleared.connect(self.add_window_objects) self.dom_loaded = False
def create_listener(authkey, backlog=4): # We use the cache dir rather than the temp dir because # on macOS, there is software that deletes the temp dir after # periods of inactivity prefix = os.path.join(cache_dir(), 'ipc-socket-%d-%%d' % os.getpid()) max_tries = 20 while max_tries > 0: max_tries -= 1 address = prefix % next(_name_counter) try: return address, Listener(address=address, authkey=authkey, backlog=backlog) except EnvironmentError as err: if max_tries < 1: raise if err.errno == errno.ENOENT: # Some OS X machines have software that deletes temp # files/dirs after prolonged inactivity. See for # example, https://bugs.launchpad.net/bugs/1541356 try: os.makedirs(os.path.dirname(prefix)) except EnvironmentError as e: if e.errno != errno.EEXIST: raise continue if err.errno != errno.EADDRINUSE: raise
def cli(port=4444): prints('Connecting to remote debugger on port %d...' % port) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) for i in range(20): try: sock.connect(('127.0.0.1', port)) break except socket.error: pass time.sleep(0.1) else: try: sock.connect(('127.0.0.1', port)) except socket.error as err: prints('Failed to connect to remote debugger:', err, file=sys.stderr) raise SystemExit(1) prints('Connected to remote process') import readline histfile = os.path.join(cache_dir(), 'rpdb.history') try: readline.read_history_file(histfile) except IOError: pass atexit.register(readline.write_history_file, histfile) p = pdb.Pdb() readline.set_completer(p.complete) readline.parse_and_bind("tab: complete") stdin = getattr(sys.stdin, 'buffer', sys.stdin) stdout = getattr(sys.stdout, 'buffer', sys.stdout) try: while True: recvd = b'' while not recvd.endswith(PROMPT) or select.select([sock], [], [], 0) == ([sock], [], []): buf = eintr_retry_call(sock.recv, 16 * 1024) if not buf: return recvd += buf recvd = recvd[:-len(PROMPT)] if recvd.startswith(QUESTION): recvd = recvd[len(QUESTION):] stdout.write(recvd) raw = stdin.readline() or b'n' else: stdout.write(recvd) raw = b'' try: raw = raw_input(PROMPT.decode('utf-8')) except (EOFError, KeyboardInterrupt): pass else: if not isinstance(raw, bytes): raw = raw.encode('utf-8') raw += b'\n' if not raw: raw = b'quit\n' eintr_retry_call(sock.send, raw) except KeyboardInterrupt: pass
def cli(port=4444): print('Connecting to remote debugger on port %d...' % port) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) for i in range(20): try: sock.connect(('127.0.0.1', port)) break except socket.error: pass time.sleep(0.1) else: try: sock.connect(('127.0.0.1', port)) except socket.error as err: print('Failed to connect to remote debugger:', err, file=sys.stderr) raise SystemExit(1) print('Connected to remote process', flush=True) try: import readline histfile = os.path.join(cache_dir(), 'rpdb.history') try: readline.read_history_file(histfile) except IOError: pass atexit.register(readline.write_history_file, histfile) p = pdb.Pdb() readline.set_completer(p.complete) readline.parse_and_bind("tab: complete") except ImportError: pass sock.setblocking(True) with suppress(KeyboardInterrupt): end_of_input = PROMPT.encode('utf-8') while True: recvd = b'' while select.select([sock], [], [], 0)[0] or not recvd.endswith(end_of_input): buf = sock.recv(4096) if not buf: return recvd += buf recvd = recvd.decode('utf-8', 'replace') recvd = recvd[:-len(PROMPT)] raw = '' if recvd.startswith(QUESTION): recvd = recvd[len(QUESTION):] print(recvd, end='', flush=True) raw = sys.stdin.readline() or 'n' else: print(recvd, end='', flush=True) try: raw = input(PROMPT) except (EOFError, KeyboardInterrupt): pass else: raw += '\n' if not raw: raw = 'quit\n' if raw: sock.sendall(raw.encode('utf-8'))
def cli(port=4444): prints('Connecting to remote debugger on port %d...' % port) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) for i in range(20): try: sock.connect(('127.0.0.1', port)) break except socket.error: pass time.sleep(0.1) else: try: sock.connect(('127.0.0.1', port)) except socket.error as err: prints('Failed to connect to remote debugger:', err, file=sys.stderr) raise SystemExit(1) prints('Connected to remote process') import readline histfile = os.path.join(cache_dir(), 'rpdb.history') try: readline.read_history_file(histfile) except IOError: pass atexit.register(readline.write_history_file, histfile) p = pdb.Pdb() readline.set_completer(p.complete) readline.parse_and_bind("tab: complete") stdin = getattr(sys.stdin, 'buffer', sys.stdin) stdout = getattr(sys.stdout, 'buffer', sys.stdout) try: while True: recvd = b'' while not recvd.endswith(PROMPT) or select.select([sock], [], [], 0) == ([sock], [], []): buf = eintr_retry_call(sock.recv, 16 * 1024) if not buf: return recvd += buf recvd = recvd[:-len(PROMPT)] if recvd.startswith(QUESTION): recvd = recvd[len(QUESTION):] stdout.write(recvd) raw = stdin.readline() or b'n' else: stdout.write(recvd) raw = b'' try: raw = rinput(PROMPT.decode('utf-8')) except (EOFError, KeyboardInterrupt): pass else: if not isinstance(raw, bytes): raw = raw.encode('utf-8') raw += b'\n' if not raw: raw = b'quit\n' eintr_retry_call(sock.send, raw) except KeyboardInterrupt: pass
def path_to_dictionary(dictionary_name, cache_callback=None): cd = getattr(path_to_dictionary, 'cache_dir', None) or cache_dir() cache_path = get_cache_path(cd) if not is_cache_up_to_date(cache_path): extract_dicts(cache_path) if cache_callback is not None: cache_callback() return os.path.join(cache_path, 'f', dictionary_name)
def cache(self): if not hasattr(self, '_mr_cache'): from calibre.utils.config import JSONConfig self._mr_cache = JSONConfig('mobileread_get_books') self._mr_cache.file_path = os.path.join( cache_dir(), 'mobileread_get_books.json') self._mr_cache.refresh() return self._mr_cache
def cache(self): if not hasattr(self, '_mr_cache'): from calibre.utils.config import JSONConfig self._mr_cache = JSONConfig('mobileread_get_books') self._mr_cache.file_path = os.path.join(cache_dir(), 'mobileread_get_books.json') self._mr_cache.refresh() return self._mr_cache
def __init__(self, data): MainWindow.__init__(self, None) self.setWindowIcon(QIcon(I('store.png'))) self.setWindowTitle(data['window_title']) self.download_data = {} profile = QWebEngineProfile.defaultProfile() profile.setCachePath(os.path.join(cache_dir(), 'web_store', 'hc')) profile.setPersistentStoragePath(os.path.join(cache_dir(), 'web_store', 'ps')) profile.setHttpUserAgent(random_user_agent(allow_ie=False)) profile.downloadRequested.connect(self.download_requested) self.data = data self.central = c = Central(self) c.home.connect(self.go_home) self.setCentralWidget(c) geometry = gprefs.get('store_dialog_main_window_geometry') if geometry is not None: self.restoreGeometry(geometry) self.go_to(data['detail_url'] or None)
def create_profile(): ans = getattr(create_profile, 'ans', None) if ans is None: ans = QWebEngineProfile('viewer-lookup', QApplication.instance()) ans.setHttpUserAgent(random_user_agent(allow_ie=False)) ans.setCachePath(os.path.join(cache_dir(), 'ev2vl')) js = P('lookup.js', data=True, allow_user_override=False) insert_scripts(ans, create_script('lookup.js', js)) s = ans.settings() s.setDefaultTextEncoding('utf-8') create_profile.ans = ans return ans
def books_cache_dir(): global _books_cache_dir if _books_cache_dir: return _books_cache_dir base = abspath(os.path.join(cache_dir(), 'srvb')) for d in 'sf': try: os.makedirs(os.path.join(base, d)) except EnvironmentError as e: if e.errno != errno.EEXIST: raise _books_cache_dir = base return base
def books_cache_dir(): global _books_cache_dir if _books_cache_dir: return _books_cache_dir base = abspath(os.path.join(cache_dir(), 'srvb')) for d in 'sf': try: os.makedirs(os.path.join(base, d)) except EnvironmentError as e: if e.errno != errno.EEXIST: raise _books_cache_dir = base return base
def __init__(self, log, use_disk_cache=True, parent=None): QNetworkAccessManager.__init__(self, parent) self.reply_count = 0 self.log = log if use_disk_cache: self.cache = QNetworkDiskCache(self) self.cache.setCacheDirectory(os.path.join(cache_dir(), 'jsbrowser')) self.setCache(self.cache) self.sslErrors.connect(self.on_ssl_errors) self.pf = ProxyFactory(log) self.setProxyFactory(self.pf) self.finished.connect(self.on_finished) self.cookie_jar = QNetworkCookieJar() self.setCookieJar(self.cookie_jar) self.main_thread = current_thread() self.report_reply_signal.connect(self.report_reply, type=Qt.QueuedConnection)
def __init__(self, max_size=1024, # The maximum disk space in MB name='thumbnail-cache', # The name of this cache (should be unique in location) thumbnail_size=(100, 100), # The size of the thumbnails, can be changed location=None, # The location for this cache, if None cache_dir() is used test_mode=False, # Used for testing min_disk_cache=0): # If the size is set less than or equal to this value, the cache is disabled. self.location = os.path.join(location or cache_dir(), name) if max_size <= min_disk_cache: max_size = 0 self.max_size = int(max_size * (1024**2)) self.group_id = 'group' self.thumbnail_size = thumbnail_size self.size_changed = False self.lock = Lock() self.min_disk_cache = min_disk_cache if test_mode: self.log = self.fail_on_error
def __init__(self, max_size=1024, # The maximum disk space in MB name='thumbnail-cache', # The name of this cache (should be unique in location) thumbnail_size=(100, 100), # The size of the thumbnails, can be changed location=None, # The location for this cache, if None cache_dir() is used test_mode=False, # Used for testing min_disk_cache=0): # If the size is set less than or equal to this value, the cache is disabled. self.location = os.path.join(location or cache_dir(), name) if max_size <= min_disk_cache: max_size = 0 self.max_size = int(max_size * (1024**2)) self.group_id = 'group' self.thumbnail_size = thumbnail_size self.size_changed = False self.lock = Lock() self.min_disk_cache = min_disk_cache if test_mode: self.log = self.fail_on_error
def __init__(self, log, use_disk_cache=True, parent=None): QNetworkAccessManager.__init__(self, parent) self.reply_count = 0 self.log = log if use_disk_cache: self.cache = QNetworkDiskCache(self) self.cache.setCacheDirectory(os.path.join(cache_dir(), 'jsbrowser')) self.setCache(self.cache) self.sslErrors.connect(self.on_ssl_errors) self.pf = ProxyFactory(log) self.setProxyFactory(self.pf) self.finished.connect(self.on_finished) self.cookie_jar = QNetworkCookieJar() self.setCookieJar(self.cookie_jar) self.main_thread = current_thread() self.report_reply_signal.connect(self.report_reply, type=Qt.QueuedConnection)
def __init__(self, library_broker, notify_changes): opts = server_config() lp, lap = log_paths() try: os.makedirs(cache_dir()) except EnvironmentError as err: if err.errno != errno.EEXIST: raise log_size = opts.max_log_size * 1024 * 1024 log = RotatingLog(lp, max_size=log_size) access_log = RotatingLog(lap, max_size=log_size) self.handler = Handler(library_broker, opts, notify_changes=notify_changes) plugins = self.plugins = [] if opts.use_bonjour: plugins.append(BonJour()) self.opts = opts self.log, self.access_log = log, access_log self.handler.set_log(self.log)
def __init__(self, library_broker, notify_changes): opts = server_config() lp, lap = log_paths() try: os.makedirs(cache_dir()) except EnvironmentError as err: if err.errno != errno.EEXIST: raise log_size = opts.max_log_size * 1024 * 1024 log = RotatingLog(lp, max_size=log_size) access_log = RotatingLog(lap, max_size=log_size) self.handler = Handler(library_broker, opts, notify_changes=notify_changes) plugins = self.plugins = [] if opts.use_bonjour: plugins.append(BonJour()) self.opts = opts self.log, self.access_log = log, access_log self.handler.set_log(self.log) self.handler.router.ctx.custom_list_template = custom_list_template()
def __call__(self): if hasattr(self, 'readline'): history = os.path.join(cache_dir(), 'pyj-repl-history.txt') self.readline.parse_and_bind("tab: complete") try: self.readline.read_history_file(history) except EnvironmentError as e: if e.errno != errno.ENOENT: raise atexit.register(partial(self.readline.write_history_file, history)) def completer(text, num): if self.completions is None: self.to_repl.put(('complete', text)) self.completions = list(filter(None, self.get_from_repl())) if not self.completions: return None try: return self.completions[num] except (IndexError, TypeError, AttributeError, KeyError): self.completions = None if hasattr(self, 'readline'): self.readline.set_completer(completer) while True: lw = self.get_from_repl() if lw is None: raise SystemExit(1) q = self.prompt if hasattr(self, 'readline'): self.readline.set_pre_input_hook(lambda: ( self.readline.insert_text(lw), self.readline.redisplay())) else: q += lw try: line = raw_input(q) self.to_repl.put(('line', line)) except EOFError: return except KeyboardInterrupt: self.to_repl.put(('SIGINT', None))
def __init__(self, library_broker, notify_changes): opts = server_config() lp, lap = log_paths() try: os.makedirs(cache_dir()) except OSError as err: if err.errno != errno.EEXIST: raise log_size = opts.max_log_size * 1024 * 1024 log = RotatingLog(lp, max_size=log_size) access_log = RotatingLog(lap, max_size=log_size) self.handler = Handler(library_broker, opts, notify_changes=notify_changes) plugins = self.plugins = [] if opts.use_bonjour: plugins.append(BonJour(wait_for_stop=max(0, opts.shutdown_timeout - 0.2))) self.opts = opts self.log, self.access_log = log, access_log self.handler.set_log(self.log) self.handler.router.ctx.custom_list_template = custom_list_template() self.handler.router.ctx.search_the_net_urls = search_the_net_urls()
def __init__(self, log, confirm_callback=None, prompt_callback=None, user_agent=USER_AGENT, enable_developer_tools=False, parent=None): QWebPage.__init__(self, parent) self.log = log self.user_agent = user_agent if user_agent else USER_AGENT self.confirm_callback = confirm_callback self.prompt_callback = prompt_callback self.setForwardUnsupportedContent(True) self.unsupportedContent.connect(self.on_unsupported_content) settings = self.settings() if enable_developer_tools: settings.setAttribute(QWebSettings.DeveloperExtrasEnabled, True) QWebSettings.enablePersistentStorage(os.path.join(cache_dir(), 'webkit-persistence')) QWebSettings.setMaximumPagesInCache(0)
def __call__(self): if hasattr(self, 'readline'): history = os.path.join(cache_dir(), 'pyj-repl-history.txt') self.readline.parse_and_bind("tab: complete") try: self.readline.read_history_file(history) except EnvironmentError as e: if e.errno != errno.ENOENT: raise atexit.register(partial(self.readline.write_history_file, history)) def completer(text, num): if self.completions is None: self.to_repl.put(('complete', text)) self.completions = filter(None, self.get_from_repl()) if self.completions is None: return None try: return self.completions[num] except (IndexError, TypeError, AttributeError, KeyError): self.completions = None if hasattr(self, 'readline'): self.readline.set_completer(completer) while True: lw = self.get_from_repl() if lw is None: raise SystemExit(1) q = self.prompt if hasattr(self, 'readline'): self.readline.set_pre_input_hook(lambda:(self.readline.insert_text(lw), self.readline.redisplay())) else: q += lw try: line = raw_input(q) self.to_repl.put(('line', line)) except EOFError: return except KeyboardInterrupt: self.to_repl.put(('SIGINT', None))
def __init__(self, log, confirm_callback=None, prompt_callback=None, user_agent=USER_AGENT, enable_developer_tools=False, parent=None): QWebPage.__init__(self, parent) self.log = log self.user_agent = user_agent if user_agent else USER_AGENT self.confirm_callback = confirm_callback self.prompt_callback = prompt_callback self.setForwardUnsupportedContent(True) self.unsupportedContent.connect(self.on_unsupported_content) settings = self.settings() if enable_developer_tools: settings.setAttribute(QWebSettings.DeveloperExtrasEnabled, True) QWebSettings.enablePersistentStorage( os.path.join(cache_dir(), 'webkit-persistence')) QWebSettings.setMaximumPagesInCache(0)
def __init__(self, library_broker, notify_changes): opts = server_config() lp, lap = log_paths() try: os.makedirs(cache_dir()) except EnvironmentError as err: if err.errno != errno.EEXIST: raise log_size = opts.max_log_size * 1024 * 1024 log = RotatingLog(lp, max_size=log_size) access_log = RotatingLog(lap, max_size=log_size) self.handler = Handler(library_broker, opts, notify_changes=notify_changes) plugins = self.plugins = [] if opts.use_bonjour: plugins.append(BonJour()) self.opts = opts self.log, self.access_log = log, access_log self.handler.set_log(self.log) _df = os.environ.get('CALIBRE_DEVELOP_FROM', None) if _df and os.path.exists(_df): from calibre.utils.rapydscript import compile_srv compile_srv()
def find_icons(): global icon_data if icon_data is not None: return icon_data base_dirs = [os.path.expanduser('~/.icons')] + [ os.path.join(b, 'icons') for b in os.environ.get( 'XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(os.pathsep) ] + ['/usr/share/pixmaps'] ans = defaultdict(list) sz_pat = re.compile(r'/((?:\d+x\d+)|scalable)/') cache_file = os.path.join(cache_dir(), 'icon-theme-cache.pickle') exts = {'.svg', '.png', '.xpm'} def read_icon_theme_dir(dirpath): ans = defaultdict(list) for path in walk(dirpath): bn = os.path.basename(path) name, ext = os.path.splitext(bn) if ext in exts: sz = sz_pat.findall(path) if sz: sz = sz[-1] if sz == 'scalable': sz = 100000 else: sz = int(sz.partition('x')[0]) idx = len(ans[name]) ans[name].append((-sz, idx, sz, path)) for icons in ans.itervalues(): icons.sort() return {k: (-v[0][2], v[0][3]) for k, v in ans.iteritems()} try: with open(cache_file, 'rb') as f: cache = cPickle.load(f) mtimes, cache = cache['mtimes'], cache['data'] except Exception: mtimes, cache = defaultdict(int), defaultdict(dict) seen_dirs = set() changed = False for loc in base_dirs: try: subdirs = os.listdir(loc) except EnvironmentError: continue for dname in subdirs: d = os.path.join(loc, dname) if os.path.isdir(d): try: mtime = os.stat(d).st_mtime except EnvironmentError: continue seen_dirs.add(d) if mtime != mtimes[d]: changed = True try: cache[d] = read_icon_theme_dir(d) except Exception: prints( 'Failed to read icon theme dir: %r with error:' % d) import traceback traceback.print_exc() mtimes[d] = mtime for name, data in cache[d].iteritems(): ans[name].append(data) for removed in set(mtimes) - seen_dirs: mtimes.pop(removed), cache.pop(removed) changed = True if changed: try: with open(cache_file, 'wb') as f: cPickle.dump({'data': cache, 'mtimes': mtimes}, f, -1) except Exception: import traceback traceback.print_exc() for icons in ans.itervalues(): icons.sort() icon_data = {k: v[0][1] for k, v in ans.iteritems()} return icon_data
def find_icons(): global icon_data if icon_data is not None: return icon_data base_dirs = [(os.environ.get('XDG_DATA_HOME') or os.path.expanduser('~/.local/share')) + '/icons'] base_dirs += [os.path.expanduser('~/.icons')] base_dirs += [ os.path.join(b, 'icons') for b in os.environ.get( 'XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(os.pathsep)] + [ '/usr/share/pixmaps'] ans = defaultdict(list) sz_pat = re.compile(r'/((?:\d+x\d+)|scalable)/') cache_file = os.path.join(cache_dir(), 'icon-theme-cache.pickle') exts = {'.svg', '.png', '.xpm'} def read_icon_theme_dir(dirpath): ans = defaultdict(list) for path in walk(dirpath): bn = os.path.basename(path) name, ext = os.path.splitext(bn) if ext in exts: sz = sz_pat.findall(path) if sz: sz = sz[-1] if sz == 'scalable': sz = 100000 else: sz = int(sz.partition('x')[0]) idx = len(ans[name]) ans[name].append((-sz, idx, sz, path)) for icons in ans.itervalues(): icons.sort() return {k:(-v[0][2], v[0][3]) for k, v in ans.iteritems()} try: with open(cache_file, 'rb') as f: cache = cPickle.load(f) mtimes, cache = cache['mtimes'], cache['data'] except Exception: mtimes, cache = defaultdict(int), defaultdict(dict) seen_dirs = set() changed = False for loc in base_dirs: try: subdirs = os.listdir(loc) except EnvironmentError: continue for dname in subdirs: d = os.path.join(loc, dname) if os.path.isdir(d): try: mtime = os.stat(d).st_mtime except EnvironmentError: continue seen_dirs.add(d) if mtime != mtimes[d]: changed = True try: cache[d] = read_icon_theme_dir(d) except Exception: prints('Failed to read icon theme dir: %r with error:' % d) import traceback traceback.print_exc() mtimes[d] = mtime for name, data in cache[d].iteritems(): ans[name].append(data) for removed in set(mtimes) - seen_dirs: mtimes.pop(removed), cache.pop(removed) changed = True if changed: try: with open(cache_file, 'wb') as f: cPickle.dump({'data':cache, 'mtimes':mtimes}, f, -1) except Exception: import traceback traceback.print_exc() for icons in ans.itervalues(): icons.sort() icon_data = {k:v[0][1] for k, v in ans.iteritems()} return icon_data
def book_cache_dir(): return getattr(book_cache_dir, 'override', os.path.join(cache_dir(), 'ev2'))
def reading_rates_path(): return os.path.join(cache_dir(), 'viewer-reading-rates.json')
def setUp(self): self.cwd = os.getcwd() self.tdir = tempfile.mkdtemp() os.chdir(self.tdir) self.original_cache_dir = cache_dir() cache_dir.ans = self.tdir
def log_paths(): return os.path.join(cache_dir(), 'server-log.txt'), os.path.join( cache_dir(), 'server-access-log.txt' )
def log_paths(): return os.path.join(cache_dir(), 'server-log.txt'), os.path.join( cache_dir(), 'server-access-log.txt' )
def setUp(self): self.cwd = os.getcwd() self.tdir = tempfile.mkdtemp() os.chdir(self.tdir) self.original_cache_dir = cache_dir() cache_dir.ans = self.tdir
def cache_path(): return db_path or os.path.join(cache_dir(), 'live.sqlite')
def get_cache(): from calibre.constants import cache_dir cache = os.path.join(cache_dir(), 'polish-test') if not os.path.exists(cache): os.mkdir(cache) return cache
#!/usr/bin/env python2 # vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai from __future__ import (unicode_literals, division, absolute_import, print_function) __license__ = 'GPL v3' __copyright__ = '2012, Kovid Goyal <*****@*****.**>' __docformat__ = 'restructuredtext en' import os, re, sys from calibre.constants import iswindows, cache_dir, get_version ipydir = os.path.join(cache_dir(), 'ipython') BANNER = ('Welcome to the interactive calibre shell!\n') def setup_pyreadline(): config = ''' #Bind keys for exit (keys only work on empty lines #disable_readline(True) #Disable pyreadline completely. from __future__ import print_function, unicode_literals, absolute_import debug_output("off") #"on" saves log info to./pyreadline_debug_log.txt #"on_nologfile" only enables print warning messages bind_exit_key("Control-d") bind_exit_key("Control-z") #Commands for moving bind_key("Home", "beginning_of_line") bind_key("End", "end_of_line") bind_key("Left", "backward_char")
#!/usr/bin/env python # vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai from __future__ import (unicode_literals, division, absolute_import, print_function) __license__ = 'GPL v3' __copyright__ = '2012, Kovid Goyal <*****@*****.**>' __docformat__ = 'restructuredtext en' import os, re, sys from calibre.constants import iswindows, cache_dir, get_version ipydir = os.path.join(cache_dir(), 'ipython') BANNER = ('Welcome to the interactive calibre shell!\n') def setup_pyreadline(): config = ''' #Bind keys for exit (keys only work on empty lines #disable_readline(True) #Disable pyreadline completely. from __future__ import print_function, unicode_literals, absolute_import debug_output("off") #"on" saves log info to./pyreadline_debug_log.txt #"on_nologfile" only enables print warning messages bind_exit_key("Control-d") bind_exit_key("Control-z") #Commands for moving bind_key("Home", "beginning_of_line") bind_key("End", "end_of_line") bind_key("Left", "backward_char") bind_key("Control-b", "backward_char")