def announceSites(self): time.sleep(5 * 60) # Sites already announced on startup while 1: config.loadTrackersFile() s = time.time() for address, site in self.sites.items(): if not site.settings["serving"]: continue site.announce(mode="update", pex=False) active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60 if site.settings[ "own"] or active_site: # Check connections more frequently on own and active sites to speed-up first connections site.needConnections(check_site_on_reconnect=True) site.sendMyHashfield(3) site.updateHashfield(3) time.sleep(1) taken = time.time() - s sleep = max( 0, 60 * 20 / len(config.trackers) - taken ) # Query all trackers one-by-one in 20 minutes evenly distributed self.log.debug( "Site announce tracker done in %.3fs, sleeping for %ss..." % (taken, sleep)) time.sleep(sleep)
def announceSites(self): import gc first_announce = True # First start while 1: # Sites healthcare every 20 min if config.trackers_file: config.loadTrackersFile() for address, site in self.sites.items(): if site.settings["serving"]: if first_announce: # Announce to all trackers on startup site.announce() else: # If not first run only use PEX site.announcePex() # Reset bad file retry counter for inner_path in site.bad_files: site.bad_files[inner_path] = 0 # Retry failed files if site.bad_files: site.retryBadFiles() site.cleanupPeers() # In passive mode keep 5 active peer connection to get the updates if self.port_opened is False: site.needConnections() if first_announce: # Send my optional files to peers site.sendMyHashfield() time.sleep(2) # Prevent too quick request site = None gc.collect() # Implicit grabage collection # Find new peers for tracker_i in range(len(config.trackers)): time.sleep( 60 * 20 / len(config.trackers) ) # Query all trackers one-by-one in 20 minutes evenly distributed if config.trackers_file: config.loadTrackersFile() for address, site in self.sites.items(): site.announce(num=1, pex=False) site.sendMyHashfield(num_send=1) time.sleep(2) first_announce = False
def announceSites(self): time.sleep(5 * 60) # Sites already announced on startup while 1: config.loadTrackersFile() s = time.time() for address, site in self.sites.items(): if not site.settings["serving"]: continue gevent.spawn(self.announceSite, site).join(timeout=10) time.sleep(1) taken = time.time() - s sleep = max(0, 60 * 20 / len(config.trackers) - taken) # Query all trackers one-by-one in 20 minutes evenly distributed self.log.debug("Site announce tracker done in %.3fs, sleeping for %.3fs..." % (taken, sleep)) time.sleep(sleep)
def actionConfigSet(self, to, key, value): if key not in config.keys_api_change_allowed: self.response(to, {"error": "Forbidden you cannot set this config key"}) return # Remove empty lines from lists if type(value) is list: value = [line for line in value if line] config.saveValue(key, value) if key not in config.keys_restart_need: if value is None: # Default value setattr(config, key, config.parser.get_default(key)) setattr(config.arguments, key, config.parser.get_default(key)) else: setattr(config, key, value) setattr(config.arguments, key, value) else: config.need_restart = True config.pending_changes[key] = value if key == "language": import Translate for translate in Translate.translates: translate.setLanguage(value) message = _["You have successfully changed the web interface's language!"] + "<br>" message += _["Due to the browser's caching, the full transformation could take some minute."] self.cmd("notification", ["done", message, 10000]) if key == "tor_use_bridges": if value is None: value = False else: value = True tor_manager = sys.modules["main"].file_server.tor_manager tor_manager.request("SETCONF UseBridges=%i" % value) if key == "trackers_file": config.loadTrackersFile() if key == "log_level": logging.getLogger('').setLevel(logging.getLevelName(config.log_level)) if key == "ip_external": gevent.spawn(sys.modules["main"].file_server.portCheck) self.response(to, "ok")
def announceSites(self): import gc first_announce = True # First start while 1: # Sites healthcare every 20 min if config.trackers_file: config.loadTrackersFile() for address, site in self.sites.items(): if site.settings["serving"]: if first_announce: # Announce to all trackers on startup site.announce() else: # If not first run only use PEX site.announcePex() # Reset bad file retry counter for inner_path in site.bad_files: site.bad_files[inner_path] = 0 # Retry failed files if site.bad_files: site.retryBadFiles() site.cleanupPeers() # In passive mode keep 5 active peer connection to get the updates if self.port_opened is False: site.needConnections() if first_announce: # Send my optional files to peers site.sendMyHashfield() time.sleep(2) # Prevent too quick request site = None gc.collect() # Implicit grabage collection # Find new peers for tracker_i in range(len(config.trackers)): time.sleep(60 * 20 / len(config.trackers)) # Query all trackers one-by-one in 20 minutes evenly distributed if config.trackers_file: config.loadTrackersFile() for address, site in self.sites.items(): site.announce(num=1, pex=False) site.sendMyHashfield(num_send=1) time.sleep(2) first_announce = False
def announceSites(self): import gc while 1: # Sites health care every 20 min if config.trackers_file: config.loadTrackersFile() for address, site in self.sites.items(): if not site.settings["serving"]: continue if site.peers: site.announcePex() # Retry failed files if site.bad_files: site.retryBadFiles() site.cleanupPeers() # In passive mode keep 5 active peer connection to get the updates if self.port_opened is False: site.needConnections() time.sleep(2) # Prevent too quick request site = None gc.collect() # Implicit garbage collection # Find new peers for tracker_i in range(len(config.trackers)): time.sleep( 60 * 20 / len(config.trackers) ) # Query all trackers one-by-one in 20 minutes evenly distributed if config.trackers_file: config.loadTrackersFile() for address, site in self.sites.items(): if not site.settings["serving"]: continue site.announce(mode="update", pex=False) if site.settings[ "own"]: # Check connections more frequently on own sites to speed-up first connections site.needConnections() site.sendMyHashfield(3) site.updateHashfield(1) time.sleep(2)
def add(self, address, all_file=True, settings=None, **kwargs): from .Site import Site self.sites_changed = int(time.time()) # Try to find site with differect case for recover_address, recover_site in list(self.sites.items()): if recover_address.lower() == address.lower(): return recover_site if not self.isAddress(address): return False # Not address: %s % address self.log.debug("Added new site: %s" % address) config.loadTrackersFile() site = Site(address, settings=settings) self.sites[address] = site if not site.settings["serving"]: # Maybe it was deleted before site.settings["serving"] = True site.saveSettings() if all_file: # Also download user files on first sync site.download(check_size=True, blind_includes=True) return site
def announceSites(self): import gc while 1: # Sites health care every 20 min if config.trackers_file: config.loadTrackersFile() for address, site in self.sites.items(): if not site.settings["serving"]: continue if site.peers: site.announcePex() # Retry failed files if site.bad_files: site.retryBadFiles() site.cleanupPeers() # In passive mode keep 5 active peer connection to get the updates if self.port_opened is False: site.needConnections() time.sleep(2) # Prevent too quick request site = None gc.collect() # Implicit garbage collection # Find new peers for tracker_i in range(len(config.trackers)): time.sleep(60 * 20 / len(config.trackers)) # Query all trackers one-by-one in 20 minutes evenly distributed if config.trackers_file: config.loadTrackersFile() for address, site in self.sites.items(): if not site.settings["serving"]: continue site.announce(mode="update", pex=False) if site.settings["own"]: # Check connections more frequently on own sites to speed-up first connections site.needConnections() site.sendMyHashfield(3) site.updateHashfield(1) time.sleep(2)
def actionConfigSet(self, to, key, value): if key not in config.keys_api_change_allowed: self.response(to, {"error": "Forbidden you cannot set this config key"}) return config.saveValue(key, value) if key not in config.keys_restart_need: if value is None: # Default value setattr(config, key, config.parser.get_default(key)) setattr(config.arguments, key, config.parser.get_default(key)) else: setattr(config, key, value) setattr(config.arguments, key, value) else: config.need_restart = True config.pending_changes[key] = value if key == "language": import Translate for translate in Translate.translates: translate.setLanguage(value) message = _["You have successfully changed the web interface's language!"] + "<br>" message += _["Due to the browser's caching, the full transformation could take some minute."] self.cmd("notification", ["done", message, 10000]) if key == "tor_use_bridges": if value is None: value = False else: value = True tor_manager = sys.modules["main"].file_server.tor_manager tor_manager.request("SETCONF UseBridges=%i" % value) if key == "trackers_file": config.loadTrackersFile() self.response(to, "ok")
def need(self, address, all_file=True, settings=None): from Site import Site site = self.get(address) if not site: # Site not exist yet self.sites_changed = int(time.time()) # Try to find site with differect case for recover_address, recover_site in self.sites.items(): if recover_address.lower() == address.lower(): return recover_site if not self.isAddress(address): return False # Not address: %s % address self.log.debug("Added new site: %s" % address) config.loadTrackersFile() site = Site(address, settings=settings) self.sites[address] = site if not site.settings["serving"]: # Maybe it was deleted before site.settings["serving"] = True site.saveSettings() if all_file: # Also download user files on first sync site.download(check_size=True, blind_includes=True) return site
def trackersFileReloader(self): while 1: config.loadTrackersFile() time.sleep(60)