def route(self, cmd, req_id, params): self.req_id = req_id # Don't allow other sites than locked if "site" in params and self.connection.site_lock and self.connection.site_lock not in ( params["site"], "global"): self.response({"error": "Invalid site"}) self.log.error("Site lock violation: %s != %s" % (self.connection.site_lock != params["site"])) return False if cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) if not RateLimit.isAllowed( event ): # There was already an update for this file in the last 10 second self.response({"ok": "File update queued"}) # If called more than once within 10 sec only keep the last update RateLimit.callAsync(event, 10, self.actionUpdate, params) else: func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) if func: func(params) else: self.actionUnknown(cmd, params)
def route(self, cmd, req_id, params): self.req_id = req_id # Don't allow other sites than locked if "site" in params and self.connection.site_lock and self.connection.site_lock not in (params["site"], "global"): self.response({"error": "Invalid site"}) self.log.error("Site lock violation: %s != %s" % (self.connection.site_lock != params["site"])) self.connection.badAction(5) return False if cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second time.sleep(5) self.response({"ok": "File update queued"}) # If called more than once within 15 sec only keep the last update RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params) else: func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions s = time.time() if self.connection.cpu_time > 0.5: self.log.debug("Delay %s %s, cpu_time used by connection: %.3fs" % (self.connection.ip, cmd, self.connection.cpu_time)) time.sleep(self.connection.cpu_time) if self.connection.cpu_time > 5: self.connection.close() if func: func(params) else: self.actionUnknown(cmd, params) if cmd not in ["getFile", "streamFile"]: taken = time.time() - s self.connection.cpu_time += taken
def route(self, cmd, req_id, params): self.req_id = req_id if cmd == "getFile": self.actionGetFile(params) elif cmd == "streamFile": self.actionStreamFile(params) elif cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second self.response({"ok": "File update queued"}) # If called more than once within 10 sec only keep the last update RateLimit.callAsync(event, 10, self.actionUpdate, params) elif cmd == "pex": self.actionPex(params) elif cmd == "listModified": self.actionListModified(params) elif cmd == "getHashfield": self.actionGetHashfield(params) elif cmd == "findHashIds": self.actionFindHashIds(params) elif cmd == "ping": self.actionPing() else: self.actionUnknown(cmd, params)
def route(self, cmd, req_id, params): self.req_id = req_id if cmd == "getFile": self.actionGetFile(params) elif cmd == "streamFile": self.actionStreamFile(params) elif cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) if not RateLimit.isAllowed( event ): # There was already an update for this file in the last 10 second self.response({"ok": "File update queued"}) # If called more than once within 10 sec only keep the last update RateLimit.callAsync(event, 10, self.actionUpdate, params) elif cmd == "pex": self.actionPex(params) elif cmd == "listModified": self.actionListModified(params) elif cmd == "ping": self.actionPing() else: self.actionUnknown(cmd, params)
def route(self, cmd, req_id, params): self.req_id = req_id # Don't allow other sites than locked if ( "site" in params and self.connection.site_lock and self.connection.site_lock not in (params["site"], "global") ): self.response({"error": "Invalid site"}) self.log.error("Site lock violation: %s != %s" % (self.connection.site_lock != params["site"])) return False if cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second self.response({"ok": "File update queued"}) # If called more than once within 10 sec only keep the last update RateLimit.callAsync(event, 10, self.actionUpdate, params) else: func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) if func: func(params) else: self.actionUnknown(cmd, params)
def actionFindHashIds(self, params): site = self.sites.get(params["site"]) s = time.time() if not site or not site.isServing(): # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False event_key = "%s_findHashIds_%s_%s" % ( self.connection.ip, params["site"], len(params["hash_ids"])) if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed( event_key, 60 * 5): time.sleep(0.1) back = self.findHashIds(site, params["hash_ids"], limit=10) else: back = self.findHashIds(site, params["hash_ids"]) RateLimit.called(event_key) my_hashes = [] my_hashfield_set = set(site.content_manager.hashfield) for hash_id in params["hash_ids"]: if hash_id in my_hashfield_set: my_hashes.append(hash_id) if config.verbose: self.log.debug("Found: %s for %s hashids in %.3fs" % ({key: len(val) for key, val in back.items() }, len(params["hash_ids"]), time.time() - s)) self.response({ "peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes })
def testCallAsync(self): obj1 = ExampleClass() obj2 = ExampleClass() s = time.time() RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #1").join() assert obj1.counted == 1 # First instant assert around(time.time() - s, 0.0) # After that the calls delayed s = time.time() t1 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #2") # Dumped by the next call time.sleep(0.03) t2 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #3") # Dumped by the next call time.sleep(0.03) t3 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #4") # Will be called assert obj1.counted == 1 # Delay still in progress: Not called yet t3.join() assert t3.value == "call #4" assert around(time.time() - s, 0.1) # Only the last one called assert obj1.counted == 2 assert obj1.last_called == "call #4" # Allowed again instantly assert RateLimit.isAllowed("counting async", 0.1) s = time.time() RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join() assert obj1.counted == 3 assert around(time.time() - s, 0.0) assert not RateLimit.isAllowed("counting async", 0.1) time.sleep(0.11) assert RateLimit.isAllowed("counting async", 0.1)
def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True): if sign: inner_path = self.actionSiteSign(to, privatekey, inner_path, response_ok=False) if not inner_path: return # Publishing if not self.site.settings["serving"]: # Enable site if paused self.site.settings["serving"] = True self.site.saveSettings() self.site.announce() event_name = "publish %s %s" % (self.site.address, inner_path) called_instantly = RateLimit.isAllowed(event_name, 30) thread = RateLimit.callAsync(event_name, 30, self.doSitePublish, self.site, inner_path) # Only publish once in 30 seconds notification = "linked" not in dir(thread) # Only display notification on first callback thread.linked = True if called_instantly: # Allowed to call instantly # At the end callback with request id and thread self.cmd("progress", ["publish", _["Content published to {0}/{1} peers."].format(0, 5), 0]) thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=notification)) else: self.cmd( "notification", ["info", _["Content publish queued for {0:.0f} seconds."].format(RateLimit.delayLeft(event_name, 30)), 5000] ) self.response(to, "ok") # At the end display notification thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=False))
def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True): if sign: inner_path = self.actionSiteSign(to, privatekey, inner_path, response_ok=False) if not inner_path: return # Publishing if not self.site.settings["serving"]: # Enable site if paused self.site.settings["serving"] = True self.site.saveSettings() self.site.announce() if not inner_path in self.site.content_manager.contents: return self.response(to, {"error": "File %s not found" % inner_path}) event_name = "publish %s %s" % (self.site.address, inner_path) called_instantly = RateLimit.isAllowed(event_name, 30) thread = RateLimit.callAsync(event_name, 30, self.doSitePublish, self.site, inner_path) # Only publish once in 30 seconds notification = "linked" not in dir(thread) # Only display notification on first callback thread.linked = True if called_instantly: # Allowed to call instantly # At the end callback with request id and thread self.cmd("progress", ["publish", _["Content published to {0}/{1} peers."].format(0, 5), 0]) thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=notification)) else: self.cmd( "notification", ["info", _["Content publish queued for {0:.0f} seconds."].format(RateLimit.delayLeft(event_name, 30)), 5000] ) self.response(to, "ok") # At the end display notification thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=False))
def cbMergerSiteAdd(self, to, addresses): added = 0 for address in addresses: added += 1 site_manager.need(address) if added: self.cmd("notification", ["done", "Added <b>%s</b> new site" % added, 5000]) RateLimit.called(self.site.address + "-MergerSiteAdd") site_manager.updateMergerSites()
def testCall(self): obj1 = ExampleClass() obj2 = ExampleClass() s = time.time() assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted" assert around(time.time() - s, 0.0) # First allow to call instantly assert obj1.counted == 1 # Call again assert not RateLimit.isAllowed("counting", 0.1) assert RateLimit.isAllowed("something else", 0.1) assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted" assert around(time.time() - s, 0.1) # Delays second call within interval assert obj1.counted == 2 # Call 3 times async s = time.time() assert obj2.counted == 0 threads = [ gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # Instant gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # 0.1s delay gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)) # 0.2s delay ] gevent.joinall(threads) assert [thread.value for thread in threads] == ["counted", "counted", "counted"] assert around(time.time() - s, 0.2) # No queue = instant again s = time.time() assert RateLimit.isAllowed("counting", 0.1) assert RateLimit.call("counting", allowed_again=0.1, func=obj2.count) == "counted" assert around(time.time() - s, 0.0) assert obj2.counted == 4
def actionFindHashIds(self, params): site = self.sites.get(params["site"]) s = time.time() if not site or not site.settings[ "serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False event_key = "%s_findHashIds_%s_%s" % ( self.connection.ip, params["site"], len(params["hash_ids"])) if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed( event_key, 60 * 5): time.sleep(0.1) back = self.findHashIds(site, params["hash_ids"], limit=10) else: back = self.findHashIds(site, params["hash_ids"]) RateLimit.called(event_key) # Check my hashfield if self.server.tor_manager and self.server.tor_manager.getOnion( site.address): # Running onion my_ip = helper.packOnionAddress( self.server.tor_manager.getOnion(site.address), self.server.port) my_ip_type = "onion" elif config.ip_external: # External ip defined my_ip = helper.packAddress(config.ip_external, self.server.port) my_ip_type = helper.getIpType(config.ip_external) elif self.server.ip and self.server.ip != "*": # No external ip defined my_ip = helper.packAddress(self.server.ip, self.server.port) my_ip_type = helper.getIpType(self.server.ip) else: my_ip = None my_ip_type = "ipv4" my_hashfield_set = set(site.content_manager.hashfield) for hash_id in params["hash_ids"]: if hash_id in my_hashfield_set: if hash_id not in back[my_ip_type]: back[my_ip_type][hash_id] = [] if my_ip: back[my_ip_type][hash_id].append(my_ip) # Add myself if config.verbose: self.log.debug("Found: %s for %s hashids in %.3fs" % ({key: len(val) for key, val in back.iteritems() }, len(params["hash_ids"]), time.time() - s)) self.response({ "peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"] })
def cbMergerSiteAdd(self, to, addresses): added = 0 for address in addresses: try: site_manager.need(address) added += 1 except Exception as err: self.cmd("notification", ["error", _["Adding <b>%s</b> failed: %s"] % (address, err)]) if added: self.cmd("notification", ["done", _["Added <b>%s</b> new site"] % added, 5000]) RateLimit.called(self.site.address + "-MergerSiteAdd") site_manager.updateMergerSites()
def route(self, cmd, req_id, params): self.req_id = req_id # Don't allow other sites than locked if "site" in params and self.connection.target_onion: valid_sites = self.connection.getValidSites() if params["site"] not in valid_sites: self.response({"error": "Invalid site"}) self.connection.log( "Site lock violation: %s not in %s, target onion: %s" % (params["site"], valid_sites, self.connection.target_onion)) self.connection.badAction(5) return False elif "site" in params and self.connection.target_i2p: valid_sites = self.connection.getValidI2PSites() if params["site"] not in valid_sites: self.response({"error": "Invalid site"}) self.connection.log( "Site lock violation: %s not in %s, target i2p: %s" % (params["site"], valid_sites, self.connection.target_i2p)) self.connection.badAction(5) return False if cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) # If called more than once within 15 sec only keep the last update RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params) else: func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions if self.connection.cpu_time > 0.5: self.log.debug( "Delay %s %s, cpu_time used by connection: %.3fs" % (self.connection.ip, cmd, self.connection.cpu_time)) time.sleep(self.connection.cpu_time) if self.connection.cpu_time > 5: self.connection.close("Cpu time: %.3fs" % self.connection.cpu_time) s = time.time() if func: func(params) else: self.actionUnknown(cmd, params) if cmd not in ["getFile", "streamFile"]: taken = time.time() - s taken_sent = self.connection.last_sent_time - self.connection.last_send_time self.connection.cpu_time += taken - taken_sent
def actionFindHashIds(self, params): site = self.sites.get(params["site"]) s = time.time() if not site or not site.settings[ "serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False event_key = "%s_findHashIds_%s_%s" % ( self.connection.ip, params["site"], len(params["hash_ids"])) if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed( event_key, 60 * 5): time.sleep(0.1) back_ip4, back_onion = self.findHashIds(site, params["hash_ids"], limit=10) else: back_ip4, back_onion = self.findHashIds(site, params["hash_ids"]) RateLimit.called(event_key) # Check my hashfield if self.server.tor_manager and self.server.tor_manager.site_onions.get( site.address): # Running onion my_ip = helper.packOnionAddress( self.server.tor_manager.site_onions[site.address], self.server.port) my_back = back_onion elif config.ip_external: # External ip defined my_ip = helper.packAddress(config.ip_external, self.server.port) my_back = back_ip4 else: # No external ip defined my_ip = my_ip = helper.packAddress(self.server.ip, self.server.port) my_back = back_ip4 my_hashfield_set = set(site.content_manager.hashfield) for hash_id in params["hash_ids"]: if hash_id in my_hashfield_set: if hash_id not in my_back: my_back[hash_id] = [] my_back[hash_id].append(my_ip) # Add myself if config.verbose: self.log.debug( "Found: IP4: %s, Onion: %s for %s hashids in %.3fs" % (len(back_ip4), len(back_onion), len( params["hash_ids"]), time.time() - s)) self.response({"peers": back_ip4, "peers_onion": back_onion})
def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True): if sign: inner_path = self.actionSiteSign(to, privatekey, inner_path, response_ok=False) if not inner_path: return # Publishing if not self.site.settings["serving"]: # Enable site if paused self.site.settings["serving"] = True self.site.saveSettings() self.site.announce() event_name = "publish %s %s" % (self.site.address, inner_path) thread = RateLimit.callAsync( event_name, 7, self.site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers notification = "linked" not in dir( thread) # Only display notification on first callback thread.linked = True thread.link(lambda thread: self.cbSitePublish(to, thread, notification) ) # At the end callback with request id and thread
def actionSitePublish(self, to, privatekey=None, inner_path="content.json"): site = self.site if not inner_path.endswith("content.json"): # Find the content.json first inner_path = site.content_manager.getFileInfo(inner_path)["content_inner_path"] if not site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path): return self.response(to, "Forbidden, you can only modify your own sites") if not privatekey: # Get privatekey from users.json privatekey = self.user.getAuthPrivatekey(self.site.address) # Signing site.content_manager.loadContent(add_bad_files=False) # Reload content.json, ignore errors to make it up-to-date signed = site.content_manager.sign(inner_path, privatekey) # Sign using private key sent by user if signed: if inner_path == "content_json": self.cmd("notification", ["done", "Private key correct, content signed!", 5000]) # Display message for 5 sec else: self.cmd("notification", ["error", "Content sign failed: invalid private key."]) self.response(to, "Site sign failed") return site.content_manager.loadContent(add_bad_files=False) # Load new content.json, ignore errors # Publishing if not site.settings["serving"]: # Enable site if paused site.settings["serving"] = True site.saveSettings() site.announce() event_name = "publish %s %s" % (site.address, inner_path) thread = RateLimit.callAsync(event_name, 7, site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers notification = "linked" not in dir(thread) # Only display notification on first callback thread.linked = True thread.link(lambda thread: self.cbSitePublish(to, thread, notification)) # At the end callback with request id and thread
def actionFindHashIds(self, params): site = self.sites.get(params["site"]) s = time.time() if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False event_key = "%s_findHashIds_%s_%s" % (self.connection.ip, params["site"], len(params["hash_ids"])) if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed(event_key, 60 * 5): time.sleep(0.1) back_ip4, back_onion = self.findHashIds(site, params["hash_ids"], limit=10) else: back_ip4, back_onion = self.findHashIds(site, params["hash_ids"]) RateLimit.called(event_key) # Check my hashfield if self.server.tor_manager and self.server.tor_manager.site_onions.get(site.address): # Running onion my_ip = helper.packOnionAddress(self.server.tor_manager.site_onions[site.address], self.server.port) my_back = back_onion elif config.ip_external: # External ip defined my_ip = helper.packAddress(config.ip_external, self.server.port) my_back = back_ip4 elif self.server.ip and self.server.ip != "*": # No external ip defined my_ip = helper.packAddress(self.server.ip, self.server.port) my_back = back_ip4 else: my_ip = None my_back = back_ip4 my_hashfield_set = set(site.content_manager.hashfield) for hash_id in params["hash_ids"]: if hash_id in my_hashfield_set: if hash_id not in my_back: my_back[hash_id] = [] if my_ip: my_back[hash_id].append(my_ip) # Add myself if config.verbose: self.log.debug( "Found: IP4: %s, Onion: %s for %s hashids in %.3fs" % (len(back_ip4), len(back_onion), len(params["hash_ids"]), time.time() - s) ) self.response({"peers": back_ip4, "peers_onion": back_onion})
def actionSitePublish(self, to, privatekey=None, inner_path="content.json"): site = self.site if not inner_path.endswith( "content.json"): # Find the content.json first inner_path = site.content_manager.getFileInfo( inner_path)["content_inner_path"] if not site.settings["own"] and self.user.getAuthAddress( self.site.address ) not in self.site.content_manager.getValidSigners(inner_path): return self.response( to, "Forbidden, you can only modify your own sites") if not privatekey: # Get privatekey from users.json privatekey = self.user.getAuthPrivatekey(self.site.address) # Signing site.content_manager.loadContent( add_bad_files=False ) # Reload content.json, ignore errors to make it up-to-date signed = site.content_manager.sign( inner_path, privatekey) # Sign using private key sent by user if signed: if inner_path == "content_json": self.cmd( "notification", ["done", "Private key correct, content signed!", 5000 ]) # Display message for 5 sec else: self.cmd("notification", ["error", "Content sign failed: invalid private key."]) self.response(to, "Site sign failed") return site.content_manager.loadContent( add_bad_files=False) # Load new content.json, ignore errors # Publishing if not site.settings["serving"]: # Enable site if paused site.settings["serving"] = True site.saveSettings() site.announce() event_name = "publish %s %s" % (site.address, inner_path) thread = RateLimit.callAsync( event_name, 7, site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers notification = "linked" not in dir( thread) # Only display notification on first callback thread.linked = True thread.link(lambda thread: self.cbSitePublish(to, thread, notification) ) # At the end callback with request id and thread
def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True): if sign: inner_path = self.actionSiteSign(to, privatekey, inner_path, response_ok=False) if not inner_path: return # Publishing if not self.site.settings["serving"]: # Enable site if paused self.site.settings["serving"] = True self.site.saveSettings() self.site.announce() event_name = "publish %s %s" % (self.site.address, inner_path) called_instantly = RateLimit.isAllowed(event_name, 30) thread = RateLimit.callAsync( event_name, 30, self.site.publish, 5, inner_path) # Only publish once in 30 seconds to 5 peer notification = "linked" not in dir( thread) # Only display notification on first callback thread.linked = True if called_instantly: # Allowed to call instantly # At the end callback with request id and thread thread.link(lambda thread: self.cbSitePublish( to, thread, notification, callback=notification)) else: self.cmd("notification", [ "info", "Content publish queued for %.0f seconds." % RateLimit.delayLeft(event_name, 30), 5000 ]) self.response(to, "ok") # At the end display notification thread.link(lambda thread: self.cbSitePublish( to, thread, notification, callback=False))
def route(self, cmd, req_id, params): self.req_id = req_id # Don't allow other sites than locked if "site" in params and self.connection.target_onion: valid_sites = self.connection.getValidSites() if params["site"] not in valid_sites and valid_sites != ["global"]: self.response({"error": "Invalid site"}) self.connection.log( "Site lock violation: %s not in %s, target onion: %s" % (params["site"], valid_sites, self.connection.target_onion) ) self.connection.badAction(5) return False if cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) # If called more than once within 15 sec only keep the last update RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params) else: func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions if self.connection.cpu_time > 0.5: self.log.debug( "Delay %s %s, cpu_time used by connection: %.3fs" % (self.connection.ip, cmd, self.connection.cpu_time) ) time.sleep(self.connection.cpu_time) if self.connection.cpu_time > 5: self.connection.close("Cpu time: %.3fs" % self.connection.cpu_time) s = time.time() if func: func(params) else: self.actionUnknown(cmd, params) if cmd not in ["getFile", "streamFile"]: taken = time.time() - s taken_sent = self.connection.last_sent_time - self.connection.last_send_time self.connection.cpu_time += taken - taken_sent
def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True): if sign: inner_path = self.actionSiteSign(to, privatekey, inner_path, response_ok=False) if not inner_path: return # Publishing if not self.site.settings["serving"]: # Enable site if paused self.site.settings["serving"] = True self.site.saveSettings() self.site.announce() event_name = "publish %s %s" % (self.site.address, inner_path) thread = RateLimit.callAsync(event_name, 7, self.site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers notification = "linked" not in dir(thread) # Only display notification on first callback thread.linked = True thread.link(lambda thread: self.cbSitePublish(to, thread, notification)) # At the end callback with request id and thread
def actionMergerSiteAdd(self, to, addresses): if type(addresses) != list: # Single site add addresses = [addresses] # Check if the site has merger permission merger_types = merger_db.get(self.site.address) if not merger_types: return self.response(to, {"error": "Not a merger site"}) if RateLimit.isAllowed(self.site.address + "-MergerSiteAdd", 10) and len(addresses) == 1: # Without confirmation if only one site address and not called in last 10 sec self.cbMergerSiteAdd(to, addresses) else: self.cmd("confirm", ["Add <b>%s</b> new site?" % len(addresses), "Add"], lambda (res): self.cbMergerSiteAdd(to, addresses)) self.response(to, "ok")
def actionMergerSiteAdd(self, to, addresses): if type(addresses) != list: # Single site add addresses = [addresses] # Check if the site has merger permission merger_types = merger_db.get(self.site.address) if not merger_types: return self.response(to, {"error": "Not a merger site"}) if RateLimit.isAllowed(self.site.address + "-MergerSiteAdd", 10) and len(addresses) == 1: # Without confirmation if only one site address and not called in last 10 sec self.cbMergerSiteAdd(to, addresses) else: self.cmd( "confirm", ["Add <b>%s</b> new site?" % len(addresses), "Add"], lambda (res): self.cbMergerSiteAdd(to, addresses) ) self.response(to, "ok")
def saveDelayed(self): RateLimit.callAsync("Save sites.json", allowed_again=5, func=self.save)