def site(request): threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)] # Reset ratelimit RateLimit.queue_db = {} RateLimit.called_db = {} site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Always use original data assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in site.storage.getPath("") # Make sure we dont delete everything shutil.rmtree(site.storage.getPath(""), True) shutil.copytree(site.storage.getPath("") + "-original", site.storage.getPath("")) # Add to site manager SiteManager.site_manager.get("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net def cleanup(): site.delete() site.content_manager.contents.db.close("Test cleanup") site.content_manager.contents.db.timer_check_optional.kill() SiteManager.site_manager.sites.clear() db_path = "%s/content.db" % config.data_dir os.unlink(db_path) del ContentDb.content_dbs[db_path] gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before]) request.addfinalizer(cleanup) site.greenlet_manager.stopGreenlets() site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files if not SiteManager.site_manager.sites: SiteManager.site_manager.sites = {} SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site site.settings["serving"] = True return site
def siteVerify(self, address): import time from Site.Site import Site from Site import SiteManager SiteManager.site_manager.load() s = time.time() logging.info("Verifing site: %s..." % address) site = Site(address) bad_files = [] for content_inner_path in site.content_manager.contents: s = time.time() logging.info("Verifing %s signature..." % content_inner_path) err = None try: file_correct = site.content_manager.verifyFile( content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False ) except Exception as err: file_correct = False if file_correct is True: logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s)) else: logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, err)) input("Continue?") bad_files += content_inner_path logging.info("Verifying site files...") bad_files += site.storage.verifyFiles()["bad_files"] if not bad_files: logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s)) else: logging.error("[ERROR] Error during verifying site files!")
def siteDownload(self, address): from Site.Site import Site from Site import SiteManager SiteManager.site_manager.load() logging.info("Opening a simple connection server") global file_server from File import FileServer file_server = FileServer("127.0.0.1", 1234) file_server_thread = gevent.spawn(file_server.start, check_sites=False) site = Site(address) on_completed = gevent.event.AsyncResult() def onComplete(evt): evt.set(True) site.onComplete.once(lambda: onComplete(on_completed)) print("Announcing...") site.announce() s = time.time() print("Downloading...") site.downloadContent("content.json", check_modifications=True) print("Downloaded in %.3fs" % (time.time()-s))
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False): from Site.Site import Site from Site import SiteManager from Debug import Debug SiteManager.site_manager.load() logging.info("Signing site: %s..." % address) site = Site(address, allow_create=False) if not privatekey: # If no privatekey defined from User import UserManager user = UserManager.user_manager.get() if user: site_data = user.getSiteData(address) privatekey = site_data.get("privatekey") else: privatekey = None if not privatekey: # Not found in users.json, ask from console import getpass privatekey = getpass.getpass("Private key (input hidden):") try: succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True, remove_missing_optional=remove_missing_optional) except Exception as err: logging.error("Sign error: %s" % Debug.formatException(err)) succ = False if succ and publish: self.sitePublish(address, inner_path=inner_path)
def site_temp(request): threads_before = [ obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) ] with mock.patch("Config.config.data_dir", config.data_dir + "-temp"): site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") site_temp.settings["serving"] = True site_temp.announce = mock.MagicMock( return_value=True) # Don't try to find peers from the net def cleanup(): site_temp.delete() site_temp.content_manager.contents.db.close("Test cleanup") site_temp.content_manager.contents.db.timer_check_optional.kill() db_path = "%s-temp/content.db" % config.data_dir os.unlink(db_path) del ContentDb.content_dbs[db_path] gevent.killall([ obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before ]) request.addfinalizer(cleanup) site_temp.log = logging.getLogger("Temp:%s" % site_temp.address_short) return site_temp
def siteCreate(self, use_master_seed=True): logging.info("Generating new privatekey (use_master_seed: %s)..." % config.use_master_seed) from Crypt import CryptBitcoin if use_master_seed: from User import UserManager user = UserManager.user_manager.get() if not user: user = UserManager.user_manager.create() address, address_index, site_data = user.getNewSiteData() privatekey = site_data["privatekey"] logging.info( "Generated using master seed from users.json, site index: %s" % address_index) else: privatekey = CryptBitcoin.newPrivatekey() address = CryptBitcoin.privatekeyToAddress(privatekey) logging.info( "----------------------------------------------------------------------" ) logging.info("Site private key: %s" % privatekey) logging.info( " !!! ^ Save it now, required to modify the site ^ !!!" ) logging.info("Site address: %s" % address) logging.info( "----------------------------------------------------------------------" ) while True and not config.batch and not use_master_seed: if input("? Have you secured your private key? (yes, no) > " ).lower() == "yes": break else: logging.info( "Please, secure it now, you going to need it to modify your site!" ) logging.info("Creating directory structure...") from Site.Site import Site from Site import SiteManager SiteManager.site_manager.load() os.mkdir("%s/%s" % (config.data_dir, address)) open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address) logging.info("Creating content.json...") site = Site(address) extend = {"postmessage_nonce_security": True} if use_master_seed: extend["address_index"] = address_index site.content_manager.sign(privatekey=privatekey, extend=extend) site.settings["own"] = True site.saveSettings() logging.info("Site created!")
def dbQuery(self, address, query): from Site.Site import Site from Site import SiteManager SiteManager.site_manager.load() import json site = Site(address) result = [] for row in site.storage.query(query): result.append(dict(row)) print(json.dumps(result, indent=4))
def siteAnnounce(self, address): from Site.Site import Site from Site import SiteManager SiteManager.site_manager.load() logging.info("Opening a simple connection server") global file_server from File import FileServer file_server = FileServer("127.0.0.1", 1234) file_server.start() logging.info("Announcing site %s to tracker..." % address) site = Site(address) s = time.time() site.announce() print("Response time: %.3fs" % (time.time() - s)) print(site.peers)
def siteNeedFile(self, address, inner_path): from Site.Site import Site from Site import SiteManager SiteManager.site_manager.load() def checker(): while 1: s = time.time() time.sleep(1) print("Switch time:", time.time() - s) gevent.spawn(checker) logging.info("Opening a simple connection server") global file_server from File import FileServer file_server = FileServer("127.0.0.1", 1234) file_server_thread = gevent.spawn(file_server.start, check_sites=False) site = Site(address) site.announce() print(site.needFile(inner_path, update=True))
def site_temp(request): threads_before = [ obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) ] with mock.patch("Config.config.data_dir", config.data_dir + "-temp"): site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") site_temp.announce = mock.MagicMock( return_value=True) # Don't try to find peers from the net def cleanup(): site_temp.storage.deleteFiles() site_temp.content_manager.contents.db.deleteSite(site_temp) site_temp.content_manager.contents.db.close() time.sleep(0.01) # Wait for db close db_path = "%s-temp/content.db" % config.data_dir os.unlink(db_path) del ContentDb.content_dbs[db_path] gevent.killall([ obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before ]) request.addfinalizer(cleanup) return site_temp
def testFindOptional(self, file_server, site, site_temp): # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init full source server (has optional files) site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") file_server_full = FileServer(file_server.ip, 1546) site_full.connection_server = file_server_full def listen(): ConnectionServer.start(file_server_full) ConnectionServer.listen(file_server_full) gevent.spawn(listen) time.sleep(0.001) # Port opening file_server_full.sites[site_full.address] = site_full # Add site site_full.storage.verifyFiles(quick_check=True) # Check optional files site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server hashfield = site_full_peer.updateHashfield() # Update hashfield assert len(site_full.content_manager.hashfield) == 8 assert hashfield assert site_full.storage.isFile("data/optional.txt") assert site_full.storage.isFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert len(site_full_peer.hashfield) == 8 # Remove hashes from source server for hash in list(site.content_manager.hashfield): site.content_manager.hashfield.remove(hash) # Init client server site_temp.connection_server = ConnectionServer(file_server.ip, 1545) site_temp.addPeer(file_server.ip, 1544) # Add source server # Download normal files site_temp.log.info("Start Downloading site") site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo( "data/optional.txt") optional_file_info2 = site_temp.content_manager.getFileInfo( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert not site_temp.storage.isFile("data/optional.txt") assert not site_temp.storage.isFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert not site.content_manager.hashfield.hasHash( optional_file_info["sha512"] ) # Source server don't know he has the file assert not site.content_manager.hashfield.hasHash( optional_file_info2["sha512"] ) # Source server don't know he has the file assert site_full_peer.hashfield.hasHash( optional_file_info["sha512"] ) # Source full peer on source server has the file assert site_full_peer.hashfield.hasHash( optional_file_info2["sha512"] ) # Source full peer on source server has the file assert site_full.content_manager.hashfield.hasHash( optional_file_info["sha512"]) # Source full server he has the file assert site_full.content_manager.hashfield.hasHash( optional_file_info2["sha512"] ) # Source full server he has the file site_temp.log.info("Request optional files") with Spy.Spy(FileRequest, "route") as requests: # Request 2 file same time threads = [] threads.append( site_temp.needFile("data/optional.txt", blocking=False)) threads.append( site_temp.needFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False)) gevent.joinall(threads) assert len([ request for request in requests if request[1] == "findHashIds" ]) == 1 # findHashids should call only once assert site_temp.storage.isFile("data/optional.txt") assert site_temp.storage.isFile( "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif" ) assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections] site_full.content_manager.contents.db.close()