def find_old_collections(keepalive_constant, testing_mode=False): """ The main keep alive function that searches the cache for older collections that should be rebroadcasted to the Main Channel. This is to keep the network up-to-date. :param keepalive_constant: the age limit of a collection before it is rebroadcasted :return: the number of collections rebroadcasted """ cache = Cache() collections = cache.get_all_collections() today = datetime.today() counter = 0 for collection in collections: age = today - collection.latest_broadcast_date if age.days >= keepalive_constant: collection.latest_broadcast_date = datetime.today() if testing_mode: success = rebroadcast(collection, testing_mode=True) else: success = rebroadcast(collection) if success: print("Updating collection in cache") cache.insert_new_collection(collection) counter += 1 else: print("Sending rebroadcast failed") return counter
def put_collection(address_password, title, description, keywords, btc): """ Create a collection in local cache :param address_password: The password with which to protect the collection. Should be at least 20 characters for optimal security and unique. Generates the unique collection ID deterministically :param title: The title of the created collection :param description: The description of the created collection :param keywords: Comma-separated keywords for the resulting collection :param BTC: the Bitcoin address of the resulting collection """ bitmessage_connection = Bitmessage() cache = Cache() address = bitmessage_connection.create_address(address_password) input_keywords = [Keyword(name=x) for x in keywords.split(",")] keywords = [] for key in input_keywords: db_key = cache.get_keyword_by_name(key.name) if db_key is not None: keywords.append(db_key) else: keywords.append(key) collection = Collection( title=title, description=description, address=address, accesses=0, votes=0, btc=btc, keywords=keywords, documents=[], creation_date=datetime.datetime.now(), oldest_date=datetime.datetime.now(), votes_last_checked=datetime.datetime.now(), latest_broadcast_date=datetime.datetime.now() ) cache.insert_new_collection(collection) print ("Collection inserted with address/ID " + address)
class TestCollectionHistory(unittest.TestCase): def setUp(self): self.cache = Cache() self.cache.reset_database() self.collection1 = Collection( title="First Cdollection", btc="btc", address="bm-first", description="description", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today(), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) self.cache.insert_new_collection(self.collection1) def test_two_doc_insert(self): d = Document( description="Test document A", hash="asdfasdfa;sldkfja;sldkfja;dljkfa;ldf", collection_address="bm-first", title="Test A", ) d2 = Document( description="Test document B", hash="fdasdfsdfsdfsdfsdfsdfsdfdfsdfsddfdfdf", collection_address="bm-first", title="Test B", ) self.cache.insert_new_document(d) collections.update_hash(self.collection1) self.cache.insert_new_document(d2) collections.update_hash(self.collection1) versions = self.cache.get_versions_for_collection( self.collection1.address) if (len(versions) < 2): print(len(versions)) self.fail("No new version was created") self.assertTrue(len(versions) == 2) def test_empty_version(self): print("test") versions = self.cache.get_versions_for_collection( self.collection1.address) if (len(versions) != 0): self.fail("Version should be empty to start") def test_increment_collectionversion(self): versions = self.cache.get_versions_for_collection( self.collection1.address) if (len(versions) != 0): self.fail("Version should be nonzero") collections.update_hash(self.collection1) versions = self.cache.get_versions_for_collection( self.collection1.address) if (versions[0].collection_version != 1): self.fail("Incorrect collection version") def test_version_update(self): collections.update_hash(self.collection1) versions = self.cache.get_versions_for_collection( self.collection1.address) if (len(versions) != 1): self.fail("Version should be updated") if (versions[0].collection_address != self.collection1.address): print(versions[0].collection_address) print(self.collection1.address) self.fail("Wrong collection address") def test_different_root_hash(self): d = Document( description="Test document A", hash="asdfasdfa;sldkfja;sldkfja;dljkfa;ldf", collection_address="bm-first", title="Test A", ) d2 = Document( description="Test document B", hash="fdasdfsdfsdfsdfsdfsdfsdfdfsdfsddfdfdf", collection_address="bm-first", title="Test B", ) self.cache.insert_new_document(d) collections.update_hash(self.collection1) self.cache.insert_new_document(d2) collections.update_hash(self.collection1) versions = self.cache.get_versions_for_collection( self.collection1.address) self.assertTrue(versions[0].root_hash != versions[1].root_hash)
def setUp(self): cache = Cache() cache.reset_database() collection1 = Collection( title="First Collection", btc="btc", address="bm-first", description="description", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today(), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) cache.insert_new_collection(collection1) collection2 = Collection( title="Second Collection", btc="btc", address="bm-second", description="description", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today() - datetime.timedelta(days=3), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) cache.insert_new_collection(collection2) collection3 = Collection( title="Third Collection", btc="btc", address="bm-third", description="description", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today() - datetime.timedelta(days=1), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) cache.insert_new_collection(collection3) collection4 = Collection( title="Fourth Collection", description="description", btc="btc", address="bm-fourth", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today() - datetime.timedelta(days=6), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) cache.insert_new_collection(collection4)
class Controller: def __init__(self): self.connection = Bitmessage() self.cache = Cache() self.download_threads = set() def _check_signature(self, fj_message): """ Checks that the signature is the correct sha256 hash of the address's public keys and payload :param fj_message: the message containing the collection and signature :return: True if the signatures match, False otherwise """ h = hashlib.sha256(fj_message["pubkey"] + fj_message['payload']).hexdigest() if h == fj_message["signature"]: print "Signature Verified" return True else: print "Signature Not Verified" return False def _save_document(self, data, file_name, testing_mode=False): """ Private helper function for writing file data to disk. Creates the file to the directory specified in config.py. :param data: the file data :param file_name: the name of the file :return: a boolean indicating success """ try: if testing_mode: file_path = file_name else: file_path = os.path.expanduser( DOCUMENT_DIRECTORY_PATH) + file_name open(file_path, 'w').write(data) return True except Exception as e: return False def _get_document(self, hash): """ Private helper function for getting document data from freenet. :param hash: the Content Hash Key for a document :return: the file data if successful, None otherwise """ data = None #Try obtaining a freenet connection try: freenet_connection = FreenetConnection() except Exception as e: print("Couldn't connect to freenet") return data try: data = freenet_connection.get(hash) except Exception as e: pass return data def _hash_document_filenames(self, documents, collection): """ Private helper function for hashing a collection of documents file names so that file name conflicts will be rare. :param documents: a list of document objects """ for document in documents: #Create a new file name out of a hash to deal with possible naming conflicts file_name = document.filename if not document.filename: file_name = document.title + str(randint(0, 100)) name, extension = os.path.splitext(file_name) hash_name = document.hash new_file_name = hash_name + extension #Save the new file name to the cache so it can be viewed later document.filename = new_file_name self.cache.insert_new_document_in_collection(document, collection) @run_as_thread def _download_documents(self, collection_title, documents): """ A function that downloads documents from a collection in a new thread. :param collection_title: the title of the collection :param documents: the list of document objects to download """ print("Downloading documents for " + collection_title) print("Number of Documents to download: " + str(len(documents))) doc_counter = 0 for document in documents: # Store and validate that the document has a file name file_name = document.filename if not file_name: file_name = collection_title + str( doc_counter) + document.title doc_counter += 1 # Try obtaining the file data from freenet data = self._get_document(document.hash) if not data: print("Couldn't download " + file_name + " from freenet") continue # If the file data was successfully downloaded, save the data to disk success = self._save_document(data, file_name) if success: print("Successfully downloaded " + file_name + " from freenet") else: print( "Couldn't save document data to disk (check that the document" + " directory path exists and appropriate permissions are set" ) def _build_docs_keywords(self, payload, collection): """ Builds a list of Keyword objects and a list of Document objects from the received json. :param payload: The payload of the FJ Message including the documents and keywords :return: Two lists representing the documents and keywords of the FJ Message """ for key in payload["keywords"]: db_key = self.cache.get_keyword_by_id(key["id"]) if db_key is not None: collection.keywords.append(db_key) else: collection.keywords.append(Keyword(name=key["name"])) for doc in payload["documents"]: db_doc = self.cache.get_document_by_hash(doc["hash"]) if db_doc is not None: collection.documents.append(db_doc) else: collection.documents.append( Document(collection_address=doc["address"], description=doc["description"], hash=doc["hash"], title=doc["title"], filename=doc["filename"], accesses=doc["accesses"])) def _cache_collection(self, payload, message): """ Checks to see if this collection is already in the cache. If it is we update the collection with the new data. Otherwise a new collection is made and cached. :param message: the Bitmessage message containing an FJ_message :param payload: the contents of the FJ_message """ # Grabbing the text representations of the documents and keywords and rebuilding them #docs, keywords = self._build_docs_keywords(payload) cached_collection = self.cache.get_collection_with_address( payload["address"]) if cached_collection is None: collection_model = Collection( title=payload["title"], description=payload["description"], address=payload["address"], btc=payload["btc"], creation_date=datetime.datetime.strptime( payload["creation_date"], "%A, %d. %B %Y %I:%M%p"), oldest_date=datetime.datetime.strptime( payload["oldest_date"], "%A, %d. %B %Y %I:%M%p"), latest_broadcast_date=datetime.datetime.strptime( payload["latest_broadcast_date"], "%A, %d. %B %Y %I:%M%p"), votes=payload['votes'], votes_last_checked=datetime.datetime.strptime( payload["votes_last_checked"], "%A, %d. %B %Y %I:%M%p"), ) self._build_docs_keywords(payload, collection_model) signature = Signature(pubkey=message["pubkey"], signature=message["signature"], address=payload["address"]) try: self.cache.insert_new_collection(collection_model) self.cache.insert_new_collection(signature) self._hash_document_filenames(collection_model.documents, collection_model) self.download_threads.add( self._download_documents(collection_model.title, collection_model.documents)) print "Cached New Collection" return True except IntegrityError as m: print m.message return False else: cached_collection.keywords = [] cached_sig = self.cache.get_signature_by_address( payload["address"]) cached_sig.pubkey = message["pubkey"] cached_sig.signature = message["signature"] cached_collection.title = payload["title"] cached_collection.description = payload["description"] cached_collection.address = payload["address"] cached_collection.btc = payload["btc"] cached_collection.documents = [] cached_collection.creation_date = datetime.datetime.strptime( payload["creation_date"], "%A, %d. %B %Y %I:%M%p") cached_collection.oldest_date = datetime.datetime.strptime( payload["oldest_date"], "%A, %d. %B %Y %I:%M%p") cached_collection.latest_broadcast_date = datetime.datetime.strptime( payload["latest_broadcast_date"], "%A, %d. %B %Y %I:%M%p") cached_collection.votes = payload['votes'] cached_collection.votes_last_checked = datetime.datetime.strptime( payload["votes_last_checked"], "%A, %d. %B %Y %I:%M%p") self._build_docs_keywords(payload, cached_collection) try: self.cache.insert_new_collection(cached_collection) self.cache.insert_new_collection(cached_sig) self._hash_document_filenames(cached_collection.documents, cached_collection) self.download_threads.add( self._download_documents(cached_collection.title, cached_collection.documents)) print "Cached Updated Collection" return True except IntegrityError as m: print m.message return False def _find_address_in_keysdat(self, address): """ Checks if this bitmessage address is in our keys.dat :param address: The address to look for :return: True if the address is in keys.dat, false otherwise """ f = open(os.path.expanduser('~/.config/PyBitmessage/keys.dat'), 'r') keys = f.read() keys_list = keys.split('\n\n') for key_info in keys_list[1:]: if address in key_info: return True return False def import_collection(self, address): """ Imports a Collection from the given Bit Message address and checks if its signature is valid. If it is valid then it is cached locally. :param address: the address to import the collection from :return: True if the collection was imported and cached successfully, False otherwise """ # buffer time to make sure to get messages messages = self.connection.check_inbox() for message in messages["inboxMessages"]: if message["toAddress"] == address: # decoded_message is a FJMessage base64_decode = base64.b64decode(message["message"]) try: json_decode = json.loads(base64_decode) validate(json_decode, fj_schema) except (ValueError, TypeError, ValidationError) as m: #print m.message print "Not a FJ Message or Invalid FJ Message" self.connection.delete_message(message['msgid']) continue # Trying to filter out non collection messages # TODO Change this? if "payload" in json_decode: payload = json_decode["payload"] try: payload = json.loads(payload) validate(payload, coll_schema) except (ValueError, TypeError, ValidationError) as m: print "Contents of FJ Message invalid or corrupted" self.connection.delete_message(message['msgid']) continue if self._check_signature(json_decode): if self._cache_collection(payload, json_decode): self.connection.delete_message(message['msgid']) return True #print "Could not import collection" return False def publish_collection(self, collection, to_address, from_address=None): """ Publishes the given to collection to the bitmessage network :param collection: the collection to be published :param to_address: the address to send the collection to, always MAIN_CHANNEL_ADDRESS except in unittests :param from_address: the address to send the collection from :return: True if the collection is published successfully, False otherwise """ if from_address is None: from_address = self.connection.create_address("new address", True) print "created address: ", from_address if not self._find_address_in_keysdat(from_address): print "This address is not in keys.dat, can not send message" return False collection_payload = collection.to_json() if collection_payload is None: return False new_fj_message = FJMessage(1, collection.address, collection_payload) sendable_fj_message = new_fj_message.to_json() if sendable_fj_message is None: return False self.connection.send_message(to_address, from_address, "subject", sendable_fj_message) return True def rebroadcast(self, collection, to_address=MAIN_CHANNEL_ADDRESS, from_address=MAIN_CHANNEL_ADDRESS): """ Rebroadcasts a collection that is stored locally to the bitmessage network :param collection: The collection to rebroadcast :param to_address: the address to send the collection to, only used for testing :param from_address: the address to send the collection from, only used for testing :return: True if the collection is sent successfully, false otherwise """ collection_payload = collection.to_json() if collection_payload is None: return False cached_signature = self.cache.get_signature_by_address( collection.address) h = hashlib.sha256(cached_signature.pubkey + collection_payload).hexdigest() if h == cached_signature.signature: new_fj_message = FJMessage(3, collection.address, collection_payload) sendable_fj_message = new_fj_message.to_json( cached_signature.signature) if sendable_fj_message is None: return False self.connection.send_message(to_address, from_address, "subject", sendable_fj_message) return True else: print "Signature Not Verified" return False def alive_downloads(self): """ Checks if there are any downloads in progress :return: True if there is a running download """ self.download_threads = { t for t in self.download_threads if t.is_alive() } return len(self.download_threads) > 0 def join_downloads(self): """ Joins all of the in-progress download threads """ for dl_thread in self.download_threads: dl_thread.join() self.download_threads = set()