def find_old_collections(keepalive_constant, testing_mode=False): """ The main keep alive function that searches the cache for older collections that should be rebroadcasted to the Main Channel. This is to keep the network up-to-date. :param keepalive_constant: the age limit of a collection before it is rebroadcasted :return: the number of collections rebroadcasted """ cache = Cache() collections = cache.get_all_collections() today = datetime.today() counter = 0 for collection in collections: age = today - collection.latest_broadcast_date if age.days >= keepalive_constant: collection.latest_broadcast_date = datetime.today() if testing_mode: success = rebroadcast(collection, testing_mode=True) else: success = rebroadcast(collection) if success: print("Updating collection in cache") cache.insert_new_collection(collection) counter += 1 else: print("Sending rebroadcast failed") return counter
def update_hash(collection): string = "" if collection is None: return None #check whether the version hashed already collection.version cache = Cache() for document in collection.documents: string += document.hash + "|" if len(string) == 0: return None string = string[:-1] h = hashlib.sha256() h.update(string) root_hash = h.hexdigest() session = DBSession.object_session(collection) collection_hash = session.query(CollectionVersion).filter_by( root_hash=root_hash).first() if collection_hash is not None: return collection_hash = CollectionVersion( root_hash=root_hash, document_ids=string, collection_version=collection.get_latest_version() + 1, collection_address=collection.address) session.add(collection_hash) collection.version_list.append(collection_hash) session.commit()
def __init__(self, space: AddressSpace, policy: ReplacementPolicy, level_sizes: list, level_associativites: list, blocksize, level_latencies: list = None): self._space = space self._replacement_policy = policy self._blocksize = blocksize if not isinstance(level_sizes, list) or len(level_sizes) != 3: raise AttributeError("Field 'level_sizes' must be a list of length 3 indicating I/DL1, UL2, and UL3 cache sizes") if not isinstance(level_associativites, list) or len(level_associativites) != 3: raise AttributeError("Field 'level_associativites' must be a list of length 3 indicating I/DL1, UL2, and UL3 associativity") if level_latencies: if not isinstance(level_latencies, list) or len(level_latencies) != 4: raise AttributeError("Field 'level_latencies' must be a list of length 4 indicating I/DL1, UL2, UL3, and MEM latencies") for level in level_latencies: if not isinstance(level, tuple) or len(level) != 2: raise AttributeError("Field 'level_latencies' must be a list of tuples indicating (read_latency, write_latency)") self.DL1 = Cache(space, level_sizes[0], level_associativites[0], blocksize, policy, name='DL1', rlatency=level_latencies[0][0], wlatency=level_latencies[0][1]) self.IL1 = Cache(space, level_sizes[0], level_associativites[0], blocksize, policy, name='IL1', rlatency=level_latencies[0][0], wlatency=level_latencies[0][1]) self.UL2 = Cache(space, level_sizes[1], level_associativites[1], blocksize, policy, name='UL2', rlatency=level_latencies[1][0], wlatency=level_latencies[1][1]) self.UL3 = Cache(space, level_sizes[2], level_associativites[2], blocksize, policy, name='UL3', rlatency=level_latencies[2][0], wlatency=level_latencies[2][1]) self.MEM = Cache(space, blocksize, 1, blocksize, policy, name='MEM', rlatency=level_latencies[3][0], wlatency=level_latencies[3][1]) else: self.DL1 = Cache(space, level_sizes[0], level_associativites[0], blocksize, policy, name='DL1') self.IL1 = Cache(space, level_sizes[0], level_associativites[0], blocksize, policy, name='IL1') self.UL2 = Cache(space, level_sizes[1], level_associativites[1], blocksize, policy, name='UL2') self.UL3 = Cache(space, level_sizes[2], level_associativites[2], blocksize, policy, name='UL3') self.MEM = Cache(space, blocksize, 1, blocksize, policy, name='MEM') self.stats = CacheMetrics( [self.IL1.name, self.DL1.name, self.UL2.name, self.UL3.name, self.MEM.name], [ (self.DL1.name, self.DL1.name), (self.DL1.name, self.UL2.name), (self.DL1.name, self.UL3.name), (self.DL1.name, self.MEM.name), (self.UL2.name, self.UL2.name), (self.UL2.name, self.UL3.name), (self.UL2.name, self.MEM.name), (self.UL3.name, self.UL3.name), (self.UL3.name, self.MEM.name), (self.MEM.name, self.MEM.name), (self.MEM.name, self.UL3.name), (self.MEM.name, self.UL2.name), (self.MEM.name, self.DL1.name), (self.UL3.name, self.UL2.name), (self.UL3.name, self.DL1.name), (self.UL2.name, self.DL1.name), (self.IL1.name, self.IL1.name), (self.IL1.name, self.UL2.name), (self.IL1.name, self.UL3.name), (self.IL1.name, self.MEM.name), (self.MEM.name, self.IL1.name), (self.UL3.name, self.IL1.name), (self.UL2.name, self.IL1.name), ] )
def __init__(self, load_from_cache: bool, config: dict): self.config = config self.cache = Cache() self.filename = self.config['cache_file'] + ".json" if load_from_cache: print('Reading Cache from file') self.cache.cache = self.load_cache() print('Cache loaded') print('Loading Cache Daemon') self.daemon = threading.Thread(target=cache_daemon, args=[self.cache]) self.daemon.setDaemon(True) self.daemon.setName("Cache Daemon")
def setUp(self): self.cache = Cache() self.cache.reset_database() self.collection1 = Collection( title="First Cdollection", btc="btc", address="bm-first", description="description", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today(), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) self.cache.insert_new_collection(self.collection1)
def put_collection(address_password, title, description, keywords, btc): """ Create a collection in local cache :param address_password: The password with which to protect the collection. Should be at least 20 characters for optimal security and unique. Generates the unique collection ID deterministically :param title: The title of the created collection :param description: The description of the created collection :param keywords: Comma-separated keywords for the resulting collection :param BTC: the Bitcoin address of the resulting collection """ bitmessage_connection = Bitmessage() cache = Cache() address = bitmessage_connection.create_address(address_password) input_keywords = [Keyword(name=x) for x in keywords.split(",")] keywords = [] for key in input_keywords: db_key = cache.get_keyword_by_name(key.name) if db_key is not None: keywords.append(db_key) else: keywords.append(key) collection = Collection( title=title, description=description, address=address, accesses=0, votes=0, btc=btc, keywords=keywords, documents=[], creation_date=datetime.datetime.now(), oldest_date=datetime.datetime.now(), votes_last_checked=datetime.datetime.now(), latest_broadcast_date=datetime.datetime.now() ) cache.insert_new_collection(collection) print ("Collection inserted with address/ID " + address)
def setUp(self): self.our_cache = Cache()
import platform from bitmessage.install import apt_install, windows_install # FreeNet installer imports from freenet.install import linux_install try: from controllers import collections except: print('SQLAlchemy import error') # FreeJournal library imports import config try: from cache.cache import Cache cache = Cache() except: print ("Warning: SQLite is not installed. No local cache " \ + "functionality available.") try: from models.collection import Collection from models.keyword import Keyword from models.document import Document except: print ("Error: could not import models.") try: from bitmessage.bitmessage import Bitmessage from controllers.controller import Controller except:
def __init__(self): self.cache = Cache()
def tearDown(self): cache = Cache() cache.reset_database()
def setUp(self): cache = Cache() cache.reset_database() collection1 = Collection( title="First Collection", btc="btc", address="bm-first", description="description", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today(), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) cache.insert_new_collection(collection1) collection2 = Collection( title="Second Collection", btc="btc", address="bm-second", description="description", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today() - datetime.timedelta(days=3), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) cache.insert_new_collection(collection2) collection3 = Collection( title="Third Collection", btc="btc", address="bm-third", description="description", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today() - datetime.timedelta(days=1), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) cache.insert_new_collection(collection3) collection4 = Collection( title="Fourth Collection", description="description", btc="btc", address="bm-fourth", keywords=[], documents=[], latest_broadcast_date=datetime.datetime.today() - datetime.timedelta(days=6), creation_date=datetime.datetime.today(), oldest_date=datetime.datetime.today(), latest_btc_tx="", oldest_btc_tx="", accesses=0, votes=0, votes_last_checked=datetime.datetime.today()) cache.insert_new_collection(collection4)
import unittest, datetime from cache.cache import Cache from models.collection import Collection from models.document import Document from models.keyword import Keyword from controllers.collections import update_hash from controllers import collections import uuid from sqlalchemy.orm.exc import ObjectDeletedError from sqlalchemy.exc import StatementError our_cache = Cache() def add_collection(): global our_cache coll_address = str(uuid.uuid1()) doc_hash_1 = str(uuid.uuid1()) doc_hash_2 = str(uuid.uuid1()) coll = Collection( title="Test", description="This is a collection!", address=str(uuid.uuid1()), btc=str(uuid.uuid1()), keywords=[ ], documents=[ Document( collection_address=doc_hash_1, description="Test document A", hash=str(uuid.uuid1()), title="Test A",
def get_cache(): cache = getattr(g, '_cache', None) if cache is None: cache = g._cache = Cache() return cache
def __init__(self): self.connection = Bitmessage() self.cache = Cache() self.download_threads = set()