def find_old_collections(keepalive_constant, testing_mode=False):
    """
    The main keep alive function that searches the cache
    for older collections that should be rebroadcasted to
    the Main Channel. This is to keep the network up-to-date.
    :param keepalive_constant: the age limit of a collection before it is rebroadcasted
    :return: the number of collections rebroadcasted
    """
    cache = Cache()
    collections = cache.get_all_collections()
    today = datetime.today()

    counter = 0
    for collection in collections:
        age = today - collection.latest_broadcast_date
        if age.days >= keepalive_constant:
            collection.latest_broadcast_date = datetime.today()

            if testing_mode:
                success = rebroadcast(collection, testing_mode=True)
            else:
                success = rebroadcast(collection)

            if success:
                print("Updating collection in cache")
                cache.insert_new_collection(collection)
                counter += 1
            else:
                print("Sending rebroadcast failed")

    return counter
Exemple #2
0
def simulate_requests(sequence: [int], cache: Cache):
    # print("Pages {}".format(sequence))

    for page in sequence:
        cache.request(page)

    # print("Costs {}".format(cache.costs))
    return sum(cache.costs) / len(sequence)
class UrlFetch:
    def __init__(self):
        self.cache = Cache()

    def fetchUrl(self, url):
        scheme, host, port, path = parseUrl(url)

        headers, body = self.cache.lookupInCache(url)
        if (headers != None and body != None):
            return headers, body

        assert scheme in ["http://", "https://", "view-source:"
                          ], "Invalid url scheme: {}".format(scheme)
        assert len(host) > 0, "No host provided"

        originalScheme = scheme

        if scheme == "view-source":
            scheme = "https://"

        headers, body, status = buildAndSendRequest(port, scheme, path, host)
        headers, body = handleRedirectIfNeeded(status, body, headers, port,
                                               scheme, path, host)

        # TODO: Fix issue with date parsing in this function
        self.cacheRequestIfNeeded(status, headers, url, body)

        return headers, body, originalScheme

    def cacheRequestIfNeeded(self, status, headers, url, body):
        if status != "200":
            return

        max_cache = self.determineCacheTime(headers)

        if (max_cache != 0):
            date = datetime.strptime(headers['date'],
                                     "%a, %d %b %Y %H:%M:%S %Z")
            self.cache.addItemToCache(
                CacheItem(url, date, max_cache, headers, body))

    def determineCacheTime(self, headers):
        try:
            max_cache = headers['cache-control']
            if (max_cache == "no-store"):
                max_cache = 0
            elif (max_cache.startswith("max-age")):
                if (',' in max_cache):
                    max_cache = int(max_cache[max_cache.index("=") +
                                              1:max_cache.index(",")])
                else:
                    max_cache = int(max_cache[max_cache.index("=") + 1:])
        except KeyError:
            max_cache = 0

        return max_cache
Exemple #4
0
    def populate(self, address, cache: Cache, dirty=False):
        base_address = address & cache.get_base_address_mask()
        block = Block(base_address, dirty, self._replacement_policy)
        error = cache.put(block)
        if error is not None:
            raise EnvironmentError(
                """Cold placement of the following address caused an eviction in the cache. You probably didn't want this.

                Address: {} was placed into the {} cache. 
                Address: {} was evicted as a result!
                """.format(hex(base_address), cache.name, error.base_address))
Exemple #5
0
def update_hash(collection):
    string = ""
    if collection is None:
        return None
    #check whether the version hashed already collection.version
    cache = Cache()
    for document in collection.documents:
        string += document.hash + "|"
        if len(string) == 0:
            return None
    string = string[:-1]
    h = hashlib.sha256()
    h.update(string)
    root_hash = h.hexdigest()
    session = DBSession.object_session(collection)
    collection_hash = session.query(CollectionVersion).filter_by(
        root_hash=root_hash).first()
    if collection_hash is not None:
        return
    collection_hash = CollectionVersion(
        root_hash=root_hash,
        document_ids=string,
        collection_version=collection.get_latest_version() + 1,
        collection_address=collection.address)
    session.add(collection_hash)
    collection.version_list.append(collection_hash)
    session.commit()
Exemple #6
0
 def setUp(self):
     self.cache = Cache()
     self.cache.reset_database()
     self.collection1 = Collection(
         title="First Cdollection",
         btc="btc",
         address="bm-first",
         description="description",
         keywords=[],
         documents=[],
         latest_broadcast_date=datetime.datetime.today(),
         creation_date=datetime.datetime.today(),
         oldest_date=datetime.datetime.today(),
         latest_btc_tx="",
         oldest_btc_tx="",
         accesses=0,
         votes=0,
         votes_last_checked=datetime.datetime.today())
     self.cache.insert_new_collection(self.collection1)
    def __init__(self, load_from_cache: bool, config: dict):
        self.config = config
        self.cache = Cache()
        self.filename = self.config['cache_file'] + ".json"
        if load_from_cache:
            print('Reading Cache from file')
            self.cache.cache = self.load_cache()
            print('Cache loaded')

        print('Loading Cache Daemon')
        self.daemon = threading.Thread(target=cache_daemon, args=[self.cache])
        self.daemon.setDaemon(True)
        self.daemon.setName("Cache Daemon")
Exemple #8
0
def put_collection(address_password, title, description, keywords, btc):
    """ Create a collection in local cache
        :param address_password: The password with which to protect the collection.
        Should be at least 20 characters for optimal security and unique.  Generates 
        the unique collection ID deterministically
        :param title: The title of the created collection
        :param description: The description of the created collection
        :param keywords: Comma-separated keywords for the resulting collection
        :param BTC: the Bitcoin address of the resulting collection
    """
    bitmessage_connection = Bitmessage()
    cache = Cache()
    address = bitmessage_connection.create_address(address_password)

    input_keywords = [Keyword(name=x) for x in keywords.split(",")]
    keywords = []
    for key in input_keywords:
            db_key = cache.get_keyword_by_name(key.name)
            if db_key is not None:
                keywords.append(db_key)
            else:
                keywords.append(key)
    collection = Collection(
        title=title,
        description=description,
        address=address,
        accesses=0,
        votes=0,
        btc=btc,
        keywords=keywords,
        documents=[],
        creation_date=datetime.datetime.now(),
        oldest_date=datetime.datetime.now(),
        votes_last_checked=datetime.datetime.now(),
        latest_broadcast_date=datetime.datetime.now()
    )
    cache.insert_new_collection(collection)
    print ("Collection inserted with address/ID " + address)
Exemple #9
0
class Controller:
    def __init__(self):
        self.connection = Bitmessage()
        self.cache = Cache()
        self.download_threads = set()

    def _check_signature(self, fj_message):
        """
        Checks that the signature is the correct sha256 hash of the address's public keys and payload
        :param fj_message: the message containing the collection and signature
        :return: True if the signatures match, False otherwise
        """
        h = hashlib.sha256(fj_message["pubkey"] +
                           fj_message['payload']).hexdigest()

        if h == fj_message["signature"]:
            print "Signature Verified"
            return True
        else:
            print "Signature Not Verified"
            return False

    def _save_document(self, data, file_name, testing_mode=False):
        """
        Private helper function for writing file data to disk.
        Creates the file to the directory specified in config.py.

        :param data: the file data
        :param file_name: the name of the file
        :return: a boolean indicating success
        """

        try:
            if testing_mode:
                file_path = file_name
            else:
                file_path = os.path.expanduser(
                    DOCUMENT_DIRECTORY_PATH) + file_name

            open(file_path, 'w').write(data)
            return True
        except Exception as e:
            return False

    def _get_document(self, hash):
        """
        Private helper function for getting document data
        from freenet.

        :param hash: the Content Hash Key for a document
        :return: the file data if successful, None otherwise
        """

        data = None

        #Try obtaining a freenet connection
        try:
            freenet_connection = FreenetConnection()
        except Exception as e:
            print("Couldn't connect to freenet")
            return data

        try:
            data = freenet_connection.get(hash)
        except Exception as e:
            pass

        return data

    def _hash_document_filenames(self, documents, collection):
        """
        Private helper function for hashing a collection of
        documents file names so that file name conflicts will be
        rare.

        :param documents: a list of document objects
        """

        for document in documents:
            #Create a new file name out of a hash to deal with possible naming conflicts
            file_name = document.filename
            if not document.filename:
                file_name = document.title + str(randint(0, 100))
            name, extension = os.path.splitext(file_name)
            hash_name = document.hash
            new_file_name = hash_name + extension
            #Save the new file name to the cache so it can be viewed later
            document.filename = new_file_name
            self.cache.insert_new_document_in_collection(document, collection)

    @run_as_thread
    def _download_documents(self, collection_title, documents):
        """
        A function that downloads documents from a collection in a new thread.

        :param collection_title: the title of the collection
        :param documents: the list of document objects to download
        """

        print("Downloading documents for " + collection_title)
        print("Number of Documents to download: " + str(len(documents)))

        doc_counter = 0
        for document in documents:
            # Store and validate that the document has a file name
            file_name = document.filename
            if not file_name:
                file_name = collection_title + str(
                    doc_counter) + document.title
                doc_counter += 1
            # Try obtaining the file data from freenet
            data = self._get_document(document.hash)
            if not data:
                print("Couldn't download " + file_name + " from freenet")
                continue

            # If the file data was successfully downloaded, save the data to disk
            success = self._save_document(data, file_name)
            if success:
                print("Successfully downloaded " + file_name + " from freenet")
            else:
                print(
                    "Couldn't save document data to disk (check that the document"
                    +
                    " directory path exists and appropriate permissions are set"
                )

    def _build_docs_keywords(self, payload, collection):
        """
        Builds a list of Keyword objects and a list of Document objects from the received json.

        :param payload: The payload of the FJ Message including the documents and keywords
        :return: Two lists representing the documents and keywords of the FJ Message
        """
        for key in payload["keywords"]:
            db_key = self.cache.get_keyword_by_id(key["id"])
            if db_key is not None:
                collection.keywords.append(db_key)
            else:
                collection.keywords.append(Keyword(name=key["name"]))

        for doc in payload["documents"]:
            db_doc = self.cache.get_document_by_hash(doc["hash"])
            if db_doc is not None:
                collection.documents.append(db_doc)
            else:
                collection.documents.append(
                    Document(collection_address=doc["address"],
                             description=doc["description"],
                             hash=doc["hash"],
                             title=doc["title"],
                             filename=doc["filename"],
                             accesses=doc["accesses"]))

    def _cache_collection(self, payload, message):
        """
        Checks to see if this collection is already in the cache. If it is we update the collection with the new data.
        Otherwise a new collection is made and cached.
        :param message: the Bitmessage message containing an FJ_message
        :param payload: the contents of the FJ_message
        """
        # Grabbing the text representations of the documents and keywords and rebuilding them
        #docs, keywords = self._build_docs_keywords(payload)
        cached_collection = self.cache.get_collection_with_address(
            payload["address"])

        if cached_collection is None:
            collection_model = Collection(
                title=payload["title"],
                description=payload["description"],
                address=payload["address"],
                btc=payload["btc"],
                creation_date=datetime.datetime.strptime(
                    payload["creation_date"], "%A, %d. %B %Y %I:%M%p"),
                oldest_date=datetime.datetime.strptime(
                    payload["oldest_date"], "%A, %d. %B %Y %I:%M%p"),
                latest_broadcast_date=datetime.datetime.strptime(
                    payload["latest_broadcast_date"], "%A, %d. %B %Y %I:%M%p"),
                votes=payload['votes'],
                votes_last_checked=datetime.datetime.strptime(
                    payload["votes_last_checked"], "%A, %d. %B %Y %I:%M%p"),
            )

            self._build_docs_keywords(payload, collection_model)
            signature = Signature(pubkey=message["pubkey"],
                                  signature=message["signature"],
                                  address=payload["address"])
            try:
                self.cache.insert_new_collection(collection_model)
                self.cache.insert_new_collection(signature)
                self._hash_document_filenames(collection_model.documents,
                                              collection_model)
                self.download_threads.add(
                    self._download_documents(collection_model.title,
                                             collection_model.documents))
                print "Cached New Collection"
                return True
            except IntegrityError as m:
                print m.message
                return False
        else:
            cached_collection.keywords = []
            cached_sig = self.cache.get_signature_by_address(
                payload["address"])
            cached_sig.pubkey = message["pubkey"]
            cached_sig.signature = message["signature"]
            cached_collection.title = payload["title"]
            cached_collection.description = payload["description"]
            cached_collection.address = payload["address"]
            cached_collection.btc = payload["btc"]
            cached_collection.documents = []
            cached_collection.creation_date = datetime.datetime.strptime(
                payload["creation_date"], "%A, %d. %B %Y %I:%M%p")
            cached_collection.oldest_date = datetime.datetime.strptime(
                payload["oldest_date"], "%A, %d. %B %Y %I:%M%p")
            cached_collection.latest_broadcast_date = datetime.datetime.strptime(
                payload["latest_broadcast_date"], "%A, %d. %B %Y %I:%M%p")
            cached_collection.votes = payload['votes']
            cached_collection.votes_last_checked = datetime.datetime.strptime(
                payload["votes_last_checked"], "%A, %d. %B %Y %I:%M%p")
            self._build_docs_keywords(payload, cached_collection)
            try:
                self.cache.insert_new_collection(cached_collection)
                self.cache.insert_new_collection(cached_sig)
                self._hash_document_filenames(cached_collection.documents,
                                              cached_collection)
                self.download_threads.add(
                    self._download_documents(cached_collection.title,
                                             cached_collection.documents))
                print "Cached Updated Collection"
                return True
            except IntegrityError as m:
                print m.message
                return False

    def _find_address_in_keysdat(self, address):
        """
        Checks if this bitmessage address is in our keys.dat
        :param address: The address to look for
        :return: True if the address is in keys.dat, false otherwise
        """
        f = open(os.path.expanduser('~/.config/PyBitmessage/keys.dat'), 'r')
        keys = f.read()
        keys_list = keys.split('\n\n')

        for key_info in keys_list[1:]:
            if address in key_info:
                return True
        return False

    def import_collection(self, address):
        """
        Imports a Collection from the given Bit Message address and checks if its signature is valid.
        If it is valid then it is cached locally.
        :param address: the address to import the collection from
        :return: True if the collection was imported and cached successfully, False otherwise
        """

        # buffer time to make sure to get messages
        messages = self.connection.check_inbox()
        for message in messages["inboxMessages"]:
            if message["toAddress"] == address:
                # decoded_message is a FJMessage
                base64_decode = base64.b64decode(message["message"])
                try:
                    json_decode = json.loads(base64_decode)
                    validate(json_decode, fj_schema)
                except (ValueError, TypeError, ValidationError) as m:
                    #print m.message
                    print "Not a FJ Message or Invalid FJ Message"
                    self.connection.delete_message(message['msgid'])
                    continue

                # Trying to filter out non collection messages
                # TODO Change this?
                if "payload" in json_decode:
                    payload = json_decode["payload"]
                    try:
                        payload = json.loads(payload)
                        validate(payload, coll_schema)
                    except (ValueError, TypeError, ValidationError) as m:
                        print "Contents of FJ Message invalid or corrupted"
                        self.connection.delete_message(message['msgid'])
                        continue

                    if self._check_signature(json_decode):
                        if self._cache_collection(payload, json_decode):
                            self.connection.delete_message(message['msgid'])
                            return True

        #print "Could not import collection"
        return False

    def publish_collection(self, collection, to_address, from_address=None):
        """
        Publishes the given to collection to the bitmessage network
        :param collection: the collection to be published
        :param to_address: the address to send the collection to,  always MAIN_CHANNEL_ADDRESS except in unittests
        :param from_address: the address to send the collection from
        :return: True if the collection is published successfully, False otherwise
        """

        if from_address is None:
            from_address = self.connection.create_address("new address", True)
            print "created address: ", from_address
        if not self._find_address_in_keysdat(from_address):
            print "This address is not in keys.dat, can not send message"
            return False

        collection_payload = collection.to_json()
        if collection_payload is None:
            return False
        new_fj_message = FJMessage(1, collection.address, collection_payload)
        sendable_fj_message = new_fj_message.to_json()
        if sendable_fj_message is None:
            return False
        self.connection.send_message(to_address, from_address, "subject",
                                     sendable_fj_message)
        return True

    def rebroadcast(self,
                    collection,
                    to_address=MAIN_CHANNEL_ADDRESS,
                    from_address=MAIN_CHANNEL_ADDRESS):
        """
        Rebroadcasts a collection that is stored locally to the bitmessage network
        :param collection: The collection to rebroadcast
        :param to_address: the address to send the collection to, only used for testing
        :param from_address: the address to send the collection from, only used for testing
        :return: True if the collection is sent successfully, false otherwise
        """
        collection_payload = collection.to_json()
        if collection_payload is None:
            return False
        cached_signature = self.cache.get_signature_by_address(
            collection.address)
        h = hashlib.sha256(cached_signature.pubkey +
                           collection_payload).hexdigest()

        if h == cached_signature.signature:
            new_fj_message = FJMessage(3, collection.address,
                                       collection_payload)
            sendable_fj_message = new_fj_message.to_json(
                cached_signature.signature)
            if sendable_fj_message is None:
                return False
            self.connection.send_message(to_address, from_address, "subject",
                                         sendable_fj_message)
            return True
        else:
            print "Signature Not Verified"
            return False

    def alive_downloads(self):
        """
        Checks if there are any downloads in progress
        :return: True if there is a running download
        """
        self.download_threads = {
            t
            for t in self.download_threads if t.is_alive()
        }
        return len(self.download_threads) > 0

    def join_downloads(self):
        """
        Joins all of the in-progress download threads
        """
        for dl_thread in self.download_threads:
            dl_thread.join()
        self.download_threads = set()
 def setUp(self):
     self.our_cache = Cache()
Exemple #11
0
def get_cache():
    cache = getattr(g, '_cache', None)
    if cache is None:
        cache = g._cache = Cache()
    return cache
Exemple #12
0
import platform
from bitmessage.install import apt_install, windows_install

# FreeNet installer imports
from freenet.install import linux_install

try:
    from controllers import collections
except:
    print('SQLAlchemy import error')

# FreeJournal library imports
import config
try:
    from cache.cache import Cache
    cache = Cache()
except:
    print ("Warning: SQLite is not installed.  No local cache " \
        + "functionality available.")

try:
    from models.collection import Collection
    from models.keyword import Keyword
    from models.document import Document
except:
    print ("Error: could not import models.")

try:
    from bitmessage.bitmessage import Bitmessage
    from controllers.controller import Controller
except:
 def __init__(self):
     self.cache = Cache()
Exemple #14
0
    def get_cached_json(
            url,  # type; str
            movie_id=None,  # type: Union[str, int, None]
            error_msg=None,  # type: Union[str, int, None]
            source=None,  # type: Union[str, None]
            dump_results=False,  # type: bool
            dump_msg='',  # type: str
            headers=None,  # type: Union[dict, None]
            params=None,  # type: Union[dict, None]
            timeout=3.0  # type: int
    ):
        # type: (...) -> (int, str)
        """
            Attempt to get cached JSON movie information before using the JSON calls
            to get it remotely.

            Any information not in the cache will be placed into it after successfully
            reading it.
        :param url:
        :param movie_id:
        :param error_msg:
        :param source:
        :param dump_results:
        :param dump_msg:
        :param headers:
        :param params:
        :param timeout:
        :return:
        """

        if headers is None:
            headers = {}

        if params is None:
            params = {}

        trailer_data = None
        status = 0
        if Settings.is_use_tmdb_cache():
            start = datetime.datetime.now()
            trailer_data = Cache.read_tmdb_cache_json(movie_id,
                                                      source,
                                                      error_msg=error_msg)
            status = 0
            stop = datetime.datetime.now()
            read_time = stop - start
            Statistics.add_json_read_time(int(read_time.microseconds / 10000))
            # if JsonUtils._logger.isEnabledFor(LazyLogger.DEBUG_EXTRA_VERBOSE):
            #    JsonUtils._logger.debug_extra_verbose('json cache read time:',
            #                                          read_time.microseconds / 10000,
            #                                          'ms')
            if trailer_data is not None:
                trailer_data[Movie.CACHED] = True

        if trailer_data is None:
            status, trailer_data = JsonUtils.get_json(
                url,
                dump_results=dump_results,
                dump_msg=dump_msg,
                headers=headers,
                error_msg=error_msg,
                params=params,
                timeout=timeout)
            if ((status == 0 or status == 200) and trailer_data is not None
                    and Settings.is_use_tmdb_cache()):
                Cache.write_tmdb_cache_json(movie_id, source, trailer_data)

        if trailer_data is None and status == 0:
            status = -1
        return status, trailer_data
Exemple #15
0
 def tearDown(self):
     cache = Cache()
     cache.reset_database()
Exemple #16
0
    def setUp(self):
        cache = Cache()
        cache.reset_database()

        collection1 = Collection(
            title="First Collection",
            btc="btc",
            address="bm-first",
            description="description",
            keywords=[],
            documents=[],
            latest_broadcast_date=datetime.datetime.today(),
            creation_date=datetime.datetime.today(),
            oldest_date=datetime.datetime.today(),
            latest_btc_tx="",
            oldest_btc_tx="",
            accesses=0,
            votes=0,
            votes_last_checked=datetime.datetime.today())

        cache.insert_new_collection(collection1)

        collection2 = Collection(
            title="Second Collection",
            btc="btc",
            address="bm-second",
            description="description",
            keywords=[],
            documents=[],
            latest_broadcast_date=datetime.datetime.today() -
            datetime.timedelta(days=3),
            creation_date=datetime.datetime.today(),
            oldest_date=datetime.datetime.today(),
            latest_btc_tx="",
            oldest_btc_tx="",
            accesses=0,
            votes=0,
            votes_last_checked=datetime.datetime.today())

        cache.insert_new_collection(collection2)

        collection3 = Collection(
            title="Third Collection",
            btc="btc",
            address="bm-third",
            description="description",
            keywords=[],
            documents=[],
            latest_broadcast_date=datetime.datetime.today() -
            datetime.timedelta(days=1),
            creation_date=datetime.datetime.today(),
            oldest_date=datetime.datetime.today(),
            latest_btc_tx="",
            oldest_btc_tx="",
            accesses=0,
            votes=0,
            votes_last_checked=datetime.datetime.today())

        cache.insert_new_collection(collection3)

        collection4 = Collection(
            title="Fourth Collection",
            description="description",
            btc="btc",
            address="bm-fourth",
            keywords=[],
            documents=[],
            latest_broadcast_date=datetime.datetime.today() -
            datetime.timedelta(days=6),
            creation_date=datetime.datetime.today(),
            oldest_date=datetime.datetime.today(),
            latest_btc_tx="",
            oldest_btc_tx="",
            accesses=0,
            votes=0,
            votes_last_checked=datetime.datetime.today())

        cache.insert_new_collection(collection4)
Exemple #17
0
 def __init__(self):
     self.connection = Bitmessage()
     self.cache = Cache()
     self.download_threads = set()
Exemple #18
0
class TestCollectionHistory(unittest.TestCase):
    def setUp(self):
        self.cache = Cache()
        self.cache.reset_database()
        self.collection1 = Collection(
            title="First Cdollection",
            btc="btc",
            address="bm-first",
            description="description",
            keywords=[],
            documents=[],
            latest_broadcast_date=datetime.datetime.today(),
            creation_date=datetime.datetime.today(),
            oldest_date=datetime.datetime.today(),
            latest_btc_tx="",
            oldest_btc_tx="",
            accesses=0,
            votes=0,
            votes_last_checked=datetime.datetime.today())
        self.cache.insert_new_collection(self.collection1)

    def test_two_doc_insert(self):
        d = Document(
            description="Test document A",
            hash="asdfasdfa;sldkfja;sldkfja;dljkfa;ldf",
            collection_address="bm-first",
            title="Test A",
        )
        d2 = Document(
            description="Test document B",
            hash="fdasdfsdfsdfsdfsdfsdfsdfdfsdfsddfdfdf",
            collection_address="bm-first",
            title="Test B",
        )
        self.cache.insert_new_document(d)
        collections.update_hash(self.collection1)
        self.cache.insert_new_document(d2)
        collections.update_hash(self.collection1)
        versions = self.cache.get_versions_for_collection(
            self.collection1.address)
        if (len(versions) < 2):
            print(len(versions))
            self.fail("No new version was created")
        self.assertTrue(len(versions) == 2)

    def test_empty_version(self):
        print("test")
        versions = self.cache.get_versions_for_collection(
            self.collection1.address)
        if (len(versions) != 0):
            self.fail("Version should be empty to start")

    def test_increment_collectionversion(self):
        versions = self.cache.get_versions_for_collection(
            self.collection1.address)
        if (len(versions) != 0):
            self.fail("Version should be nonzero")
        collections.update_hash(self.collection1)
        versions = self.cache.get_versions_for_collection(
            self.collection1.address)
        if (versions[0].collection_version != 1):
            self.fail("Incorrect collection version")

    def test_version_update(self):
        collections.update_hash(self.collection1)
        versions = self.cache.get_versions_for_collection(
            self.collection1.address)
        if (len(versions) != 1):
            self.fail("Version should be updated")
        if (versions[0].collection_address != self.collection1.address):
            print(versions[0].collection_address)
            print(self.collection1.address)
            self.fail("Wrong collection address")

    def test_different_root_hash(self):
        d = Document(
            description="Test document A",
            hash="asdfasdfa;sldkfja;sldkfja;dljkfa;ldf",
            collection_address="bm-first",
            title="Test A",
        )
        d2 = Document(
            description="Test document B",
            hash="fdasdfsdfsdfsdfsdfsdfsdfdfsdfsddfdfdf",
            collection_address="bm-first",
            title="Test B",
        )
        self.cache.insert_new_document(d)
        collections.update_hash(self.collection1)
        self.cache.insert_new_document(d2)
        collections.update_hash(self.collection1)
        versions = self.cache.get_versions_for_collection(
            self.collection1.address)
        self.assertTrue(versions[0].root_hash != versions[1].root_hash)
import unittest, datetime
from cache.cache import Cache
from models.collection import Collection
from models.document import Document
from models.keyword import Keyword
from controllers.collections import update_hash
from controllers import collections
import uuid
from sqlalchemy.orm.exc import ObjectDeletedError
from sqlalchemy.exc import StatementError

our_cache = Cache()

def add_collection():
    global our_cache
    coll_address = str(uuid.uuid1())
    doc_hash_1 = str(uuid.uuid1())
    doc_hash_2 = str(uuid.uuid1())
    coll = Collection(
            title="Test",
            description="This is a collection!",
            address=str(uuid.uuid1()),
            btc=str(uuid.uuid1()),
            keywords=[
            ],
            documents=[
                Document(
                    collection_address=doc_hash_1,
                    description="Test document A",
                    hash=str(uuid.uuid1()),
                    title="Test A",
Exemple #20
0
    def __init__(self,
                 space: AddressSpace,
                 policy: ReplacementPolicy,
                 level_sizes: list,
                 level_associativites: list,
                 blocksize,
                 level_latencies: list = None):
        self._space = space
        self._replacement_policy = policy
        self._blocksize = blocksize

        if not isinstance(level_sizes, list) or len(level_sizes) != 3:
            raise AttributeError(
                "Field 'level_sizes' must be a list of length 3 indicating I/DL1, UL2, and UL3 cache sizes"
            )

        if not isinstance(level_associativites,
                          list) or len(level_associativites) != 3:
            raise AttributeError(
                "Field 'level_associativites' must be a list of length 3 indicating I/DL1, UL2, and UL3 associativity"
            )

        if level_latencies:
            if not isinstance(level_latencies,
                              list) or len(level_latencies) != 4:
                raise AttributeError(
                    "Field 'level_latencies' must be a list of length 4 indicating I/DL1, UL2, UL3, and MEM latencies"
                )
            for level in level_latencies:
                if not isinstance(level, tuple) or len(level) != 2:
                    raise AttributeError(
                        "Field 'level_latencies' must be a list of tuples indicating (read_latency, write_latency)"
                    )
            self.DL1 = Cache(space,
                             level_sizes[0],
                             level_associativites[0],
                             blocksize,
                             policy,
                             name='DL1',
                             rlatency=level_latencies[0][0],
                             wlatency=level_latencies[0][1])
            self.IL1 = Cache(space,
                             level_sizes[0],
                             level_associativites[0],
                             blocksize,
                             policy,
                             name='IL1',
                             rlatency=level_latencies[0][0],
                             wlatency=level_latencies[0][1])
            self.UL2 = Cache(space,
                             level_sizes[1],
                             level_associativites[1],
                             blocksize,
                             policy,
                             name='UL2',
                             rlatency=level_latencies[1][0],
                             wlatency=level_latencies[1][1])
            self.UL3 = Cache(space,
                             level_sizes[2],
                             level_associativites[2],
                             blocksize,
                             policy,
                             name='UL3',
                             rlatency=level_latencies[2][0],
                             wlatency=level_latencies[2][1])
            self.MEM = Cache(space,
                             blocksize,
                             1,
                             blocksize,
                             policy,
                             name='MEM',
                             rlatency=level_latencies[3][0],
                             wlatency=level_latencies[3][1])
        else:
            self.DL1 = Cache(space,
                             level_sizes[0],
                             level_associativites[0],
                             blocksize,
                             policy,
                             name='DL1')
            self.IL1 = Cache(space,
                             level_sizes[0],
                             level_associativites[0],
                             blocksize,
                             policy,
                             name='IL1')
            self.UL2 = Cache(space,
                             level_sizes[1],
                             level_associativites[1],
                             blocksize,
                             policy,
                             name='UL2')
            self.UL3 = Cache(space,
                             level_sizes[2],
                             level_associativites[2],
                             blocksize,
                             policy,
                             name='UL3')
            self.MEM = Cache(space,
                             blocksize,
                             1,
                             blocksize,
                             policy,
                             name='MEM')

        self.stats = CacheMetrics([
            self.IL1.name, self.DL1.name, self.UL2.name, self.UL3.name,
            self.MEM.name
        ], [
            (self.DL1.name, self.DL1.name),
            (self.DL1.name, self.UL2.name),
            (self.DL1.name, self.UL3.name),
            (self.DL1.name, self.MEM.name),
            (self.UL2.name, self.UL2.name),
            (self.UL2.name, self.UL3.name),
            (self.UL2.name, self.MEM.name),
            (self.UL3.name, self.UL3.name),
            (self.UL3.name, self.MEM.name),
            (self.MEM.name, self.MEM.name),
            (self.MEM.name, self.UL3.name),
            (self.MEM.name, self.UL2.name),
            (self.MEM.name, self.DL1.name),
            (self.UL3.name, self.UL2.name),
            (self.UL3.name, self.DL1.name),
            (self.UL2.name, self.DL1.name),
            (self.IL1.name, self.IL1.name),
            (self.IL1.name, self.UL2.name),
            (self.IL1.name, self.UL3.name),
            (self.IL1.name, self.MEM.name),
            (self.MEM.name, self.IL1.name),
            (self.UL3.name, self.IL1.name),
            (self.UL2.name, self.IL1.name),
        ])
Exemple #21
0
class ThreeLevelSUUInclusiveCacheSystem:
    def __init__(self,
                 space: AddressSpace,
                 policy: ReplacementPolicy,
                 level_sizes: list,
                 level_associativites: list,
                 blocksize,
                 level_latencies: list = None):
        self._space = space
        self._replacement_policy = policy
        self._blocksize = blocksize

        if not isinstance(level_sizes, list) or len(level_sizes) != 3:
            raise AttributeError(
                "Field 'level_sizes' must be a list of length 3 indicating I/DL1, UL2, and UL3 cache sizes"
            )

        if not isinstance(level_associativites,
                          list) or len(level_associativites) != 3:
            raise AttributeError(
                "Field 'level_associativites' must be a list of length 3 indicating I/DL1, UL2, and UL3 associativity"
            )

        if level_latencies:
            if not isinstance(level_latencies,
                              list) or len(level_latencies) != 4:
                raise AttributeError(
                    "Field 'level_latencies' must be a list of length 4 indicating I/DL1, UL2, UL3, and MEM latencies"
                )
            for level in level_latencies:
                if not isinstance(level, tuple) or len(level) != 2:
                    raise AttributeError(
                        "Field 'level_latencies' must be a list of tuples indicating (read_latency, write_latency)"
                    )
            self.DL1 = Cache(space,
                             level_sizes[0],
                             level_associativites[0],
                             blocksize,
                             policy,
                             name='DL1',
                             rlatency=level_latencies[0][0],
                             wlatency=level_latencies[0][1])
            self.IL1 = Cache(space,
                             level_sizes[0],
                             level_associativites[0],
                             blocksize,
                             policy,
                             name='IL1',
                             rlatency=level_latencies[0][0],
                             wlatency=level_latencies[0][1])
            self.UL2 = Cache(space,
                             level_sizes[1],
                             level_associativites[1],
                             blocksize,
                             policy,
                             name='UL2',
                             rlatency=level_latencies[1][0],
                             wlatency=level_latencies[1][1])
            self.UL3 = Cache(space,
                             level_sizes[2],
                             level_associativites[2],
                             blocksize,
                             policy,
                             name='UL3',
                             rlatency=level_latencies[2][0],
                             wlatency=level_latencies[2][1])
            self.MEM = Cache(space,
                             blocksize,
                             1,
                             blocksize,
                             policy,
                             name='MEM',
                             rlatency=level_latencies[3][0],
                             wlatency=level_latencies[3][1])
        else:
            self.DL1 = Cache(space,
                             level_sizes[0],
                             level_associativites[0],
                             blocksize,
                             policy,
                             name='DL1')
            self.IL1 = Cache(space,
                             level_sizes[0],
                             level_associativites[0],
                             blocksize,
                             policy,
                             name='IL1')
            self.UL2 = Cache(space,
                             level_sizes[1],
                             level_associativites[1],
                             blocksize,
                             policy,
                             name='UL2')
            self.UL3 = Cache(space,
                             level_sizes[2],
                             level_associativites[2],
                             blocksize,
                             policy,
                             name='UL3')
            self.MEM = Cache(space,
                             blocksize,
                             1,
                             blocksize,
                             policy,
                             name='MEM')

        self.stats = CacheMetrics([
            self.IL1.name, self.DL1.name, self.UL2.name, self.UL3.name,
            self.MEM.name
        ], [
            (self.DL1.name, self.DL1.name),
            (self.DL1.name, self.UL2.name),
            (self.DL1.name, self.UL3.name),
            (self.DL1.name, self.MEM.name),
            (self.UL2.name, self.UL2.name),
            (self.UL2.name, self.UL3.name),
            (self.UL2.name, self.MEM.name),
            (self.UL3.name, self.UL3.name),
            (self.UL3.name, self.MEM.name),
            (self.MEM.name, self.MEM.name),
            (self.MEM.name, self.UL3.name),
            (self.MEM.name, self.UL2.name),
            (self.MEM.name, self.DL1.name),
            (self.UL3.name, self.UL2.name),
            (self.UL3.name, self.DL1.name),
            (self.UL2.name, self.DL1.name),
            (self.IL1.name, self.IL1.name),
            (self.IL1.name, self.UL2.name),
            (self.IL1.name, self.UL3.name),
            (self.IL1.name, self.MEM.name),
            (self.MEM.name, self.IL1.name),
            (self.UL3.name, self.IL1.name),
            (self.UL2.name, self.IL1.name),
        ])

    def perform(self, address, for_data, is_fetch):
        cache = self.DL1 if for_data else self.IL1
        block = cache.get(address)
        self.stats.add_latency(
            cache.read_latency if is_fetch else cache.write_latency, is_fetch)
        hit_in = cache
        if block is None:
            self.stats.add_miss(cache.name)
            cache = self.UL2
            block = cache.get(address)
            self.stats.add_latency(
                cache.read_latency if is_fetch else cache.write_latency,
                is_fetch)
            hit_in = self.UL2

            if block is None:
                self.stats.add_miss(cache.name)
                cache = self.UL3
                block = cache.get(address)
                self.stats.add_latency(
                    cache.read_latency if is_fetch else cache.write_latency,
                    is_fetch)
                hit_in = self.UL3
                if block is None:
                    self.stats.add_miss(cache.name)
                    # Not in the cache, fetch from memory
                    self.stats.add_latency(
                        self.MEM.read_latency
                        if is_fetch else self.MEM.write_latency, is_fetch)
                    # Allocate new block from MEM to L3
                    block = Block(address & self.UL3.get_base_address_mask(),
                                  False, self.UL3.get_policy())
                    if is_fetch:
                        block.read()
                    else:
                        block.write()

                    hit_in = self.MEM
                    cache = self.UL3
                    evicted = cache.put(block)
                    if evicted:
                        # If the evicted block is in L1, transition from L1 to MEM
                        if self.DL1.get(evicted.base_address()):
                            self.stats.add_transition(
                                self.DL1.name,
                                self.MEM.name,
                                evicted.base_address(),
                                total_size=self.DL1.get_block_size())
                            # If the evicted block is in L1, evict it too, and from L2
                            self.DL1.remove_base(evicted.base_address())
                            self.UL2.remove_base(evicted.base_address())
                        elif self.IL1.get(evicted.base_address()):
                            self.stats.add_transition(
                                self.IL1.name,
                                self.MEM.name,
                                evicted.base_address(),
                                total_size=self.IL1.get_block_size())
                            # If the evicted block is in L1, evict it too, and from L2
                            self.IL1.remove_base(evicted.base_address())
                            self.UL2.remove_base(evicted.base_address())
                        elif self.UL2.get(evicted.base_address()):
                            self.stats.add_transition(
                                self.UL2.name,
                                self.MEM.name,
                                evicted.base_address(),
                                total_size=self.UL2.get_block_size())
                            # If the evicted block is in L2, evict it
                            self.UL2.remove_base(evicted.base_address())
                        else:
                            self.stats.add_transition(
                                self.UL3.name,
                                self.MEM.name,
                                evicted.base_address(),
                                total_size=self.UL3.get_block_size())
                else:
                    if is_fetch:
                        block.read()
                    else:
                        block.write()

                # Allocate new block from L3 to L2
                block = Block(address & self.UL2.get_base_address_mask(),
                              block.is_dirty(), self.UL2.get_policy())
                if is_fetch:
                    block.read()
                else:
                    block.write()

                cache = self.UL2
                evicted = cache.put(block)
                if evicted:
                    # If the evicted block is in L1, transition from L1 to L3, else L2 to L3
                    if self.DL1.get(evicted.base_address()):
                        self.stats.add_transition(
                            self.DL1.name,
                            self.UL3.name,
                            evicted.base_address(),
                            total_size=self.DL1.get_block_size())
                        # If the evicted block is in L1, evict it too
                        self.DL1.remove_base(evicted.base_address())
                    elif self.IL1.get(evicted.base_address()):
                        self.stats.add_transition(
                            self.IL1.name,
                            self.UL3.name,
                            evicted.base_address(),
                            total_size=self.IL1.get_block_size())
                        # If the evicted block is in L1, evict it too
                        self.IL1.remove_base(evicted.base_address())
                    else:
                        self.stats.add_transition(
                            self.UL2.name,
                            self.UL3.name,
                            evicted.base_address(),
                            total_size=self.UL2.get_block_size())

            else:
                if is_fetch:
                    block.read()
                else:
                    block.write()
                # Guaranteed by inclusivity
                self.UL3.get(address).touch()

            # Allocate new block from L2 to L1
            block = Block(
                address &
                (self.DL1 if for_data else self.IL1).get_base_address_mask(),
                block.is_dirty(),
                (self.DL1 if for_data else self.IL1).get_policy())
            if is_fetch:
                block.read()
            else:
                block.write()

            cache = self.DL1 if for_data else self.IL1
            evicted = cache.put(block)
            if evicted:
                self.stats.add_transition(
                    (self.DL1 if for_data else self.IL1).name,
                    self.UL2.name,
                    evicted.base_address(),
                    total_size=(self.DL1
                                if for_data else self.IL1).get_block_size())
        else:
            if is_fetch:
                block.read()
            else:
                block.write()
            # Guaranteed by inclusivity
            self.UL2.get(address).touch()
            self.UL3.get(address).touch()

        self._replacement_policy.step()
        self.stats.add_hit(address, hit_in.name, is_fetch, not for_data)
        self.stats.add_transition(hit_in.name, cache.name, address)
        return cache.name, hit_in.name, block

    def perform_fetch(self, address, for_data=True):
        self.perform(address, for_data, True)

    def perform_set(self, address, for_data=True):
        self.perform(address, for_data, False)

    def populate(self, address, cache: Cache, dirty=False):
        base_address = address & cache.get_base_address_mask()
        block = Block(base_address, dirty, self._replacement_policy)
        error = cache.put(block)
        if error is not None:
            raise EnvironmentError(
                """Cold placement of the following address caused an eviction in the cache. You probably didn't want this.

                Address: {} was placed into the {} cache. 
                Address: {} was evicted as a result!
                """.format(hex(base_address), cache.name, error.base_address))