Beispiel #1
0
def create_list_of_feeds():
    dc = DatabaseConnector()
    list_of_feeds = dc.get_all_feed_ids()  # 4
    new_list = []
    for i, feedID in enumerate(list_of_feeds):
        new_list.append([feedID, dc.get_current_seq_no(feedID)])
    return new_list
def test_get_current_event():
    try:
        with LogCapture():
            private_key = secrets.token_bytes(32)
            signing_key = SigningKey(private_key)
            public_key_feed_id = signing_key.verify_key.encode()

            content = Content('whateverapp/whateveraction', {
                'oneKey': 'somevalue',
                'someotherkey': 1
            })
            hash_of_content = hashlib.sha256(content.get_as_cbor()).hexdigest()
            hash_of_prev = None
            meta = Meta(public_key_feed_id, 0, hash_of_prev, 'ed25519',
                        ('sha256', hash_of_content))
            signature = signing_key.sign(meta.get_as_cbor())._signature
            event = Event(meta, signature, content).get_as_cbor()

            connector = DatabaseConnector()
            connector.add_event(event)
            result = connector.get_current_event(public_key_feed_id)
            result = Event.from_cbor(result)
        assert result.meta.hash_of_content[1] == meta.hash_of_content[1]
    finally:
        try:
            if os.path.exists('cborDatabase.sqlite'):
                os.remove('cborDatabase.sqlite')
                if os.path.exists('eventDatabase.sqlite'):
                    os.remove('eventDatabase.sqlite')
            else:
                assert False
        except PermissionError:
            print('Database is still in use')
def test_get_event():
    try:
        with LogCapture() as log_cap:
            private_key = secrets.token_bytes(32)
            signing_key = SigningKey(private_key)
            public_key_feed_id = signing_key.verify_key.encode()

            content0 = Content('whateverapp/whateveraction', {
                'firstkey': 'somevalue',
                'someotherkey': 3
            })
            hash_of_content = hashlib.sha256(
                content0.get_as_cbor()).hexdigest()
            hash_of_prev = None
            meta = Meta(public_key_feed_id, 0, hash_of_prev, 'ed25519',
                        ('sha256', hash_of_content))
            signature = signing_key.sign(meta.get_as_cbor())._signature
            event = Event(meta, signature, content0).get_as_cbor()

            connector = DatabaseConnector()
            connector.add_event(event)
            meta = Meta(public_key_feed_id, 1, hash_of_prev, 'ed25519',
                        ('sha256', hash_of_content))
            content1 = Content('whateverapp/whateveraction', {
                'secondkey': 'somevalue',
                'someotherkey': 4
            })
            signature = signing_key.sign(meta.get_as_cbor())._signature
            event = Event(meta, signature, content1).get_as_cbor()
            connector.add_event(event)
            content2 = Content('whateverapp/whateveraction', {
                'thirdkey': 'somevalue',
                'someotherkey': 5
            })
            meta = Meta(public_key_feed_id, 2, hash_of_prev, 'ed25519',
                        ('sha256', hash_of_content))
            signature = signing_key.sign(meta.get_as_cbor())._signature
            event = Event(meta, signature, content2).get_as_cbor()
            connector.add_event(event)
            res0 = connector.get_event(public_key_feed_id, 0)
            res1 = connector.get_event(public_key_feed_id, 1)
            res2 = connector.get_event(public_key_feed_id, 2)
            result0 = Event.from_cbor(res0)
            result1 = Event.from_cbor(res1)
            result2 = Event.from_cbor(res2)
        assert result0.content.content == content0.content
        assert result1.content.content == content1.content
        assert result2.content.content == content2.content
        print(log_cap)
    finally:
        try:
            if os.path.exists('cborDatabase.sqlite'):
                os.remove('cborDatabase.sqlite')
                if os.path.exists('eventDatabase.sqlite'):
                    os.remove('eventDatabase.sqlite')
            else:
                assert False
        except PermissionError:
            print('Database is still in use')
def test_event_factory():
    ecf = EventFactory()
    new_event = ecf.next_event('whateverapp/whateveraction', {
        'oneKey': 'somevalue',
        'someotherkey': 1
    })
    connector = DatabaseConnector()
    connector.add_event(new_event)
    result = connector.get_current_event(ecf.get_feed_id())
    result = Event.from_cbor(result)
    assert result.content.content[0] == 'whateverapp/whateveraction'
Beispiel #5
0
def filter_events(list_with_needed_extensions):
    event_list = []
    dc = DatabaseConnector()
    for info in list_with_needed_extensions:
        appended_events = []
        feed_id = info[0]
        seq_num = info[1]
        num = dc.get_current_seq_no(feed_id)
        for i in range(seq_num, num):
            extension = dc.get_event(feed_id, i + 1)
            appended_events.append(extension)
        print("Extension with", len(appended_events), "events")
        event_list.append(appended_events)
    return event_list
def test_event_factory():
    ecf = EventFactory()
    new_event = ecf.next_event('whateverapp/whateveraction', {
        'oneKey': 'somevalue',
        'someotherkey': 1
    })
    connector = DatabaseConnector(
        path_to_db=
        'C:\\Users\\user\\Google Drive\\Studies\\4. Semester\\30526-01 - Introduction to Internet and Security\\Project\\BACnet\\groups\\07-logStore'
    )
    connector.add_event(new_event)
    result = connector.get_current_event(ecf.get_feed_id())
    result = Event.from_cbor(result)
    assert result.content.content[0] == 'whateverapp/whateveraction'
def test_get_event():
    with session_scope():
        with LogCapture() as log_cap:
            private_key = secrets.token_bytes(32)
            signing_key = SigningKey(private_key)
            public_key_feed_id = signing_key.verify_key.encode()

            content0 = Content('whateverapp/whateveraction', {
                'firstkey': 'somevalue',
                'someotherkey': 3
            })
            hash_of_content = hashlib.sha256(
                content0.get_as_cbor()).hexdigest()
            hash_of_prev = None
            meta = Meta(public_key_feed_id, 0, hash_of_prev, 'ed25519',
                        ('sha256', hash_of_content))
            signature = signing_key.sign(meta.get_as_cbor())._signature
            event = Event(meta, signature, content0).get_as_cbor()

            connector = DatabaseConnector()
            connector.add_event(event)
            meta = Meta(public_key_feed_id, 1, hash_of_prev, 'ed25519',
                        ('sha256', hash_of_content))
            content1 = Content('whateverapp/whateveraction', {
                'secondkey': 'somevalue',
                'someotherkey': 4
            })
            signature = signing_key.sign(meta.get_as_cbor())._signature
            event = Event(meta, signature, content1).get_as_cbor()
            connector.add_event(event)
            content2 = Content('whateverapp/whateveraction', {
                'thirdkey': 'somevalue',
                'someotherkey': 5
            })
            meta = Meta(public_key_feed_id, 2, hash_of_prev, 'ed25519',
                        ('sha256', hash_of_content))
            signature = signing_key.sign(meta.get_as_cbor())._signature
            event = Event(meta, signature, content2).get_as_cbor()
            connector.add_event(event)
            res0 = connector.get_event(public_key_feed_id, 0)
            res1 = connector.get_event(public_key_feed_id, 1)
            res2 = connector.get_event(public_key_feed_id, 2)
            result0 = Event.from_cbor(res0)
            result1 = Event.from_cbor(res1)
            result2 = Event.from_cbor(res2)
        assert result0.content.content == content0.content
        assert result1.content.content == content1.content
        assert result2.content.content == content2.content
        print(log_cap)
def verify_validation(feed, received_event):
    feed_id = feed[0]
    seq_num = feed[1]

    dc = DatabaseConnector()
    last_event = dc.get_event(feed_id, seq_num)

    # Controlling if last event exists
    if last_event is None:
        # If the list has -1, it means it is a new feed to create
        if seq_num == -1:
            print("Awaiting creation of new feed:", feed_id)
            return True
        else:
            return False

    last_event = cbor2.loads(last_event)
    last_meta = cbor2.loads(last_event[0])  # meta

    received_event = cbor2.loads(received_event)
    received_meta = cbor2.loads(received_event[0])  # meta

    # Controlling feed ids
    if last_meta[0] != received_meta[0]:
        print("Feed ID validation... FAILED")
        return False
    print("Feed ID validation... PASSED")

    # Controlling sequence numbers
    if last_meta[1] + 1 != received_meta[1]:
        print("Seq-Num validation... FAILED")
        return False
    print("Seq-Num validation... PASSED")

    # Controlling last meta hash value / prev hash value
    if received_meta[2][0] != 0 or get_hash(
            last_event[0]) != received_meta[2][1]:
        print("Meta Hash validation... FAILED")
        return False
    print("Meta Hash validation... PASSED")

    # Controlling the signature
    if last_meta[3] != received_meta[3]:
        print("Signature validation... FAILED")
        return False
    print("Signature validation... PASSED")
    print("Extension... VALID")
    return True
 def setUpClass(cls):
     cls.lm = LogMerge.LogMerge()
     cls.dc = DatabaseConnector()
     cls.vf = Verification()
     cls.master_feed_id = cls.dc.get_master_feed_id()
     for folder_path in TEST_FOLDERS_RELATIVE_PATHS:
         if not os.path.exists(folder_path):
             os.mkdir(folder_path)
Beispiel #10
0
def compare_feeds(list_of_feeds):
    need_list = []
    dc = DatabaseConnector()
    for i, elem in enumerate(list_of_feeds):
        feed_id = elem[0]
        seq_num = elem[1]
        this_seq_num = dc.get_current_seq_no(feed_id)

        # if seq num == -1 that means the feed does not exist in this database
        if this_seq_num is None:
            print("Entry does not exist...")
            need_list.append([feed_id, -1])
        elif this_seq_num < seq_num:
            elem[1] = this_seq_num
            need_list.append(elem)

    return need_list
Beispiel #11
0
def sync_database(i_want_extensions_list, received_extensions):
    received_extensions = cbor.loads(received_extensions)
    dc = DatabaseConnector()
    if len(i_want_extensions_list) != len(received_extensions):
        print(
            "Number of received extensions is not as expected. Sync aborted.")
        return
    print("Number of received extensions:", len(received_extensions))
    for i, val in enumerate(i_want_extensions_list):
        appended_events_list = received_extensions[i]
        # Check if valid
        if verify_validation(val, appended_events_list[0]):
            for ev in appended_events_list:
                dc.add_event(ev)
        else:
            print(
                "The extension is not valid! Sync of one received feed is not possible."
            )
    print("Finished synchronising!")
Beispiel #12
0
 def setup_db(self, user_name=''):
     mas_id = self._fcc.get_host_master_id()
     if mas_id is not None:
         self.master_feed_id = self._fcc.get_host_master_id()
         self.db_connector = DatabaseConnector()
         self.user_name = self.get_user_name()
     else:
         make_dirs()
         self._ecf = EventFactory(None, DIR_MAIN + '/' + 'Keys', False)
         self._eventCreationWrapper = EventCreationWrapper(self._ecf)
         _firstEvent = self._eventCreationWrapper.create_MASTER()
         _secondEvent = self._eventCreationWrapper.create_radius(1)
         _thirdEvent = self._eventCreationWrapper.create_name(user_name)
         self._fcc.add_event(_firstEvent)
         self._fcc.add_event(_secondEvent)
         self._fcc.add_event(_thirdEvent)
         self.master_feed_id = self._fcc.get_host_master_id()
         self.db_connector = DatabaseConnector()
         self.user_name = user_name
def test_get_current_event():
    with session_scope():
        with LogCapture():
            private_key = secrets.token_bytes(32)
            signing_key = SigningKey(private_key)
            public_key_feed_id = signing_key.verify_key.encode()

            content = Content('whateverapp/whateveraction', {
                'oneKey': 'somevalue',
                'someotherkey': 1
            })
            hash_of_content = hashlib.sha256(content.get_as_cbor()).hexdigest()
            hash_of_prev = None
            meta = Meta(public_key_feed_id, 0, hash_of_prev, 'ed25519',
                        ('sha256', hash_of_content))
            signature = signing_key.sign(meta.get_as_cbor())._signature
            event = Event(meta, signature, content).get_as_cbor()

            connector = DatabaseConnector()
            connector.add_event(event)
            result = connector.get_current_event(public_key_feed_id)
            result = Event.from_cbor(result)
        assert result.meta.hash_of_content[1] == meta.hash_of_content[1]
Beispiel #14
0
from logStore.transconn.database_connector import  DatabaseConnector
import cbor2
import event



dc = DatabaseConnector()

feed_ids = dc.get_all_feed_ids()
n = dc.get_current_seq_no(feed_ids[1])
print("seq num:", n, "(feed 1)")
tmp = 0
for i in range(0, n + 1):
    l = cbor2.loads(dc.get_event(feed_ids[1], i))
    tmp = cbor2.dumps(l)
    print(cbor2.loads(l[0])) # content bits
    # print(l[1])
    ev = cbor2.loads(l[2])
    ev = str(ev[0]).split("/")
    print(ev[0]) # event
    print(event.get_hash(l[0]))
    print("-------------------")

Beispiel #15
0
class BACCore:
    def __init__(self):
        self.pickle_file_names = ['personList.pkl', 'username.pkl'
                                  ]  # use to reset user or create new one
        self.switch = ["", "", ""]

    # checks if there is already an existing database by checking if there exists a masterfeed ID
    def exists_db(self):
        self._fcc = FeedCtrlConnection()
        master_feed_id = self._fcc.get_host_master_id()
        if master_feed_id is not None:
            return 1
        return 0

    # creates a new database and its first three masterfeed events, if there isn't already one.
    # otherwise it doesn't create a new one
    def setup_db(self, user_name=''):
        mas_id = self._fcc.get_host_master_id()
        if mas_id is not None:
            self.master_feed_id = self._fcc.get_host_master_id()
            self.db_connector = DatabaseConnector()
            self.user_name = self.get_user_name()
        else:
            make_dirs()
            self._ecf = EventFactory(None, DIR_MAIN + '/' + 'Keys', False)
            self._eventCreationWrapper = EventCreationWrapper(self._ecf)
            _firstEvent = self._eventCreationWrapper.create_MASTER()
            _secondEvent = self._eventCreationWrapper.create_radius(1)
            _thirdEvent = self._eventCreationWrapper.create_name(user_name)
            self._fcc.add_event(_firstEvent)
            self._fcc.add_event(_secondEvent)
            self._fcc.add_event(_thirdEvent)
            self.master_feed_id = self._fcc.get_host_master_id()
            self.db_connector = DatabaseConnector()
            self.user_name = user_name

    # creates a new feed (adds 2 events to the master feed and creates the first and second event of the new feed)
    def create_feed(self, article_feed_name):
        fcc = FeedCtrlConnection()
        ect = EventCreationTool()
        ect.set_path_to_keys(DIR_MAIN + '/' + 'Keys', False)

        event = self.db_connector.get_current_event(self.master_feed_id)
        ecf_master = EventFactory(event, DIR_MAIN + '/' + 'Keys', False)
        eventCreationWrapper = EventCreationWrapper(ecf_master)

        public_key = ect.generate_feed()
        new_feed_event = eventCreationWrapper.create_newFeed(
            public_key, 'bac_news')
        trust_feed_event = eventCreationWrapper.create_trust(public_key)
        first_event = ect.create_first_event(
            public_key, 'bac_news/new_article',
            {'master_feed': self.master_feed_id})

        fcc.add_event(new_feed_event)
        fcc.add_event(trust_feed_event)
        fcc.add_event(first_event)

        # creates event containing list name, host name and creation date (second event of the newly created feed)
        ect = EventCreationTool()
        ect.set_path_to_keys(DIR_MAIN + '/' + 'Keys', False)
        dictionary = {
            'host': self.get_event_content(self.master_feed_id, 2)[1]['name'],
            'list_name': article_feed_name,
            'date': datetime.now().isoformat()
        }
        second_event = ect.create_event_from_previous(first_event,
                                                      'bac_news/new_article',
                                                      dictionary)
        fcc.add_event(second_event)

    # creates an event, that is appended at the given feed (given with the feedname) with the content given as json file
    def create_event(self, feed_name, json_file):
        feed_id = self.get_id_from_feed_name(feed_name)
        event = self.db_connector.get_current_event(feed_id)

        ect = EventCreationTool()
        ect.set_path_to_keys(DIR_MAIN + '/' + 'Keys', False)
        new_event = ect.create_event_from_previous(event,
                                                   'bac_news/new_article',
                                                   {'json': json_file})
        fcc = FeedCtrlConnection()
        fcc.add_event(new_event)

    # exports the content of the database to the given path as one or more pcap files
    def export_db_to_pcap(self, path):
        dictionary = {}
        feed_ids = self.get_all_feed_ids()
        for f_id in feed_ids:
            dictionary[f_id] = -1
        lm = LogMerge()
        lm.export_logs(path, dictionary)

    # imports pcap files from the given path to the database
    def import_from_pcap_to_db(self, path):
        lm = LogMerge()
        lm.import_logs(path)

    def get_all_feed_ids(self):
        return self.db_connector.get_all_feed_ids()

    def get_all_feed_name_host_tuples(self):
        feed_names = list()
        feed_ids = self.get_all_feed_ids()
        for feed_id in feed_ids:
            if self.get_event_content(feed_id, 0)[0] == "MASTER/MASTER":
                continue
            host = self.get_host_from_feed(feed_id)
            feed_names.append((self.get_feedname_from_id(feed_id), host))
        return feed_names

    def get_event_content(self, feed_id, seq_no):
        cbor_event = self.db_connector.get_event(feed_id, seq_no)
        event = Event.from_cbor(cbor_event)
        return event.content.content

    def get_feednames_from_host(self):
        feed_names = list()
        feed_ids = self.get_all_feed_ids()
        for feed_id in feed_ids:
            if self.get_event_content(feed_id, 0)[0] == "MASTER/MASTER":
                continue
            host = self.get_host_from_feed(feed_id)
            if host == self.user_name:  #host of this feed is also host of this app
                feed_names.append(self.get_feedname_from_id(feed_id))
        return feed_names

    def get_feedname_from_id(self, feed_id):
        return self.get_event_content(feed_id, 1)[1]["list_name"]

    def get_host_from_feed(self, feed_id):
        return self.get_event_content(feed_id, 1)[1]["host"]

    def get_id_from_feed_name(self, feed_name):  #for own feed_ids
        feed_ids = self.get_all_feed_ids()
        for feed_id in feed_ids:
            if self.get_event_content(
                    feed_id, 0
            )[0] == "MASTER/MASTER":  # still need to do this, because master feed of other user could be in front of new feed in db of own host after import.
                continue
            host = self.get_host_from_feed(feed_id)
            if host == self.user_name:
                if feed_name == self.get_feedname_from_id(feed_id):
                    return feed_id
        return None

    def get_id_from_feed_name_and_host(self, feedname_host):
        feed_name = feedname_host[0]
        host = feedname_host[1]
        feed_ids = self.get_all_feed_ids()
        for feed_id in feed_ids:
            if self.get_event_content(feed_id, 0)[0] == "MASTER/MASTER":
                continue
            feed_host = self.get_host_from_feed(feed_id)
            if host == feed_host:
                if feed_name == self.get_feedname_from_id(feed_id):
                    return feed_id
        return None

    def get_json_files_from_feed(
            self,
            feedname_host):  #feedname_host = tuple with feed_name and its host
        json_files = list()
        feed_name = feedname_host[0]
        host = feedname_host[1]
        feed_id = self.get_id_from_feed_name_and_host((feed_name, host))
        max_seq_no = self.db_connector.get_current_seq_no(feed_id)
        if max_seq_no is None:
            max_seq_no = -1
        for i in range(2, max_seq_no + 1):
            json_files.append(self.get_json_from_event(feed_id, i))
        return json_files

    def get_json_from_event(self, feed_id, seq_no):
        return self.get_event_content(feed_id, seq_no)[1]['json']

    def get_user_name(self):
        return (self.get_event_content(self.master_feed_id, 2)[1]["name"])
def test_get_current_seq_no():
    try:
        private_key = secrets.token_bytes(32)
        signing_key = SigningKey(private_key)
        public_key_feed_id1 = signing_key.verify_key.encode()

        content = Content('whateverapp/whateveraction', {
            'somekey': 'somevalue',
            'someotherkey': 2
        })
        hash_of_content = hashlib.sha256(content.get_as_cbor()).hexdigest()
        hash_of_prev = None
        meta = Meta(public_key_feed_id1, 0, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()

        connector = DatabaseConnector()
        connector.add_event(event)
        meta = Meta(public_key_feed_id1, 1, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        meta = Meta(public_key_feed_id1, 2, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        content = Content('whateverapp/whateveraction', {
            'test1': 'somevalue',
            'someotherkey': 2
        })
        meta = Meta(public_key_feed_id1, 3, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)

        private_key = secrets.token_bytes(32)
        signing_key = SigningKey(private_key)
        public_key_feed_id2 = signing_key.verify_key.encode()
        meta = Meta(public_key_feed_id2, 0, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        meta = Meta(public_key_feed_id2, 1, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        meta = Meta(public_key_feed_id2, 2, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        meta = Meta(public_key_feed_id2, 3, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        content = Content('whateverapp/whateveraction', {
            'test2': 'somevalue',
            'someotherkey': 2
        })
        meta = Meta(public_key_feed_id2, 4, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        res1 = connector.get_current_seq_no(public_key_feed_id1)
        res2 = connector.get_current_seq_no(public_key_feed_id2)
        result1 = connector.get_current_event(public_key_feed_id1)
        result2 = connector.get_current_event(public_key_feed_id2)
        print(result1)
        print(result2)
        assert res1 == 3
        assert res2 == 4
    finally:
        try:
            if os.path.exists('cborDatabase.sqlite'):
                os.remove('cborDatabase.sqlite')
                if os.path.exists('eventDatabase.sqlite'):
                    os.remove('eventDatabase.sqlite')
            else:
                assert False
        except PermissionError:
            print('Database is still in use')
def test_get_current_seq_no():
    with session_scope():
        private_key = secrets.token_bytes(32)
        signing_key = SigningKey(private_key)
        public_key_feed_id1 = signing_key.verify_key.encode()

        content = Content('whateverapp/whateveraction', {
            'somekey': 'somevalue',
            'someotherkey': 2
        })
        hash_of_content = hashlib.sha256(content.get_as_cbor()).hexdigest()
        hash_of_prev = None
        meta = Meta(public_key_feed_id1, 0, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()

        connector = DatabaseConnector()
        connector.add_event(event)
        meta = Meta(public_key_feed_id1, 1, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        meta = Meta(public_key_feed_id1, 2, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        content = Content('whateverapp/whateveraction', {
            'test1': 'somevalue',
            'someotherkey': 2
        })
        meta = Meta(public_key_feed_id1, 3, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)

        private_key = secrets.token_bytes(32)
        signing_key = SigningKey(private_key)
        public_key_feed_id2 = signing_key.verify_key.encode()
        meta = Meta(public_key_feed_id2, 0, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        meta = Meta(public_key_feed_id2, 1, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        meta = Meta(public_key_feed_id2, 2, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        meta = Meta(public_key_feed_id2, 3, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        content = Content('whateverapp/whateveraction', {
            'test2': 'somevalue',
            'someotherkey': 2
        })
        meta = Meta(public_key_feed_id2, 4, hash_of_prev, 'ed25519',
                    ('sha256', hash_of_content))
        signature = signing_key.sign(meta.get_as_cbor())._signature
        event = Event(meta, signature, content).get_as_cbor()
        connector.add_event(event)
        res1 = connector.get_current_seq_no(public_key_feed_id1)
        res2 = connector.get_current_seq_no(public_key_feed_id2)
        result1 = connector.get_current_event(public_key_feed_id1)
        result2 = connector.get_current_event(public_key_feed_id2)
        print(result1)
        print(result2)
        assert res1 == 3
        assert res2 == 4
Beispiel #18
0
class LogMerge:

    def __init__(self):
        self.DB = DatabaseConnector()
        self.EV = Verification()

    def get_database_status(self):
        list_of_feed_ids = self.DB.get_all_feed_ids()
        dict_of_feed_ids_and_corresponding_sequence_numbers = {}
        for feed_id in list_of_feed_ids:
            if self.EV.check_outgoing(feed_id):
                dict_of_feed_ids_and_corresponding_sequence_numbers[feed_id] = self.DB.get_current_seq_no(feed_id)
        return dict_of_feed_ids_and_corresponding_sequence_numbers

    def export_logs(self, path_to_pcap_folder, dict_feed_id_current_seq_no, maximum_events_per_feed_id):
        for feed_id, current_seq_no in dict_feed_id_current_seq_no.items():
            if not self.EV.check_outgoing(feed_id):
                continue
            event_list = []
            current_seq_no += 1
            next_event = self.DB.get_event(feed_id, current_seq_no)
            while next_event is not None and len(event_list) < maximum_events_per_feed_id:
                event_list.append(next_event)
                current_seq_no += 1
                next_event = self.DB.get_event(feed_id, current_seq_no)
            PCAP.write_pcap(path_to_pcap_folder + "/" + str(feed_id).split("'")[1] + "_v", event_list)

    def import_logs(self, path_of_pcap_files_folder):
        list_of_cbor_events = []
        list_of_events = []
        list_of_feed_ids = []
        paths_of_pcap_files = []
        for d, r, f in next(walk(path_of_pcap_files_folder)):
            for file in f:
                if file.lower().endswith('.pcap'):
                    paths_of_pcap_files.append(os.path.join(r, file))
        for path in paths_of_pcap_files:
            list_of_cbor_events.extend(PCAP.read_pcap(path))
        for event in list_of_cbor_events:
            list_of_events.append(Event.from_cbor(event))
        for event in list_of_events:
            if event.meta.feed_id not in list_of_feed_ids:
                list_of_feed_ids.append(event.meta.feed_id)
        for feed_id in list_of_feed_ids:
            most_recent_seq_no = self.__get_most_recent_seq_no(feed_id, list_of_events)
            db_seq_no = self.DB.get_current_seq_no(feed_id)
            if db_seq_no == -1:
                self.__verify_and_add_logs(0, feed_id, list_of_events)
            elif most_recent_seq_no <= db_seq_no:
                return
            else:
                self.__verify_and_add_logs(db_seq_no + 1, feed_id, list_of_events)

    def __get_most_recent_seq_no(self, feed_id, list_of_events):
        most_rec_seq_no = -1
        for event in list_of_events:
            if event.meta.feed_id == feed_id and most_rec_seq_no < event.meta.seq_no:
                most_rec_seq_no = event.meta.seq_no
        return most_rec_seq_no

    def __verify_and_add_logs(self, start_seq_no, feed_id, list_of_events):
        list_of_new_events = []
        for event in list_of_events:
            if event.meta.seq_no >= start_seq_no:
                list_of_new_events.append(event)
        if start_seq_no == 0:
            prev_event = None
        else:
            prev_event = Event.from_cbor(self.DB.get_current_event(feed_id))
        while list_of_new_events:
            event_with_lowest_seq_no = self.__get_event_with_lowest_seq_no_from_list(list_of_new_events)
            if self.__verify_event(event_with_lowest_seq_no, prev_event):
                self.DB.add_event(event_with_lowest_seq_no.get_as_cbor())
                # self.DB.add_event(feed_id, event_with_lowest_seq_no.meta.seq_no, event_with_lowest_seq_no.get_as_cbor())
            else:
                return
            prev_event = event_with_lowest_seq_no
            list_of_new_events.remove(prev_event)

    def __get_event_with_lowest_seq_no_from_list(self, list_of_events):
        if not list_of_events:
            return None
        lowest_seq_no = list_of_events[0].meta.seq_no
        for event in list_of_events:
            if event.meta.seq_no < lowest_seq_no:
                lowest_seq_no = event.meta.seq_no
        for event in list_of_events:
            if event.meta.seq_no == lowest_seq_no:
                return event
        return None

    def __verify_event(self, event, previous_event=None):
        if previous_event is not None:
            previous_hash_type, hash_of_previous = event.meta.hash_of_prev
            prev_meta_as_cbor = previous_event.meta.get_as_cbor()
            if previous_event.meta.feed_id != event.meta.feed_id:
                return False
            if event.meta.seq_no - 1 != previous_event.meta.seq_no:
                return False
            if not(previous_hash_type == 0 and hashlib.sha256(prev_meta_as_cbor).digest() == hash_of_previous):
                return False

        content_hash_type, hash_of_content = event.meta.hash_of_content
        signature_identifier = event.meta.signature_info
        signature = event.signature

        content = event.content.get_as_cbor()
        meta_as_cbor = event.meta.get_as_cbor()

        if not(content_hash_type == 0 and hashlib.sha256(content).digest() == hash_of_content):
            return False

        if signature_identifier == 0:
            verification_key = nacl.signing.VerifyKey(event.meta.feed_id)
            try:
                verification_key.verify(meta_as_cbor, signature)
            except nacl.exceptions.BadSignatureError:
                return False
        # This code is ready to be used, but nobody is using Hmac right now.
        # elif signature_identifier == 1:
        #     secret_key = self.DB.get_secret_hmac_key(event.meta.feed_id)
        #     if secret_key is None:
        #         return False
        #     generated_signature = hmac.new(secret_key, meta_as_cbor, hashlib.sha256).digest()
        #     if signature != generated_signature:
        #         return False
        else:
            return False

        return True
Beispiel #19
0
 def __init__(self):
     if not os.path.exists('cborDatabase.sqlite'):
         import feedCtrl.uiFunctionsHandler
         feedCtrl.uiFunctionsHandler.UiFunctionHandler()
     self.DB = DatabaseConnector()
     self.EV = Verification()
Beispiel #20
0
 def __init__(self):
     self.DB = DatabaseConnector()
     self.EV = Verification()
Beispiel #21
0
        #     secret_key = self.DB.get_secret_hmac_key(event.meta.feed_id)
        #     if secret_key is None:
        #         return False
        #     generated_signature = hmac.new(secret_key, meta_as_cbor, hashlib.sha256).digest()
        #     if signature != generated_signature:
        #         return False
        else:
            return False

        return True


if __name__ == '__main__':
    logMerge = LogMerge()
    from EventCreationTool import EventFactory
    dc = DatabaseConnector()
    ef = EventFactory()
    first_event = ef.first_event('chat', dc.get_master_feed_id())
    second_event = ef.next_event('chat/okletsgo', {
        'messagekey': 759432,
        'timestampkey': 2345,
        'chat_id': 745
    })
    PCAP.write_pcap('nameofpcapfile', [first_event, second_event])
    logMerge.import_logs(os.getcwd())
    logMerge.export_logs(os.getcwd(), {ef.get_feed_id(): -1}, 10)
    events = PCAP.read_pcap('nameofpcapfile.pcap')
    for event in events:
        event = Event.from_cbor(event)
        print(event.content.content[1]['master_feed'].hex())
        break