コード例 #1
0
ファイル: sneakernet.py プロジェクト: witjon/BACnet
def my_log_append(log_fn, body):
    lg = log.PCAP()
    lg.open(log_fn, 'r')
    prev = None
    feed = None
    seq = 0
    t = gg.TRANSFER()
    # find last event
    for block in lg:
        t.from_cbor(block)
        prev = t.event.prev
        feed = t.event.feed
        seq = t.event.seq
    lg.close()

    lg.open(log_fn, 'a')
    e = gg.EVENT(prev=prev,
                 feed=keypair.public,
                 seq=seq + 1,
                 time=int(time.time()),
                 content=bytes(json.dumps(body), 'utf-8'),
                 content_enc=gg.GG_CONTENT_ENCODING_JSON)
    e.signature = keypair.sign(e.event_to_cbor())
    t = gg.TRANSFER(e)
    lg.write(t.to_cbor())
    lg.close()
コード例 #2
0
 def ingest(self, data):
     t = gg.TRANSFER()
     t.from_cbor(data)
     feed = t.event.feed
     seq = t.event.seq
     lg = pcap.PCAP()
     if not feed in self.db:
         self.max_fn_number += 1
         self.fn[feed] = os.path.join(self.dirname,
                                      str(self.max_fn_number) + '.pcap')
         lg.open(self.fn[feed], 'w')
         lg.close()
         self.db[feed] = {}
         self.max[feed] = 0
     # print(f"-- ingesting {seq} into {self.fn[feed]}")
     if seq != self.max[feed] + 1:  # TODO: should also check prev field
         print("-- mismatch:", seq, self.max[feed] + 1, ", ignored")
         return
     self.db[feed][seq] = data
     self.max[feed] += 1
     lg.open(self.fn[feed], 'a')
     lg.write(data)
     lg.close()
     print(
         f"-- ingested event {base64.b64encode(feed).decode('utf8')}:{seq}")
コード例 #3
0
    def load(self, dirname):
        if not os.path.isdir(dirname):
            return 0
        self.dirname = dirname

        lg = pcap.PCAP()
        t = gg.TRANSFER()
        for fn in os.listdir(dirname):
            # remember highest file number, if we have to create a new file
            i = int(fn.split('.')[0])
            if self.max_fn_number < i:
                self.max_fn_number = i

            fn = os.path.join(dirname, fn)
            lg.open(fn, 'r')
            for block in lg:
                self.cnt += 1
                t.from_cbor(block)
                feed = t.event.feed
                if not feed in self.fn:
                    self.fn[feed] = fn
                    self.max[feed] = -1
                    self.db[feed] = {}
                seq = t.event.seq
                if seq > self.max[feed]:
                    self.max[feed] = seq
                self.db[feed][seq] = block
            lg.close()
        return self.cnt
コード例 #4
0
ファイル: sneakernet.py プロジェクト: witjon/BACnet
def feed_get_display_name(log_fn):
    # returns a <feedID,display_name> tuple
    feed = None
    name = None
    lg = log.PCAP()
    lg.open(log_fn, 'r')
    t = gg.TRANSFER()
    for block in lg:
        t.from_cbor(block)
        if not feed:
            feed = t.event.feed
        c = t.event.content
        if not c:
            continue
        m = json.loads(c)
        if 'app' in m and m['app'] == 'feed/about' and 'display_name' in m:
            name = m['display_name']
    lg.close()
    return (feed, name)
コード例 #5
0
ファイル: sneakernet.py プロジェクト: witjon/BACnet
def output_chat(stdscr):
    t = gg.TRANSFER()
    lg = log.PCAP()
    pp_list = []
    name_list = {}
    for file in os.listdir(LOGS_DIR):
        lg.open(os.path.join(LOGS_DIR, file), "r")
        nick_name = []
        for block in lg:
            t.from_cbor(block)
            c = t.event.content
            if c != None:
                # print(f"** {base64.b64encode(t.event.feed).decode('utf8')}/{t.event.seq}")
                # print(str(c, 'utf8'))
                m = json.loads(c)
                if m['app'] == "feed/message":
                    pp_list.append([t.event.time, m])
                if m['app'] == "feed/about":
                    name_list[m['feed']] = m['display_name']
                # print(m)
            else:
                scr_print(stdscr, f"** {n}: no content")
        lg.close()
    pp(pp_list, name_list, stdscr)
コード例 #6
0
ファイル: import-from.py プロジェクト: ckschim/SJFProject
    # one optional parameter: -new_name
    import_dir = sys.argv[1]
    
    print("Welcome to SneakerNet\n")
    print(f"** importing new events from '{import_dir}'")
    print()

    if not os.path.isdir(import_dir):
        print(f"** directory not found, aborting")
        sys.exit()

    new_db = {}
    new_cnt = 0
    lg = log.PCAP()
    t = gg.TRANSFER()
    for fn in os.listdir(import_dir):
        fn = os.path.join(import_dir, fn)
        lg.open(fn, 'r')
        for block in lg:
            t.from_cbor(block)
            feed = t.event.feed
            seq = t.event.seq
            if not feed in new_db:
                new_db[feed] = {}
            if not seq in new_db[feed]:
                new_db[feed][seq] = []
            new_db[feed][seq].append(block)
            new_cnt += 1
        lg.close()
    print(f"** found {new_cnt} event(s) in '{import_dir}'")
コード例 #7
0
ファイル: sn.py プロジェクト: ckschim/SJFProject
    e2 = event.EVENT(prev=e1.get_sha256(),
                     feed=keypair.public,
                     seq=2,
                     time=1564444801,
                     content=b'{\n  "name": "foobar.box"\n}\n',
                     content_enc=event.GG_CONTENT_ENCODING_JSON)
    e2.signature = keypair.sign(e2.event_to_cbor())

    # --------------------------------------------------
    # write two events into a log

    lg = log.PCAP()
    lg.open('test.pcap', 'w')

    for e in [e1, e2]:
        t = event.TRANSFER(e)
        lg.write(t.to_cbor())
    lg.close()

    # --------------------------------------------------
    # read all events from the log, pretty-print them

    lg.open('test.pcap', 'r')
    n = 0
    while True:
        # print(f"offs={lg.offset}")
        block = lg.read()
        if block == None:
            break
        # print(f"pcap block {n}:\n", block, '\n')
        t = event.TRANSFER()
コード例 #8
0
ファイル: export-to.py プロジェクト: ckschim/SJFProject
def export():
    LOGS_DIR = 'logs'
    MY_LOG_FILE = '1.pcap'  # inside logs dir

    # ----------------------------------------------------------------------
    if __name__ == '__main__':

        # one optional parameter: -new_name
        export_dir = sys.argv[1]

        print("Welcome to SneakerNet\n")
        print(f"** exporting new events to '{export_dir}'")
        print()

        if not os.path.isdir(export_dir):
            print(f"** directory not found, aborting")
            sys.exit()

        lg = log.PCAP()
        t = gg.TRANSFER()

        have_db = {}
        have_max = {}
        have_cnt = 0
        for fn in os.listdir(LOGS_DIR):
            lg.open(os.path.join(LOGS_DIR, fn), 'r')
            for block in lg:
                t.from_cbor(block)
                feed = t.event.feed
                seq = t.event.seq
                if not feed in have_db:
                    have_db[feed] = {}
                    have_max[feed] = 0
                have_db[feed][seq] = block
                if seq > have_max[feed]:
                    have_max[feed] = seq
                have_cnt += 1
            lg.close()
        print(f"** found {have_cnt} event(s) in directory '{LOGS_DIR}'")

        target_db = {}
        target_cnt = 0
        for fn in os.listdir(export_dir):
            fn = os.path.join(export_dir, fn)
            lg.open(fn, 'r')
            for block in lg:
                t.from_cbor(block)
                feed = t.event.feed
                seq = t.event.seq
                if not feed in target_db:
                    target_db[feed] = {}
                if not seq in target_db[feed]:
                    target_db[feed][seq] = []
                # target_db[feed][seq].append(block)
                target_cnt += 1
            lg.close()
        print(
            f"** found {target_cnt} event(s) in target directory '{export_dir}'"
        )

        # create file with unique file name
        log_fn = None
        while True:
            log_fn = 'x' + str(random.randint(10000000,
                                              19999999))[1:] + '.pcap'
            log_fn = os.path.join(export_dir, log_fn)
            if not os.path.isfile(log_fn):
                break

        lg.open(log_fn, 'w')
        update_cnt = 0
        for feed in have_db:
            for i in range(0, have_max[feed]):
                if not feed in target_db or not i + 1 in target_db[feed]:
                    if update_cnt == 0:
                        print()
                    print(
                        f"** exporting {base64.b64encode(feed).decode('utf8')}/{i+1}"
                    )
                    lg.write(have_db[feed][i + 1])
                    update_cnt += 1
        lg.close()

        print()
        if update_cnt == 0:
            os.unlink(log_fn)
            print("** no events exported")
        else:
            print(f"** exported {update_cnt} event(s) to '{log_fn}'")
コード例 #9
0
    keypair = crypto.ED25519()
    keypair.create()
    seq = 1
    prev = None
    lg = log.PCAP()

    lg.open(LOG_FILE_NAME, 'w')

    print(f"creating new log '{LOG_FILE_NAME}'")
    while True:
        content = input("\n** type in your message (or RETURN to leave): ")
        if content == "":
            end = input(">> do you really want to leave? (y/N) ")
            if end == "y":
                break
            continue
        event = create_event(content, seq, prev, keypair.public)
        event.signature = keypair.sign(event.event_to_cbor())
        t = gg.TRANSFER(event)
        lg.write(t.to_cbor())
        print('>> wrote', event.pretty_print())
        seq += 1
        prev = event.get_sha256()

    print('\n' + f"** wrote {seq-1} messages to {LOG_FILE_NAME}")

    lg.close()

# eof
コード例 #10
0
ファイル: sneakernet.py プロジェクト: witjon/BACnet
def export(stdscr):
    export_dir = c_input(stdscr, "enter path: ")

    scr_print(stdscr, f"** exporting new events to '{export_dir}'\n")
    print()

    if not os.path.isdir(export_dir):
        scr_print(stdscr, "directory not found, press ENTER to go back\n")
        return

    lg = log.PCAP()
    t = gg.TRANSFER()

    have_db = {}
    have_max = {}
    have_cnt = 0
    for fn in os.listdir(LOGS_DIR):
        lg.open(os.path.join(LOGS_DIR, fn), 'r')
        for block in lg:
            t.from_cbor(block)
            feed = t.event.feed
            seq = t.event.seq
            if not feed in have_db:
                have_db[feed] = {}
                have_max[feed] = 0
            have_db[feed][seq] = block
            if seq > have_max[feed]:
                have_max[feed] = seq
            have_cnt += 1
        lg.close()
    scr_print(stdscr,
              f"** found {have_cnt} event(s) in directory '{LOGS_DIR}'\n")

    target_db = {}
    target_cnt = 0
    for fn in os.listdir(export_dir):
        fn = os.path.join(export_dir, fn)
        lg.open(fn, 'r')
        for block in lg:
            t.from_cbor(block)
            feed = t.event.feed
            seq = t.event.seq
            if not feed in target_db:
                target_db[feed] = {}
            if not seq in target_db[feed]:
                target_db[feed][seq] = []
            # target_db[feed][seq].append(block)
            target_cnt += 1
        lg.close()
    scr_print(
        stdscr,
        f"** found {target_cnt} event(s) in target directory '{export_dir}'\n")

    # create file with unique file name
    log_fn = None
    while True:
        log_fn = 'x' + str(random.randint(10000000, 19999999))[1:] + '.pcap'
        log_fn = os.path.join(export_dir, log_fn)
        if not os.path.isfile(log_fn):
            break

    lg.open(log_fn, 'w')
    update_cnt = 0
    for feed in have_db:
        for i in range(0, have_max[feed]):
            if not feed in target_db or not i + 1 in target_db[feed]:
                if update_cnt == 0:
                    print()
                scr_print(
                    stdscr,
                    f"** exporting {base64.b64encode(feed).decode('utf8')}/{i + 1}\n"
                )
                lg.write(have_db[feed][i + 1])
                update_cnt += 1
    lg.close()

    print()
    if update_cnt == 0:
        os.unlink(log_fn)
        scr_print(stdscr, "** no events exported\n")
    else:
        stdscr.addstr(f"** exported {update_cnt} event(s) to '{log_fn}'\n")
コード例 #11
0
ファイル: sneakernet.py プロジェクト: witjon/BACnet
def import_log(stdscr):
    import_dir = c_input(stdscr, "enter path: ")

    if not os.path.isdir(import_dir):
        scr_print(stdscr, "directory not found, press ENTER to go back\n")
        return

    new_db = {}
    new_cnt = 0
    lg = log.PCAP()
    t = gg.TRANSFER()
    for fn in os.listdir(import_dir):
        fn = os.path.join(import_dir, fn)
        lg.open(fn, 'r')
        for block in lg:
            t.from_cbor(block)
            feed = t.event.feed
            seq = t.event.seq
            if not feed in new_db:
                new_db[feed] = {}
            if not seq in new_db[feed]:
                new_db[feed][seq] = []
            new_db[feed][seq].append(block)
            new_cnt += 1
        lg.close()
    scr_print(stdscr, f"** found {new_cnt} event(s) in '{import_dir}'\n")

    have_fn = {}
    have_max = {}
    have_cnt = 0
    max_fn_number = 1
    for fn in os.listdir(LOGS_DIR):
        # remember highest file number, if we have to create a new file
        i = int(fn.split('.')[0])
        if max_fn_number < i:
            max_fn_number = i

        lg.open(os.path.join(LOGS_DIR, fn), 'r')
        for block in lg:
            have_cnt += 1
            t.from_cbor(block)
            feed = t.event.feed
            if not feed in have_fn:
                have_fn[feed] = fn
            seq = t.event.seq
            if not feed in have_max:
                have_max[feed] = -1
            if seq > have_max[feed]:
                have_max[feed] = seq
        lg.close()
    scr_print(stdscr, f"** found {have_cnt} event(s) in '{LOGS_DIR}'\n")

    update_cnt = 0
    for feed in new_db:
        if not feed in have_fn:
            max_fn_number += 1
            have_fn[feed] = os.path.join(LOGS_DIR,
                                         str(max_fn_number) + '.pcap')
            have_max[feed] = 0
            if update_cnt == 0:
                print()
            scr_print(
                stdscr,
                f"** creating {have_fn[feed]} for {base64.b64encode(feed).decode('utf8')}\n"
            )
            lg.open(have_fn[feed], 'w')
            lg.close()
            max_fn_number += 1
            update_cnt += 1

    update_cnt = 0
    for feed in have_fn:
        if not feed in new_db:
            continue
        lg.open(have_fn[feed], 'a')
        # print(f"** testing {have_fn[feed]}, seq={have_max[feed]}")
        while have_max[feed] + 1 in new_db[feed]:
            have_max[feed] += 1
            if update_cnt == 0:
                print()
            scr_print(
                stdscr,
                f"** import {base64.b64encode(feed).decode('utf8')}/{have_max[feed]}\n"
            )
            lg.write(new_db[feed][have_max[feed]][0])
            update_cnt += 1
        lg.close()

    scr_print(
        stdscr,
        f"** imported {update_cnt} event(s) to the '{LOGS_DIR}' directory\n")