예제 #1
0
    def buttonClicked_3(self):
        p = int(unicode(self.lineEdit_8.text()))
        q = int(unicode(self.lineEdit_9.text()))

        n = p * q

        self.textBrowser_3.append("p = {0}, q = {1}, n = {2}".format(p, q, n))
        input = self.plainTextEdit_3.toPlainText()
        self.textBrowser_3.append("Text lenght: {0}".format(len(input)))
        if self.radioButton_5.isChecked() is True:
            [time, output] = rabin.rabin(unicode(input), 1, p, q)
            self.textBrowser_3.append("Encoded text: {0}".format(
                str(unicode(output))))
            self.textBrowser_3.append("Time elapsed: {0} s<br>".format(
                str(unicode(time))))
        if self.radioButton_6.isChecked() is True:
            [time, a, b, c, d] = rabin.rabin(unicode(input), 0, p, q)
            self.textBrowser_3.append("Decoded text:")
            self.textBrowser_3.append("{0}".format(str(unicode(a))))
            self.textBrowser_3.append("{0}".format(str(unicode(b))))
            self.textBrowser_3.append("{0}".format(str(unicode(c))))
            self.textBrowser_3.append("{0}".format(str(unicode(d))))
            self.textBrowser_3.append("Time elapsed: {0} s<br>".format(time))
예제 #2
0
#!/usr/bin/python
# -*- coding: utf-8 -*-
from rabin import rabin

print rabin("testfile")
예제 #3
0
 def __init__(self):
     self.des = DES()
     self.rabin = rabin()
예제 #4
0
파일: common.py 프로젝트: cschwede/safebox
def backup(backend, src, tag="default"):
    # Try to load old metadata from latest backup
    old_backups = backend.list(prefix="b-*")
    old_meta_data = {}
    if old_backups:
        backup_id = utils.newest_backup_id(old_backups)
        om = backend.get(backup_id)
        try:
            old_meta_data = json.loads(om)
        except ValueError:
            pass

    start_time = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
    path = os.path.expanduser(src)
    files = utils.find_modified_files(path)
    chunk_size = chunk_count = changed_bytes = 0
    for filename, meta in files.items():
        # Assume file is unchanged if neither mtime nor size is changed
        old = old_meta_data.get(unicode(filename, 'utf-8'))
        if old and old['m'] == meta['m'] and old['s'] == meta['s']:
            old_checksum = old.get('c')
            if old_checksum:
                meta['c'] = old_checksum
            logging.info("Skipped unchanged %s" % filename)
            continue

        fullname = os.path.join(path, filename)
        if not S_ISREG(meta['p']):  # not a file
            continue

        my_sha256 = hashlib.sha256()

        chunk_checksums = []
        try:
            chunks = rabin(fullname)
        except IOError:
            logging.warning("%s not found, skipping" % fullname)
            continue
        with open(fullname) as infile:
            for chunksize in chunks:
                data = infile.read(chunksize)
                my_sha256.update(data)
                chunk_checksum = utils.sha256_string(data)
                name = "c-%s" % chunk_checksum
                chunk_checksums.append(chunk_checksum)
                stored = backend.put(name, data)
                changed_bytes += len(data)
                if stored:
                    chunk_size += len(data)
                    chunk_count += 1
        if len(chunk_checksums) > 1:
            checksum = my_sha256.hexdigest()
            name = "o-%s" % checksum
            backend.put(name, ';'.join(chunk_checksums))
        else:
            name = "c-%s" % chunk_checksums[0]
        meta['c'] = name
        logging.info(fullname)

    # write backup summary
    meta_data = json.dumps(files)
    suffix = ''.join(random.choice(ascii_letters + digits) for _ in range(8))
    backup_id = "b-%s-%s-%s" % (tag, start_time, suffix)
    backend.put(backup_id, meta_data)
    logging.info("Finished backup %s. %s bytes changed" % (
                 backup_id, changed_bytes))
    logging.info("Stored %s new objects with a total size of %s bytes" % (
                 chunk_count, chunk_size))
    return backup_id