def sha256(file_path): ''' Given a file path calculate the sha256 of its contents ''' contents = read_contents(file_path) return _sha256(contents).hexdigest()
def sha256(self, filename): ''' Return SHA-256 hex digest of local file using digest_from_file(). ''' if not HAVE_HASHLIB: self.fail_json( msg= "SHA-256 checksums require hashlib, which is available in Python 2.5 and higher" ) return self.digest_from_file(filename, _sha256())
def sha256_file(filename, chunk_size=1024*1024*16): ''' Helper function for hashing a single file ''' hasher = _sha256() chunk = 1024*1024*16 with open(filename, 'rb') as fid: data = fid.read(chunk) while data: hasher.update(data) data = fid.read(chunk) return hasher.hexdigest()
def sha256(self, filename): ''' Return SHA-256 hex digest of local file using digest_from_file(). ''' if not HAVE_HASHLIB: self.fail_json(msg="SHA-256 checksums require hashlib, which is available in Python 2.5 and higher") return self.digest_from_file(filename, _sha256())
def _dblsha256(data): return _sha256(_sha256(data).digest()).digest()
def sha256(bytestr: bytes) -> bytes: return _sha256(bytestr).digest()
def double_sha256(bytestr): return _sha256(_sha256(bytestr).digest()).digest()
def sha256(x): '''Simple wrapper of hashlib sha256.''' return _sha256(x).digest()
def sha256(msg): return _sha256(msg).digest()
def sha256(s): return _sha256(s.encode('utf-8')).hexdigest()
def dsha256(x): '''SHA-256 of SHA-256, as used extensively in bitcoin.''' return _sha256( _sha256(x).digest() ).digest()
import math import time import os import re from base64 import b64encode, b64decode from itertools import chain, cycle, repeat, count, combinations, combinations_with_replacement, product, islice from aes import AES from collections import Counter, OrderedDict, namedtuple from twister import Twister from hashes import sha1, md5, md4 from hashlib import sha256 as _sha256 from heapq import heappush, heappop sha256 = lambda m: _sha256(m).digest() def from_int(a, endianness='big'): if not a: return b'\x00' n_bytes = math.ceil(math.log2(a+1) / 8) return a.to_bytes(n_bytes, endianness) def to_int(a, endianness='big'): return int.from_bytes(a, byteorder='big') def increment(b, start=-1): if b[start] == 0xFF: b[start] = 0 increment(b, start-1) else: b[start] += 1 bin_chars = '01' hex_chars = '0123456789abcdef'
def __call__(self, bytestr: bytes) -> bytes: return _sha256(bytestr).digest()
def sha256(s): return _sha256(s.encode("utf-8")).hexdigest()
def changed_files(backup1, backup2): ''' yield files that are different or only exist in one of the backups. Yields file-tuples: (file1, file2) If a file exists only in backup1, this will yield (file1, None) If a file exists only in backup2, this will yield (None, file2) If contents of files differs, both files are yielded: (file1, file2) Note that we only compare files with the same domain,relativePath properties. Moved or otherwise duplicate files are not considered between backups. Also, when the size is different, no actual comparing is done, based on the fact that files of different size have different contents by definition. ''' # we are only interested in files b1recs = filter(lambda r: r.filetype == FileType.RegularFile, backup1.filerecords()) b2recs = filter(lambda r: r.filetype == FileType.RegularFile, backup2.filerecords()) # put filerecords for boths sets in a dictionary by (domain, relativePath) b1dict = {(r.domain, r.relativePath): r for r in b1recs} b2dict = {(r.domain, r.relativePath): r for r in b2recs} b1keys = set(b1dict.keys()) b2keys = set(b2dict.keys()) # files that only exist in b1 for k in b1keys - b2keys: yield (b1dict[k], None) # files that only exist in b2 for k in b2keys - b1keys: yield (None, b2dict[k]) # files that exist in both sets for k in b1keys.intersection(b2keys): f1 = b1dict[k] f2 = b2dict[k] if f1.size != f2.size: # if size differs, so does contents yield (f1, f2) elif f1.size == 0: # both are empty, equal! pass else: # compare by hash if backup1.backuptype == BackupType.IOS10: d1 = _os.path.join(backup1.rootdir, f1.fileID[0:2], f1.fileID) elif backup1.backuptype == BackupType.IOS5TO9: d1 = _os.path.join(backup1.rootdir, f1.fileID) else: raise ValueError('unexpected backuptype') s1 = _sha256() with open(d1, 'rb') as f: s1.update(f.read()) if backup2.backuptype == BackupType.IOS10: d2 = _os.path.join(backup2.rootdir, f2.fileID[0:2], f2.fileID) elif backup2.backuptype == BackupType.IOS5TO9: d2 = _os.path.join(backup2.rootdir, f2.fileID) else: raise ValueError('unexpected backuptype') s2 = _sha256() with open(d2, 'rb') as f: s2.update(f.read()) if s1.digest() != s2.digest(): yield (f1, f2)
def sha256(x): return _sha256(x).digest()
def sha256_digest(data): return _sha256(data).digest()
def sha256(bytestr): return _sha256(bytestr).digest()
def sha256_hexdigest(data): return _sha256(data).hexdigest()
def double_sha256(data): return _sha256(_sha256(data).digest()).digest()
def sha256(x): """Simple wrapper of hashlib sha256.""" return _sha256(x).digest()
from noiseprotocol.crypto.hash import SHA256, SHA512, BLAKE2s, BLAKE2b from hashlib import sha256 as _sha256, sha512 as _sha512 from pyblake2 import blake2s as _blake2s, blake2b as _blake2b def test_SHA256_name(): assert SHA256().name == 'SHA256' @pytest.mark.parametrize( 'data,expected_digest', [ ('\x00', _sha256('\x00').digest()), ('\x00'*16, _sha256('\x00'*16).digest()), ('\x00'*32, _sha256('\x00'*32).digest()), ('\x00'*64, _sha256('\x00'*64).digest()), ('\x00'*128, _sha256('\x00'*128).digest()), ] ) def test_SHA256_kat(data, expected_digest): assert SHA256().hash(data) == expected_digest def test_SHA512_name(): assert SHA512().name == 'SHA512' @pytest.mark.parametrize(