def _get(self, file, key_path): bucket = self._bucket s3_resource = self._store res = s3_resource.Object(bucket, key_path).get() c = res['Body'] log.debug('Get - downloading [%s] from bucket [%s] into file [%s]' % (key_path, bucket, file), class_name=S3_MULTI_HASH_STORE_NAME) with open(file, 'wb') as f: m = hashlib.sha256() while True: chunk = c.read(self._blk_size) if not chunk: break m.update(chunk) f.write(chunk) h = m.hexdigest() mh = multihash.encode(bytes.fromhex(h), 'sha2-256') cid = CIDv1('dag-pb', mh) ncid = str(cid) if self.check_integrity(key_path, ncid) is False: return False c.close() return True
def digest(self, data): m = hashlib.sha256() m.update(data) h = m.hexdigest() mh = multihash.encode(bytes.fromhex(h), 'sha2-256') cid = CIDv1('dag-pb', mh) return str(cid)
def _verify_chunk_integrity(self, corrupted_files, corrupted_files_fullpaths, file, fullpath, m, root): chuck_hex = m.hexdigest() multi_hash = multihash.encode(bytes.fromhex(chuck_hex), 'sha2-256') cid = CIDv1('dag-pb', multi_hash) ncid = str(cid) if ncid != file: log.debug(output_messages['DEBUG_CORRUPTION_DETECTED'] % (file, ncid), class_name=HASH_FS_CLASS_NAME) corrupted_files.append(file) corrupted_files_fullpaths.append(fullpath) else: log.debug(output_messages['DEBUG_CHECKSUM_VERIFIED'] % cid, class_name=HASH_FS_CLASS_NAME) if not self._is_valid_hashpath(root, file): corrupted_files.append(file) corrupted_files_fullpaths.append(fullpath)
def _verify_chunk_integrity(self, corrupted_files, corrupted_files_fullpaths, file, fullpath, m, root): chuck_hex = m.hexdigest() multi_hash = multihash.encode(bytes.fromhex(chuck_hex), 'sha2-256') cid = CIDv1('dag-pb', multi_hash) ncid = str(cid) if ncid != file: log.error('Corruption detected for chunk [%s] - got [%s]' % (file, ncid), class_name=HASH_FS_CLASS_NAME) corrupted_files.append(file) corrupted_files_fullpaths.append(fullpath) else: log.debug('Checksum verified for chunk [%s]' % cid, class_name=HASH_FS_CLASS_NAME) if not self._is_valid_hashpath(root, file): corrupted_files.append(file) corrupted_files_fullpaths.append(fullpath)
def test_cidv1_to_cidv0_no_dag_pb(self): """ converting non dag-pb CIDv1 should raise an exception """ with pytest.raises(ValueError) as excinfo: CIDv1('base2', test_hash).to_v0() assert 'can only be converted for codec' in str(excinfo.value)
def test_cidv1_eq_cidv0(self): """ check for equality between converted v1 to v0 """ assert CIDv1(CIDv0.CODEC, test_hash).to_v0() == CIDv0(test_hash)
def test_cidv0_eq_cidv1(self, test_hash): """ check for equality between converted v0 to v1 """ assert CIDv0(test_hash).to_v1() == CIDv1(CIDv0.CODEC, test_hash)
def cid(self, test_hash): return CIDv1(self.TEST_CODEC, test_hash)
def cidv1(self, test_hash): return CIDv1('dag-pb', test_hash)
def test_multibase_hash(self, test_hash): """ make_cid: make_cid works with multibase-encoded hash """ cidstr = CIDv1('dag-pb', test_hash).encode() assert make_cid(cidstr) == CIDv1('dag-pb', test_hash)