def get(self, object_key, dst_file_path): size = 0 descriptor = json_load(self._get_hashpath(object_key)) json_objects = json.dumps(descriptor).encode() is_corrupted = not self._check_integrity(object_key, json_objects) if is_corrupted: return size successfully_wrote = True # concat all chunks to dstfile try: with open(dst_file_path, 'wb') as dst_file: for chunk in descriptor['Links']: chunk_hash = chunk['Hash'] blob_size = chunk['Size'] log.debug(output_messages['DEBUG_GET_CHUNK'] % (chunk_hash, blob_size), class_name=HASH_FS_CLASS_NAME) size += int(blob_size) successfully_wrote = self._write_chunk_in_file(chunk_hash, dst_file) if not successfully_wrote: break except Exception as e: if os.path.exists(dst_file_path): os.remove(dst_file_path) raise e if not successfully_wrote: size = 0 os.unlink(dst_file_path) return size
def test_json_load(self): jsn = {} self.assertFalse(bool(jsn)) jsn = json_load('./udata/data.json') self.assertEqual(jsn[DATASETS]['categories'], 'imgs') self.assertEqual(jsn[DATASETS]['name'], 'dataex') self.assertEqual(jsn[DATASETS]['version'], 1) self.assertTrue(bool(jsn))
def load(self, key): srckey = self._get_hashpath(key) return json_load(srckey)