def test_print_info(self): db_file = 'foo' self.assertRaises(InfoSystemExit, print_info, 'object', db_file) db_file = os.path.join(self.testdir, './acct.db') self.assertRaises(InfoSystemExit, print_info, 'account', db_file) controller = AccountController({ 'devices': self.testdir, 'mount_check': 'false' }) req = Request.blank('/sda1/1/acct', environ={ 'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0' }) resp = req.get_response(controller) self.assertEqual(resp.status_int, 201) out = StringIO() exp_raised = False with mock.patch('sys.stdout', out): db_file = os.path.join(self.testdir, 'sda1', 'accounts', '1', 'b47', 'dc5be2aa4347a22a0fee6bc7de505b47', 'dc5be2aa4347a22a0fee6bc7de505b47.db') try: print_info('account', db_file, swift_dir=self.testdir) except Exception: exp_raised = True if exp_raised: self.fail("Unexpected exception raised") else: self.assertTrue(len(out.getvalue().strip()) > 800) controller = ContainerController({ 'devices': self.testdir, 'mount_check': 'false' }) req = Request.blank('/sda1/1/acct/cont', environ={ 'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0' }) resp = req.get_response(controller) self.assertEqual(resp.status_int, 201) out = StringIO() exp_raised = False with mock.patch('sys.stdout', out): db_file = os.path.join(self.testdir, 'sda1', 'containers', '1', 'cae', 'd49d0ecbb53be1fcc49624f2f7c7ccae', 'd49d0ecbb53be1fcc49624f2f7c7ccae.db') orig_cwd = os.getcwd() try: os.chdir(os.path.dirname(db_file)) print_info('container', os.path.basename(db_file), swift_dir='/dev/null') except Exception: exp_raised = True finally: os.chdir(orig_cwd) if exp_raised: self.fail("Unexpected exception raised") else: self.assertTrue(len(out.getvalue().strip()) > 600) out = StringIO() exp_raised = False with mock.patch('sys.stdout', out): db_file = os.path.join(self.testdir, 'sda1', 'containers', '1', 'cae', 'd49d0ecbb53be1fcc49624f2f7c7ccae', 'd49d0ecbb53be1fcc49624f2f7c7ccae.db') orig_cwd = os.getcwd() try: os.chdir(os.path.dirname(db_file)) print_info('account', os.path.basename(db_file), swift_dir='/dev/null') except InfoSystemExit: exp_raised = True finally: os.chdir(orig_cwd) if exp_raised: exp_out = 'Does not appear to be a DB of type "account":' \ ' ./d49d0ecbb53be1fcc49624f2f7c7ccae.db' self.assertEqual(out.getvalue().strip(), exp_out) else: self.fail("Expected an InfoSystemExit exception to be raised")
def reset(self): self.out = StringIO() self.messages = []
def str_diagnostic(expected, value, rtol, atol): """Return a pretty multiline string representating the cause of the exception""" sio = StringIO() try: ssio = StringIO() print(" : shape, dtype, strides, min, max, n_inf, n_nan:", file=ssio) print(" Expected :", end=' ', file=ssio) print(expected.shape, end=' ', file=ssio) print(expected.dtype, end=' ', file=ssio) print(expected.strides, end=' ', file=ssio) print(expected.min(), end=' ', file=ssio) print(expected.max(), end=' ', file=ssio) print(np.isinf(expected).sum(), end=' ', file=ssio) print(np.isnan(expected).sum(), end=' ', file=ssio) # only if all succeeds to we add anything to sio print(ssio.getvalue(), file=sio) except Exception: pass try: ssio = StringIO() print(" Value :", end=' ', file=ssio) print(value.shape, end=' ', file=ssio) print(value.dtype, end=' ', file=ssio) print(value.strides, end=' ', file=ssio) print(value.min(), end=' ', file=ssio) print(value.max(), end=' ', file=ssio) print(np.isinf(value).sum(), end=' ', file=ssio) print(np.isnan(value).sum(), end=' ', file=ssio) # only if all succeeds to we add anything to sio print(ssio.getvalue(), file=sio) except Exception: pass print(" expected :", expected, file=sio) print(" value :", value, file=sio) try: ov = np.asarray(expected) nv = np.asarray(value) ssio = StringIO() absdiff = np.absolute(nv - ov) print(" Max Abs Diff: ", np.max(absdiff), file=ssio) print(" Mean Abs Diff: ", np.mean(absdiff), file=ssio) print(" Median Abs Diff: ", np.median(absdiff), file=ssio) print(" Std Abs Diff: ", np.std(absdiff), file=ssio) reldiff = np.absolute(nv - ov) / np.absolute(ov) print(" Max Rel Diff: ", np.max(reldiff), file=ssio) print(" Mean Rel Diff: ", np.mean(reldiff), file=ssio) print(" Median Rel Diff: ", np.median(reldiff), file=ssio) print(" Std Rel Diff: ", np.std(reldiff), file=ssio) # only if all succeeds to we add anything to sio print(ssio.getvalue(), file=sio) except Exception: pass atol_, rtol_ = T.basic._get_atol_rtol(expected, value) if rtol is not None: rtol_ = rtol if atol is not None: atol_ = atol print(" rtol, atol:", rtol_, atol_, file=sio) return sio.getvalue()
def normalize_token_spacing(code): tokens = [(t[0], t[1]) for t in tokenize.generate_tokens(StringIO(code).readline)] return pretty_untokenize(tokens)
def __repr__(self): file_obj = StringIO() file_obj.write('<%s object> n=%s\n' % (self.type, self.n)) self.write_card(file_obj) return file_obj.getvalue()
def test_dump_info(): cs = StringIO() debug.dump_info(None, None, file=cs, testing=True) assert cs.getvalue()
def mock_keyfile(*args, **kwargs): yield StringIO('a' * 50)
def test_01_02_load_v2(self): data = r"""CellProfiler Pipeline: http://www.cellprofiler.org Version:1 SVNRevision:9524 LoadSingleImage:[module_num:1|svn_version:\'Unknown\'|variable_revision_number:2|show_window:True|notes:\x5B\x5D] Folder containing the image file:Default Input Folder\x7CNone Filename of the image to load (Include the extension, e.g., .tif):foo.tif Name the image that will be loaded:DNA Filename of the image to load (Include the extension, e.g., .tif):bar.tif Name the image that will be loaded:Cytoplasm LoadSingleImage:[module_num:2|svn_version:\'Unknown\'|variable_revision_number:2|show_window:True|notes:\x5B\x5D] Folder containing the image file:Default Output Folder\x7CNone Filename of the image to load (Include the extension, e.g., .tif):baz.tif Name the image that will be loaded:GFP LoadSingleImage:[module_num:3|svn_version:\'Unknown\'|variable_revision_number:2|show_window:True|notes:\x5B\x5D] Folder containing the image file:Elsewhere...\x7CNone Filename of the image to load (Include the extension, e.g., .tif):baz.tif Name the image that will be loaded:GFP LoadSingleImage:[module_num:4|svn_version:\'Unknown\'|variable_revision_number:2|show_window:True|notes:\x5B\x5D] Folder containing the image file:URL\x7Chttps\x3A//svn.broadinstitute.org/CellProfiler/trunk/ExampleImages/ExampleSBSImages Filename of the image to load (Include the extension, e.g., .tif):Channel1-01-A-01.tif Name the image that will be loaded:DNA1 """ pipeline = cellprofiler.pipeline.Pipeline() def callback(caller, event): self.assertFalse( isinstance(event, cellprofiler.pipeline.LoadExceptionEvent)) pipeline.add_listener(callback) pipeline.load(StringIO(data)) self.assertEqual(len(pipeline.modules()), 4) module = pipeline.modules()[0] self.assertTrue( isinstance(module, cellprofiler.modules.loadsingleimage.LoadSingleImage)) self.assertEqual(len(module.file_settings), 2) fs = module.file_settings[0] self.assertEqual(fs.file_name, "foo.tif") self.assertEqual(fs.image_name, "DNA") fs = module.file_settings[1] self.assertEqual(fs.file_name, "bar.tif") self.assertEqual(fs.image_name, "Cytoplasm") module = pipeline.modules()[3] fs = module.file_settings[0] self.assertEqual( fs.file_name, "https://svn.broadinstitute.org/CellProfiler/trunk/ExampleImages/" "ExampleSBSImages/Channel1-01-A-01.tif") dir_choice = [ cellprofiler.setting.DEFAULT_INPUT_FOLDER_NAME, cellprofiler.setting.DEFAULT_OUTPUT_FOLDER_NAME, cellprofiler.setting.ABSOLUTE_FOLDER_NAME, cellprofiler.setting.URL_FOLDER_NAME ] for i, module in enumerate(pipeline.modules()): self.assertTrue( isinstance( module, cellprofiler.modules.loadsingleimage.LoadSingleImage)) self.assertEqual(module.directory.dir_choice, dir_choice[i])
def test_01_01_load_v1(self): data = r"""CellProfiler Pipeline: http://www.cellprofiler.org Version:1 SVNRevision:9524 LoadSingleImage:[module_num:1|svn_version:\'Unknown\'|variable_revision_number:1|show_window:True|notes:\x5B\x5D] Folder containing the image file:Default Input Folder Name of the folder containing the image file:path1 Filename of the image to load (Include the extension, e.g., .tif):foo.tif Name the image that will be loaded:DNA Filename of the image to load (Include the extension, e.g., .tif):bar.tif Name the image that will be loaded:Cytoplasm LoadSingleImage:[module_num:2|svn_version:\'Unknown\'|variable_revision_number:1|show_window:True|notes:\x5B\x5D] Folder containing the image file:Default Output Folder Name of the folder containing the image file:path2 Filename of the image to load (Include the extension, e.g., .tif):baz.tif Name the image that will be loaded:GFP LoadSingleImage:[module_num:3|svn_version:\'Unknown\'|variable_revision_number:1|show_window:True|notes:\x5B\x5D] Folder containing the image file:Custom folder Name of the folder containing the image file:path3 Filename of the image to load (Include the extension, e.g., .tif):baz.tif Name the image that will be loaded:GFP LoadSingleImage:[module_num:4|svn_version:\'Unknown\'|variable_revision_number:1|show_window:True|notes:\x5B\x5D] Folder containing the image file:Custom with metadata Name of the folder containing the image file:path4 Filename of the image to load (Include the extension, e.g., .tif):baz.tif Name the image that will be loaded:GFP """ pipeline = cellprofiler.pipeline.Pipeline() def callback(caller, event): self.assertFalse( isinstance(event, cellprofiler.pipeline.LoadExceptionEvent)) pipeline.add_listener(callback) pipeline.load(StringIO(data)) self.assertEqual(len(pipeline.modules()), 4) dir_choice = [ cellprofiler.setting.DEFAULT_INPUT_FOLDER_NAME, cellprofiler.setting.DEFAULT_OUTPUT_FOLDER_NAME, cellprofiler.setting.ABSOLUTE_FOLDER_NAME, cellprofiler.setting.ABSOLUTE_FOLDER_NAME ] for i, module in enumerate(pipeline.modules()): self.assertTrue( isinstance( module, cellprofiler.modules.loadsingleimage.LoadSingleImage)) self.assertEqual(module.directory.dir_choice, dir_choice[i]) self.assertEqual(module.directory.custom_path, "path%d" % (i + 1)) module = pipeline.modules()[0] self.assertTrue( isinstance(module, cellprofiler.modules.loadsingleimage.LoadSingleImage)) self.assertEqual(len(module.file_settings), 2) fs = module.file_settings[0] self.assertEqual(fs.file_name, "foo.tif") self.assertEqual(fs.image_name, "DNA") fs = module.file_settings[1] self.assertEqual(fs.file_name, "bar.tif") self.assertEqual(fs.image_name, "Cytoplasm")
def test_unchecked_etag(self): out = StringIO() with mock.patch('sys.stdout', out): print_obj(self.datafile, check_etag=False) self.assertTrue('ETag: d41d8cd98f00b204e9800998ecf8427e (not checked)' in out.getvalue())
def test_print_obj_metadata(self): self.assertRaisesMessage(ValueError, 'Metadata is None', print_obj_metadata, []) def get_metadata(items): md = dict(name='/AUTH_admin/c/dummy') md['Content-Type'] = 'application/octet-stream' md['X-Timestamp'] = 106.3 md.update(items) return md metadata = get_metadata({'X-Object-Meta-Mtime': '107.3'}) out = StringIO() with mock.patch('sys.stdout', out): print_obj_metadata(metadata) exp_out = '''Path: /AUTH_admin/c/dummy Account: AUTH_admin Container: c Object: dummy Object hash: 128fdf98bddd1b1e8695f4340e67a67a Content-Type: application/octet-stream Timestamp: 1970-01-01T00:01:46.300000 (%s) System Metadata: No metadata found User Metadata: X-Object-Meta-Mtime: 107.3 Other Metadata: No metadata found''' % (utils.Timestamp(106.3).internal) self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({ 'X-Object-Sysmeta-Mtime': '107.3', 'X-Object-Sysmeta-Name': 'Obj name', }) out = StringIO() with mock.patch('sys.stdout', out): print_obj_metadata(metadata) exp_out = '''Path: /AUTH_admin/c/dummy Account: AUTH_admin Container: c Object: dummy Object hash: 128fdf98bddd1b1e8695f4340e67a67a Content-Type: application/octet-stream Timestamp: 1970-01-01T00:01:46.300000 (%s) System Metadata: X-Object-Sysmeta-Mtime: 107.3 X-Object-Sysmeta-Name: Obj name User Metadata: No metadata found Other Metadata: No metadata found''' % (utils.Timestamp(106.3).internal) self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({ 'X-Object-Meta-Mtime': '107.3', 'X-Object-Sysmeta-Mtime': '107.3', 'X-Object-Mtime': '107.3', }) out = StringIO() with mock.patch('sys.stdout', out): print_obj_metadata(metadata) exp_out = '''Path: /AUTH_admin/c/dummy Account: AUTH_admin Container: c Object: dummy Object hash: 128fdf98bddd1b1e8695f4340e67a67a Content-Type: application/octet-stream Timestamp: 1970-01-01T00:01:46.300000 (%s) System Metadata: X-Object-Sysmeta-Mtime: 107.3 User Metadata: X-Object-Meta-Mtime: 107.3 Other Metadata: X-Object-Mtime: 107.3''' % (utils.Timestamp(106.3).internal) self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({}) out = StringIO() with mock.patch('sys.stdout', out): print_obj_metadata(metadata) exp_out = '''Path: /AUTH_admin/c/dummy Account: AUTH_admin Container: c Object: dummy Object hash: 128fdf98bddd1b1e8695f4340e67a67a Content-Type: application/octet-stream Timestamp: 1970-01-01T00:01:46.300000 (%s) System Metadata: No metadata found User Metadata: No metadata found Other Metadata: No metadata found''' % (utils.Timestamp(106.3).internal) self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({'X-Object-Meta-Mtime': '107.3'}) metadata['name'] = '/a-s' self.assertRaisesMessage(ValueError, 'Path is invalid', print_obj_metadata, metadata) metadata = get_metadata({'X-Object-Meta-Mtime': '107.3'}) del metadata['name'] out = StringIO() with mock.patch('sys.stdout', out): print_obj_metadata(metadata) exp_out = '''Path: Not found in metadata Content-Type: application/octet-stream Timestamp: 1970-01-01T00:01:46.300000 (%s) System Metadata: No metadata found User Metadata: X-Object-Meta-Mtime: 107.3 Other Metadata: No metadata found''' % (utils.Timestamp(106.3).internal) self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({'X-Object-Meta-Mtime': '107.3'}) del metadata['Content-Type'] out = StringIO() with mock.patch('sys.stdout', out): print_obj_metadata(metadata) exp_out = '''Path: /AUTH_admin/c/dummy Account: AUTH_admin Container: c Object: dummy Object hash: 128fdf98bddd1b1e8695f4340e67a67a Content-Type: Not found in metadata Timestamp: 1970-01-01T00:01:46.300000 (%s) System Metadata: No metadata found User Metadata: X-Object-Meta-Mtime: 107.3 Other Metadata: No metadata found''' % (utils.Timestamp(106.3).internal) self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({'X-Object-Meta-Mtime': '107.3'}) del metadata['X-Timestamp'] out = StringIO() with mock.patch('sys.stdout', out): print_obj_metadata(metadata) exp_out = '''Path: /AUTH_admin/c/dummy Account: AUTH_admin Container: c Object: dummy Object hash: 128fdf98bddd1b1e8695f4340e67a67a Content-Type: application/octet-stream Timestamp: Not found in metadata System Metadata: No metadata found User Metadata: X-Object-Meta-Mtime: 107.3 Other Metadata: No metadata found''' self.assertEqual(out.getvalue().strip(), exp_out)
def test_print_obj_policy_name_mismatch(self): out = StringIO() with mock.patch('sys.stdout', out): print_obj(self.datafile, policy_name='two', swift_dir=self.testdir) ring_alert_msg = 'Warning: Ring does not match policy!' self.assertTrue(ring_alert_msg in out.getvalue())
def test_print_obj(self): out = StringIO() with mock.patch('sys.stdout', out): print_obj(self.datafile, swift_dir=self.testdir) self.assertTrue('/objects-1/' in out.getvalue())
def test_missing_etag(self): out = StringIO() with mock.patch('sys.stdout', out): print_obj(self.datafile) self.assertTrue('ETag: Not found in metadata' in out.getvalue())
def getEmptyFileStoreID(self, jobStoreID=None): fileID = self._newID(isFile=True, jobStoreID=jobStoreID) self._writeFile(fileID, StringIO("")) return fileID
def try_den_weight_torsion(self, grid_pair): #backup_k_rep = self.params.tardy.\ # prolsq_repulsion_function_changes.k_rep local_seed = int(self.random_seed + grid_pair[1]) flex.set_random_seed(value=local_seed) random.seed(local_seed) self.fmodels.fmodel_xray().xray_structure.replace_scatterers( self.save_scatterers_local.deep_copy()) self.fmodels.update_xray_structure( xray_structure=self.fmodels.fmodel_xray().xray_structure, update_f_calc=True) utils.assert_xray_structures_equal( x1=self.fmodels.fmodel_xray().xray_structure, x2=self.model.get_xray_structure()) gamma_local = grid_pair[0] weight_local = grid_pair[1] self.model.restraints_manager.geometry.\ den_manager.gamma = gamma_local self.model.restraints_manager.geometry.\ den_manager.weight = weight_local cycle = 0 self.model.restraints_manager.geometry.\ den_manager.current_cycle = cycle+1 num_den_cycles = self.model.restraints_manager.geometry.\ den_manager.num_cycles if self.params.den.optimize and \ self.nproc != Auto and \ self.nproc > 1: local_log = sys.stdout elif self.params.den.optimize and \ self.nproc == 1: if self.verbose: local_log = self.log else: local_log = StringIO() else: local_log = self.log print(" ...trying gamma %.1f, weight %.1f" % (gamma_local, weight_local), file=self.log) while cycle < num_den_cycles: #if self.model.restraints_manager.geometry.\ # generic_restraints_manager.den_manager.current_cycle == \ # self.model.restraints_manager.geometry.\ # generic_restraints_manager.den_manager.torsion_mid_point+1: # self.params.tardy.\ # prolsq_repulsion_function_changes.k_rep = 1.0 print("DEN cycle %d" % (cycle + 1), file=local_log) #print >> local_log, "Random seed: %d" % flex.get_random_seed() r_free = self.fmodels.fmodel_xray().r_free() print("rfree at start of SA cycle: %.4f" % r_free, file=local_log) print("k_rep = %.2f" % \ self.params.tardy.\ prolsq_repulsion_function_changes.k_rep, file=local_log) tardy.run(fmodels=self.fmodels, model=self.model, target_weights=self.target_weights, params=self.params.tardy, log=local_log, format_for_phenix_refine=True, call_back_after_step=False) if self.params.den.bulk_solvent_and_scale: self.bulk_solvent_and_scale(log=local_log) self.fmodels.fmodel_xray( ).xray_structure = self.model.get_xray_structure() if self.params.den.refine_adp: self.adp_refinement(log=local_log) self.model.torsion_ncs_restraints_update(log=local_log) cycle += 1 self.model.restraints_manager.geometry.\ den_manager.current_cycle += 1 r_free = self.fmodels.fmodel_xray().r_free() print("rfree at end of SA cycle: %f" % r_free, file=local_log) r_free = self.fmodels.fmodel_xray().r_free() step_xray_structure = self.fmodels.fmodel_xray().\ xray_structure.deep_copy_scatterers().scatterers() step_eq_distances = self.model.restraints_manager.geometry.\ den_manager.get_current_eq_distances() return (gamma_local, weight_local, r_free, step_xray_structure, step_eq_distances)
def _writeString(self, jobStoreID, stringToUpload, **kwarg): self._writeFile(jobStoreID, StringIO(stringToUpload), **kwarg)
def try_den_weight_cartesian(self, grid_pair): local_seed = int(self.random_seed + grid_pair[1]) flex.set_random_seed(value=local_seed) random.seed(local_seed) self.fmodels.fmodel_xray().xray_structure.replace_scatterers( self.save_scatterers_local.deep_copy()) self.fmodels.update_xray_structure( xray_structure=self.fmodels.fmodel_xray().xray_structure, update_f_calc=True) utils.assert_xray_structures_equal( x1=self.fmodels.fmodel_xray().xray_structure, x2=self.model.get_xray_structure()) gamma_local = grid_pair[0] weight_local = grid_pair[1] self.model.restraints_manager.geometry.\ den_manager.gamma = gamma_local self.model.restraints_manager.geometry.\ den_manager.weight = weight_local cycle = 0 self.model.restraints_manager.geometry.\ den_manager.current_cycle = cycle+1 num_den_cycles = self.model.restraints_manager.geometry.\ den_manager.num_cycles if self.params.den.optimize and \ self.nproc != Auto and \ self.nproc > 1: local_log = sys.stdout elif self.params.den.optimize and \ self.nproc == 1: if self.verbose: local_log = self.log else: local_log = StringIO() else: local_log = self.log print(" ...trying gamma %f, weight %f" % (gamma_local, weight_local), file=self.log) while cycle < num_den_cycles: print("DEN cycle %s" % (cycle + 1), file=local_log) r_free = self.fmodels.fmodel_xray().r_free() print("rfree at start of SA cycle: %f" % r_free, file=local_log) simulated_annealing.manager(params=self.params.simulated_annealing, target_weights=self.target_weights, macro_cycle=self.macro_cycle, h_params=self.params.hydrogens, fmodels=self.fmodels, model=self.model, all_params=self.params, out=local_log) if self.params.den.bulk_solvent_and_scale: self.bulk_solvent_and_scale(log=local_log) if self.params.den.refine_adp: self.adp_refinement(log=local_log) self.model.torsion_ncs_restraints_update(log=local_log) cycle += 1 self.model.restraints_manager.geometry.\ den_manager.current_cycle += 1 r_free = self.fmodels.fmodel_xray().r_free() print("rfree at end of SA cycle: %f" % r_free, file=local_log) r_free = self.fmodels.fmodel_xray().r_free() step_xray_structure = self.fmodels.fmodel_xray().\ xray_structure.deep_copy_scatterers().scatterers() step_eq_distances = self.model.restraints_manager.geometry.\ den_manager.get_current_eq_distances() return (gamma_local, weight_local, r_free, step_xray_structure, step_eq_distances)
def exercise_combine_unique_pdb_files(): for file_name, s in [("tmp1", "1"), ("tmp2", " 2"), ("tmp3", "1\t"), ("tmp4", " \t2"), ("tmp5", "1 ")]: open(file_name, "w").write(s) for file_names in [[], ["tmp1"], ["tmp1", "tmp2"]]: c = pdb.combine_unique_pdb_files(file_names=file_names) assert len(c.file_name_registry) == len(file_names) assert len(c.md5_registry) == len(file_names) assert len(c.unique_file_names) == len(file_names) assert len(c.raw_records) == len(file_names) s = StringIO() c.report_non_unique(out=s) assert len(s.getvalue()) == 0 c = pdb.combine_unique_pdb_files(file_names=["tmp1", "tmp1"]) assert len(c.file_name_registry) == 1 assert len(c.md5_registry) == 1 assert len(c.unique_file_names) == 1 assert len(c.raw_records) == 1 s = StringIO() c.report_non_unique(out=s) assert not show_diff( s.getvalue(), """\ INFO: PDB file name appears 2 times: "tmp1" 1 repeated file name ignored. """) c = pdb.combine_unique_pdb_files( file_names=["tmp1", "tmp1", "tmp2", "tmp1"]) assert len(c.file_name_registry) == 2 assert len(c.md5_registry) == 2 assert len(c.unique_file_names) == 2 assert len(c.raw_records) == 2 s = StringIO() c.report_non_unique(out=s, prefix="^") assert not show_diff( s.getvalue(), """\ ^INFO: PDB file name appears 3 times: "tmp1" ^ 2 repeated file names ignored. ^ """) c = pdb.combine_unique_pdb_files(file_names=["tmp1", "tmp2", "tmp3"]) assert len(c.file_name_registry) == 3 assert len(c.md5_registry) == 2 assert len(c.unique_file_names) == 2 assert len(c.raw_records) == 2 s = StringIO() c.report_non_unique(out=s) assert not show_diff( s.getvalue(), """\ INFO: PDB files with identical content: "tmp1" "tmp3" 1 file with repeated content ignored. """) c = pdb.combine_unique_pdb_files( file_names=["tmp1", "tmp2", "tmp3", "tmp5"]) assert len(c.file_name_registry) == 4 assert len(c.md5_registry) == 2 assert len(c.unique_file_names) == 2 assert len(c.raw_records) == 2 s = StringIO() c.report_non_unique(out=s, prefix=": ") assert not show_diff( s.getvalue(), """\ : INFO: PDB files with identical content: : "tmp1" : "tmp3" : "tmp5" : 2 files with repeated content ignored. : """) c = pdb.combine_unique_pdb_files( file_names=["tmp1", "tmp2", "tmp3", "tmp4", "tmp5", "tmp4", "tmp5"]) assert len(c.file_name_registry) == 5 assert len(c.md5_registry) == 2 assert len(c.unique_file_names) == 2 assert len(c.raw_records) == 2 s = StringIO() c.report_non_unique(out=s) assert not show_diff( s.getvalue(), """\ INFO: PDB file name appears 2 times: "tmp4" INFO: PDB file name appears 2 times: "tmp5" 2 repeated file names ignored. INFO: PDB files with identical content: "tmp2" "tmp4" INFO: PDB files with identical content: "tmp1" "tmp3" "tmp5" 3 files with repeated content ignored. """)
def __init__(self, path): self._output = StringIO() self._path = path
def test_write_values_disconnects(): s = StringIO() tst = "foo" * 100 writer.write_values(s, [tst], [(0, "disconnect")], blocksize=5) assert not s.getvalue()
def test_debugprint(): A = tensor.matrix(name='A') B = tensor.matrix(name='B') C = A + B C.name = 'C' D = tensor.matrix(name='D') E = tensor.matrix(name='E') F = D + E G = C + F mode = theano.compile.get_default_mode().including('fusion') g = theano.function([A, B, D, E], G, mode=mode) # just test that it work s = StringIO() debugprint(G, file=s) # test ids=int s = StringIO() debugprint(G, file=s, ids='int') s = s.getvalue() # The additional white space are needed! reference = '\n'.join([ "Elemwise{add,no_inplace} [id 0] '' ", " |Elemwise{add,no_inplace} [id 1] 'C' ", " | |A [id 2]", " | |B [id 3]", " |Elemwise{add,no_inplace} [id 4] '' ", " |D [id 5]", " |E [id 6]", ]) + '\n' if s != reference: print('--' + s + '--') print('--' + reference + '--') assert s == reference # test ids=CHAR s = StringIO() debugprint(G, file=s, ids='CHAR') s = s.getvalue() # The additional white space are needed! reference = "\n".join([ "Elemwise{add,no_inplace} [id A] '' ", " |Elemwise{add,no_inplace} [id B] 'C' ", " | |A [id C]", " | |B [id D]", " |Elemwise{add,no_inplace} [id E] '' ", " |D [id F]", " |E [id G]", ]) + '\n' if s != reference: print('--' + s + '--') print('--' + reference + '--') assert s == reference # test ids=CHAR, stop_on_name=True s = StringIO() debugprint(G, file=s, ids='CHAR', stop_on_name=True) s = s.getvalue() # The additional white space are needed! reference = '\n'.join([ "Elemwise{add,no_inplace} [id A] '' ", " |Elemwise{add,no_inplace} [id B] 'C' ", " |Elemwise{add,no_inplace} [id C] '' ", " |D [id D]", " |E [id E]", ]) + '\n' if s != reference: print('--' + s + '--') print('--' + reference + '--') assert s == reference # test ids= s = StringIO() debugprint(G, file=s, ids='') s = s.getvalue() # The additional white space are needed! reference = '\n'.join([ "Elemwise{add,no_inplace} '' ", " |Elemwise{add,no_inplace} 'C' ", " | |A ", " | |B ", " |Elemwise{add,no_inplace} '' ", " |D ", " |E ", ]) + '\n' if s != reference: print('--' + s + '--') print('--' + reference + '--') assert s == reference # test print_storage=True s = StringIO() debugprint(g, file=s, ids='', print_storage=True) s = s.getvalue() # The additional white space are needed! reference = '\n'.join([ "Elemwise{add,no_inplace} '' 0 [None]", " |A [None]", " |B [None]", " |D [None]", " |E [None]", ]) + '\n' if s != reference: print('--' + s + '--') print('--' + reference + '--') assert s == reference # test clients s = StringIO() # We must force the mode as otherwise it can change the clients order f = theano.function([A, B, D], [A + B, A + B - D], mode='FAST_COMPILE') debugprint(f, file=s, print_clients=True) s = s.getvalue() # The additional white space are needed! reference = '\n'.join([ "Elemwise{add,no_inplace} [id A] '' 0 clients:[('[id B]', 1), ('output', '')]", " |A [id D]", " |B [id E]", "Elemwise{sub,no_inplace} [id B] '' 1", " |Elemwise{add,no_inplace} [id A] '' 0 clients:[('[id B]', 1), ('output', '')]", " |D [id F]", ]) + '\n' if s != reference: print('--' + s + '--') print('--' + reference + '--') assert s == reference
def c_code(self, node, name, inputs, outputs, sub): inp_ndim = node.inputs[0].ndim inp = inputs[0] indices = inputs[1:] # pad out the index list to the same dimension as the input idx_list = self.idx_list + \ ((slice(None),) * (inp_ndim - len(self.idx_list))) # This case fails when we use pygpu_index(), so here is some # special code if len(idx_list) == 0: return """ Py_XDECREF(%(out)s); %(out)s = pygpu_copy(%(inp)s, GA_ANY_ORDER); if (!%(out)s) { // Exception already set %(fail)s } """ % dict(out=outputs[0], inp=inp, fail=sub['fail']) sio = StringIO() print(""" ssize_t starts[%(sz)s]; ssize_t stops[%(sz)s]; ssize_t steps[%(sz)s]; ssize_t cur; int err; if (%(inp)s->ga.nd != %(sz)s) { PyErr_SetString(PyExc_IndexError, "invalid index"); %(fail)s } """ % dict(sz=len(idx_list), inp=inp, fail=sub['fail']), file=sio) def fix_idx(idx): if idx is None: return "0", 1 elif isinstance(idx, (np.integer, integer_types)): return str(idx), 0 elif isinstance(idx, gof.Type): return indices.pop(0), 0 else: assert 0, idx for i, idx in enumerate(idx_list): if isinstance(idx, slice): start, start_n = fix_idx(idx.start) stop, stop_n = fix_idx(idx.stop) step, step_n = fix_idx(idx.step) print(""" starts[%(i)s] = %(start)s; stops[%(i)s] = %(stop)s; steps[%(i)s] = %(step)s; if (fix_indices(&starts[%(i)s], &stops[%(i)s], &steps[%(i)s], %(start_n)s, %(stop_n)s, %(step_n)s, %(inp)s->ga.dimensions[%(i)s]) == -1) { %(fail)s } """ % dict(i=i, start=start, stop=stop, step=step, start_n=start_n, stop_n=stop_n, step_n=step_n, fail=sub['fail'], inp=inp), file=sio) else: if isinstance(idx, gof.Type): start = indices.pop(0) elif isinstance(idx, (np.integer, integer_types)): start = idx else: assert 0, idx print(""" cur = %(start)s; if (cur < 0) cur += %(inp)s->ga.dimensions[%(i)s]; starts[%(i)s] = cur; steps[%(i)s] = 0; """ % dict(i=i, start=start, fail=sub['fail'], inp=inp), file=sio) print(""" Py_XDECREF(%(out)s); %(out)s = pygpu_index(%(inp)s, starts, stops, steps); if (!%(out)s) { %(fail)s } """ % dict(name=name, fail=sub['fail'], inp=inp, out=outputs[0]), file=sio) return sio.getvalue()
def exercise_simple(): pdb_str = """ ATOM 47 N TYR A 7 8.292 1.817 6.147 1.00 14.70 N ATOM 48 CA TYR A 7 9.159 2.144 7.299 1.00 15.18 C ATOM 49 C TYR A 7 10.603 2.331 6.885 1.00 15.91 C ATOM 50 O TYR A 7 11.041 1.811 5.855 1.00 15.76 O ATOM 51 CB TYR A 7 9.061 1.065 8.369 1.00 15.35 C ATOM 52 CG TYR A 7 7.665 0.929 8.902 1.00 14.45 C ATOM 53 CD1 TYR A 7 6.771 0.021 8.327 1.00 15.68 C ATOM 54 CD2 TYR A 7 7.210 1.756 9.920 1.00 14.80 C ATOM 55 CE1 TYR A 7 5.480 -0.094 8.796 1.00 13.46 C ATOM 56 CE2 TYR A 7 5.904 1.649 10.416 1.00 14.33 C ATOM 57 CZ TYR A 7 5.047 0.729 9.831 1.00 15.09 C ATOM 58 OH TYR A 7 3.766 0.589 10.291 1.00 14.39 O ATOM 59 OXT TYR A 7 11.358 2.999 7.612 1.00 17.49 O TER """ pdb_in = iotbx.pdb.input(source_info=None, lines=pdb_str) hierarchy = pdb_in.construct_hierarchy() xrs = pdb_in.xray_structure_simple() xrs.scattering_type_registry(d_min=1.5, table="n_gaussian") xrs.set_inelastic_form_factors(photon=1.54, table="sasaki") file_base = "tmp_mmtbx_cmdline" with open(file_base + ".pdb", "w") as f: f.write(hierarchy.as_pdb_string(crystal_symmetry=xrs)) fc = abs(xrs.structure_factors(d_min=1.5).f_calc()) flags = fc.generate_r_free_flags() mtz = fc.as_mtz_dataset(column_root_label="F") mtz.add_miller_array(flags, column_root_label="FreeR_flag") mtz.mtz_object().write(file_base + ".mtz") with open(file_base + ".fa", "w") as f: f.write(">Tyr\nY\n") base_args = [file_base + ext for ext in [".pdb", ".mtz", ".fa"]] cmdline = mmtbx.command_line.load_model_and_data( args=base_args + ["wavelength=1.54"], master_phil=mmtbx.command_line.generate_master_phil_with_inputs(""), out=StringIO(), create_log_buffer=True) assert (cmdline.params.input.xray_data.file_name is not None) assert (cmdline.sequence is not None) r_factor = cmdline.fmodel.r_work() assert (r_factor < 0.002) cmdline.save_data_mtz("tmp_mmtbx_cmdline_data.mtz") assert os.path.isfile("tmp_mmtbx_cmdline_data.mtz") model = cmdline.create_model_manager() # energy input cmdline = mmtbx.command_line.load_model_and_data( args=base_args + ["energy=8050"], master_phil=mmtbx.command_line.generate_master_phil_with_inputs(""), out=StringIO(), create_log_buffer=True) assert approx_equal(cmdline.params.input.wavelength, 1.54018, eps=0.0001) # UNMERGED DATA INPUT log = cmdline.start_log_file("tst_mmtbx_cmdline.log") fc2 = xrs.structure_factors(d_min=1.3).f_calc().generate_bijvoet_mates() fc2 = fc2.randomize_amplitude_and_phase( amplitude_error=0.01, phase_error_deg=5, random_seed=12345).customized_copy( sigmas=flex.random_double(fc2.size(), 10)) i_obs = abs(fc2).f_as_f_sq() i_obs = i_obs.expand_to_p1().customized_copy( crystal_symmetry=fc2).set_observation_type_xray_intensity() with open(file_base + ".sca", "w") as f: no_merge_original_index.writer(i_obs, file_object=f) master_phil = mmtbx.command_line.generate_master_phil_with_inputs( phil_string="", enable_unmerged_data=True) cmdline = mmtbx.command_line.load_model_and_data( args=[file_base + ext for ext in [ ".pdb", ".mtz", ".fa", ]] + ["unmerged_data.file_name=%s.sca" % file_base], master_phil=master_phil, out=StringIO(), create_log_buffer=True) assert (cmdline.unmerged_i_obs is not None) # test with unknown scatterers pdb_in = iotbx.pdb.input(source_info=None, lines=pdb_str + """\ ATOM 59 UNK UNL A 7 0.000 0.000 0.000 1.00 20.00 X """) hierarchy = pdb_in.construct_hierarchy() file_base = "tmp_mmtbx_cmdline" with open(file_base + ".pdb", "w") as f: f.write(hierarchy.as_pdb_string(crystal_symmetry=xrs)) try: cmdline = mmtbx.command_line.load_model_and_data( args=[file_base + ext for ext in [ ".pdb", ".mtz", ".fa", ]], master_phil=master_phil, out=StringIO(), process_pdb_file=False, create_log_buffer=True) except Sorry: pass else: raise Exception_expected cmdline = mmtbx.command_line.load_model_and_data( args=[file_base + ext for ext in [ ".pdb", ".mtz", ".fa", ]], master_phil=master_phil, out=StringIO(), process_pdb_file=False, create_log_buffer=True, remove_unknown_scatterers=True)
def tokenize_str(code): return list(tokenize.generate_tokens(StringIO(code).readline))
def exercise_load_unmerged(): flex.set_random_seed(123456) random.seed(123456) base = "tst_load_unmerged" pdb_in = iotbx.pdb.hierarchy.input(pdb_string=model_1yjp) xrs = pdb_in.xray_structure_simple() xrs.set_inelastic_form_factors(photon=1.54, table="sasaki") fc = abs(xrs.structure_factors(d_min=1.5).f_calc()).average_bijvoet_mates() fc.set_observation_type_xray_amplitude() flags = fc.generate_r_free_flags() mtz = fc.as_mtz_dataset(column_root_label="F") mtz.add_miller_array(flags, column_root_label="FreeR_flag") mtz.mtz_object().write(base + ".mtz") xrs_p1 = xrs.expand_to_p1() xrs_p1.shake_sites_in_place(rms_difference=0.1) fc_p1 = xrs_p1.structure_factors(d_min=1.4).f_calc() fc_p1_extra = fc_p1.randomize_amplitude_and_phase(amplitude_error=1.0, phase_error_deg=0, random_seed=123456) fc_p1 = abs( fc_p1.concatenate(other=fc_p1_extra)).sort(by_value="packed_indices") fc_p1.set_observation_type_xray_amplitude() sg_p2 = sgtbx.space_group_info("P2") ic = fc_p1.f_as_f_sq().customized_copy(space_group_info=sg_p2, sigmas=flex.double( fc_p1.size(), 10.0)) ic.export_as_scalepack_unmerged(file_name=base + ".sca") open(base + ".pdb", "w").write(model_1yjp) args = [ base + ".mtz", base + ".pdb", "unmerged_data=%s.sca" % base, ] master_phil = mmtbx.command_line.generate_master_phil_with_inputs( phil_string="", enable_unmerged_data=True) cmdline = mmtbx.command_line.load_model_and_data( args=[base + ext for ext in [ ".pdb", ".mtz", ]] + ["unmerged_data.file_name=%s.sca" % base], master_phil=master_phil, out=StringIO(), create_fmodel=False, process_pdb_file=False, create_log_buffer=True) # now with .sca in P1 (raises Sorry) ic2 = fc_p1.f_as_f_sq().customized_copy( sigmas=flex.double(fc_p1.size(), 10.0)) ic2.export_as_scalepack_unmerged(file_name=base + "_p1.sca") args = [ base + ".mtz", base + ".pdb", "unmerged_data=%s_p1.sca" % base, ] try: cmdline = mmtbx.command_line.load_model_and_data( args=[base + ext for ext in [ ".pdb", ".mtz", ]] + ["unmerged_data.file_name=%s_p1.sca" % base], master_phil=master_phil, out=StringIO(), create_fmodel=False, process_pdb_file=False, create_log_buffer=True) except Sorry as s: assert ( str(s) == "Incompatible space groups in merged and unmerged data:P 1 21 1 versus P 1" ), s else: raise Exception_expected # XXX f = open(base + ".cif", "w") ic.as_cif_simple(array_type="meas", out=f) f.close() args = [ base + ".mtz", base + ".pdb", "unmerged_data=%s.cif" % base, ] cmdline = mmtbx.command_line.load_model_and_data( args=[base + ext for ext in [ ".pdb", ".mtz", ]] + ["unmerged_data.file_name=%s.cif" % base], master_phil=master_phil, out=StringIO(), create_fmodel=False, process_pdb_file=False, create_log_buffer=True) # bad unit cell uc2 = uctbx.unit_cell((23, 6.5, 23.5, 90, 108, 90)) ic3 = ic.customized_copy(unit_cell=uc2) f = open(base + "_new_uc.cif", "w") ic3.as_cif_simple(array_type="meas", out=f) f.close() args = [ base + ".mtz", base + ".pdb", "unmerged_data=%s_new_uc.cif" % base, ] try: cmdline = mmtbx.command_line.load_model_and_data( args=[base + ext for ext in [ ".pdb", ".mtz", ]] + ["unmerged_data.file_name=%s_new_uc.cif" % base], master_phil=master_phil, out=StringIO(), create_fmodel=False, process_pdb_file=False, create_log_buffer=True) except Sorry as s: assert ("Incompatible symmetry definitions" in str(s)), s else: raise Exception_expected
def generate_ssl_cert(target_file=None, overwrite=False, random=False, return_content=False, serial_number=None): # Note: Do NOT import "OpenSSL" at the root scope # (Our test Lambdas are importing this file but don't have the module installed) from OpenSSL import crypto def all_exist(*files): return all([os.path.exists(f) for f in files]) if target_file and not overwrite and os.path.exists(target_file): key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file try: # extract key and cert from target_file and store into separate files content = load_file(target_file) key_start = '-----BEGIN PRIVATE KEY-----' key_end = '-----END PRIVATE KEY-----' cert_start = '-----BEGIN CERTIFICATE-----' cert_end = '-----END CERTIFICATE-----' key_content = content[content.index(key_start ):content.index(key_end) + len(key_end)] cert_content = content[content.index(cert_start ):content.index(cert_end) + len(cert_end)] save_file(key_file_name, key_content) save_file(cert_file_name, cert_content) except Exception as e: LOG.info( 'Unable to store key/cert files for custom SSL certificate: %s' % e) if all_exist(key_file_name, cert_file_name): return target_file, cert_file_name, key_file_name if random and target_file: if '.' in target_file: target_file = target_file.replace('.', '.%s.' % short_uid(), 1) else: target_file = '%s.%s' % (target_file, short_uid()) # create a key pair k = crypto.PKey() k.generate_key(crypto.TYPE_RSA, 2048) # create a self-signed cert cert = crypto.X509() subj = cert.get_subject() subj.C = 'AU' subj.ST = 'Some-State' subj.L = 'Some-Locality' subj.O = 'LocalStack Org' # noqa subj.OU = 'Testing' subj.CN = 'localhost' # Note: new requirements for recent OSX versions: https://support.apple.com/en-us/HT210176 # More details: https://www.iol.unh.edu/blog/2019/10/10/macos-catalina-and-chrome-trust serial_number = serial_number or 1001 cert.set_version(2) cert.set_serial_number(serial_number) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(2 * 365 * 24 * 60 * 60) cert.set_issuer(cert.get_subject()) cert.set_pubkey(k) alt_names = b'DNS:localhost,DNS:test.localhost.atlassian.io,IP:127.0.0.1' cert.add_extensions([ crypto.X509Extension(b'subjectAltName', False, alt_names), crypto.X509Extension(b'basicConstraints', True, b'CA:false'), crypto.X509Extension( b'keyUsage', True, b'nonRepudiation,digitalSignature,keyEncipherment'), crypto.X509Extension(b'extendedKeyUsage', True, b'serverAuth') ]) cert.sign(k, 'SHA256') cert_file = StringIO() key_file = StringIO() cert_file.write(to_str(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))) key_file.write(to_str(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))) cert_file_content = cert_file.getvalue().strip() key_file_content = key_file.getvalue().strip() file_content = '%s\n%s' % (key_file_content, cert_file_content) if target_file: key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file # check existence to avoid permission denied issues: # https://github.com/localstack/localstack/issues/1607 if not all_exist(target_file, key_file_name, cert_file_name): for i in range(2): try: save_file(target_file, file_content) save_file(key_file_name, key_file_content) save_file(cert_file_name, cert_file_content) break except Exception as e: if i > 0: raise LOG.info( 'Unable to store certificate file under %s, using tmp file instead: %s' % (target_file, e)) # Fix for https://github.com/localstack/localstack/issues/1743 target_file = '%s.pem' % new_tmp_file() key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file TMP_FILES.append(target_file) TMP_FILES.append(key_file_name) TMP_FILES.append(cert_file_name) if not return_content: return target_file, cert_file_name, key_file_name return file_content
def setup(self): from six.moves import cStringIO as StringIO self.packet, self.socket = self.request self.rfile = StringIO(self.packet) self.wfile = StringIO(self.packet)
def __init__(self): self._out = StringIO() self.indentation = 0
def test_print_db_info_metadata(self): self.assertRaisesMessage(ValueError, 'Wrong DB type', print_db_info_metadata, 't', {}, {}) self.assertRaisesMessage(ValueError, 'DB info is None', print_db_info_metadata, 'container', None, {}) self.assertRaisesMessage(ValueError, 'Info is incomplete', print_db_info_metadata, 'container', {}, {}) info = dict(account='acct', created_at=100.1, put_timestamp=106.3, delete_timestamp=107.9, status_changed_at=108.3, container_count='3', object_count='20', bytes_used='42') info['hash'] = 'abaddeadbeefcafe' info['id'] = 'abadf100d0ddba11' md = { 'x-account-meta-mydata': ('swift', '0000000000.00000'), 'x-other-something': ('boo', '0000000000.00000') } out = StringIO() with mock.patch('sys.stdout', out): print_db_info_metadata('account', info, md) exp_out = '''Path: /acct Account: acct Account Hash: dc5be2aa4347a22a0fee6bc7de505b47 Metadata: Created at: 1970-01-01T00:01:40.100000 (100.1) Put Timestamp: 1970-01-01T00:01:46.300000 (106.3) Delete Timestamp: 1970-01-01T00:01:47.900000 (107.9) Status Timestamp: 1970-01-01T00:01:48.300000 (108.3) Container Count: 3 Object Count: 20 Bytes Used: 42 Chexor: abaddeadbeefcafe UUID: abadf100d0ddba11 X-Other-Something: boo No system metadata found in db file User Metadata: {'mydata': 'swift'}''' self.assertEqual(sorted(out.getvalue().strip().split('\n')), sorted(exp_out.split('\n'))) info = dict(account='acct', container='cont', storage_policy_index=0, created_at='0000000100.10000', put_timestamp='0000000106.30000', delete_timestamp='0000000107.90000', status_changed_at='0000000108.30000', object_count='20', bytes_used='42', reported_put_timestamp='0000010106.30000', reported_delete_timestamp='0000010107.90000', reported_object_count='20', reported_bytes_used='42', x_container_foo='bar', x_container_bar='goo') info['hash'] = 'abaddeadbeefcafe' info['id'] = 'abadf100d0ddba11' md = {'x-container-sysmeta-mydata': ('swift', '0000000000.00000')} out = StringIO() with mock.patch('sys.stdout', out): print_db_info_metadata('container', info, md) exp_out = '''Path: /acct/cont Account: acct Container: cont Container Hash: d49d0ecbb53be1fcc49624f2f7c7ccae Metadata: Created at: 1970-01-01T00:01:40.100000 (0000000100.10000) Put Timestamp: 1970-01-01T00:01:46.300000 (0000000106.30000) Delete Timestamp: 1970-01-01T00:01:47.900000 (0000000107.90000) Status Timestamp: 1970-01-01T00:01:48.300000 (0000000108.30000) Object Count: 20 Bytes Used: 42 Storage Policy: %s (0) Reported Put Timestamp: 1970-01-01T02:48:26.300000 (0000010106.30000) Reported Delete Timestamp: 1970-01-01T02:48:27.900000 (0000010107.90000) Reported Object Count: 20 Reported Bytes Used: 42 Chexor: abaddeadbeefcafe UUID: abadf100d0ddba11 X-Container-Bar: goo X-Container-Foo: bar System Metadata: {'mydata': 'swift'} No user metadata found in db file''' % POLICIES[0].name self.assertEqual(sorted(out.getvalue().strip().split('\n')), sorted(exp_out.split('\n')))