def test_metadata_set_success(self): """verify successful setting of key,val pair""" json_str = """{"backup-today": "2019-01-02", "latest-complete": "2019-01-02-012345"}""" metadata = MetaData(self.log, self.comms, self.settings, json_str) metadata.set('backup-today', "2019-12-12") self.assertEqual(metadata.get("backup-today"), "2019-12-12") self.assertEqual(self.log.getVal('info'), '')
def test_validate_mulitNestedTagsWithPathDefinition_validMetaTag(self): tags = {"test8/multi_inner": self.multi_nest_tag_not_empty_path2} validator = MetaDataValidator(self.x, tags) validator.set_console_logger(self.c_logger) validator.set_file_logger(self.f_logger) meta = MetaData() validator.validate(meta) self.assertEqual(meta.is_valid(), True, "Invalid tags")
def test_validate_uniqueNestedTagsWithoutPathDefinition_validMetaData(self): tags = {"unique_inner": self.unique_nest_tag_not_empty} validator = MetaDataValidator(self.x, tags) validator.set_console_logger(self.c_logger) validator.set_file_logger(self.f_logger) meta = MetaData() validator.validate(meta) self.assertEqual(meta.is_valid(), True, "Invalid tags")
def test_validate_mulitNestedTagsWithoutPathDefinition_noDistinctionFailsAlwaysIfOneFails(self): tags = {"multi_inner": self.multi_nest_tag_not_empty_1} validator = MetaDataValidator(self.x, tags) validator.set_console_logger(self.c_logger) validator.set_file_logger(self.f_logger) meta = MetaData() validator.validate(meta) self.assertNotEqual(meta.is_valid(), True, "Tags must not be valid because one is invalid")
def disable_temp_data(): Course.db_setprefix('') Exam.db_setprefix('') ExamInfo.db_setprefix('') Path.db_setprefix('') Student.db_setprefix('') Applicant.db_setprefix('') CourseSemesterInfo.db_setprefix('') MetaData.db_setprefix('')
def test_validate_optionalTag_doesntFailForInvalidTag(self): tags = {"test9": self.invalid_non_optional_tag} validator = MetaDataValidator(self.x, tags) validator.set_console_logger(self.c_logger) validator.set_file_logger(self.f_logger) meta = MetaData() validator.validate(meta) self.assertEqual(meta.is_valid(), False, "Valid tags")
def test_validate_defaultEmptyXmlTagsWithAttributes_validMetaData(self): tags = {"test5": self.tag_empty_attribute, "test6": self.tag_not_empty_attribute} validator = MetaDataValidator(self.x, tags) validator.set_console_logger(self.c_logger) validator.set_file_logger(self.f_logger) meta = MetaData() validator.validate(meta) self.assertEqual(meta.is_valid(), True, "Invalid tags")
def test_validate_defaultXmlTags_validMetaData(self): tags = {"test1": self.tag_not_empty_mapped, "test3": self.tag_empty_not_mapped} validator = MetaDataValidator(self.x, tags) validator.set_console_logger(self.c_logger) validator.set_file_logger(self.f_logger) meta = MetaData() validator.validate(meta) self.assertEqual(meta.is_valid(), True, "Invalid tags")
def test_validate_optionalTag_doesntFailForInvalidTag(self): tags = {"test9": self.invalid_optional_tag} validator = MetaDataValidator(self.x, tags) validator.set_console_logger(self.c_logger) validator.set_file_logger(self.f_logger) meta = MetaData() validator.validate(meta) self.assertEqual(meta.is_valid(), True, "Invalid tags") if "optional_attr9" not in meta.meta_data(): raise Exception("Wrong attributes")
def test_validate_defaultXmlTags_correctMappedNames(self): tags = {"test1": self.tag_not_empty_mapped, "test4": self.tag_empty_mapped} validator = MetaDataValidator(self.x, tags) validator.set_console_logger(self.c_logger) validator.set_file_logger(self.f_logger) meta = MetaData() validator.validate(meta) if "mapped1" not in meta.meta_data_info() or "mapped4" not in meta.meta_data_info(): raise Exception("Meta data name was not mapped correctly")
def test_replaceExistingName_noDuplicate_originalNameUsed(self): meta_data = MetaData() meta_data.add_meta_data("Original", "Original") meta_data.add_meta_data("Original", "Original1") meta_data.add_meta_data("Original_X", "OriginalX") self.assertEqual(len((meta_data.meta_data())['Original']), 2, "Wrong number of elements in list") self.assertEqual((meta_data.meta_data())['Original_X'], "OriginalX", "Wrong name")
def test_metadata_get_exception(self): """verify get checks for invalid keys""" json_str = """{"backup-today": "", "latest-complete": ""}""" metadata = MetaData(self.log, self.comms, self.settings, json_str) with self.assertRaises(CrashPlanError) as cpe: metadata.get("test") self.assertIsInstance(cpe.exception, CrashPlanError) self.assertEqual(repr(cpe.exception.value), "'(get) Invalid meta data key - test'") self.assertEqual(self.log.getVal('info'), '')
def swap_temp_to_op(): enable_temp_data() Course.get_collection().rename('courses', dropTarget=True) Exam.get_collection().rename('exams', dropTarget=True) ExamInfo.get_collection().rename('examInfos', dropTarget=True) Path.get_collection().rename('paths', dropTarget=True) Student.get_collection().rename('students', dropTarget=True) Applicant.get_collection().rename('applicants', dropTarget=True) CourseSemesterInfo.get_collection().rename('courseSemesterInfos', dropTarget=True) MetaData.get_collection().rename('metadata', dropTarget=True) disable_temp_data()
def test_validate_mappedTagsWithAttributes_metaDataHasTheRightContent(self): tags = {"test1": self.tag_not_empty_mapped, "test5": self.tag_empty_attribute} validator = MetaDataValidator(self.x, tags) validator.set_console_logger(self.c_logger) validator.set_file_logger(self.f_logger) meta = MetaData() validator.validate(meta) self.assertEqual(meta.is_valid(), True, "Invalid tags") if "test5_attr51" not in meta.meta_data() or "test5_attr52" not in meta.meta_data(): raise Exception("Wrong attributes")
def test_replaceExistingName_twoDuplicatesInMap_AppendNumberThreeToTheName(self): meta_data = MetaData() meta_data.add_meta_data("Original", "Original") meta_data.add_meta_data("Original", "Original1") meta_data.add_meta_data("Original", "Original2") meta_data.add_meta_data("Original", "Original3") self.assertEqual(len((meta_data.meta_data())['Original']), 4, "Wrong number of elements in list")
def test_metadata_writeMetaData_1(self): """verify writeMetaData write a file and copies it remotely""" json_str = """{"backup-today": "2019-01-02", "latest-complete": "2019-01-02-012345"}""" metadata = MetaData(self.log, self.comms, self.settings, json_str) self.assertEqual(self.log.getVal('info'), '') with patch("builtins.open", mock_open(read_data="data")) as mock_file: metadata.writeMetaData() mock_file.assert_called_with('/Users/judge/.metadata', 'w') handle = mock_file() handle.write.assert_called_once_with( '{"backup-today": "2019-01-02", "latest-complete": "2019-01-02-012345"}\n' ) self.assertEqual(self.comms.getFilename(), '/Users/judge/.metadata')
def set_loading(self, path): from MetaData import MetaData try: meta = MetaData(path) preview = meta.get(meta.KEY_PREVIEW) except Exception: preview = "" dirname = os.path.dirname(path) icon = os.path.join(dirname, preview) self.__label.set_markup("<big><b>Loading:</b>\n%s</big>" % path) self.__icon.hide() if preview: try: # for him who loves small try blocks: you don't gain anything # from splitting up this block except for badly readable code ;) from utils import vfs data = vfs.read_entire_file(icon) loader = gtk.gdk.PixbufLoader() loader.write(data, len(data)) loader.close() pbuf = loader.get_pixbuf() # scale icon down while preserving aspect ratio width = pbuf.get_width() height = pbuf.get_height() scale = 48 / float(height) width *= scale if width > 96: width = 96 if abs(scale - 1.0) > 0.001: pbuf = pbuf.scale_simple(int(width), 48, 3) self.__icon.set_from_pixbuf(pbuf) self.__icon.show() except Exception: pass # end if self.resize(10, 10) self.set_size_request(-1, -1)
def loop_through(args): if args.problem_type == 'classification': all_dataset_id = classification_datasets_id elif args.problem_type == 'regression': all_dataset_id = regression_datasets_id for data_id in all_dataset_id: print(data_id) md = MetaData(data_id) metafeature_dict = md.meta_features() metafeature_dict['data_id'] = data_id filename = 'metafeatures_{}.csv'.format(args.problem_type) with open(filename, 'a') as f: pd.DataFrame(metafeature_dict).to_csv(f, mode='a', header=f.tell() == 0, index=False)
def test_metadata_set_exception(self): """verify set checks for invalid keys""" json_str = "" metadata = MetaData(self.log, self.comms, self.settings, json_str) self.assertDictEqual(metadata.meta, { 'backup-today': '', 'latest-complete': '' }) with self.assertRaises(CrashPlanError) as cpe: metadata.set("test", "t") self.assertIsInstance(cpe.exception, CrashPlanError) self.assertEqual(repr(cpe.exception.value), "'(set) Invalid meta data key - test'") #self.assertEqual(self.log.getVal('info').split('|')[0], 'MetaData added missing expected key latest-complete.') #self.assertEqual(self.log.getVal('info').split('|')[1], 'MetaData added missing expected key backup-today.') self.assertEqual(self.log.getVal('info'), '')
def test_metadata_constructor(self): """verify __init__() creates an empty dict if no json is given""" metadata = MetaData(self.log, self.comms, self.settings) self.assertDictEqual(metadata.meta, { 'backup-today': '', 'latest-complete': '' }) #self.assertEqual(self.log.getVal('info').split('|')[0], 'MetaData added missing expected key latest-complete.') #self.assertEqual(self.log.getVal('info').split('|')[1], 'MetaData added missing expected key backup-today.') self.assertEqual(self.log.getVal('info'), '')
def test_metadata__repr__success(self): """verify __repr__ returns as expected""" json_str = """{"backup-today": "2019-01-02", "latest-complate": "2019-01-02-012345"}""" metadata = MetaData(self.log, self.comms, self.settings, json_str) #self.assertEqual(self.log.getVal('info').split('|')[0], "MetaData added missing expected key latest-complete.") self.assertEqual( self.log.getVal('info').split('|')[0], "MetaData removed unexpected key latest-complate.") self.assertNotEqual( repr(metadata), '{"backup-today": "2019-01-02", "latest-complete": "2019-01-02-012345"}\n' )
def test_metadata_readMetaData_1(self): """verify readMetaData""" jstr = """{"backup-today": "2019-01-02", "latest-complete": "2019-01-02-012345"}""" os.makedirs(os.path.join(os.environ['HOME'], "temp_myocp"), mode=0o755, exist_ok=True) with open(os.path.join(os.environ['HOME'], "temp_myocp", ".metadata"), 'w') as fp: fp.write(jstr) metadata = MetaData(self.log, self.comms, self.settings) metadata.settings.set("settings-dir", "temp_myocp") self.assertDictEqual(metadata.meta, { "backup-today": "", "latest-complete": "" }) metadata.readMetaData() self.assertDictEqual(metadata.meta, { "backup-today": "2019-01-02", "latest-complete": "2019-01-02-012345" }) shutil.rmtree(os.path.join(os.environ['HOME'], "temp_myocp"))
def finishUp(self): """write .metadata file""" if self.backup_successful: meta2 = MetaData(self.log, self.comms, self.settings, "") meta2.set("latest-complete", TimeDate.datedir()) meta2.set("backup-today", TimeDate.today()) try: meta2.writeMetaData() # move WORKING to Latest Complete Date src = os.path.join(self.settings('backup-destination'), self.settings('local-hostname'), "WORKING") dest = os.path.join(self.settings('backup-destination'), self.settings('local-hostname'), TimeDate.datedir()) self.comms.remoteCommand(f"mv {src} {dest}") except CrashPlanError as exc: print(exc) self.log.error(exc)
def main(): options = parse_args(argv) if not isinstance(options.fragment_lengths, list): options.fragment_lengths = [options.fragment_lengths] tmpdir, resultsdir, hmmdir = create_dirs(options) logfile = '%s/model_creation_optimization.log' % (path.abspath( options.output_dir)) logging.basicConfig(filename=logfile, filemode='w', format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) if options.only_full_length: full_lengths = [True] elif options.only_fragments: full_lengths = [False] else: full_lengths = [True, False] for i, full_length in enumerate(full_lengths): # TODO handle if the input is already fragments est_obj = MetaData(full_length, None, None, None, tmpdir, resultsdir, hmmdir) if options.sensitivity: estimate_sensitivity(options.reference_sequences, est_obj, options) if options.specificity: estimate_specificity(options, est_obj) if not options.sensitivity: summarize_sens_or_spec(est_obj, options, False) elif not options.specificity: summarize_sens_or_spec(est_obj, options, True) else: calculate_performance(est_obj, 1, options) ''' If not specificity, create a HMM-model ''' if not options.specificity: create_hmm(options, est_obj)
def json_files_decoder(self): # Decodes the files JSON meta = MetaData(self.get_json_files_request(), self.get_files_url()) meta.decode_json() self.decoded_json_files = meta.get_decoded()
def json_protocols_decoder( self): # Decodes the content of the protocols request meta = MetaData(self.get_json_protocols_request(), self.get_protocols_url()) meta.decode_json() self.decoded_json_protocols = meta.get_decoded()
if __name__ == "__main__": x = Link_Generator("json") kw = ["cancer"] x.insert_keywords(kw) dic = {"samplecount" : "[200 TO 500]"} #dic = {} x.insert_criteria(dic) x.url_generator("experiments") # Creates the URL to be requested request = File_Requester(x.get_url()) #print(request.get_url()) request.do_request() request.get_request() request.do_content() # Returns the content of the requested URL #print(request.get_content()) decoder = MetaData(request.get_content(), request.get_url()) decoder.decode_json() # Decodes the content JSON #print(decoder.get_decoded()) exp = Experiment(decoder.get_decoded()["experiments"]["experiment"][13]) # Created an Experiment Instance #print(exp.__dict__) #print(exp.accession) #exp.do_files_url() #print(exp.get_files_url()) #exp.json_files_requester() #print(exp.get_json_files_request()) #exp.json_files_decoder() #print(exp.get_decoded_json_files()) #exp.create_files() #print(exp.files) #exp.download_all_files("C:/Users/utilizador/Google Drive/drive/Bioinformática/1_ano/2_Semestre/Projeto/Scripts/Downloads") exp.do_idf_file("C:/Users/utilizador/Google Drive/drive/Bioinformática/1_ano/2_Semestre/Projeto/Scripts/Downloads") # Downloads and creates an data with IDF data
import sys
def test_metadata_readRemoteMetaData_2(self): """verify readRemoteMetaData return empty metadata on error""" #jstr = """{"backup-today": "2019-01-02", "latest-complete": "2019-01-02-012345"}""" fakecomms = FakeRemoteComms(1, "No such file or directory") metadata = MetaData(self.log, fakecomms, self.settings) self.assertEqual(metadata.readRemoteMetaData(), "{}")
def test_metadata_readRemoteMetaData_1(self): """verify readRemoteMetaData""" jstr = """{"backup-today": "2019-01-02", "latest-complete": "2019-01-02-012345"}""" fakecomms = FakeRemoteComms(0, jstr) metadata = MetaData(self.log, fakecomms, self.settings) self.assertEqual(metadata.readRemoteMetaData(), jstr)
def test_metadata_readMetaData_2(self): """verify readMetaData return empty json string if no such file returned from remote call""" json_str = """{}""" metadata = MetaData(self.log, self.comms, self.settings, json_str) metadata.readMetaData() self.assertDictEqual(metadata.meta, {})