def get(self, analysis_id): analysis_id = int(analysis_id.split("/")[0]) analysis = Analysis(analysis_id) check_analysis_access(self.current_user, analysis) jobres = defaultdict(list) for job in analysis.jobs: jobject = Job(job) jobres[jobject.datatype].append( (jobject.command[0], jobject.results)) dropped = {} dropped_samples = analysis.dropped_samples if dropped_samples: for proc_data_id, samples in viewitems(dropped_samples): proc_data = ProcessedData(proc_data_id) key = "Data type %s, Study: %s" % (proc_data.data_type(), proc_data.study) dropped[key] = samples self.render("analysis_results.html", jobres=jobres, aname=analysis.name, dropped=dropped, basefolder=get_db_files_base_dir())
def get(self, analysis_id): user = self.current_user analysis_id = int(analysis_id) check_analysis_access(User(user), analysis_id) analysis = Analysis(analysis_id) jobres = defaultdict(list) for job in analysis.jobs: jobject = Job(job) jobres[jobject.datatype].append( (jobject.command[0], jobject.results)) dropped = {} for proc_data_id, samples in viewitems(analysis.dropped_samples): proc_data = ProcessedData(proc_data_id) key = "Data type %s, Study: %s" % (proc_data.data_type(), proc_data.study) dropped[key] = samples self.render("analysis_results.html", user=self.current_user, jobres=jobres, aname=analysis.name, dropped=dropped, basefolder=get_db_files_base_dir()) # wipe out cached messages for this analysis r_server = Redis() key = '%s:messages' % self.current_user oldmessages = r_server.lrange(key, 0, -1) if oldmessages is not None: for message in oldmessages: if '"analysis": %d' % analysis_id in message: r_server.lrem(key, message, 1)
def render(self, study): avail_pd = [(pd_id, ProcessedData(pd_id)) for pd_id in study.processed_data()] return self.render_string( "study_description_templates/processed_data_tab.html", available_processed_data=avail_pd, study_id=study.id)
def test_get_filepath(self): """Correctly returns the filepaths to the processed files""" # check the test data pd = ProcessedData(1) obs = pd.get_filepaths() exp = [(join(self.db_test_pd_dir, '1_study_1001_closed_reference_otu_table.biom'), "biom")] self.assertEqual(obs, exp)
def _selected_parser(self, analysis): """builds dictionaries of selected samples from analysis object""" selsamples = {} selproc_data = defaultdict(list) for proc_data_id, samps in viewitems(analysis.samples): study = ProcessedData(proc_data_id).study selproc_data[study].append(proc_data_id) selsamples[study] = set(samps) return selproc_data, selsamples
def test_link_filepaths_status_setter(self): pd = ProcessedData(1) self.assertEqual(pd.link_filepaths_status, 'idle') pd._set_link_filepaths_status('linking') self.assertEqual(pd.link_filepaths_status, 'linking') pd._set_link_filepaths_status('unlinking') self.assertEqual(pd.link_filepaths_status, 'unlinking') pd._set_link_filepaths_status('failed: error') self.assertEqual(pd.link_filepaths_status, 'failed: error')
def test_data_type_id(self): pd = ProcessedData(1) self.assertEqual(pd.data_type(ret_id=True), 2)
def test_data_type(self): pd = ProcessedData(1) self.assertEqual(pd.data_type(), "18S")
def test_preprocessed_data(self): """Correctly returns the preprocessed_data""" pd = ProcessedData(1) self.assertEqual(pd.preprocessed_data, 1)
def test_get_filepath_ids(self): pd = ProcessedData(1) self.assertEqual(pd.get_filepath_ids(), [10])
def test_data_type_id(self): """Correctly returns the data_type of preprocessed_data""" pd = ProcessedData(1) self.assertEqual(pd.data_type(ret_id=True), 2)
def test_data_type(self): """Correctly returns the data_type of preprocessed_data""" pd = ProcessedData(1) self.assertEqual(pd.data_type(), "18S")
def test_processed_date(self): pd = ProcessedData(1) self.assertEqual(pd.processed_date, datetime(2012, 10, 1, 9, 30, 27))
def test_link_filepaths_status(self): pd = ProcessedData(1) self.assertEqual(pd.link_filepaths_status, 'idle')