def take_action(self, parsed_args): """get json with runs for a file and prepare for output.""" ids = utils.key_len(parsed_args.id, "ID") runs = self.app.cosmosid.sample_run_list(ids) header = ['id', 'status', 'created'] if runs: if not runs['runs']: LOGGER.info('\nThere are no runs for file %s (id: %s)', runs['file_name'], ids) for_output = [[' ', ' ', ' ']] return (header, for_output) else: raise Exception("Exception occured.") def _set_date(inp): try: utc_time_tuple = time.strptime(inp[1], "%Y-%m-%dT%H:%M:%S.%f") except Exception: utc_time_tuple = time.strptime(inp[1], "%Y-%m-%dT%H:%M:%S") local_time = calendar.timegm(utc_time_tuple) # time.ctime(local_time) return dt.fromtimestamp(local_time).strftime('%Y-%m-%d %H:%M:%S') def _del_none(inp): out = inp[1] if not out: out = 0 if field_maps[inp[0]][1] == 'int' else '-' return out def _convert(inp): for item in inp.items(): for k, v in field_maps.items(): if item[0] == v[0]: inp[item[0]] = field_maps[k][2](item) break return inp field_maps = { 'id': ['id', 'str', _del_none], 'status': ['status', 'str', _del_none], 'created': ['created', 'str', _set_date]} # we need just runs for output runs_data = [_convert(run) for run in runs['runs']] # order regarding order parameters if parsed_args.order: if parsed_args.order.lower() in header: runs_data = sorted( runs_data, key=itemgetter(field_maps[parsed_args.order.lower()][0]), reverse=(not parsed_args.up)) for_output = [[item[field_maps[field][0]] for field in header] for item in runs_data] LOGGER.info('\nRuns list for file %s (id: %s)', runs['file_name'], ids) return (header, for_output)
def __init__(self, api_key=None, base_url=BASE_URL): """Initialize a client with the given params.""" try: if not api_key: api_key = self.__auth() api_key = utils.key_len(api_key) except ValidationError as err: utils.log_traceback(err) base_url = base_url if base_url else self.BASE_URL if base_url != self.BASE_URL: self.logger.info("Using base URL: %s", base_url) self.base_url = base_url self.api_key = api_key
def take_action(self, parsed_args): """Save report to a given file.""" f_id = utils.key_len(parsed_args.id, "ID") if parsed_args.id else None r_id = (utils.key_len(parsed_args.run_id, "ID") if parsed_args.run_id else None) output_file = parsed_args.output if parsed_args.output else None output_dir = parsed_args.dir if parsed_args.dir else None if not r_id: LOGGER.info('Processing reports for the latest run of file %s ...', f_id) else: LOGGER.info('Processing reports for the run_id %s of file %s ...', r_id, f_id) response = self.app.cosmosid.report(file_id=f_id, run_id=r_id, output_file=output_file, output_dir=output_dir) if response: LOGGER.info('\nReport has been saved to: %s', response['saved_report']) else: raise Exception('Exception occured during report creation.') LOGGER.info('Task Done')
def take_action(self, parsed_args): """get json with items and prepare for output""" parent = utils.key_len(parsed_args.parent) folder_content = self.app.cosmosid.directory_list(parent) content_type_map = { '1': 'Folder', '2': 'Metagenomics Sample', '3': 'MRSA Sample', '4': 'Listeria Sample', '5': 'Amplicon 16S Sample', '6': 'Amplicon ITS Sample', '7': 'Microbiome Standard' } header = ['type', 'name', 'id', 'status', 'reads', 'created'] if folder_content: if not folder_content['items']: LOGGER.info('\nFolder %s (id: %s) is empty', folder_content['name'], parent) for_output = [[' ', ' ', ' ', ' ', ' ', ' ']] return (header, for_output) else: raise Exception("Exception accured.") def _set_date(input_date): try: utc_time_tuple = time.strptime(input_date[1], "%Y-%m-%dT%H:%M:%S.%f") except Exception: utc_time_tuple = time.strptime(input_date[1], "%Y-%m-%dT%H:%M:%S") local_time = calendar.timegm(utc_time_tuple) return dt.fromtimestamp(local_time).strftime('%Y-%m-%d %H:%M:%S') def _del_none(inp): out = [inp[1]] if not out[0]: out = [0 if v[1] == 'int' else '-' for _, v in field_maps.items() if inp[0] == v[0]] return out[0] def _set_dim(inp): out = inp if inp else 0 out = utils.convert_size(out) return out if out != '0B' else '-' def _set_type(inp): ctype = (content_type_map[str(inp[1])] if content_type_map.get(str(inp[1])) else inp[1]) return ctype def _convert(inp): for item in inp.items(): for key, val in field_maps.items(): if item[0] == val[0]: inp[item[0]] = field_maps[key][2](item) break return inp field_maps = { 'type': ['content_type', 'str', _set_type], 'id': ['id', 'str', _del_none], 'name': ['name', 'str', _del_none], 'status': ['status', 'str', _del_none], 'reads': ['reads', 'int', _del_none], 'created': ['created', 'str', _set_date] } # we need just items for output items_data = [_convert(item) for item in folder_content['items']] # order regarding order parameters if parsed_args.order: if parsed_args.order.lower() in header: items_data = sorted( items_data, key=itemgetter(field_maps[parsed_args.order.lower()][0]), reverse=(not parsed_args.up)) for_output = [[item[field_maps[f][0]] if f != 'reads' else _set_dim(item[field_maps[f][0]]) for f in header] for item in items_data] LOGGER.info('\nContent of the Folder %s (id: %s)', folder_content['name'], parent) return (header, for_output)
def take_action(self, parsed_args): """get json with analysis for a file and prepare for output""" f_id = utils.key_len(parsed_args.id, "ID") if parsed_args.id else None r_id = (utils.key_len(parsed_args.run_id, "ID") if parsed_args.run_id else None) analysis_content = self.app.cosmosid.analysis_list(file_id=f_id, run_id=r_id) header = ['id', 'database', 'strains', 'strains_filtered', 'status'] if analysis_content: if not analysis_content['analysis']: LOGGER.info('\nThere are no analysis for run id %s', analysis_content['run_id']) for_output = [[' ', ' ', ' ', ' ', ' ']] return (header, for_output) else: raise Exception("Exception uccured.") def _set_date(inp): try: utc_time_tuple = time.strptime(inp, "%Y-%m-%dT%H:%M:%S.%f") except Exception: utc_time_tuple = time.strptime(inp, "%Y-%m-%dT%H:%M:%S") local_time = calendar.timegm(utc_time_tuple) # time.ctime(local_time) return dt.fromtimestamp(local_time).strftime('%Y-%m-%d %H:%M:%S') def _del_none(inp): out = inp[1] if not out: out = 0 if field_maps[inp[0]][1] == 'int' else '-' return out def _convert(inp): database = inp['database'] db_description = database['description'] for item in inp.items(): for k, v in field_maps.items(): if item[0] == v[0]: inp[item[0]] = (field_maps[k][2](item) if item[0] != 'database' else db_description) break return inp field_maps = { 'id': ['id', 'str', _del_none], 'database': ['database', 'str', _del_none], 'status': ['status', 'str', _del_none], 'strains': ['strains', 'int', _del_none], 'strains_filtered': ['strains_filtered', 'int', _del_none] } run_metadata = analysis_content['run_meta'] # we need just items for output items_data = [_convert(item) for item in analysis_content['analysis']] # order regarding order parameters if parsed_args.order: if parsed_args.order.lower() in header: items_data = sorted( items_data, key=itemgetter(field_maps[parsed_args.order.lower()][0]), reverse=(not parsed_args.up)) for_output = [[item[field_maps[field][0]] for field in header] for item in items_data] if not r_id: LOGGER.info('Run ID is not entered. Analysis list from latest run ' '(%s) for file %s', _set_date(run_metadata['created']), run_metadata['file']['name']) LOGGER.info('\nAnalysis list for file %s and run id %s (%s)', run_metadata['file']['name'], run_metadata['id'], _set_date(run_metadata['created'])) return (header, for_output)
def take_action(self, parsed_args): """Send files to analysis.""" parent_id = parsed_args.parent if parsed_args.parent else None parent_id = utils.key_len(parent_id, "ID") directory = parsed_args.dir if parsed_args.dir else None files = parsed_args.file if parsed_args.file else None credits = self.app.cosmosid.profile()['credits'] if credits <= 0: LOGGER.info("\nYou don't have enough credits to run analysis") return if (files and directory) or (not files and not directory): LOGGER.info("\nInvalid input parameters. Files or directory must be specified." " It is not permitted to specify both file and directory in one command.") return elif files: if not all([os.path.exists(f) for f in files]): LOGGER.error('Not all specified files exist: %s', files) return else: if isdir(directory): files = [join(directory, f) for f in listdir(directory) if isfile(join(directory, f))] LOGGER.info("\nReading files from directory {directory}".format(directory=directory)) else: LOGGER.info("\nSpecified path {directory} is not a directory.".format(directory=directory)) return pairs = [] files = sorted(files) prev_fname, prev_ext = self.get_base_file_name_and_extension(files[0]) if prev_ext not in self.allowed_extensions: LOGGER.info('not supported file extension for file {}'.format(files[0])) return paired_ended = {'files': [files[0]], 'sample_name': prev_fname, 'ext': prev_ext} for fname in files[1:]: cur_fname, cur_ext = self.get_base_file_name_and_extension(fname) if cur_ext not in self.allowed_extensions: LOGGER.info('not supported file extension for file {}'.format(fname)) return if cur_fname == prev_fname and prev_ext == cur_ext: paired_ended['files'].append(fname) else: pairs.append(paired_ended) paired_ended = {'files': [fname], 'sample_name': cur_fname, 'ext': cur_ext} prev_fname = cur_fname prev_ext = cur_ext pairs.append(paired_ended) pricing_req = [] for pair in pairs: pricing_req.append({'sample_key': pair['sample_name'], 'extension': pair['ext'], 'file_sizes': [sum( [os.path.getsize(f) for f in pair['files'] if os.path.exists(f)])]}) cost = 0 for price in self.app.cosmosid.pricing(data=pricing_req): cost += price['pricing'][str(parsed_args.type)] if cost > credits: LOGGER.info("\nYou don't have enough credits to run analysis") return for pair in pairs: # In case some file don't have pair, we get this file and upload it as single sample if len(pair.get('files')) == 1: pair.update(sample_name=os.path.basename(pair.get('files')[0])) LOGGER.info('File uploading is started: %s', pair) file_id = self.app.cosmosid.upload_files(pair, parsed_args.type, parent_id) LOGGER.info('\nFile %s has been sent to analysis.', pair) LOGGER.info('Use File ID to get Analysis Result: %s', file_id) LOGGER.info('Task Done')