def __call__(self): translate = self.context.translate instrument = self.context.getInstrument() if not instrument: self.context.plone_utils.addPortalMessage( _("You must select an instrument"), 'info') self.request.RESPONSE.redirect(self.context.absolute_url()) return exim = instrument.getDataInterface() if not exim: self.context.plone_utils.addPortalMessage( _("Instrument has no data interface selected"), 'info') self.request.RESPONSE.redirect(self.context.absolute_url()) return # exim refers to filename in instruments/ if type(exim) == list: exim = exim[0] exim = exim.lower() # search instruments module for 'exim' module if not instruments.getExim(exim): self.context.plone_utils.addPortalMessage( _("Instrument exporter not found"), 'error') self.request.RESPONSE.redirect(self.context.absolute_url()) return exim = instruments.getExim(exim) exporter = exim.Export(self.context, self.request) data = exporter(self.context.getAnalyses()) pass
def getDataInterfaces(context): """ Return the current list of data interfaces """ from bika.lims.exportimport import instruments exims = [('', t(_('None')))] for exim_id in instruments.__all__: exim = instruments.getExim(exim_id) exims.append((exim_id, exim.title)) return DisplayList(exims)
def getExim(exim_id): if exim_id == 'biorad.tc20.tc20': currmodule = sys.modules[__name__] members = [obj for name, obj in inspect.getmembers(currmodule) \ if hasattr(obj, '__name__') \ and obj.__name__.endswith(exim_id)] return members[0] if len(members) > 0 else None else: return instruments.getExim(exim_id)
def __call__(self): if 'submitted' in self.request: if 'setupfile' in self.request.form or \ 'setupexisting' in self.request.form: lsd = LoadSetupData(self.context, self.request) return lsd() else: exim = instruments.getExim(self.request['exim']) return exim.Import(self.context, self.request) else: return self.template()
def getDataInterfaces(context): """ Return the current list of data interfaces """ from bika.lims.exportimport import instruments exims = [] for exim_id in instruments.__all__: exim = instruments.getExim(exim_id) exims.append((exim_id, exim.title)) exims.sort(lambda x, y: cmp(x[1].lower(), y[1].lower())) exims.insert(0, ('', t(_('None')))) return DisplayList(exims)
def getImportDataInterfaces(context, import_only=False): """ Return the current list of import data interfaces """ from bika.lims.exportimport import instruments exims = [] for exim_id in instruments.__all__: exim = instruments.getExim(exim_id) if import_only and not hasattr(exim, 'Import'): pass else: exims.append((exim_id, exim.title)) exims.sort(lambda x, y: cmp(x[1].lower(), y[1].lower())) exims.insert(0, ('', t(_('None')))) return DisplayList(exims)
def __call__(self): if 'submitted' in self.request: if 'setupfile' in self.request.form or \ 'setupexisting' in self.request.form: lsd = LoadSetupData(self.context, self.request) return lsd() else: exim = instruments.getExim(self.request['exim']) if not exim: er_mes = "Importer not found for: %s" % self.request['exim'] results = {'errors': [er_mes], 'log': '', 'warns': ''} return json.dumps(results) else: return exim.Import(self.context, self.request) else: return self.template()
def __call__(self): interfaces = [] try: plone.protect.CheckAuthenticator(self.request) except Forbidden: return json.dumps(interfaces) from bika.lims.exportimport import instruments bsc = getToolByName(self, 'bika_setup_catalog') instrument=bsc(portal_type='Instrument', UID=self.request.get('instrument_uid', ''), inactive_state='active',) if instrument and len(instrument) == 1: instrument = instrument[0].getObject() for i in instrument.getImportDataInterface(): if i: exim = instruments.getExim(i) interface = {'id': i, 'title': exim.title} interfaces.append(interface) return json.dumps(interfaces)
def __call__(self): request = self.request if not self.is_import_allowed(): return 'Auto-import skipped due to interval...' bsc = getToolByName(self, 'bika_setup_catalog') # Getting instrumnets to run auto-import query = {'portal_type': 'Instrument', 'inactive_state': 'active'} if request.get('i_uid', ''): query['UID'] = request.get('i_uid') brains = bsc(query) interfaces = [] for brain in brains: i = brain.getObject() logger.info('Auto import for ' + i.Title()) # If Import Interface ID is specified in request, then auto-import # will run only that interface. Otherwise all available interfaces # of this instruments if request.get('interface', ''): interfaces.append(request.get('interface')) else: interfaces = [ pairs.get('InterfaceName', '') for pairs in i.getResultFilesFolder() ] folder = '' for interface in interfaces: # Each interface must have its folder where result files are # saved. If not, then we will skip for pairs in i.getResultFilesFolder(): if pairs['InterfaceName'] == interface: folder = pairs.get('Folder', '') if not folder: continue logger.info('Auto import for ' + interface) all_files = [ f for f in listdir(folder) if isfile(join(folder, f)) ] imported_list = self.getAlreadyImportedFiles(folder) if not imported_list: logger.warn('imported.csv file not found ' + interface) self.add_to_logs(i, interface, 'imported.csv File not found...', '') continue for file_name in all_files: if file_name in imported_list: continue temp_file = open(folder + '/' + file_name) # Parsers work with UploadFile object from # zope.HTTPRequest which has filename attribute. # To add this attribute we convert the file. # CHECK should we add headers too? result_file = ConvertToUploadFile(temp_file) exim = instruments.getExim(interface) parser_name = instruments.getParserName(interface) parser_function = getattr(exim, parser_name) \ if hasattr(exim, parser_name) else '' if not parser_function: self.add_to_logs(i, interface, 'Parser not found...', file_name) continue # We will run import with some default parameters # Expected to be modified in the future. logger.info('Parsing ' + file_name) parser = parser_function(result_file) importer = GeneralImporter( parser=parser, context=self.portal, idsearchcriteria=[ 'getId', 'getSampleID', 'getClientSampleID' ], allowed_ar_states=['sample_received'], allowed_analysis_states=None, override=[False, False], instrument_uid=i.UID()) tbex = '' try: importer.process() except: tbex = traceback.format_exc() errors = importer.errors logs = importer.logs if tbex: errors.append(tbex) final_log = '' success_log = self.getInfoFromLog(logs, 'Import finished') if success_log: final_log = success_log else: final_log = errors self.insert_file_name(folder, file_name) self.add_to_logs(i, interface, final_log, file_name) self.add_to_log_file(i.Title(), interface, final_log, file_name, folder) logger.info('End of auto import...') return 'Auto-Import finished...'
def __call__(self): request = self.request if not self.is_import_allowed(): return 'Auto-import skipped due to interval...' bsc = getToolByName(self, 'bika_setup_catalog') # Getting instrumnets to run auto-import query = {'portal_type': 'Instrument', 'inactive_state': 'active'} if request.get('i_uid', ''): query['UID'] = request.get('i_uid') brains = bsc(query) interfaces = [] for brain in brains: i = brain.getObject() logger.info('Auto import for ' + i.Title()) # If Import Interface ID is specified in request, then auto-import # will run only that interface. Otherwise all available interfaces # of this instruments if request.get('interface', ''): interfaces.append(request.get('interface')) else: interfaces = [pairs.get('InterfaceName', '') for pairs in i.getResultFilesFolder()] folder = '' for interface in interfaces: # Each interface must have its folder where result files are # saved. If not, then we will skip for pairs in i.getResultFilesFolder(): if pairs['InterfaceName'] == interface: folder = pairs.get('Folder', '') if not folder: continue logger.info('Auto import for ' + interface) all_files = [f for f in listdir(folder) if isfile(join(folder, f))] imported_list = self.getAlreadyImportedFiles(folder) if not imported_list: logger.warn('imported.csv file not found ' + interface) self.add_to_logs(i, interface, 'imported.csv File not found...', '') continue for file_name in all_files: if file_name in imported_list: continue temp_file = open(folder+'/'+file_name) # Parsers work with UploadFile object from # zope.HTTPRequest which has filename attribute. # To add this attribute we convert the file. # CHECK should we add headers too? result_file = ConvertToUploadFile(temp_file) exim = instruments.getExim(interface) parser_name = instruments.getParserName(interface) parser_function = getattr(exim, parser_name) \ if hasattr(exim, parser_name) else '' if not parser_function: self.add_to_logs(i, interface, 'Parser not found...', file_name) continue # We will run import with some default parameters # Expected to be modified in the future. logger.info('Parsing ' + file_name) parser = parser_function(result_file) importer = GeneralImporter( parser=parser, context=self.portal, idsearchcriteria=['getId', 'getSampleID', 'getClientSampleID'], allowed_ar_states=['sample_received'], allowed_analysis_states=None, override=[False, False], instrument_uid=i.UID()) tbex = '' try: importer.process() except: tbex = traceback.format_exc() errors = importer.errors logs = importer.logs if tbex: errors.append(tbex) final_log = '' success_log = self.getInfoFromLog(logs, 'Import finished') if success_log: final_log = success_log else: final_log = errors self.insert_file_name(folder, file_name) self.add_to_logs(i, interface, final_log, file_name) self.add_to_log_file(i.Title(), interface, final_log, file_name, folder) logger.info('End of auto import...') return 'Auto-Import finished...'