def on_success(self, retval, task_id, args, kwargs): user_id, compendium_id, ontology_id, filename, file_type, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) log = MessageLog() log.title = "Biological feature annotation" log.message = "Status: success, Task: " + task_id + ", User: "******"compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) })
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, experiment_id, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id) parsing_db = init_parsing(compendium_id, experiment_id) platforms = [plt.platform_access_id for plt in ParsingPlatform.objects.using(parsing_db).all()] log = MessageLog() log.title = "Platforms: " + ", ".join(platforms) + " error" log.message = "Status: error, Platforms: " + ", ".join(platforms) + ", Experiment: " + \ exp.experiment_access_id + ", Task: " + task_id + ", User: "******", Exception: " \ + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) # delete CSV file for fl in glob.glob(parsing_db + '*bio_feature*.csv'): os.remove(fl) message = Message(type='error', title='Error on importing platforms for experiment ' + exp.experiment_access_id, message=str(exc)) message.send_to(channel) for parsing_platform in ParsingPlatform.objects.using(parsing_db).all(): plt = Platform.objects.using(compendium.compendium_nick_name).get(id=parsing_platform.platform_fk) plt.status = None plt.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) })
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, experiment_id, keep_platform, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id) parsing_db = init_parsing(compendium_id, experiment_id) log = MessageLog() log.title = "Experiment: " + exp.experiment_access_id + " imported" log.message = "Status: error, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + ", User: "******", Exception: " \ + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) # delete CSV file for fl in glob.glob(parsing_db + '*.csv'): os.remove(fl) message = Message(type='error', title='Error on importing experiment ' + exp.experiment_access_id, message=str(exc)) message.send_to(channel) data_ready_status = Status.objects.using(compendium.compendium_nick_name).get(name='experiment_data_ready') exp.status = data_ready_status exp.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) })
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, ontology_id, filename, file_type, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) log = MessageLog() log.title = "Biological feature annotation" log.message = "Status: error, Task: " + task_id + ", User: "******", Error: " + str(exc) log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) message = Message(type='error', title='Error', message=str(exc)) message.send_to(channel) Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) })
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, term, db_id, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) log = MessageLog() log.title = "Search experiment " + term log.message = "Status: error, Term: " + term + ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) message = Message(type='error', title='Error', message=str(exc)) message.send_to(channel) ViewTask.objects.using(compendium.compendium_nick_name). \ get(task_id=task_id, operation=operation, view=view).delete() Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) })
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, plt_dir, platform_id, filter_id, \ blast_file_name, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) log = MessageLog() plt = Platform.objects.using( compendium.compendium_nick_name).get(id=platform_id) log.title = "Platform: " + plt.platform_access_id + " importing mapping error" log.message = "Status: error, Platform: " + plt.platform_access_id + \ ", Task: " + task_id + ", User: "******", Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) message = Message(type='error', title='Error during importing platform mapping', message=str(exc)) message.send_to(channel) Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) })
def delete_bio_feature_annotation(request, *args, **kwargs): req = request.POST comp_id = req['compendium_id'] view = req['view'] channel_name = request.session['channel_name'] operation = req['operation'] compendium = CompendiumDatabase.objects.get(id=comp_id) BioFeatureAnnotation.objects.using( compendium.compendium_nick_name).all().delete() message = Message( type='info', title='Annotation', message='Biological feature annotation has been deleted') message.send_to(Channel(channel_name)) Group("compendium_" + str(comp_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) return HttpResponse(json.dumps({'success': True}), content_type="application/json")
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, exp_id, script, parameters, input_files,\ experiment_entity, platform_entity, sample_entity, channel_name, view, operation = args channel = Channel(channel_name) message = Message(type='error', title='Error running ' + os.path.basename(script), message=str(exc)) message.send_to(channel)
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, exp_id, exp_name, exp_descr, exp_structure_file, exp_data_file, \ channel_name, view, operation = args channel = Channel(channel_name) message = Message(type='error', title='Error on experiment ' + exp_name, message=str(exc)) message.send_to(channel)
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, file_path, bio_feature_name, file_type, channel_name, view, operation = args channel = Channel(channel_name) message = Message(type='error', title='Error', message=str(exc)) message.send_to(channel) compendium = CompendiumDatabase.objects.get(id=compendium_id) log = MessageLog() log.title = "Importing " + bio_feature_name + " (biological features) from " + file_type + " file" log.message = "Status: error, File: " + os.path.basename(file_path) + ", Type: " + file_type + \ ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name)
def func_wrapper(*args, **kwargs): channel_name, view, request, user = args channel = Channel(channel_name) try: func(*args, **kwargs) except ProgrammingError as e: error_msg = Message(type='error', title='Error', message=str(e) + "<br><br> Is the compendium initialized?") error_msg.send_to(channel) except Exception as e: error_msg = Message(type='error', title='Error', message=str(e)) error_msg.send_to(channel)
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, plt_dir, platform_id, blast_file_name, alignment_length_1, gap_open_1, \ mismatches_1, alignment_length_2, gap_open_2, mismatches_2, channel_name, view, operation = args channel = Channel(channel_name) message = Message(type='error', title='Error', message=str(exc)) message.send_to(channel) compendium = CompendiumDatabase.objects.get(id=compendium_id) platform = Platform.objects.using( compendium.compendium_nick_name).get(id=platform_id) log = MessageLog() log.title = "Filtering of alignment " + blast_file_name log.message = "Status: error, Platform: " + platform.platform_access_id + ", Alignment: " + blast_file_name + \ ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name)
def on_success(self, retval, task_id, args, kwargs): user_id, compendium_id, experiment_id, channel_name, view, operation = args Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': 'experiments', 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = ExperimentSearchResult.objects.using( compendium.compendium_nick_name).get(id=experiment_id) log = MessageLog() log.title = "Download experiment " + exp.experiment_access_id log.message = "Status: success, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + "," \ "User: "******"Notes: " + retval log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) if retval: channel = Channel(channel_name) message = Message( type='info', title='Download notes', message="Duplicated samples are not imported! <br><br>" + retval) message.send_to(channel)
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, experiment_id, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) new_status = Status.objects.using( compendium.compendium_nick_name).get(name='experiment_new') exp = ExperimentSearchResult.objects.using( compendium.compendium_nick_name).get(id=experiment_id) exp.status = new_status exp.save(using=compendium.compendium_nick_name) log = MessageLog() log.title = "Download experiment " + exp.experiment_access_id log.message = "Status: error, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) message = Message(type='error', title='Error on experiment ' + exp.experiment_access_id, message=str(exc)) message.send_to(channel) Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': 'experiments', 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) })
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, plt_dir, platform_id, use_short_blastn, alignment_identity, \ channel_name, view, operation = args channel = Channel(channel_name) message = Message(type='error', title='Error', message=str(exc)) message.send_to(channel) compendium = CompendiumDatabase.objects.get(id=compendium_id) platform = Platform.objects.using( compendium.compendium_nick_name).get(id=platform_id) log = MessageLog() log.title = "Alignment of platform " + platform.platform_access_id log.message = "Status: error, Platform: " + platform.platform_access_id + \ ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) blast_file_name = os.path.join(plt_dir, task_id + '.blast') mapper = MicroarrayMapper(blast_file_name) mapper.set_alignment_status('error') Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) # remove files for file in [ f for f in os.listdir(plt_dir) if os.path.isfile(os.path.join(plt_dir, f)) and f.startswith(task_id) ]: if file.endswith('.sqlite') or file.endswith('.blast'): continue os.remove(os.path.join(plt_dir, file))
def on_success(self, retval, task_id, args, kwargs): user_id, compendium_id, experiment_id, keep_platform, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id) parsing_db = init_parsing(compendium_id, experiment_id) report = retval log = MessageLog() log.title = "Experiment: " + exp.experiment_access_id + " imported" log.message = "Status: success, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + ", User: "******" Results: " + report log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) # delete CSV files try: for fl in glob.glob(parsing_db + '*.csv'): os.remove(fl) except Exception as e: pass message = Message(type='info', title='Successfully imported experiment', message='Successfully imported experiment ' + exp.experiment_access_id + '<br>' + report ) message.send_to(channel) imported_status = Status.objects.using(compendium.compendium_nick_name).get(name='experiment_raw_data_imported') exp.status = imported_status exp.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) })
def on_success(self, retval, task_id, args, kwargs): user_id, compendium_id, experiment_id, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id) parsing_db = init_parsing(compendium_id, experiment_id) platforms = [plt.platform_access_id for plt in ParsingPlatform.objects.using(parsing_db).all()] log = MessageLog() log.title = "Platforms: " + ", ".join(platforms) + " imported" log.message = "Status: success, Platforms: " + ", ".join(platforms) + ", Experiment: " + \ exp.experiment_access_id + ", Task: " + task_id + ", User: "******" Results: " + retval log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) # delete CSV file for fl in glob.glob(parsing_db + '*bio_feature*.csv'): os.remove(fl) message = Message(type='info', title='Successfully imported platforms', message='Successfully imported platforms for experiment ' + exp.experiment_access_id + ' (' + ','.join(platforms) + ') <br>' + retval ) message.send_to(channel) ready_status = Status.objects.using(compendium.compendium_nick_name).get(name='platform_imported') for parsing_platform in ParsingPlatform.objects.using(parsing_db).all(): plt = Platform.objects.using(compendium.compendium_nick_name).get(id=parsing_platform.platform_fk) plt.status = ready_status plt.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) })
def on_success(self, retval, task_id, args, kwargs): user_id, compendium_id, exp_id, entity_type, entity_id, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using( compendium.compendium_nick_name).get(id=exp_id) parsed_status = Status.objects.using( compendium.compendium_nick_name).get(name='entity_script_parsed') entity_access_id = '' if entity_type == 'experiment': entity_access_id = exp.experiment_access_id for af in exp.assignedfile_set.all(): log = MessageLog() log.title = exp.experiment_access_id + " " + af.script_name + " completed on file " + af.input_filename log.message = "Status: success, Experiment: " + exp.experiment_access_id + ", Order: " + str( af.order) + ", Parameters: " + " ".join( af.parameters ) + " Task: " + task_id + ", User: "******" " + af.script_name + " completed on file " + af.input_filename log.message = "Status: success, Platform: " + platform.platform_access_id + ", Order: " + str( af.order) + ", Parameters: " + " ".join( af.parameters ) + " Task: " + task_id + ", User: "******" " + af.script_name + " completed on file " + af.input_filename log.message = "Status: success, Sample: " + sample.sample_name + ", Order: " + str( af.order) + ", Parameters: " + " ".join( af.parameters ) + " Task: " + task_id + ", User: "******"compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': 'parse_experiment_platform', 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': 'message_log', 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': 'file_assignment_list', 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) if retval: message = Message(type='parsing_log', title='Parsing STDOUT ' + entity_type + ' ' + entity_access_id + ', experiment: ' + exp.experiment_access_id, message=retval.replace('\n', '<br>')) message.send_to(channel)
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, exp_id, entity_type, entity_id, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using( compendium.compendium_nick_name).get(id=exp_id) error_status = Status.objects.using( compendium.compendium_nick_name).get(name='entity_script_error') access_id = '' if entity_type == 'experiment': for af in exp.assignedfile_set.all(): log = MessageLog() log.title = exp.experiment_access_id + " " + af.script_name + " error on file " + af.input_filename log.message = "Status: error, Experiment: " + exp.experiment_access_id + ", Order: " + str( af.order) + ", Parameters: " + " ".join( af.parameters) + " Task: " + task_id + ", User: "******", Exception: " + str(exc) + ", Stacktrace: " + \ einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) af.status = error_status af.message_log = log af.save(using=compendium.compendium_nick_name) elif entity_type == 'platform': platform = Platform.objects.using( compendium.compendium_nick_name).get(id=entity_id) for af in platform.assignedfile_set.all(): log = MessageLog() log.title = exp.experiment_access_id + " " + af.script_name + " error on file " + af.input_filename log.message = "Status: error, Platform: " + platform.platform_access_id + ", Order: " + str( af.order) + ", Parameters: " + " ".join( af.parameters) + " Task: " + task_id + ", User: "******", Exception: " + str(exc) + ", Stacktrace: " + \ einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) af.status = error_status af.message_log = log af.save(using=compendium.compendium_nick_name) elif entity_type == 'sample': sample = Sample.objects.using(compendium.compendium_nick_name).get( experiment=exp, id=entity_id) access_id = sample.sample_name for af in sample.assignedfile_set.all(): log = MessageLog() log.title = exp.experiment_access_id + " " + af.script_name + " error on file " + af.input_filename log.message = "Status: error, Sample: " + sample.sample_name + ", Order: " + str( af.order) + ", Parameters: " + " ".join( af.parameters) + " Task: " + task_id + ", User: "******", Exception: " + str(exc) + ", Stacktrace: " + \ einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) af.status = error_status af.message_log = log af.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': 'message_log', 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': 'file_assignment', 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) message = Message(type='error', title='Error parsing ' + entity_type + ' ' + access_id, message=str(exc)) message.send_to(channel)
def experiment_public_download(self, user_id, compendium_id, experiment_id, channel_name, view, operation): init_database_connections() channel = Channel(channel_name) user = User.objects.get(id=user_id) compendium = CompendiumDatabase.objects.get(id=compendium_id) task_id = self.request.id operation = operation + "_" + str(experiment_id) downloading_status = Status.objects.using( compendium.compendium_nick_name).get(name='experiment_downloading') exp = ExperimentSearchResult.objects.using( compendium.compendium_nick_name).get(id=experiment_id) exp.status = downloading_status exp.save(using=compendium.compendium_nick_name) try: ViewTask.objects.using(compendium.compendium_nick_name). \ get(operation=operation, view=view).delete() except Exception as e: pass channel_task = ViewTask(task_id=task_id, operation=operation, view=view) channel_task.save(using=compendium.compendium_nick_name) data_ready_status = Status.objects.using( compendium.compendium_nick_name).get(name='experiment_data_ready') base_output_directory = AdminOptions.objects.get( option_name='download_directory') exp = ExperimentSearchResult.objects.using( compendium.compendium_nick_name).get(id=experiment_id) out_dir = os.path.join(base_output_directory.option_value, compendium.compendium_nick_name, exp.experiment_access_id) os.makedirs(out_dir, exist_ok=True) Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) log_message = '' module_name, class_name = '.'.join(exp.data_source.python_class.split('.')[:-1]), \ exp.data_source.python_class.split('.')[-1] python_class = getattr(importlib.import_module(module_name), class_name)() python_class.download_experiment_files(exp.experiment_access_id, user.email, out_dir) try: Experiment.objects.using(compendium.compendium_nick_name).get( experiment_access_id=exp.experiment_access_id) message = Message( type='info', title='Experiment already exists', message='The experiment ' + exp.experiment_access_id + ' is already present in the database. Data have been download anyway.' ) message.send_to(channel) except Exception as e: log_message = python_class.create_experiment_structure( compendium_id, experiment_id, out_dir) exp.status = data_ready_status exp.save(using=compendium.compendium_nick_name) return log_message
def on_success(self, retval, task_id, args, kwargs): user_id, compendium_id, plt_dir, platform_id, filter_id, \ blast_file_name, channel_name, view, operation = args csv_file, without_mapping_before, with_mapping_before = retval without_mapping_before = set(without_mapping_before) channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) without_mapping_after = set( BioFeatureReporter.objects.using( compendium.compendium_nick_name).filter( platform_id=platform_id, bio_feature__isnull=True).values_list( 'id', flat=True).distinct()) with_mapping_after = dict( BioFeatureReporter.objects.using( compendium.compendium_nick_name).filter( platform_id=platform_id, bio_feature__isnull=False).values_list( 'id', 'bio_feature_id')) added = len( set.intersection(without_mapping_before, set(with_mapping_after.keys()))) removed = len( set.intersection(set(with_mapping_before.keys()), without_mapping_after)) changed = len(set.intersection(set(with_mapping_before.keys()), set(with_mapping_after.keys()))) - \ len(set.intersection(set(with_mapping_before.items()), set(with_mapping_after.items()))) unchanged_mapped = len( set.intersection(set(with_mapping_before.items()), set(with_mapping_after.items()))) unchanged_unmapped = len( set.intersection(without_mapping_before, without_mapping_after)) report = 'added: {}, removed: {}, changed: {}, unchanged_mapped: {}, unchanged_unmapped: {}'.format( added, removed, changed, unchanged_mapped, unchanged_unmapped) plt = Platform.objects.using( compendium.compendium_nick_name).get(id=platform_id) log = MessageLog() log.title = "Platform: " + plt.platform_access_id + " mapping imported" log.message = "Status: success, Platform: " + plt.platform_access_id +\ ", Report: " + report + ", Task: " + task_id + ", User: "******"/*.blast"): mapper = MicroarrayMapper(os.path.join(plt_dir, blast_file)) mapper.set_imported(False) mapper = MicroarrayMapper(os.path.join(plt_dir, blast_file_name)) mapper.set_imported(True, filter_id) mapper.set_filter_status(filter_id, 'ready') Group("compendium_" + str(compendium_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) })
def on_success(self, retval, task_id, args, kwargs): user_id, compendium_id, exp_id, script, parameters, input_files,\ experiment_entity, platform_entity, sample_entity, channel_name, view, operation = args channel = Channel(channel_name) message = Message(type='info', title='Assigned script run succesfully.', message=retval[0]) message.send_to(channel) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': 'experiment_' + view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': 'platform_' + view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({ 'text': json.dumps({ 'stream': 'sample_' + view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) if retval[1]: message = Message(type='parsing_log', title='Assignment script', message=retval[1].replace('\n', '<br>')) message.send_to(channel)