def read_experiment(channel_name, view, request, user): channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=request['compendium_id']) Group("compendium_" + str(compendium.id) + "_" + str(request['values'])).add(channel) parsing_db = init_parsing(request['compendium_id'], request['values']) experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values']) parsing_experiment = ParsingExperiment.objects.using(parsing_db).get(experiment_fk=request['values']) n_samples = parsing_experiment.parsingsample_set.all().count() platforms = ",".join(list(set( [sample.platform.platform_access_id for sample in parsing_experiment.parsingsample_set.all()] ))) status = 'importing' if experiment.status.name == 'experiment_raw_data_importing' else None if not status: for smp in experiment.sample_set.all(): if smp.platform.status and smp.platform.status.name == 'platform_importing': status = 'importing' channel.send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': request, 'data': { 'status': status, 'experiment': experiment.to_dict(), 'parsing_experiment': parsing_experiment.to_dict(), 'platforms': platforms, 'n_samples': n_samples } } }) })
def delete_parsing_data(request, *args, **kwargs): values = json.loads(request.POST['values']) comp_id = request.POST['compendium_id'] channel_name = request.session['channel_name'] view = request.POST['view'] compendium = CompendiumDatabase.objects.get(id=comp_id) for exp_id in values: parsing_db = init_parsing(compendium.id, exp_id, get_name_only=True) try: os.remove(parsing_db) except Exception as e: pass Group("compendium_" + str(comp_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) return HttpResponse(json.dumps({'success': True}), content_type="application/json")
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, experiment_id, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id) parsing_db = init_parsing(compendium_id, experiment_id) platforms = [plt.platform_access_id for plt in ParsingPlatform.objects.using(parsing_db).all()] log = MessageLog() log.title = "Platforms: " + ", ".join(platforms) + " error" log.message = "Status: error, Platforms: " + ", ".join(platforms) + ", Experiment: " + \ exp.experiment_access_id + ", Task: " + task_id + ", User: "******", Exception: " \ + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) # delete CSV file for fl in glob.glob(parsing_db + '*bio_feature*.csv'): os.remove(fl) message = Message(type='error', title='Error on importing platforms for experiment ' + exp.experiment_access_id, message=str(exc)) message.send_to(channel) for parsing_platform in ParsingPlatform.objects.using(parsing_db).all(): plt = Platform.objects.using(compendium.compendium_nick_name).get(id=parsing_platform.platform_fk) plt.status = None plt.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) })
def on_failure(self, exc, task_id, args, kwargs, einfo): user_id, compendium_id, experiment_id, keep_platform, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id) parsing_db = init_parsing(compendium_id, experiment_id) log = MessageLog() log.title = "Experiment: " + exp.experiment_access_id + " imported" log.message = "Status: error, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + ", User: "******", Exception: " \ + str(exc) + ", Stacktrace: " + einfo.traceback log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) # delete CSV file for fl in glob.glob(parsing_db + '*.csv'): os.remove(fl) message = Message(type='error', title='Error on importing experiment ' + exp.experiment_access_id, message=str(exc)) message.send_to(channel) data_ready_status = Status.objects.using(compendium.compendium_nick_name).get(name='experiment_data_ready') exp.status = data_ready_status exp.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) })
def read_platform_preview(channel_name, view, request, user): channel = Channel(channel_name) start = 0 end = None if request['page_size']: start = (request['page'] - 1) * request['page_size'] end = start + request['page_size'] compendium = CompendiumDatabase.objects.get(id=request['compendium_id']) parsing_db = init_parsing(request['compendium_id'], request['values']) experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values']) parsing_experiment = ParsingExperiment.objects.using(parsing_db).get(experiment_fk=experiment.id) order = '' if request['ordering'] == 'DESC': order = '-' platform_ids = list(set([sample.platform_id for sample in parsing_experiment.parsingsample_set.all()])) query_response = ParsingPlatform.objects.using(parsing_db). \ filter(id__in=platform_ids). \ filter(Q(platform_name__contains=request['filter']) | Q(description__contains=request['filter']) | Q(platform_access_id__contains=request['filter']) ).order_by(order + request['ordering_value']) total = query_response.count() query_response = query_response[start:end] platforms = [] status = 'importing' if experiment.status.name == 'experiment_raw_data_importing' else None for platform in query_response: imported_platform = Platform.objects.using(compendium.compendium_nick_name).get(id=platform.platform_fk) if not status and imported_platform.status: status = 'importing' if imported_platform.status.name == 'platform_importing' else None plt = platform.to_dict() plt['experiment_id'] = experiment.id plt['reporter_platform'] = '' plt['is_imported'] = imported_platform.biofeaturereporter_set.count() > 0 try: p_type = PlatformType.objects.using(compendium.compendium_nick_name). \ get(name=plt['platform_type']) plt['bio_feature_reporter_name'] = p_type.bio_feature_reporter_name plt['bio_features_reporter_fields'] = [ field.to_dict() for field in p_type.biofeaturereporterfields_set.all() ] except Exception as e: pass platforms.append(plt) channel.send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': request, 'data': { 'platforms': platforms, 'status': status, 'total': total } } }) })
def delete_experiment(request, *args, **kwargs): values = json.loads(request.POST['values']) comp_id = request.POST['compendium_id'] channel_name = request.session['channel_name'] view = request.POST['view'] compendium = CompendiumDatabase.objects.get(id=comp_id) base_dir = AdminOptions.objects.get( option_name='download_directory').option_value for exp_id in values: exp = Experiment.objects.using( compendium.compendium_nick_name).get(id=exp_id) new_status = Status.objects.using( compendium.compendium_nick_name).get(name='experiment_new') try: exp_search = ExperimentSearchResult.objects.using( compendium.compendium_nick_name).get( experiment_access_id=exp.experiment_access_id) exp_search.status = new_status exp_search.save(using=compendium.compendium_nick_name) except Exception as e: pass parsing_db = init_parsing(compendium.id, exp_id, get_name_only=True) try: os.remove(parsing_db) except Exception as e: pass try: full_dir = os.path.join(base_dir, compendium.compendium_nick_name, exp.experiment_access_id) shutil.rmtree(full_dir) except Exception as e: pass exp.delete() Group("compendium_" + str(comp_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) return HttpResponse(json.dumps({'success': True}), content_type="application/json")
def read_experiment_platform_files(channel_name, view, request, user): channel = Channel(channel_name) start = 0 end = None if request['page_size']: start = (request['page'] - 1) * request['page_size'] end = start + request['page_size'] compendium = CompendiumDatabase.objects.get(id=request['compendium_id']) experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values']) parsing_db = init_parsing(request['compendium_id'], request['values']) order = '' if request['ordering'] == 'DESC': order = '-' query_response = Platform.objects.using(compendium.compendium_nick_name). \ filter(id__in=[s.platform.id for s in experiment.sample_set.all()]). \ filter(Q(platform_name__contains=request['filter']) | Q(description__contains=request['filter']) ).order_by(order + request['ordering_value']) total = query_response.count() query_response = query_response[start:end] platforms = [] for platform in query_response: plt = platform.to_dict() reporter_platform_id = ParsingPlatform.objects.using(parsing_db).\ get(platform_fk=platform.id).reporter_platform try: plt['reporter_platform'] = Platform.objects.using(compendium.compendium_nick_name).\ get(id=reporter_platform_id).platform_access_id except Exception as e: pass plt['status'] = Status.objects.using(compendium.compendium_nick_name). \ get(name='entity_script_ready').to_dict() try: plt['status'] = platform.assignedfile_set.all()[0].status.to_dict() except Exception as e: pass plt['parsing_details'] = [assigned_file.to_dict() for assigned_file in platform.assignedfile_set.all()] platforms.append(plt) channel.send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': request, 'data': { 'platforms': platforms, 'total': total } } }) })
def read_sample_preview(channel_name, view, request, user): channel = Channel(channel_name) start = 0 end = None if request['page_size']: start = (request['page'] - 1) * request['page_size'] end = start + request['page_size'] compendium = CompendiumDatabase.objects.get(id=request['compendium_id']) parsing_db = init_parsing(request['compendium_id'], request['values']) experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values']) parsing_experiment = ParsingExperiment.objects.using(parsing_db).get(experiment_fk=experiment.id) order = '' if request['ordering'] == 'DESC': order = '-' query_response = ParsingSample.objects.using(parsing_db). \ filter(experiment=parsing_experiment). \ filter(Q(sample_name__contains=request['filter']) | Q(description__contains=request['filter']) | Q(platform__platform_access_id__contains=request['filter']) ).order_by(order + request['ordering_value']) total = query_response.count() query_response = query_response[start:end] samples = [] status = 'importing' if experiment.status.name == 'experiment_raw_data_importing' else None if not status: for smp in experiment.sample_set.all(): if smp.platform.status and smp.platform.status.name == 'platform_importing': status = 'importing' for sample in query_response: smp = sample.to_dict() smp['experiment_id'] = experiment.id smp['reporter_platform'] = Platform.objects.using(compendium.compendium_nick_name).\ get(id=sample.reporter_platform).to_dict() samples.append(smp) channel.send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': request, 'data': { 'samples': samples, 'status': status, 'total': total } } }) })
def import_experiment_platform(self, user_id, compendium_id, experiment_id, channel_name, view, operation): compendium = CompendiumDatabase.objects.get(id=compendium_id) parsing_db = init_parsing(compendium_id, experiment_id) reporters_batch_size = 1000 importing_status = Status.objects.using(compendium.compendium_nick_name).get(name='platform_importing') for parsing_platform in ParsingPlatform.objects.using(parsing_db).all(): plt = Platform.objects.using(compendium.compendium_nick_name).get(id=parsing_platform.platform_fk) plt.status = importing_status plt.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium.id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) }) return __import_experiment_platform(compendium, parsing_db, reporters_batch_size)
def change_reporter_platforms(request, *args, **kwargs): values = json.loads(request.POST['values']) view = request.POST['view'] compendium = CompendiumDatabase.objects.get(id=request.POST['compendium_id']) parsing_db = init_parsing(compendium.id, values['experiment_id']) parsing_plt = ParsingPlatform.objects.using(parsing_db).\ get(platform_fk=values['platform_id']) try: Platform.objects.using(compendium.compendium_nick_name).get( id=values['reporter_platform_id'] ) parsing_plt.reporter_platform = values['reporter_platform_id'] except Exception as e: pass parsing_plt.save(using=parsing_db) return HttpResponse(json.dumps({'success': True,}), content_type="application/json")
def read_parse_experiment_preview_raw_data(channel_name, view, request, user): channel = Channel(channel_name) start = 0 end = None if request['page_size']: start = (request['page'] - 1) * request['page_size'] end = start + request['page_size'] compendium = CompendiumDatabase.objects.get(id=request['compendium_id']) parsing_db = init_parsing(request['compendium_id'], request['values']['experiment_id']) parsing_sample = ParsingSample.objects.using(parsing_db).get(id=request['values']['id']) order = '' if request['ordering'] == 'DESC': order = '-' query_response = ParsingRawData.objects.using(parsing_db). \ filter(sample=parsing_sample). \ filter(Q(bio_feature_reporter_name__contains=request['filter']) | Q(value__contains=request['filter']) ).order_by(order + request['ordering_value']) total = query_response.count() query_response = query_response[start:end] raw_data = [] status = 'ready' for rd in query_response: rd_dict = rd.to_dict() raw_data.append(rd_dict) channel.send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': request, 'data': { 'raw_data': raw_data, 'status': status, 'total': total } } }) })
def delete_platform(request, *args, **kwargs): comp_id = request.POST['compendium_id'] channel_name = request.session['channel_name'] view = request.POST['view'] compendium = CompendiumDatabase.objects.get(id=comp_id) platform = Platform.objects.using( compendium.compendium_nick_name).get(id=request.POST['values']) for exp_id in platform.platform.get_queryset().values_list( 'experiment_id', flat=True).distinct(): parsing_db = init_parsing(compendium.id, exp_id) try: ParsingPlatform.objects.using(parsing_db).get( platform_fk=platform.id).delete() except Exception as e: pass platform.delete() base_dir = AdminOptions.objects.get(option_name='raw_data_directory') plt_dir = os.path.join(base_dir.option_value, compendium.compendium_nick_name, 'platforms', platform.platform_access_id) shutil.rmtree(plt_dir, ignore_errors=True) Group("compendium_" + str(comp_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': { 'operation': 'refresh' }, 'data': None } }) }) return HttpResponse(json.dumps({'success': True}), content_type="application/json")
def on_success(self, retval, task_id, args, kwargs): user_id, compendium_id, experiment_id, keep_platform, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id) parsing_db = init_parsing(compendium_id, experiment_id) report = retval log = MessageLog() log.title = "Experiment: " + exp.experiment_access_id + " imported" log.message = "Status: success, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + ", User: "******" Results: " + report log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) # delete CSV files try: for fl in glob.glob(parsing_db + '*.csv'): os.remove(fl) except Exception as e: pass message = Message(type='info', title='Successfully imported experiment', message='Successfully imported experiment ' + exp.experiment_access_id + '<br>' + report ) message.send_to(channel) imported_status = Status.objects.using(compendium.compendium_nick_name).get(name='experiment_raw_data_imported') exp.status = imported_status exp.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) })
def on_success(self, retval, task_id, args, kwargs): user_id, compendium_id, experiment_id, channel_name, view, operation = args channel = Channel(channel_name) compendium = CompendiumDatabase.objects.get(id=compendium_id) exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id) parsing_db = init_parsing(compendium_id, experiment_id) platforms = [plt.platform_access_id for plt in ParsingPlatform.objects.using(parsing_db).all()] log = MessageLog() log.title = "Platforms: " + ", ".join(platforms) + " imported" log.message = "Status: success, Platforms: " + ", ".join(platforms) + ", Experiment: " + \ exp.experiment_access_id + ", Task: " + task_id + ", User: "******" Results: " + retval log.source = log.SOURCE[1][0] log.save(using=compendium.compendium_nick_name) # delete CSV file for fl in glob.glob(parsing_db + '*bio_feature*.csv'): os.remove(fl) message = Message(type='info', title='Successfully imported platforms', message='Successfully imported platforms for experiment ' + exp.experiment_access_id + ' (' + ','.join(platforms) + ') <br>' + retval ) message.send_to(channel) ready_status = Status.objects.using(compendium.compendium_nick_name).get(name='platform_imported') for parsing_platform in ParsingPlatform.objects.using(parsing_db).all(): plt = Platform.objects.using(compendium.compendium_nick_name).get(id=parsing_platform.platform_fk) plt.status = ready_status plt.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) })
def import_experiment(self, user_id, compendium_id, experiment_id, keep_platform, channel_name, view, operation): compendium = CompendiumDatabase.objects.get(id=compendium_id) parsing_db = init_parsing(compendium_id, experiment_id) reporters_batch_size = 1000 raw_data_batch_size = 1000 data_importing_status = Status.objects.using(compendium.compendium_nick_name).get(name='experiment_raw_data_importing') with transaction.atomic(using=compendium.compendium_nick_name): exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id) parsing_exp = ParsingExperiment.objects.using(parsing_db).all()[0] exp.experiment_access_id = parsing_exp.experiment_access_id exp.organism = parsing_exp.organism exp.experiment_name = parsing_exp.experiment_name exp.scientific_paper_ref = parsing_exp.scientific_paper_ref exp.description = parsing_exp.description exp.status = data_importing_status exp.save(using=compendium.compendium_nick_name) Group("compendium_" + str(compendium.id) + "_" + str(experiment_id)).send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': {'operation': 'refresh'}, 'data': None } }) }) report = '' if not keep_platform: report = __import_experiment_platform(compendium, parsing_db, reporters_batch_size) with transaction.atomic(using=compendium.compendium_nick_name): for parsing_sample in ParsingSample.objects.using(parsing_db).all(): smp = Sample.objects.using(compendium.compendium_nick_name).get(id=parsing_sample.sample_fk) smp.sample_name = parsing_sample.sample_name smp.description = parsing_sample.description smp.reporter_platform_id = parsing_sample.reporter_platform smp.save(using=compendium.compendium_nick_name) if parsing_sample.parsingrawdata_set.count(): smp.rawdata_set.all().delete() raw_data_csv = os.path.join( os.path.dirname(parsing_db), os.path.basename(parsing_db) + '_raw_data.csv') reporter_counter = 0 for start, end, total, qs in batch_qs(parsing_sample.parsingrawdata_set.get_queryset().order_by('id'), batch_size=raw_data_batch_size): reporters = {x['name']: x['id'] for x in smp.reporter_platform.biofeaturereporter_set.filter(name__in=[r.bio_feature_reporter_name for r in qs]).values('id', 'name')} reporter_counter += len(reporters) with open(raw_data_csv, 'w') as csvfile_val: writer_val = csv.writer(csvfile_val, delimiter='\t') for raw_data in qs.all(): if raw_data.bio_feature_reporter_name in reporters: writer_val.writerow([smp.id, reporters[raw_data.bio_feature_reporter_name], raw_data.value]) csvfile_val = open(raw_data_csv) with closing(connections[compendium.compendium_nick_name].cursor()) as cursor: cursor.copy_from( file=csvfile_val, table=RawData._meta.db_table, sep='\t', columns=('sample_id', 'bio_feature_reporter_id', 'value') ) csvfile_val.close() report += 'Sample: ' + smp.sample_name + ' ' + str(reporter_counter) + ' imported<br>' return report
def read_parse_experiment_preview_bio_feature_reporter(channel_name, view, request, user): channel = Channel(channel_name) start = 0 end = None parsing_db = init_parsing(request['compendium_id'], request['values']['experiment_id']) if request['page_size']: start = (request['page'] - 1) * request['page_size'] end = start + request['page_size'] order = '' if request['ordering'] == 'DESC': order = '-' compendium = CompendiumDatabase.objects.get(id=request['compendium_id']) platform = ParsingPlatform.objects.using(parsing_db).get(id=request['values']['id']) imported_platform = Platform.objects.using(compendium.compendium_nick_name).get(id=platform.platform_fk) # bio features is_imported = imported_platform.biofeaturereporter_set.count() > 0 parsed = platform.parsingbiofeaturereporter_set.count() > 0 if is_imported and not parsed: query_response = BioFeatureReporter.objects.using(compendium.compendium_nick_name). \ filter(platform=imported_platform).filter(Q(name__icontains=request['filter']) | Q(description__icontains=request['filter'])).order_by( order + request['ordering_value']) total = query_response.count() query_response = query_response[start:end] bio_feature_reporter = [] for bfr in query_response: b = bfr.to_dict() for field in bfr.biofeaturereportervalues_set.all(): b[field.bio_feature_reporter_field.name] = field.value bio_feature_reporter.append(b) else: query_response = ParsingBioFeatureReporter.objects.using(parsing_db). \ filter(platform=platform).filter(Q(name__icontains=request['filter']) | Q(description__icontains=request['filter'])).order_by( order + request['ordering_value']) total = query_response.count() query_response = query_response[start:end] bio_feature_reporter = [] for bfr in query_response: b = bfr.to_dict() for field in bfr.parsingbiofeaturereportervalues_set.all(): b[field.bio_feature_reporter_field] = field.value bio_feature_reporter.append(b) channel.send({ 'text': json.dumps({ 'stream': view, 'payload': { 'request': request, 'data': { 'bio_feature_reporter': bio_feature_reporter, 'imported': is_imported and not parsed, 'total': total } } }) })