Exemplo n.º 1
0
 def on_success(self, retval, task_id, args, kwargs):
     user_id, compendium_id, plt_dir, platform_id, blast_file_name, alignment_length_1, gap_open_1, \
         mismatches_1, alignment_length_2, gap_open_2, mismatches_2, channel_name, view, operation = args
     Group("compendium_" + str(compendium_id)).send({
         'text':
         json.dumps({
             'stream': view,
             'payload': {
                 'request': {
                     'operation': 'refresh'
                 },
                 'data': None
             }
         })
     })
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     platform = Platform.objects.using(
         compendium.compendium_nick_name).get(id=platform_id)
     mapper = MicroarrayMapper(os.path.join(plt_dir, blast_file_name))
     mapper.set_filter_status(retval, 'ready')
     log = MessageLog()
     log.title = "Filtering of alignment" + blast_file_name
     log.message = "Status: success, Platform: " + platform.platform_access_id + ", Alignment: " + blast_file_name + \
                   ", Task: " + task_id + ", User: " + User.objects.get(id=user_id).username
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
Exemplo n.º 2
0
    def on_failure(self, exc, task_id, args, kwargs, einfo):
        user_id, compendium_id, plt_dir, platform_id, filter_id, \
        blast_file_name, channel_name, view, operation = args
        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=compendium_id)

        log = MessageLog()
        plt = Platform.objects.using(
            compendium.compendium_nick_name).get(id=platform_id)
        log.title = "Platform: " + plt.platform_access_id + " importing mapping error"
        log.message = "Status: error, Platform: " + plt.platform_access_id + \
                      ", Task: " + task_id + ", User: "******", Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback
        log.source = log.SOURCE[1][0]
        log.save(using=compendium.compendium_nick_name)

        message = Message(type='error',
                          title='Error during importing platform mapping',
                          message=str(exc))
        message.send_to(channel)
        Group("compendium_" + str(compendium_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })
Exemplo n.º 3
0
 def on_failure(self, exc, task_id, args, kwargs, einfo):
     user_id, compendium_id, experiment_id, keep_platform, channel_name, view, operation = args
     channel = Channel(channel_name)
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id)
     parsing_db = init_parsing(compendium_id, experiment_id)
     log = MessageLog()
     log.title = "Experiment: " + exp.experiment_access_id + " imported"
     log.message = "Status: error, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + ", User: "******", Exception: " \
                   + str(exc) + ", Stacktrace: " + einfo.traceback
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
     # delete CSV file
     for fl in glob.glob(parsing_db + '*.csv'):
         os.remove(fl)
     message = Message(type='error', title='Error on importing experiment ' + exp.experiment_access_id, message=str(exc))
     message.send_to(channel)
     data_ready_status = Status.objects.using(compendium.compendium_nick_name).get(name='experiment_data_ready')
     exp.status = data_ready_status
     exp.save(using=compendium.compendium_nick_name)
     Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({
         'text': json.dumps({
             'stream': view,
             'payload': {
                 'request': {'operation': 'refresh'},
                 'data': None
             }
         })
     })
Exemplo n.º 4
0
    def on_failure(self, exc, task_id, args, kwargs, einfo):
        user_id, compendium_id, ontology_id, filename, file_type, channel_name, view, operation = args
        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=compendium_id)

        log = MessageLog()
        log.title = "Biological feature annotation"
        log.message = "Status: error, Task: " + task_id + ", User: "******", Error: " + str(exc)
        log.source = log.SOURCE[1][0]
        log.save(using=compendium.compendium_nick_name)

        message = Message(type='error', title='Error', message=str(exc))
        message.send_to(channel)

        Group("compendium_" + str(compendium_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })
Exemplo n.º 5
0
    def on_failure(self, exc, task_id, args, kwargs, einfo):
        user_id, compendium_id, term, db_id, channel_name, view, operation = args
        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=compendium_id)

        log = MessageLog()
        log.title = "Search experiment " + term
        log.message = "Status: error, Term: " + term + ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback
        log.source = log.SOURCE[1][0]
        log.save(using=compendium.compendium_nick_name)

        message = Message(type='error', title='Error', message=str(exc))
        message.send_to(channel)

        ViewTask.objects.using(compendium.compendium_nick_name). \
            get(task_id=task_id, operation=operation, view=view).delete()

        Group("compendium_" + str(compendium_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })
Exemplo n.º 6
0
    def on_success(self, retval, task_id, args, kwargs):
        user_id, compendium_id, term, db_id, channel_name, view, operation = args

        compendium = CompendiumDatabase.objects.get(id=compendium_id)

        log = MessageLog()
        log.title = "Search experiment " + term
        log.message = "Status: success, Term: " + term + ", Task: " + task_id + ", User: "******"compendium_" + str(compendium_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })
Exemplo n.º 7
0
 def create_message_log(request, *args, **kwargs):
     values = json.loads(request.POST['values'])
     comp_id = request.POST['compendium_id']
     channel_name = request.session['channel_name']
     view = request.POST['view']
     compendium = CompendiumDatabase.objects.get(id=comp_id)
     message = MessageLog()
     message.title = values['title']
     message.message = values['message']
     message.source = message.SOURCE[0][0]
     message.save(using=compendium.compendium_nick_name)
     Group('admin').send({
         'text':
         json.dumps({
             'stream': request.POST['view'],
             'payload': {
                 'request': {
                     'operation': 'refresh'
                 },
                 'data': None
             }
         })
     })
     return HttpResponse(json.dumps({'success': True}),
                         content_type="application/json")
Exemplo n.º 8
0
 def on_failure(self, exc, task_id, args, kwargs, einfo):
     user_id, compendium_id, experiment_id, channel_name, view, operation = args
     channel = Channel(channel_name)
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id)
     parsing_db = init_parsing(compendium_id, experiment_id)
     platforms = [plt.platform_access_id for plt in ParsingPlatform.objects.using(parsing_db).all()]
     log = MessageLog()
     log.title = "Platforms: " + ", ".join(platforms) + " error"
     log.message = "Status: error, Platforms: " + ", ".join(platforms) + ", Experiment: " + \
                   exp.experiment_access_id + ", Task: " + task_id + ", User: "******", Exception: " \
                   + str(exc) + ", Stacktrace: " + einfo.traceback
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
     # delete CSV file
     for fl in glob.glob(parsing_db + '*bio_feature*.csv'):
         os.remove(fl)
     message = Message(type='error', title='Error on importing platforms for experiment ' + exp.experiment_access_id, message=str(exc))
     message.send_to(channel)
     for parsing_platform in ParsingPlatform.objects.using(parsing_db).all():
         plt = Platform.objects.using(compendium.compendium_nick_name).get(id=parsing_platform.platform_fk)
         plt.status = None
         plt.save(using=compendium.compendium_nick_name)
     Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({
         'text': json.dumps({
             'stream': view,
             'payload': {
                 'request': {'operation': 'refresh'},
                 'data': None
             }
         })
     })
Exemplo n.º 9
0
    def on_success(self, retval, task_id, args, kwargs):
        user_id, compendium_id, ontology_id, filename, file_type, channel_name, view, operation = args

        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=compendium_id)

        log = MessageLog()
        log.title = "Biological feature annotation"
        log.message = "Status: success, Task: " + task_id + ", User: "******"compendium_" + str(compendium_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })
Exemplo n.º 10
0
 def on_failure(self, exc, task_id, args, kwargs, einfo):
     user_id, compendium_id, path, channel_name, view, operation = args
     channel = Channel(channel_name)
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     log = MessageLog()
     log.title = "Export raw data"
     log.message = "Status: error, Task: " + task_id + ", User: "******", Exception: " + str(
             exc) + ", Stacktrace: " + einfo.traceback
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
Exemplo n.º 11
0
 def on_failure(self, exc, task_id, args, kwargs, einfo):
     user_id, compendium_id, file_path, bio_feature_name, file_type, channel_name, view, operation = args
     channel = Channel(channel_name)
     message = Message(type='error', title='Error', message=str(exc))
     message.send_to(channel)
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     log = MessageLog()
     log.title = "Importing " + bio_feature_name + " (biological features) from " + file_type + " file"
     log.message = "Status: error, File: " + os.path.basename(file_path) + ", Type: " + file_type + \
                   ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
Exemplo n.º 12
0
 def on_failure(self, exc, task_id, args, kwargs, einfo):
     user_id, compendium_id, plt_dir, platform_id, blast_file_name, alignment_length_1, gap_open_1, \
         mismatches_1, alignment_length_2, gap_open_2, mismatches_2, channel_name, view, operation = args
     channel = Channel(channel_name)
     message = Message(type='error', title='Error', message=str(exc))
     message.send_to(channel)
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     platform = Platform.objects.using(
         compendium.compendium_nick_name).get(id=platform_id)
     log = MessageLog()
     log.title = "Filtering of alignment " + blast_file_name
     log.message = "Status: error, Platform: " + platform.platform_access_id + ", Alignment: " + blast_file_name + \
                   ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
Exemplo n.º 13
0
 def on_success(self, retval, task_id, args, kwargs):
     user_id, compendium_id, path, channel_name, view, operation = args
     channel = Channel(channel_name)
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     filename_tsv = os.path.basename(str(retval))
     filename_hdf5 = os.path.basename(
         str(retval).replace('.tsv.gz', '.hdf5'))
     url_tsv = '/export_data/read_file?path=' + str(retval)
     url_hdf5 = '/export_data/read_file?path=' + str(retval).replace(
         '.tsv.gz', '.hdf5')
     log = MessageLog()
     log.title = "Export raw data"
     log.message = "Status: success, <br> File TSV:  <a href='" + url_tsv + "'>" + filename_tsv + "</a>, <br>" \
                 "File HDF5:  <a href='" + url_hdf5 + "'>" + filename_hdf5 + "</a>Task: " + task_id + ", User: " + User.objects.get(id=user_id).username
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
Exemplo n.º 14
0
    def on_success(self, retval, task_id, args, kwargs):
        user_id, compendium_id, experiment_id, channel_name, view, operation = args
        Group("compendium_" + str(compendium_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })
        Group("compendium_" + str(compendium_id)).send({
            'text':
            json.dumps({
                'stream': 'experiments',
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        compendium = CompendiumDatabase.objects.get(id=compendium_id)
        exp = ExperimentSearchResult.objects.using(
            compendium.compendium_nick_name).get(id=experiment_id)

        log = MessageLog()
        log.title = "Download experiment " + exp.experiment_access_id
        log.message = "Status: success, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + "," \
                        "User: "******"Notes: " + retval
        log.source = log.SOURCE[1][0]
        log.save(using=compendium.compendium_nick_name)
        if retval:
            channel = Channel(channel_name)
            message = Message(
                type='info',
                title='Download notes',
                message="Duplicated samples are not imported! <br><br>" +
                retval)
            message.send_to(channel)
Exemplo n.º 15
0
 def on_failure(self, exc, task_id, args, kwargs, einfo):
     user_id, compendium_id, experiment_id, channel_name, view, operation = args
     channel = Channel(channel_name)
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     new_status = Status.objects.using(
         compendium.compendium_nick_name).get(name='experiment_new')
     exp = ExperimentSearchResult.objects.using(
         compendium.compendium_nick_name).get(id=experiment_id)
     exp.status = new_status
     exp.save(using=compendium.compendium_nick_name)
     log = MessageLog()
     log.title = "Download experiment " + exp.experiment_access_id
     log.message = "Status: error, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
     message = Message(type='error',
                       title='Error on experiment ' +
                       exp.experiment_access_id,
                       message=str(exc))
     message.send_to(channel)
     Group("compendium_" + str(compendium_id)).send({
         'text':
         json.dumps({
             'stream': view,
             'payload': {
                 'request': {
                     'operation': 'refresh'
                 },
                 'data': None
             }
         })
     })
     Group("compendium_" + str(compendium_id)).send({
         'text':
         json.dumps({
             'stream': 'experiments',
             'payload': {
                 'request': {
                     'operation': 'refresh'
                 },
                 'data': None
             }
         })
     })
Exemplo n.º 16
0
    def on_failure(self, exc, task_id, args, kwargs, einfo):
        user_id, compendium_id, plt_dir, platform_id, use_short_blastn, alignment_identity, \
            channel_name, view, operation = args
        channel = Channel(channel_name)
        message = Message(type='error', title='Error', message=str(exc))
        message.send_to(channel)
        compendium = CompendiumDatabase.objects.get(id=compendium_id)
        platform = Platform.objects.using(
            compendium.compendium_nick_name).get(id=platform_id)
        log = MessageLog()
        log.title = "Alignment of platform " + platform.platform_access_id
        log.message = "Status: error, Platform: " + platform.platform_access_id + \
                      ", Task: " + task_id + ", User: "******"Exception: " + str(exc) + ", Stacktrace: " + einfo.traceback
        log.source = log.SOURCE[1][0]
        log.save(using=compendium.compendium_nick_name)

        blast_file_name = os.path.join(plt_dir, task_id + '.blast')
        mapper = MicroarrayMapper(blast_file_name)
        mapper.set_alignment_status('error')

        Group("compendium_" + str(compendium_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        # remove files
        for file in [
                f for f in os.listdir(plt_dir)
                if os.path.isfile(os.path.join(plt_dir, f))
                and f.startswith(task_id)
        ]:
            if file.endswith('.sqlite') or file.endswith('.blast'):
                continue
            os.remove(os.path.join(plt_dir, file))
Exemplo n.º 17
0
 def on_success(self, retval, task_id, args, kwargs):
     user_id, compendium_id, file_path, bio_feature_name, file_type, channel_name, view, operation = args
     Group("compendium_" + str(compendium_id)).send({
         'text':
         json.dumps({
             'stream': 'bio_feature',
             'payload': {
                 'request': {
                     'operation': 'refresh'
                 },
                 'data': None
             }
         })
     })
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     log = MessageLog()
     log.title = "Importing " + bio_feature_name + " (biological features) from " + file_type + " file"
     log.message = "Status: success, File: " + os.path.basename(file_path) + ", Type: " + file_type + \
                   ", Task: " + task_id + ", User: " + User.objects.get(id=user_id).username
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
Exemplo n.º 18
0
    def on_success(self, retval, task_id, args, kwargs):
        user_id, compendium_id, plt_dir, platform_id, use_short_blastn, alignment_identity, \
            channel_name, view, operation = args

        compendium = CompendiumDatabase.objects.get(id=compendium_id)
        platform = Platform.objects.using(
            compendium.compendium_nick_name).get(id=platform_id)
        log = MessageLog()
        log.title = "Alignment of platform " + platform.platform_access_id
        log.message = "Status: success, Platform: " + platform.platform_access_id + ", Task: " + task_id + ", User: "******", Report: " + retval
        log.source = log.SOURCE[1][0]
        log.save(using=compendium.compendium_nick_name)

        blast_file_name = os.path.join(plt_dir, task_id + '.blast')
        mapper = MicroarrayMapper(blast_file_name)
        mapper.set_alignment_status('ready')

        Group("compendium_" + str(compendium_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        # remove files
        for file in [
                f for f in os.listdir(plt_dir)
                if os.path.isfile(os.path.join(plt_dir, f))
                and f.startswith(task_id)
        ]:
            if file.endswith('.sqlite') or file.endswith('.blast'):
                continue
            os.remove(os.path.join(plt_dir, file))
Exemplo n.º 19
0
    def on_success(self, retval, task_id, args, kwargs):
        user_id, compendium_id, experiment_id, keep_platform, channel_name, view, operation = args
        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=compendium_id)
        exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id)
        parsing_db = init_parsing(compendium_id, experiment_id)
        report = retval
        log = MessageLog()
        log.title = "Experiment: " + exp.experiment_access_id + " imported"
        log.message = "Status: success, Experiment: " + exp.experiment_access_id + ", Task: " + task_id + ", User: "******" Results: " + report
        log.source = log.SOURCE[1][0]
        log.save(using=compendium.compendium_nick_name)

        # delete CSV files
        try:
            for fl in glob.glob(parsing_db + '*.csv'):
                os.remove(fl)
        except Exception as e:
            pass
        message = Message(type='info', title='Successfully imported experiment',
                          message='Successfully imported experiment ' + exp.experiment_access_id +
                                  '<br>' + report
                          )
        message.send_to(channel)
        imported_status = Status.objects.using(compendium.compendium_nick_name).get(name='experiment_raw_data_imported')
        exp.status = imported_status
        exp.save(using=compendium.compendium_nick_name)
        Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': {'operation': 'refresh'},
                    'data': None
                }
            })
        })
Exemplo n.º 20
0
 def on_success(self, retval, task_id, args, kwargs):
     user_id, compendium_id, experiment_id, channel_name, view, operation = args
     channel = Channel(channel_name)
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     exp = Experiment.objects.using(compendium.compendium_nick_name).get(id=experiment_id)
     parsing_db = init_parsing(compendium_id, experiment_id)
     platforms = [plt.platform_access_id for plt in ParsingPlatform.objects.using(parsing_db).all()]
     log = MessageLog()
     log.title = "Platforms: " + ", ".join(platforms) + " imported"
     log.message = "Status: success, Platforms: " + ", ".join(platforms) + ", Experiment: " + \
                   exp.experiment_access_id + ", Task: " + task_id + ", User: "******" Results: " + retval
     log.source = log.SOURCE[1][0]
     log.save(using=compendium.compendium_nick_name)
     # delete CSV file
     for fl in glob.glob(parsing_db + '*bio_feature*.csv'):
         os.remove(fl)
     message = Message(type='info', title='Successfully imported platforms',
                       message='Successfully imported platforms for experiment ' + exp.experiment_access_id +
                       ' (' + ','.join(platforms) + ') <br>' + retval
                       )
     message.send_to(channel)
     ready_status = Status.objects.using(compendium.compendium_nick_name).get(name='platform_imported')
     for parsing_platform in ParsingPlatform.objects.using(parsing_db).all():
         plt = Platform.objects.using(compendium.compendium_nick_name).get(id=parsing_platform.platform_fk)
         plt.status = ready_status
         plt.save(using=compendium.compendium_nick_name)
     Group("compendium_" + str(compendium_id) + "_" + str(experiment_id)).send({
         'text': json.dumps({
             'stream': view,
             'payload': {
                 'request': {'operation': 'refresh'},
                 'data': None
             }
         })
     })
Exemplo n.º 21
0
    def on_success(self, retval, task_id, args, kwargs):
        user_id, compendium_id, exp_id, entity_type, entity_id, channel_name, view, operation = args

        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=compendium_id)
        exp = Experiment.objects.using(
            compendium.compendium_nick_name).get(id=exp_id)
        parsed_status = Status.objects.using(
            compendium.compendium_nick_name).get(name='entity_script_parsed')
        entity_access_id = ''
        if entity_type == 'experiment':
            entity_access_id = exp.experiment_access_id
            for af in exp.assignedfile_set.all():
                log = MessageLog()
                log.title = exp.experiment_access_id + " " + af.script_name + " completed on file " + af.input_filename
                log.message = "Status: success, Experiment: " + exp.experiment_access_id + ", Order: " + str(
                    af.order) + ", Parameters: " + " ".join(
                        af.parameters
                    ) + " Task: " + task_id + ", User: "******" " + af.script_name + " completed on file " + af.input_filename
                log.message = "Status: success, Platform: " + platform.platform_access_id + ", Order: " + str(
                    af.order) + ", Parameters: " + " ".join(
                        af.parameters
                    ) + " Task: " + task_id + ", User: "******" " + af.script_name + " completed on file " + af.input_filename
                log.message = "Status: success, Sample: " + sample.sample_name + ", Order: " + str(
                    af.order) + ", Parameters: " + " ".join(
                        af.parameters
                    ) + " Task: " + task_id + ", User: "******"compendium_" + str(compendium_id) + "_" + str(exp_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({
            'text':
            json.dumps({
                'stream': 'parse_experiment_platform',
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({
            'text':
            json.dumps({
                'stream': 'message_log',
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({
            'text':
            json.dumps({
                'stream': 'file_assignment_list',
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        if retval:
            message = Message(type='parsing_log',
                              title='Parsing STDOUT ' + entity_type + ' ' +
                              entity_access_id + ', experiment: ' +
                              exp.experiment_access_id,
                              message=retval.replace('\n', '<br>'))
            message.send_to(channel)
Exemplo n.º 22
0
    def on_failure(self, exc, task_id, args, kwargs, einfo):
        user_id, compendium_id, exp_id, entity_type, entity_id, channel_name, view, operation = args
        channel = Channel(channel_name)

        compendium = CompendiumDatabase.objects.get(id=compendium_id)
        exp = Experiment.objects.using(
            compendium.compendium_nick_name).get(id=exp_id)
        error_status = Status.objects.using(
            compendium.compendium_nick_name).get(name='entity_script_error')
        access_id = ''
        if entity_type == 'experiment':
            for af in exp.assignedfile_set.all():
                log = MessageLog()
                log.title = exp.experiment_access_id + " " + af.script_name + " error on file " + af.input_filename
                log.message = "Status: error, Experiment: " + exp.experiment_access_id + ", Order: " + str(
                    af.order) + ", Parameters: " + " ".join(
                    af.parameters) + " Task: " + task_id + ", User: "******", Exception: " + str(exc) + ", Stacktrace: " + \
                              einfo.traceback
                log.source = log.SOURCE[1][0]
                log.save(using=compendium.compendium_nick_name)
                af.status = error_status
                af.message_log = log
                af.save(using=compendium.compendium_nick_name)
        elif entity_type == 'platform':
            platform = Platform.objects.using(
                compendium.compendium_nick_name).get(id=entity_id)
            for af in platform.assignedfile_set.all():
                log = MessageLog()
                log.title = exp.experiment_access_id + " " + af.script_name + " error on file " + af.input_filename
                log.message = "Status: error, Platform: " + platform.platform_access_id + ", Order: " + str(
                    af.order) + ", Parameters: " + " ".join(
                    af.parameters) + " Task: " + task_id + ", User: "******", Exception: " + str(exc) + ", Stacktrace: " + \
                              einfo.traceback
                log.source = log.SOURCE[1][0]
                log.save(using=compendium.compendium_nick_name)
                af.status = error_status
                af.message_log = log
                af.save(using=compendium.compendium_nick_name)
        elif entity_type == 'sample':
            sample = Sample.objects.using(compendium.compendium_nick_name).get(
                experiment=exp, id=entity_id)
            access_id = sample.sample_name
            for af in sample.assignedfile_set.all():
                log = MessageLog()
                log.title = exp.experiment_access_id + " " + af.script_name + " error on file " + af.input_filename
                log.message = "Status: error, Sample: " + sample.sample_name + ", Order: " + str(
                    af.order) + ", Parameters: " + " ".join(
                    af.parameters) + " Task: " + task_id + ", User: "******", Exception: " + str(exc) + ", Stacktrace: " + \
                              einfo.traceback
                log.source = log.SOURCE[1][0]
                log.save(using=compendium.compendium_nick_name)
                af.status = error_status
                af.message_log = log
                af.save(using=compendium.compendium_nick_name)

        Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({
            'text':
            json.dumps({
                'stream': 'message_log',
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        Group("compendium_" + str(compendium_id) + "_" + str(exp_id)).send({
            'text':
            json.dumps({
                'stream': 'file_assignment',
                'payload': {
                    'request': {
                        'operation': 'refresh'
                    },
                    'data': None
                }
            })
        })

        message = Message(type='error',
                          title='Error parsing ' + entity_type + ' ' +
                          access_id,
                          message=str(exc))
        message.send_to(channel)
Exemplo n.º 23
0
 def on_success(self, retval, task_id, args, kwargs):
     user_id, compendium_id, plt_dir, platform_id, filter_id, \
         blast_file_name, channel_name, view, operation = args
     csv_file, without_mapping_before, with_mapping_before = retval
     without_mapping_before = set(without_mapping_before)
     channel = Channel(channel_name)
     compendium = CompendiumDatabase.objects.get(id=compendium_id)
     without_mapping_after = set(
         BioFeatureReporter.objects.using(
             compendium.compendium_nick_name).filter(
                 platform_id=platform_id,
                 bio_feature__isnull=True).values_list(
                     'id', flat=True).distinct())
     with_mapping_after = dict(
         BioFeatureReporter.objects.using(
             compendium.compendium_nick_name).filter(
                 platform_id=platform_id,
                 bio_feature__isnull=False).values_list(
                     'id', 'bio_feature_id'))
     added = len(
         set.intersection(without_mapping_before,
                          set(with_mapping_after.keys())))
     removed = len(
         set.intersection(set(with_mapping_before.keys()),
                          without_mapping_after))
     changed = len(set.intersection(set(with_mapping_before.keys()), set(with_mapping_after.keys()))) - \
               len(set.intersection(set(with_mapping_before.items()), set(with_mapping_after.items())))
     unchanged_mapped = len(
         set.intersection(set(with_mapping_before.items()),
                          set(with_mapping_after.items())))
     unchanged_unmapped = len(
         set.intersection(without_mapping_before, without_mapping_after))
     report = 'added: {}, removed: {}, changed: {}, unchanged_mapped: {}, unchanged_unmapped: {}'.format(
         added, removed, changed, unchanged_mapped, unchanged_unmapped)
     plt = Platform.objects.using(
         compendium.compendium_nick_name).get(id=platform_id)
     log = MessageLog()
     log.title = "Platform: " + plt.platform_access_id + " mapping imported"
     log.message = "Status: success, Platform: " + plt.platform_access_id +\
                   ", Report: " + report + ", Task: " + task_id + ", User: "******"/*.blast"):
         mapper = MicroarrayMapper(os.path.join(plt_dir, blast_file))
         mapper.set_imported(False)
     mapper = MicroarrayMapper(os.path.join(plt_dir, blast_file_name))
     mapper.set_imported(True, filter_id)
     mapper.set_filter_status(filter_id, 'ready')
     Group("compendium_" + str(compendium_id)).send({
         'text':
         json.dumps({
             'stream': view,
             'payload': {
                 'request': {
                     'operation': 'refresh'
                 },
                 'data': None
             }
         })
     })