Beispiel #1
0
def store_analysis_result(request):
    """Stores the reporting of a transferred analysis result file,
    checks its md5"""
    if request.method != 'POST':
        return HttpResponseNotAllowed(permitted_methods=['POST'])
    data = request.POST
    # create analysis file
    if ('client_id' not in data or
            data['client_id'] != settings.ANALYSISCLIENT_APIKEY):
        return HttpResponseForbidden()
    # FIXME nextflow to file this, then poll rawstatus/md5success
    # before deleting rundir etc, or report taskfail
    # Reruns lead to trying to store files multiple times, avoid that:
    anashare = ServerShare.objects.get(name=settings.ANALYSISSHARENAME)
    try:
        ftypeid = {x.name: x.id for x in StoredFileType.objects.all()}[data['ftype']]
    except KeyError:
        return HttpResponseForbidden('File type does not exist')
    try:
        sfile = StoredFile.objects.get(rawfile_id=data['fn_id'], filetype_id=ftypeid)
    except StoredFile.DoesNotExist:
        print('New transfer registered, fn_id {}'.format(data['fn_id']))
        sfile = StoredFile(rawfile_id=data['fn_id'], filetype_id=ftypeid,
                           servershare=anashare, path=data['outdir'],
                           filename=data['filename'], md5='', checked=False)
        sfile.save()
        AnalysisResultFile.objects.create(analysis_id=data['analysis_id'], sfile=sfile)
    else:
        print('Analysis result already registered as transfer, client asks for new '
              'MD5 check after a possible rerun. Running MD5 check.')
    create_file_job('get_md5', sfile.id)
    return HttpResponse()
Beispiel #2
0
def singlefile_qc(rawfile, storedfile):
    """This method is only run for detecting new incoming QC files"""
    add_to_qc(rawfile, storedfile)
    jobutil.create_file_job('convert_single_mzml', storedfile.id,
                            queue=settings.QUEUE_QCPWIZ)
    start_qc_analysis(rawfile, storedfile, settings.LONGQC_NXF_WF_ID,
                      settings.LONGQC_FADB_ID)
Beispiel #3
0
def start_qc_analysis(rawfile, storedfile, wf_id, dbfn_id):
    analysis = Analysis(user_id=settings.QC_USER_ID,
                        name='{}_{}_{}'.format(rawfile.producer.name,
                                               rawfile.name, rawfile.date))
    analysis.save()
    jobutil.create_file_job('run_longit_qc_workflow', storedfile.id,
                            analysis.id, wf_id, dbfn_id)
Beispiel #4
0
def singlefile_qc(rawfile, storedfile):
    """This method is only run for detecting new incoming QC files"""
    add_to_qc(rawfile, storedfile)
    jobutil.create_file_job('convert_single_mzml',
                            storedfile.id,
                            queue=settings.QUEUE_QCPWIZ)
    start_qc_analysis(rawfile, storedfile, settings.LONGQC_NXF_WF_ID,
                      settings.LONGQC_FADB_ID)
Beispiel #5
0
def file_transferred(request):
    """Treats POST requests with:
        - fn_id
    Starts checking file MD5 in background
    """
    if request.method == 'POST':
        try:
            fn_id = request.POST['fn_id']
            client_id = request.POST['client_id']
            ftype = request.POST['ftype']
            fname = request.POST['filename']
        except KeyError as error:
            print('POST request to file_transferred with missing parameter, '
                  '{}'.format(error))
            return HttpResponseForbidden()
        try:
            check_producer(client_id)
        except Producer.DoesNotExist:
            return HttpResponseForbidden()
        tmpshare = ServerShare.objects.get(name=settings.TMPSHARENAME)
        try:
            RawFile.objects.get(pk=fn_id)
        except RawFile.DoesNotExist:
            print('File has not been registered yet, cannot transfer')
            return JsonResponse({
                'fn_id': request.POST['fn_id'],
                'state': 'error'
            })
        try:
            file_transferred = StoredFile.objects.get(rawfile_id=fn_id,
                                                      filetype=ftype)
        except StoredFile.DoesNotExist:
            print('New transfer registered, fn_id {}'.format(fn_id))
            file_transferred = StoredFile(rawfile_id=fn_id,
                                          filetype=ftype,
                                          servershare=tmpshare,
                                          path='',
                                          filename=fname,
                                          md5='',
                                          checked=False)
            file_transferred.save()
            jobutil.create_file_job('get_md5', file_transferred.id)
        else:
            print('File already registered as transfer, client asks for new '
                  'MD5 check after a possible retransfer. Running MD5 check.')
            jobutil.create_file_job('get_md5', file_transferred.id)
        finally:
            return JsonResponse({
                'fn_id': request.POST['fn_id'],
                'state': 'ok'
            })
    else:
        return HttpResponseNotAllowed(permitted_methods=['POST'])
Beispiel #6
0
def file_transferred(request):
    """Treats POST requests with:
        - fn_id
    Starts checking file MD5 in background
    """
    if request.method == 'POST':
        try:
            fn_id = request.POST['fn_id']
            client_id = request.POST['client_id']
            ftype = request.POST['ftype']
            fname = request.POST['filename']
        except KeyError as error:
            print('POST request to file_transferred with missing parameter, '
                  '{}'.format(error))
            return HttpResponseForbidden()
        try:
            check_producer(client_id)
        except Producer.DoesNotExist:
            return HttpResponseForbidden()
        tmpshare = ServerShare.objects.get(name=settings.TMPSHARENAME)
        try:
            RawFile.objects.get(pk=fn_id)
        except RawFile.DoesNotExist:
            print('File has not been registered yet, cannot transfer')
            return JsonResponse({'fn_id': request.POST['fn_id'],
                                 'state': 'error'})
        try:
            ftypeid = {x.name: x.id for x in StoredFileType.objects.all()}[ftype]
        except KeyError:
            return HttpResponseForbidden('File type does not exist')
        try:
            file_transferred = StoredFile.objects.get(rawfile_id=fn_id,
                                                      filetype_id=ftypeid)
        except StoredFile.DoesNotExist:
            print('New transfer registered, fn_id {}'.format(fn_id))
            file_transferred = StoredFile(rawfile_id=fn_id, filetype_id=ftypeid,
                                          servershare=tmpshare, path='',
                                          filename=fname, md5='', checked=False)
            file_transferred.save()
            jobutil.create_file_job('get_md5', file_transferred.id)
        else:
            print('File already registered as transfer, client asks for new '
                  'MD5 check after a possible retransfer. Running MD5 check.')
            jobutil.create_file_job('get_md5', file_transferred.id)
        finally:
            return JsonResponse({'fn_id': request.POST['fn_id'],
                                 'state': 'ok'})
    else:
        return HttpResponseNotAllowed(permitted_methods=['POST'])
Beispiel #7
0
def upload_userfile_token(request):
    if request.method != 'POST':
        return HttpResponseNotAllowed(permitted_methods=['POST'])
    try:
        ufile = UserFile.objects.select_related('sfile__servershare', 'upload').get(
            upload__token=request.POST['token'])
    except (KeyError, UserFileUpload.DoesNotExist) as e:
        print(e)
        return HttpResponseForbidden()
    else:
        if ufile.upload.expires < timezone.now():
            print('expired', ufile.upload.expires)
            return HttpResponseForbidden()
    move_uploaded_file(ufile, request.FILES['file'])
    jobutil.create_file_job('get_md5', ufile.sfile.id)
    return HttpResponse()
Beispiel #8
0
def mzrefine_file_done(request):
    """Refined mzML files must get MD5, fn, path and moved to their dataset directory from the
    analysis output dir (they result from a nextflow analysis run"""
    # FIXME need to remove the empty dir after moving all the files, how?
    data = request.POST
    # create analysis file
    if ('client_id' not in data or
            data['client_id'] != settings.ANALYSISCLIENT_APIKEY):
        return HttpResponseForbidden()
    sfile = StoredFile.objects.select_related('rawfile__datasetrawfile__dataset').get(pk=data['fn_id'])
    sfile.path = data['outdir']
    sfile.filename = data['filename']
    sfile.md5 = data['md5']
    sfile.checked = True
    sfile.save()
    create_file_job('move_single_file', sfile.id, sfile.rawfile.datasetrawfile.dataset.storage_loc,
                    newname=sfile.filename.split('___')[1])
    return HttpResponse()
Beispiel #9
0
def manyfile_qc(rawfiles, storedfiles):
    """For reanalysis or batching by hand"""
    for rawfn, sfn in zip(rawfiles, storedfiles):
        try:
            dsmodels.DatasetRawFile.objects.select_related(
                'dataset').filter(rawfile=rawfn).get().dataset
        except dsmodels.DatasetRawFile.DoesNotExist:
            dset = add_to_qc(rawfn, sfn)
            print('Added QC file {} to QC dataset {}'.format(rawfn.id, dset.id))
        jobutil.create_file_job('convert_single_mzml', sfn.id)
    # Do not rerun with the same workflow as previously
    for rawfn, sfn in zip(rawfiles, storedfiles):
        if not dashmodels.QCData.objects.filter(
                analysis__nextflowsearch__nfworkflow=settings.LONGQC_NXF_WF_ID,
                rawfile=rawfn.id).count():
            start_qc_analysis(rawfn, sfn, settings.LONGQC_NXF_WF_ID, settings.LONGQC_FADB_ID)
        else:
            print('QC has already been done with this workflow (id: {}) for '
                  'rawfile id {}'.format(settings.LONGQC_NXF_WF_ID, rawfn.id))
Beispiel #10
0
def do_md5_check(file_transferred):
    file_registered = file_transferred.rawfile
    if not file_transferred.md5:
        return JsonResponse({'fn_id': file_registered.id, 'md5_state': False})
    elif file_registered.source_md5 == file_transferred.md5:
        if not file_transferred.checked:
            file_transferred.checked = True
            file_transferred.save()
        if (not AnalysisResultFile.objects.filter(sfile_id=file_transferred) and
                SwestoreBackedupFile.objects.filter(
                storedfile_id=file_transferred.id).count() == 0):
            fn = file_transferred.filename
            # FIXME hardcoded instruments are not dynamic!
            if 'QC' in fn and 'hela' in fn.lower() and any([x in fn for x in ['QE', 'HFLu', 'HFLe', 'Velos', 'HFTo', 'HFGi']]):
                singlefile_qc(file_transferred.rawfile, file_transferred)
            jobutil.create_file_job('create_pdc_archive',
                                    file_transferred.id, file_transferred.md5)
        return JsonResponse({'fn_id': file_registered.id, 'md5_state': 'ok'})
    else:
        return JsonResponse({'fn_id': file_registered.id, 'md5_state': 'error'})
Beispiel #11
0
def manyfile_qc(rawfiles, storedfiles):
    """For reanalysis or batching by hand"""
    for rawfn, sfn in zip(rawfiles, storedfiles):
        try:
            dsmodels.DatasetRawFile.objects.select_related('dataset').filter(
                rawfile=rawfn).get().dataset
        except dsmodels.DatasetRawFile.DoesNotExist:
            dset = add_to_qc(rawfn, sfn)
            print('Added QC file {} to QC dataset {}'.format(
                rawfn.id, dset.id))
        jobutil.create_file_job('convert_single_mzml', sfn.id)
    # Do not rerun with the same workflow as previously
    for rawfn, sfn in zip(rawfiles, storedfiles):
        if not dashmodels.QCData.objects.filter(
                analysis__nextflowsearch__nfworkflow=settings.LONGQC_NXF_WF_ID,
                rawfile=rawfn.id).count():
            start_qc_analysis(rawfn, sfn, settings.LONGQC_NXF_WF_ID,
                              settings.LONGQC_FADB_ID)
        else:
            print('QC has already been done with this workflow (id: {}) for '
                  'rawfile id {}'.format(settings.LONGQC_NXF_WF_ID, rawfn.id))
Beispiel #12
0
def check_md5_success(request):
    if not request.method == 'GET':
        return HttpResponseNotAllowed(permitted_methods=['GET'])
    try:
        fn_id = request.GET['fn_id']
        ftype = request.GET['ftype']
        client_id = request.GET['client_id']
    except KeyError:
        return HttpResponseForbidden()
    try:
        check_producer(client_id)
    except Producer.DoesNotExist:
        return HttpResponseForbidden()
    print('Transfer state requested for fn_id {}, type {}'.format(
        fn_id, ftype))
    try:
        file_transferred = StoredFile.objects.get(rawfile_id=fn_id,
                                                  filetype=ftype)
    except StoredFile.DoesNotExist:
        return JsonResponse({'fn_id': fn_id, 'md5_state': False})
    file_registered = file_transferred.rawfile
    if not file_transferred.md5:
        return JsonResponse({'fn_id': fn_id, 'md5_state': False})
    elif file_registered.source_md5 == file_transferred.md5:
        if not file_transferred.checked:
            file_transferred.checked = True
            file_transferred.save()
        if (not AnalysisResultFile.objects.filter(sfile_id=file_transferred)
                and SwestoreBackedupFile.objects.filter(
                    storedfile_id=file_transferred.id).count() == 0):
            fn = file_transferred.filename
            if 'QC' in fn and 'hela' in fn.lower() and any(
                [x in fn for x in ['QE', 'HFLu', 'HFLe', 'Velos']]):
                singlefile_qc(file_transferred.rawfile, file_transferred)
            jobutil.create_file_job('create_swestore_backup',
                                    file_transferred.id, file_transferred.md5)
        return JsonResponse({'fn_id': fn_id, 'md5_state': 'ok'})
    else:
        return JsonResponse({'fn_id': fn_id, 'md5_state': 'error'})
Beispiel #13
0
def rename_file(request):
    """Renames a single file. This checks if characters are correct, launches job
    with bare filename (no extension), since job determines if mutliple files including
    mzML have to be renamed."""
    if not request.method == 'POST':
        return HttpResponseNotAllowed(permitted_methods=['POST'])
    data =  json.loads(request.body.decode('utf-8'))
    try:
        sfile = StoredFile.objects.filter(pk=data['sf_id']).select_related(
            'rawfile').get()
        newfilename = os.path.splitext(data['newname'])[0]
        #mv_mzml = data['mvmzml']  # TODO optional mzml renaming too? Now it is default
    except (StoredFile.DoesNotExist, KeyError):
        print('Stored file to rename does not exist')
        return HttpResponseForbidden()
    if request.user.id not in get_file_owners(sfile):
        print('No ownership of file to rename')
        return HttpResponseForbidden()
    if re.match('^[a-zA-Z_0-9\-]*$', newfilename) is None or sfile.filetype_id in [settings.MZML_SFGROUP_ID, settings.REFINEDMZML_SFGROUP_ID]:
        # TODO Give proper errors to JSON if possible!
        print('Illegal characters in filename {}'.format(newfilename))
        return HttpResponseForbidden()
    jobutil.create_file_job('rename_file', sfile.id, newfilename)
Beispiel #14
0
def set_libraryfile(request):
    if request.method == 'POST':
        try:
            client_id = request.POST['client_id']
            fn_id = request.POST['fn_id']
        except KeyError as error:
            print('POST request to register_file with missing parameter, '
                  '{}'.format(error))
            return HttpResponseForbidden()
        if client_id != settings.ADMIN_APIKEY:
            print('POST request with incorrect client id '
                  '{}'.format(client_id))
            return HttpResponseForbidden()
        try:
            rawfn = RawFile.objects.get(pk=fn_id)
        except RawFile.DoesNotExist:
            print('POST request with incorrect fn id ' '{}'.format(fn_id))
            return HttpResponseForbidden()
        else:
            sfile = StoredFile.objects.select_related('servershare').get(
                rawfile_id=fn_id)
            if LibraryFile.objects.filter(sfile__rawfile_id=fn_id):
                response = {'library': True, 'state': 'ok'}
            elif sfile.servershare.name == settings.TMPSHARENAME:
                libfn = LibraryFile.objects.create(
                    sfile=sfile, description=request.POST['desc'])
                jobutil.create_file_job('move_single_file',
                                        sfile.id,
                                        settings.LIBRARY_FILE_PATH,
                                        newname='libfile_{}_{}'.format(
                                            libfn.id, sfile.filename))
                response = {'library': True, 'state': 'ok'}
            else:
                LibraryFile.objects.create(sfile=sfile,
                                           description=request.POST['desc'])
                response = {'library': False, 'state': 'ok'}
        return JsonResponse(response)
Beispiel #15
0
def set_libraryfile(request):
    if request.method != 'POST':
        return HttpResponseNotAllowed(permitted_methods=['POST'])
    try:
        client_id = request.POST['client_id']
        fn_id = request.POST['fn_id']
    except KeyError as error:
        print('POST request to register_file with missing parameter, '
              '{}'.format(error))
        return HttpResponseForbidden()
    if client_id != settings.ADMIN_APIKEY:
        print('POST request with incorrect client id '
              '{}'.format(client_id))
        return HttpResponseForbidden()
    try:
        rawfn = RawFile.objects.get(pk=fn_id)
    except RawFile.DoesNotExist:
        print('POST request with incorrect fn id '
              '{}'.format(fn_id))
        return HttpResponseForbidden()
    else:
        sfile = StoredFile.objects.select_related('servershare').get(
            rawfile_id=fn_id)
        if LibraryFile.objects.filter(sfile__rawfile_id=fn_id):
            response = {'library': True, 'state': 'ok'}
        elif sfile.servershare.name == settings.TMPSHARENAME:
            libfn = LibraryFile.objects.create(
                sfile=sfile, description=request.POST['desc'])
            jobutil.create_file_job(
                'move_single_file', sfile.id, settings.LIBRARY_FILE_PATH,
                newname='libfile_{}_{}'.format(libfn.id, sfile.filename))
            response = {'library': True, 'state': 'ok'}
        else:
            LibraryFile.objects.create(sfile=sfile,
                                       description=request.POST['desc'])
            response = {'library': False, 'state': 'ok'}
    return JsonResponse(response)
Beispiel #16
0
def start_qc_analysis(rawfile, storedfile, wf_id, dbfn_id):
    analysis = Analysis(user_id=settings.QC_USER_ID,
                        name='{}_{}_{}'.format(rawfile.producer.name, rawfile.name, rawfile.date))
    analysis.save()
    jobutil.create_file_job('run_longit_qc_workflow', storedfile.id,
                            analysis.id, wf_id, dbfn_id)