Example #1
0
def save_file_information(request, project, gi, file_id):
    """import file in the synbiowatch and save metadata in the database
    """

    data = gi.datasets.show_dataset(dataset_id=file_id)
    name_history = gi.histories.show_history(data['history_id'])['name']
    project_obj = Project.objects.get(name=project)

    try:
        runWf = RunWorkflow.objects.get(name=name_history)
    except:
        messages.warning(
            request,
            "Workflow data is not linked with run workflow some information are missing.\n"
            +
            "Please relaunch workflow in a new history with Synbiowatch interface"
        )
        return redirect("galaxy_workflow")

    # TODO test if bigDATA
    # recupere et stocke le fichier via l'url api de galaxy dans synbiowatchuser = User.objects.get(username=request.user)
    params = urllib.urlencode({'to_ext': "html", 'key': gi.key}, True)

    # recupere l'url de telechargement
    url = urlparse.urljoin(gi.base_url, data['download_url'])
    response = urllib2.urlopen('%s/?%s' % (url, params))

    file_info = FileInformation(
        name=data['name'],
        format=data['data_type'],
        type="",
        size=data['file_size'],
        author="",
        # creation_date="",
        # file_path="",
        project=project_obj)

    file_info.file_path.save(
        '%s/%s/%s/%s' %
        (settings.ANALYSE_FOLDER, project, name_history, data['name']),
        ContentFile(response.read()),
        save=False)
    file_info.save()

    worflowdata = WorkflowData(
        data=file_info,
        input=0,
        output=1,  # TODO Count
        id_run_wf=runWf)
    worflowdata.save()

    messages.success(request, file_id)
    return redirect(request.META.get('HTTP_REFERER'), request)
Example #2
0
def save_file_information(request, project, gi, file_id):
    """import file in the synbiowatch and save metadata in the database
    """

    data = gi.datasets.show_dataset(dataset_id=file_id)
    name_history = gi.histories.show_history(data["history_id"])["name"]
    project_obj = Project.objects.get(name=project)

    try:
        runWf = RunWorkflow.objects.get(name=name_history)
    except:
        messages.warning(
            request,
            "Workflow data is not linked with run workflow some information are missing.\n"
            + "Please relaunch workflow in a new history with Synbiowatch interface",
        )
        return redirect("galaxy_workflow")

    # TODO test if bigDATA
    # recupere et stocke le fichier via l'url api de galaxy dans synbiowatchuser = User.objects.get(username=request.user)
    params = urllib.urlencode({"to_ext": "html", "key": gi.key}, True)

    # recupere l'url de telechargement
    url = urlparse.urljoin(gi.base_url, data["download_url"])
    response = urllib2.urlopen("%s/?%s" % (url, params))

    file_info = FileInformation(
        name=data["name"],
        format=data["data_type"],
        type="",
        size=data["file_size"],
        author="",
        # creation_date="",
        # file_path="",
        project=project_obj,
    )

    file_info.file_path.save(
        "%s/%s/%s/%s" % (settings.ANALYSE_FOLDER, project, name_history, data["name"]),
        ContentFile(response.read()),
        save=False,
    )
    file_info.save()

    worflowdata = WorkflowData(data=file_info, input=0, output=1, id_run_wf=runWf)  # TODO Count
    worflowdata.save()

    messages.success(request, file_id)
    return redirect(request.META.get("HTTP_REFERER"), request)
def save_file_information(request, project, gi, file_id):
    """import file in the synbiowatch and save metadata in the database
    """
    data = gi.datasets.show_dataset(dataset_id=file_id)
    name_history = gi.histories.show_history(data['history_id'])['name']
    project_obj = Project.objects.get(name=project)

    try:
        runWf = RunWorkflow.objects.get(name=name_history)
    except:
        messages.warning(request, "Workflow data is not linked with run workflow some information are missing.\n" +
                         "Please relaunch workflow in a new history with Synbiowatch interface")
        return redirect("galaxy_workflow")

    # TODO test if bigDATA 
    # recupere l'url de telechargement
    url = urlparse.urljoin(gi.base_url, data['download_url'])
    params = urllib.urlencode({'to_ext': "html", 'key': gi.key}, True)
    # response = urllib2.urlopen('%s?%s' % (url, params))
    r = requests.get('%s?%s' % (url, params))

    file_info = FileInformation(name=data['name'],
                                format=data['data_type'],
                                type="",
                                size=data['file_size'],
                                author="",
                                # creation_date="",
                                # file_path="",
                                project=project_obj
                                )
    file_info._path = '%s/%s/%s' % (settings.ANALYSE_FOLDER, project, name_history)
    file_info.file_path.save(data['name'],
                             ContentFile(r.content),
                             save=False)
    file_info.save()
    print file_info
    worflowdata = WorkflowData(data=file_info,
                               input=0,
                               output=1,  # TODO Count
                               id_run_wf=runWf
                               )
    worflowdata.save()
    messages.success(request, file_id)
    return redirect('galaxy_history_detail', data['history_id'])
def launch_workflow(request, project, gi):
    """ 
    soumet l'execution du workflow a Galaxy
    """

    project_obj = Project.objects.get(name=project)
    dataset_map = dict()

    if request.method == 'POST':

        #recuperation des variables POST
        wf_launch = request.POST.get('workflow_id')
        hist_id = request.POST.get('hist_id')
        input_submit = request.POST.getlist('inputs')

        #recuperation du noms de l'history
        name_history = gi.histories.show_history(hist_id)['name']

        #recuperation de la key galaxy pour lancer le workflow
        wf_obj = Workflow.objects.get(id=wf_launch)
        wf_key = wf_obj.wf_key

        #verification 1er lancement du workflows
        #Sauvegarde dans la base du workflow lancé dans la base de donnée
        runWf, created = RunWorkflow.objects.get_or_create(
            name=name_history,
            workflow_id=wf_obj,
            project_id=project_obj,
        )

        if not created:
            messages.warning(
                request,
                "Workflow has already run, Please create a new history")
            return redirect('galaxy_history_detail', history_id=hist_id)

        #recupere les inputs du worflows
        wf = gi.workflows.show_workflow(wf_key)
        i_inputs = wf['inputs'].keys()

        #mappe les inputs du workflow avec les id des fichiers soumis par l'utilisateur
        for i, r in zip(i_inputs, input_submit):
            dataset_map[i] = {'id': r, 'src': 'hda'}

        #donne l'orde a galaxy de lancer le worflow avec les parametres
        gi.workflows.run_workflow(workflow_id=wf_key,
                                  history_id=hist_id,
                                  dataset_map=dataset_map)
        """Sauvegarde du workflow lancé dans la base de donnée"""
        #sauvergarde des inputs selectionnés pour lancer le workflow
        compt = 0
        for r in input_submit:
            compt += 1
            data = gi.datasets.show_dataset(dataset_id=r)

            #cree la metadonnée du fichier utiliser avec le workflow
            #TODO trouver un moyen de discriminer les fichiers autre que par le nom

            file_info = FileInformation(
                name=data['name'],
                format=data['data_type'],
                type="",
                size=data['file_size'],
                author="",
                #creation_date="",
                #file_path="",
                project=project_obj,
            )
            file_info.save()

            #sauvegarde avec quel workflow le fichier a été utilisé
            workflowdata = WorkflowData(
                data=file_info,
                id_run_wf=runWf,
                input=compt,
                output=0,
            )
            workflowdata.save()

            print workflowdata

        return redirect('galaxy_history_detail', history_id=hist_id)

    return redirect("galaxy_workflow")
def  launch_workflow(request, project, gi):
    """ 
    soumet l'execution du workflow a Galaxy
    """

    project_obj = Project.objects.get(name=project) 
    dataset_map = dict()

    if request.method == 'POST':
        
        #recuperation des variables POST
        wf_launch = request.POST.get('workflow_id')
        hist_id = request.POST.get('hist_id')
        input_submit = request.POST.getlist('inputs')
        
        #recuperation du noms de l'history
        name_history = gi.histories.show_history(hist_id)['name']
    
        
        #recuperation de la key galaxy pour lancer le workflow
        wf_obj = Workflow.objects.get(id=wf_launch)
        wf_key = wf_obj.wf_key
        
        #verification 1er lancement du workflows
        #Sauvegarde dans la base du workflow lancé dans la base de donnée 
        runWf, created = RunWorkflow.objects.get_or_create(name = name_history,
                                                           workflow_id=wf_obj,
                                                           project_id = project_obj,
                                                            )
        
        if not created:
            messages.warning(request,"Workflow has already run, Please create a new history" )
            return redirect('galaxy_history_detail',history_id=hist_id)
        
        

        #recupere les inputs du worflows
        wf = gi.workflows.show_workflow(wf_key)
        i_inputs = wf['inputs'].keys()
        
        #mappe les inputs du workflow avec les id des fichiers soumis par l'utilisateur
        for i, r in zip(i_inputs,input_submit):
            dataset_map[i] = {'id':r,'src':'hda'}                
                

        #donne l'orde a galaxy de lancer le worflow avec les parametres
        gi.workflows.run_workflow(workflow_id=wf_key, history_id=hist_id, dataset_map=dataset_map)
        
        
        """Sauvegarde du workflow lancé dans la base de donnée"""
        #sauvergarde des inputs selectionnés pour lancer le workflow
        compt = 0
        for r in input_submit:
            compt +=1 
            data = gi.datasets.show_dataset(dataset_id= r)
            
            #cree la metadonnée du fichier utiliser avec le workflow
            #TODO trouver un moyen de discriminer les fichiers autre que par le nom
    
            file_info = FileInformation(name=data['name'],
                            format=data['data_type'],
                            type="",
                            size=data['file_size'],
                            author="",
                            #creation_date="",
                            #file_path="",
                            project = project_obj,                     
                            )
            file_info.save()
            
            #sauvegarde avec quel workflow le fichier a été utilisé
            workflowdata = WorkflowData (data = file_info,
                                       id_run_wf = runWf,
                                       input = compt,
                                       output = 0, 
                                       )
            workflowdata.save()
            
            print  workflowdata 
                 
        return redirect('galaxy_history_detail',history_id=hist_id)
        
    return redirect("galaxy_workflow")