Пример #1
0
    def create_call(self):

        species = self.cleaned_data.get('species').replace(" ", "")
        pipeline_id = self.generate_id()
        FS = FileSystemStorage()
        FS.location = os.path.join(MEDIA_ROOT, pipeline_id)
        os.system("mkdir " + FS.location)
        out_dir = FS.location
        name = pipeline_id + '_h_trna'
        JobStatus.objects.create(
            job_name=name,
            pipeline_key=pipeline_id,
            job_status="not launched",
            start_time=datetime.datetime.now(),
            # finish_time=datetime.time(0, 0),
            #all_files=ifile,
            modules_files="",
            pipeline_type="helper",
        )

        return 'qsub -v pipeline="helper",mode="trna",key="{pipeline_id}",outdir="{out_dir}",species="{species}",name="{name}" -N {job_name} {sh}'.format(
            pipeline_id=pipeline_id,
            out_dir=out_dir,
            species=species.replace(" ", ""),
            name=name,
            job_name=name,
            sh=os.path.join(BASE_DIR +
                            '/core/bash_scripts/run_helper_trna.sh'))
Пример #2
0
    def form_valid(self, form):
        upload_filename = form.cleaned_data["file"].name

        content = VideoContent(title=upload_filename,
                               description="",
                               upload_date=timezone.now(),
                               original_name=upload_filename,
                               filename="")
        content.save()

        try:
            storage = FileSystemStorage()
            storage.location = DATA_DIR + str(content.id)
            filename = storage.save(upload_filename, form.cleaned_data["file"])
            make_video_thumb(DATA_DIR + str(content.id) + "/" + filename,
                             content.thumb_frame,
                             DATA_DIR + str(content.id) + "/thumb.jpg")

        except:
            delete_video(content.id, filename)
            content.delete()
            raise

        else:
            content.filename = filename
            content.save()

            return HttpResponseRedirect(
                reverse('video:edit', args=(content.id, )))
Пример #3
0
def add_orders(request, order, order_date, order_datetime):
    '''
    This function is used to add orders to the order history.
    '''
    design = order
    product = design.product
    artwork = design.art

    # This code saves the design image to the folder 'media/order_pics'.
    image_data = design.design_photo.open()
    file_storage = FileSystemStorage()
    file_storage.location = 'media/order_pics'
    name = file_storage.get_available_name('orderdesign.png')
    file_storage.save(name, image_data)
    design_location = 'order_pics/' + name

    order_history_item_instance = OrderHistoryItem.objects.create(
        name=artwork.artwork_name + '   |   ' + product.product_name,
        design_photo=design_location,
        user=request.user,
        order_date=order_date,
        order_datetime=order_datetime,
        status='NP',
        paid_price=artwork.artwork_price + product.price,
        quantity=order.quantity,
    )

    return order_history_item_instance
Пример #4
0
    def post(self, request, *args, **kwargs):
        msg_subject = request.POST.get('subject')
        message_body = request.POST.get('temp_message')
        sender = request.user.first_name + "|" + request.user.email
        to = get_email_list(request.POST.get('to'))
        cc = get_email_list(request.POST.get('cc'))
        bcc = get_email_list(request.POST.get('bcc'))
        bcc.append(request.user.email)
        files = request.FILES.getlist('attachments')
        _files = []
        for f in files:
            fs = FileSystemStorage()
            fs.location = join(settings.MEDIA_ROOT, "ipcaccounts/vouchers/email")
            filename = fs.save(f.name, f)
            _files.append(join(fs.location, filename))
            uploaded_file_url = fs.url(filename)

        stage_classic_notification("IPC Vouchers", sender, to, cc,
                                    msg_subject, message_body, _files)
        _dispatch_status = True

        if _dispatch_status:
            account_id = request.POST.get('account_id')
            account_master = RCOAccountsMaster.objects.get(id=account_id)
            account_master.email_sent = True
            account_master.save()
            return render(request, 'ipcaccounts/confirm.html')
        else:
            return render(request, 'ipcaccounts/error.html')
Пример #5
0
def delete_video(content_id, video_filename):
    print('remove files at ' + str(content_id) + '/')
    storage = FileSystemStorage()
    storage.location = DATA_DIR
    storage.delete(str(content_id) + '/' + video_filename)
    storage.delete(str(content_id) + '/' + 'thumb.jpg')
    storage.delete(str(content_id) + '/')
Пример #6
0
    def save(self, commit=True):
        instance = super(TravelBookingForm, self).save(commit=True)

        vendor_email = self.cleaned_data.get('vendor_email')

        try:
            user = User.objects.get(email=vendor_email)
        except:
            user = None

        if instance.attached_template:
            fs = FileSystemStorage()
            fs.location = fs.location + '/media/'
            filename = fs.save(instance.attached_template.name,
                               instance.attached_template)

        if user:
            html_message = "Dear " + user.first_name + ", <br><br>Please have a look at attached files for travel details. <br><br>If you have any questions we are here to help – please drop in an email to [email protected]<br><br>Cheers,<br>Team XYZ<br>"
            email_message = EmailMessage('Travel Booked', html_message,
                                         settings.EMAIL_HOST_USER, [],
                                         [user.email])
            email_message.content_subtype = "html"

            if instance.attached_template:
                filename = 'media/' + filename
                email_message.attach_file(filename)
            email_message.send()
Пример #7
0
 def post(self, request, format='csv'):
     file_obj = request.data['filename']
     if file_obj.name[-4:] != '.csv':
         return Response("Unsupported file type")
     storage = FileSystemStorage()
     try:
         storage.save(file_obj.name, file_obj)
         with storage.open(file_obj.name, mode='r') as opened_file:
             bulk_mgr = BulkCreateManager(chunk_size=1000)
             reader = csv.reader(opened_file)
             next(reader)
             for row in reader:
                 bulk_mgr.add(
                     Member(id=row[0],
                            first_name=row[1],
                            last_name=row[2],
                            phone_number=Phonenumber.objects.create(
                                phone_number=row[3]),
                            client_member_id=ClientId.objects.create(
                                client_member_id=row[4]),
                            account_id=row[5]))
             bulk_mgr.done()
     except Exception as e:
         raise e
         return Response("could not open file or invalid csv file")
     return Response({file_obj.name: storage.location()}, status=204)
Пример #8
0
    def create_call(self):

        pipeline_id = self.generate_id()
        FS = FileSystemStorage()
        FS.location = os.path.join(MEDIA_ROOT, pipeline_id)
        os.system("mkdir " + FS.location)
        out_dir = FS.location
        name = pipeline_id + '_h_rnac'
        species = self.cleaned_data.get('species')
        taxonomy = self.cleaned_data.get('taxonomy')
        name = pipeline_id + '_h_central'
        config_location = os.path.join(out_dir, "conf.txt")
        configuration = {
            'pipeline_id': pipeline_id,
            'out_dir': out_dir,
            'name': name,
            'conf_input': config_location,
            'type': 'helper'
        }

        with open(config_location, "w+") as file:
            file.write(
                "input=" +
                os.path.join(CONF["db"], "dbs/rnacentral_active.fasta") + "\n")
            file.write("taxonFile=" +
                       os.path.join(CONF["db"], "dbs/taxonomy_full.txt") +
                       "\n")
            file.write("mode=RNAC \n")
            file.write("output=" + out_dir + "\n")
            if self.cleaned_data.get('species'):
                species = species.replace(" ", "_")
                file.write("species=" + species + "\n")
            elif self.cleaned_data.get('taxonomy'):
                taxonomy = taxonomy.replace(" ", "_")
                file.write("taxon=" + taxonomy + "\n")

        import json
        configuration_file_path = os.path.join(out_dir, 'conf.json')
        with open(configuration_file_path, 'w') as conf_file:
            json.dump(configuration, conf_file, indent=True)

        JobStatus.objects.create(
            job_name=name,
            pipeline_key=pipeline_id,
            job_status="not launched",
            start_time=datetime.datetime.now(),
            # finish_time=datetime.time(0, 0),
            #all_files=ifile,
            modules_files="",
            pipeline_type="helper",
            outdir=FS.location,
        )
        if QSUB:
            return 'qsub -q ff -v c="{configuration_file_path}" -N {job_name} {sh}'.format(
                configuration_file_path=configuration_file_path,
                job_name=name,
                sh=os.path.join(
                    os.path.dirname(BASE_DIR) +
                    '/core/bash_scripts/run_qsub.sh')), pipeline_id
Пример #9
0
    def create_call(self):

        pipeline_id = self.generate_id()
        FS = FileSystemStorage()
        FS.location = os.path.join(MEDIA_ROOT, pipeline_id)
        os.system("mkdir " + FS.location)
        out_dir = FS.location
        ifile = self.cleaned_data.get("ifile")
        if ifile:
            file_to_update = ifile
            uploaded_file = str(file_to_update)
            ifile = FS.save(uploaded_file, file_to_update)
        elif self.cleaned_data.get("url"):
            url = self.cleaned_data.get("url")
            extension = os.path.basename(url).split('.')[-1]
            dest = os.path.join(FS.location, os.path.basename(url))
            ifile, headers = urllib.request.urlretrieve(url, filename=dest)

        else:
            raise Http404

        name = pipeline_id + '_h_ens'
        config_location = os.path.join(out_dir, "conf.txt")
        configuration = {
            'pipeline_id': pipeline_id,
            'out_dir': out_dir,
            'name': name,
            'conf_input': config_location,
            'type': 'helper'
        }
        with open(config_location, "w+") as file:
            file.write("input=" + os.path.join(out_dir, ifile) + "\n")
            file.write("mode=ENS\n")
            file.write("output=" + out_dir + "\n")
        import json
        configuration_file_path = os.path.join(out_dir, 'conf.json')
        with open(configuration_file_path, 'w') as conf_file:
            json.dump(configuration, conf_file, indent=True)
        JobStatus.objects.create(
            job_name=name,
            pipeline_key=pipeline_id,
            job_status="not launched",
            start_time=datetime.datetime.now(),
            #finish_time=datetime.time(0, 0),
            all_files=ifile,
            modules_files="",
            pipeline_type="helper",
            outdir=FS.location,
        )

        if QSUB:
            return 'qsub -q ff -v c="{configuration_file_path}" -N {job_name} {sh}'.format(
                configuration_file_path=configuration_file_path,
                job_name=name,
                sh=os.path.join(
                    os.path.dirname(BASE_DIR) +
                    '/core/bash_scripts/run_qsub.sh')), pipeline_id
Пример #10
0
    def handle_uploaded_file(self, post, f):
        with open(settings.MEDIA_ROOT + '/' + f.name, 'wb+') as destination:
            #MDOR_REPOSITORY
            mdorrepository = settings.MEDIA_ROOT
            fs = FileSystemStorage()
            fs.location = mdorrepository

            ff = fs.save(f.name, f)
            return ff  #.name
Пример #11
0
def upload_result_test(request):
    file = request.FILES['file_uploads']
    #img = Image.open(uploaded_file.file,'r')

    fs = FileSystemStorage()
    fs.location = os.path.join(settings.BASE_DIR, '/UploadedImages/')
    fs.save(file.name, file)

    # if raw_image.metadata.orientation == 0:
    #     converted_image = Image.frombytes('RGB', (raw_image.metadata.width, raw_image.metadata.height), buffered_image)
    # else:
    #     converted_image = Image.frombytes('RGB', (raw_image.metadata.height, raw_image.metadata.width), buffered_image)

    #img.save('/home/kidneybean/Pictures/conv/conv2.jpg', format='JPEG')
    #img.close()

    return HttpResponse('Type:')
Пример #12
0
def saveDesignCoordinate(design, imageData, art_top, art_left, art_height,
                         art_width, frame_top, frame_left, frame_width,
                         frame_height, frame_border_radius, frame_rotation,
                         text_font, text_top, text_left, text_weight,
                         text_style, text_color, text_size, text):

    imageData = imageData
    format, imgstr = imageData.split(';base64,')
    ext = format.split('/')[-1]
    imageData = ContentFile(base64.b64decode(imgstr), name='temp.' + ext)
    fileStorage = FileSystemStorage()
    fileStorage.location = 'media/design_pics'
    if design.design_photo.url != '/media/design_pics/defaultDesign.png':
        storage, path = design.design_photo.storage, design.design_photo.path
        storage.delete(path)
    name = fileStorage.get_available_name('design.png')
    fileStorage.save(name, imageData)
    design.design_photo = 'design_pics/' + name

    ##art
    design.designArtCoordinate.coordinate_top = art_top
    design.designArtCoordinate.coordinate_left = art_left
    design.designArtCoordinate.height = art_height
    design.designArtCoordinate.width = art_width
    ##artframe
    design.designArtFrameCoordinate.frame_coordinate_top = frame_top
    design.designArtFrameCoordinate.frame_coordinate_left = frame_left
    design.designArtFrameCoordinate.frame_width = frame_width
    design.designArtFrameCoordinate.frame_height = frame_height
    design.designArtFrameCoordinate.frame_border_radius = frame_border_radius
    design.designArtFrameCoordinate.rotation = str(frame_rotation)
    ##text
    design.designTextCoordinate.font = text_font
    design.designTextCoordinate.font_weight = text_weight
    design.designTextCoordinate.font_style = text_style
    design.designTextCoordinate.coordinate_top = text_top
    design.designTextCoordinate.coordinate_left = text_left
    design.designTextCoordinate.font_color = text_color
    design.designTextCoordinate.text = text
    design.designTextCoordinate.font_size = text_size

    design.designArtCoordinate.save()
    design.designArtFrameCoordinate.save()
    design.designTextCoordinate.save()
    design.save()
Пример #13
0
    def create_call(self):

        pipeline_id = self.generate_id()
        FS = FileSystemStorage()
        FS.location = os.path.join(MEDIA_ROOT, pipeline_id)
        os.system("mkdir " + FS.location)
        out_dir = FS.location
        ifile = self.cleaned_data.get("ifile")
        if ifile:
            file_to_update = ifile
            uploaded_file = str(file_to_update)
            ifile = FS.save(uploaded_file, file_to_update)
        elif self.cleaned_data.get("url"):
            url = self.cleaned_data.get("url")
            extension = os.path.basename(url).split('.')[-1]
            dest = os.path.join(FS.location, os.path.basename(url))
            ifile, headers = urllib.request.urlretrieve(url, filename=dest)

        else:
            raise Http404

        name = pipeline_id + '_h_rd'
        JobStatus.objects.create(
            job_name=name,
            pipeline_key=pipeline_id,
            job_status="not launched",
            start_time=datetime.datetime.now(),
            #finish_time=datetime.time(0, 0),
            all_files=ifile,
            modules_files="",
            pipeline_type="helper",
        )

        return 'qsub -v pipeline="helper",mode="rd",key="{pipeline_id}",outdir="{out_dir}",inputfile="{input_file}",string="{string}",remove="true",name="{name}" -N {job_name} {sh}'.format(
            pipeline_id=pipeline_id,
            out_dir=out_dir,
            input_file=os.path.join(FS.location, ifile),
            string=self.cleaned_data.get("string"),
            name=name,
            job_name=name,
            sh=os.path.join(
                BASE_DIR +
                '/core/bash_scripts/run_helper_remove_duplicates.sh'))
Пример #14
0
def upload(request):
    user = request.GET.get('user')
    token = request.GET.get('token')

    if auth.isAuthenticatedBool(user, token) == False:
        return HttpResponseRedirect('/static/login.html?user='******'&token=' + token)
    file_type = request.GET.get('file_type')
    file_name = request.POST.get('file_to_replace')
    file_data = request.FILES['data']
    fs = FileSystemStorage()
    MEDIA_ROOT = fs.location
    fs.location = MEDIA_ROOT + "/" + file_type
    BASE_URL = fs.base_url
    fs.base_url = BASE_URL = "/" + file_type
    os.remove(fs.location + "/" + file_name)
    fs.save(file_name, file_data)
    return HttpResponseRedirect(
        '/static/admin.html?upload_status=success&type=' + file_type +
        '&user='******'&token=' + token)
Пример #15
0
def tagTitle(request, title=None):
    from django.core.files.storage import FileSystemStorage

    fs = FileSystemStorage()
    fs.location = 'C:\\Django_Practice_For_MH\\workspace\\myweb\\htmlapp\\templates'
    if title == None:
        # 저장되어 있는 파일 목록 출력
        data = fs.listdir('docs')
    else:
        # title에 지정된 파일을 불러옴
        data = fs.open(f'docs\\{title}').read()
        data = str(data, encoding='utf-8')
        return render(request, 'htmlapp/view.html', {'data': data})
    '''fo = open('C:\\Django_Practice_For_MH\\workspace\\myweb\\htmlapp\\templates\\docs\\' + title, 'r', encoding='utf-8')
    context = {
        'data': fo.read()
    }
    fo.close()
    return render(request, 'htmlapp/view.html', context)'''

    context = {'data': data}
    return render(request, 'htmlapp/tag_list.html', context)
Пример #16
0
def user_settings(request):
    current_user = request.user
    if request.method == 'POST' and request.FILES['profile_image']:
        big_dict = {}
        myfile = request.FILES['profile_image']
        fs = FileSystemStorage()
        fs.location = PROFILE_IMG_DIR
        filename = fs.save(myfile.name, myfile)
        current_user.image = filename
        current_user.save()

        big_dict["user"] = current_user
        big_dict["image_path"] = os.path.join('images/icon/profile_image/',
                                              filename)

        return render(request, 'profiles/settings.html', context=big_dict)
    else:
        big_dict = {}
        big_dict["image_path"] = os.path.join('images/icon/profile_image/',
                                              current_user.image)
        big_dict["user"] = current_user
        return render(request, 'profiles/settings.html', context=big_dict)
Пример #17
0
    def create_conf_file(self, cleaned_data, pipeline_id):
        conf = {}
        conf['pipeline_id'] = pipeline_id
        FS = FileSystemStorage()
        FS.location = os.path.join(MEDIA_ROOT, pipeline_id)
        os.system("mkdir " + FS.location)
        out_dir = FS.location
        conf['out_dir'] = out_dir
        ifile, libs_files = self.upload_files(cleaned_data, FS)
        lib_mode = cleaned_data.get('library_mode')
        is_solid = str(cleaned_data.get('is_solid')).lower()
        guess_adapter = str(cleaned_data.get('guess_adapter')).lower()
        predict_mirna = str(cleaned_data.get('predict_mirna')).lower()
        no_libs = cleaned_data.get('no_libs')
        highconf = cleaned_data.get('highconf')
        mirDB = cleaned_data.get('mirDB')
        #recursive_adapter_trimming = str(cleaned_data.get('recursive_adapter_trimming')).lower()
        recursive_adapter_trimming = str(
            cleaned_data.get('adapter_recursive_trimming')).lower()
        species = [i.db_ver for i in cleaned_data['species']]
        assemblies = [i.db for i in cleaned_data['species']]
        short_names = [i.shortName for i in cleaned_data['species']]
        micrornas_species = ':'.join(short_names)
        adapter = cleaned_data['adapter_chosen'] or cleaned_data[
            'adapter_manual']
        if adapter == "EMPTY":
            adapter = None
        nucleotides_5_removed = str(cleaned_data['nucleotides_5_removed'])
        adapter_length = str(cleaned_data['adapter_length'])
        adapter_mismatch = str(cleaned_data['adapter_mismatch'])
        seed_length = str(cleaned_data['seed_length'])
        mismatches = str(cleaned_data['mismatches'])
        aligment_type = str(cleaned_data['aligment_type'])
        min_read_count = str(cleaned_data['min_read_count'])
        min_read_length = str(cleaned_data['min_read_length'])
        max_multiple_mapping = str(cleaned_data['max_multiple_mapping'])
        homologous = cleaned_data[
            'homologous'] if cleaned_data['homologous'] != '' else None

        species_annotation_file = SpeciesAnnotationParser(
            CONF["speciesAnnotation"])
        species_annotation = species_annotation_file.parse()
        db = CONF["db"]

        new_conf = SRNABenchConfig(
            species_annotation,
            db,
            FS.location,
            ifile,
            iszip="true",
            #RNAfold="RNAfold2",
            bedGraph="true",
            writeGenomeDist="true",
            predict=predict_mirna,
            graphics="true",
            species=species,
            assembly=assemblies,
            short_names=short_names,
            adapter=adapter,
            recursiveAdapterTrimming=recursive_adapter_trimming,
            libmode=lib_mode,
            nolib=no_libs,
            microRNA=micrornas_species,
            removeBarcode=nucleotides_5_removed,
            adapterMinLength=adapter_length,
            adapterMM=adapter_mismatch,
            seed=seed_length,
            noMM=mismatches,
            alignType=aligment_type,
            minRC=min_read_count,
            solid=is_solid,
            guessAdapter=guess_adapter,
            highconf=highconf,
            mirDB=mirDB,
            homolog=homologous,
            user_files=libs_files,
            minReadLength=min_read_length,
            mBowtie=max_multiple_mapping)

        conf_file_location = os.path.join(FS.location, "conf.txt")
        new_conf.write_conf_file(conf_file_location)

        name = pipeline_id + '_bench'
        configuration = {
            'pipeline_id': pipeline_id,
            'out_dir': out_dir,
            'name': name,
            'conf_input': conf_file_location,
            'type': 'sRNAbench'
        }

        JobStatus.objects.create(
            job_name=name,
            pipeline_key=pipeline_id,
            job_status="not_launched",
            start_time=datetime.now(),
            all_files=ifile,
            modules_files="",
            pipeline_type="sRNAbench",
        )
        configuration_file_path = os.path.join(out_dir, 'conf.json')
        with open(configuration_file_path, 'w') as conf_file:
            json.dump(configuration, conf_file, indent=True)
        return name, configuration_file_path
Пример #18
0
    def create_call(self):

        pipeline_id = self.generate_id()
        FS = FileSystemStorage()
        FS.location = os.path.join(MEDIA_ROOT, pipeline_id)
        os.system("mkdir " + FS.location)
        out_dir = FS.location
        mirfile = self.cleaned_data.get("mirfile")
        utrfile = self.cleaned_data.get("utrfile")
        program_list = []
        param_list = []
        if self.cleaned_data.get('seed'):
            program_list.append("SEED")
            param_list.append(self.cleaned_data.get("seed_par"))
        if self.cleaned_data.get('targetspy'):
            program_list.append("TS")
            param_list.append(self.cleaned_data.get("target_par"))
        if self.cleaned_data.get('miranda'):
            program_list.append("MIRANDA")
            param_list.append(self.cleaned_data.get("miranda_par"))
        if self.cleaned_data.get('PITA'):
            program_list.append("PITA")
            param_list.append(self.cleaned_data.get("PITA_par"))
        program_string = ":".join(program_list)
        if mirfile:
            file_to_update = mirfile
            uploaded_file = str(file_to_update)
            mirfile = FS.save(uploaded_file, file_to_update)
        else:
            mirtext = self.cleaned_data.get("mirtext")
            content = ContentFile(mirtext)
            mirfile = FS.fileUpload.save("mirs.fa", content)
            FS.save()
        if utrfile:
            file_to_update = utrfile
            uploaded_file = str(file_to_update)
            utrfile = FS.save(uploaded_file, file_to_update)
        elif self.cleaned_data.get("utrtext"):
            utrtext = self.cleaned_data.get("utrtext")
            content = ContentFile(utrtext)
            utrfile = FS.fileUpload.save('utrs.fa', content)
            FS.save()
        else:
            utrfile = self.cleaned_data.get('utrchoice')

        name = pipeline_id + '_mirconstarget'

        configuration = {
            'pipeline_id': pipeline_id,
            'out_dir': out_dir,
            'name': name,
            "mirna_file": mirfile,
            "utr_file": utrfile,
            "program_string": program_string,
            #"parameter_string": '":"'.join(param_list),
            "parameter_string": "\"'" + " : ".join(param_list) + "'\"",
            'type': 'miRNAconstarget'
        }
        configuration_file_path = os.path.join(out_dir, 'conf.json')
        import json
        with open(configuration_file_path, 'w') as conf_file:
            json.dump(configuration, conf_file, indent=True)

        JobStatus.objects.create(
            job_name=name,
            pipeline_key=pipeline_id,
            job_status="not launched",
            start_time=datetime.datetime.now(),
            #finish_time=datetime.time(0, 0),
            all_files=[mirfile, utrfile],
            modules_files="",
            pipeline_type="mirconstarget",
        )

        if QSUB:
            return 'qsub -v c="{configuration_file_path}" -N {job_name} {sh}'.format(
                configuration_file_path=configuration_file_path,
                job_name=name,
                sh=os.path.join(
                    os.path.dirname(BASE_DIR) +
                    '/core/bash_scripts/run_qsub.sh')), pipeline_id
        else:
            return '{sh} {configuration_file_path}'.format(
                configuration_file_path=configuration_file_path,
                sh=os.path.join(
                    os.path.dirname(BASE_DIR) +
                    '/core/bash_scripts/run.sh')), pipeline_id
Пример #19
0
    def create_conf_file(self, cleaned_data, pipeline_id):
        conf = {}
        conf['pipeline_id'] = pipeline_id
        FS = FileSystemStorage()
        FS.location = os.path.join(MEDIA_ROOT, pipeline_id)
        os.system("mkdir " + FS.location)
        out_dir = FS.location
        conf['out_dir'] = out_dir
        ifile, libs_files = self.upload_files(cleaned_data, FS)
        if not ifile and cleaned_data.get("job_ID"):
            new_record = JobStatus.objects.get(
                pipeline_key=cleaned_data.get("job_ID"))
            path = new_record.outdir
            ifile = os.path.join(path, "reads.fa")

        #recursive_adapter_trimming = str(cleaned_data.get('recursive_adapter_trimming')).lower()
        adapter = cleaned_data['adapter_chosen'] or cleaned_data[
            'adapter_manual']
        adapter_length = str(cleaned_data['adapter_length'])
        adapter_mismatch = str(cleaned_data['adapter_mismatch'])
        conf_dict = {}
        conf_dict["input"] = ifile
        conf_dict["output"] = out_dir
        conf_dict["maxReads"] = cleaned_data.get("maxReads")
        conf_dict["blastDB"] = cleaned_data.get("dataBase")
        #conf_dict["minIdent"]= cleaned_data.get("minIdent")
        conf_dict["maxEvalue"] = cleaned_data.get("maxEval")
        conf_dict["adapter"] = adapter
        if conf_dict["adapter"] == "EMPTY":
            conf_dict["adapter"] = None
        conf_dict["adapterMinLength"] = adapter_length
        conf_dict["adapterMM"] = adapter_mismatch
        conf_file_location = os.path.join(FS.location, "conf.txt")
        with open(conf_file_location, "a") as cfile:
            for k in conf_dict.keys():
                if conf_dict.get(k):
                    line = str(k) + "=" + conf_dict.get(k) + "\n"
                    cfile.write(line)

        name = pipeline_id + '_blast'
        configuration = {
            'pipeline_id': pipeline_id,
            'out_dir': out_dir,
            'name': name,
            'conf_input': conf_file_location,
            'type': 'sRNAblast'
        }

        JobStatus.objects.create(
            job_name=name,
            pipeline_key=pipeline_id,
            job_status="not_launched",
            start_time=datetime.now(),
            all_files=ifile,
            modules_files="",
            outdir=out_dir,
            #parameters="".join(open(conf_file_location).readlines()),
            pipeline_type="sRNAblast",
            zip_file=pipeline_id + "/" + "sRNAblast_full_Result.zip",
        )
        configuration_file_path = os.path.join(out_dir, 'conf.json')
        with open(configuration_file_path, 'w') as conf_file:
            json.dump(configuration, conf_file, indent=True)
        return name, configuration_file_path
Пример #20
0
def upload_result(request):
    if request.method == 'POST' and len(
            request.FILES.getlist('file_uploads')) > 0:
        if request.user.is_authenticated:
            user_id = request.user.id
        else:
            user_id = "none"

        file_urls = []
        for file in request.FILES.getlist('file_uploads'):
            if request.user.is_authenticated:
                if MEDIA_FILES_ON_S3:
                    s3 = boto3.resource(
                        's3',
                        region_name="us-west-2",
                        aws_access_key_id=AWS_ACCESS_KEY_ID,
                        aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
                    #organize a path for the file in bucket
                    #file_directory_within_bucket = 'user_upload_files/{username}'.format(username=requests.user)
                    album_name = 'Uploads'
                    file_directory_within_bucket = '/user_uploads/{}/{}'.format(
                        user_id, album_name)
                    #file_directory_within_bucket = 'user_uploads/{}/'.format(album_name)

                    # synthesize a full file path; note that we included the filename
                    file_path_within_bucket = os.path.join(
                        file_directory_within_bucket, file.name)
                    custom_media_storage = CustomMediaStorage()
                    #custom_media_storage.bucket_name = AWS_STORAGE_BUCKET_NAME
                    custom_media_storage.save(file_path_within_bucket, file)
                    file_url = custom_media_storage.url(
                        file_path_within_bucket)
                    file_name = file_url.split('/')[-1]
                    file_urls.append((file_name, file_url))
            else:
                fs = FileSystemStorage()
                fs.location = os.path.join(settings.MEDIA_ROOT,
                                           '/UploadedImages/')
                fs.save(file.name, file)

                #file_urls.append(upload.file.url)

        # try:
        #     logfile = temp_path+session_id+'/log.txt'
        #     with open(logfile,'w') as logfile:
        #         logfile.write("{} - Created log file\n".format(time.strftime("%x %X", time.gmtime())))
        #         logfile.write("Uploading files:\n")
        #         for f in filenames:
        #             logfile.write("{} - {}\n".format(time.strftime("%x %X", time.gmtime()), f))
        #         logfile.flush()
        #         logfile.close()
        #
        # except Exception as e:
        #     print('Error:',e)

        return render(
            request, 'OnlineImageConverter/upload_result.html', {
                'number_of_files': len(file_urls),
                'filenames': file_urls,
                'session_id': request.session._session_key,
            })
    else:
        return HttpResponse("No Files were uploaded")