def run(): """ Run page """ inforun = None inforun_file = os.path.join(config_reader.config_dir, ".inforun") if os.path.exists(inforun_file): try: with open(inforun_file, "r") as info: inforun = json.loads(info.read()) except json.JSONDecodeError: print("Unable to parse inforun file. Ignoring it.", file=sys.stderr) pass tools = Tools().tools tools_names = sorted(list(tools.keys()), key=lambda x: (tools[x].order, tools[x].name)) tools_ava = {} tools_options = {} for tool_name, tool in tools.items(): tools_ava[tool_name] = 1 if tool.all_vs_all is not None else 0 tools_options[tool_name] = tool.options if MODE == "webserver": with Session.connect(): s_id = Session.new() else: upload_folder = Functions.random_string(20) tmp_dir = config_reader.upload_folder upload_folder_path = os.path.join(tmp_dir, upload_folder) while os.path.exists(upload_folder_path): upload_folder = Functions.random_string(20) upload_folder_path = os.path.join(tmp_dir, upload_folder) s_id = upload_folder id_job = Functions.random_string( 5) + "_" + datetime.datetime.fromtimestamp( time.time()).strftime('%Y%m%d%H%M%S') if "id_job" in request.args: id_job = request.args["id_job"] email = "" if "email" in request.args: email = request.args["email"] return render_template( "run.html", id_job=id_job, email=email, menu="run", allowed_ext=ALLOWED_EXTENSIONS, s_id=s_id, max_upload_file_size=config_reader.max_upload_file_size, example=config_reader.example_target != "", target=os.path.basename(config_reader.example_target), query=os.path.basename(config_reader.example_query), tools_names=tools_names, tools=tools, tools_ava=tools_ava, tools_options=tools_options, version=VERSION, inforun=inforun)
def new(cls, keep_active=False): from dgenies.lib.functions import Functions my_s_id = Functions.random_string(20) while len(cls.select().where(cls.s_id == my_s_id)) > 0: my_s_id = Functions.random_string(20) upload_folder = Functions.random_string(20) tmp_dir = config.upload_folder upload_folder_path = os.path.join(tmp_dir, upload_folder) while os.path.exists(upload_folder_path): upload_folder = Functions.random_string(20) upload_folder_path = os.path.join(tmp_dir, upload_folder) cls.create(s_id=my_s_id, date_created=datetime.now(), upload_folder=upload_folder, last_ping=datetime.now(), keep_active=keep_active) return my_s_id
def result(id_res): """ Result page :param id_res: job id :type id_res: str """ res_dir = os.path.join(APP_DATA, id_res) return render_template( "result.html", id=id_res, menu="result", current_result=id_res, is_gallery=Functions.is_in_gallery(id_res, MODE), fasta_file=Functions.query_fasta_file_exists(res_dir))
def gallery(): """ Gallery page """ if MODE == "webserver": return render_template("gallery.html", items=Functions.get_gallery_items(), menu="gallery") return abort(404)
def run(): """ Run page """ tools = Tools().tools tools_names = sorted(list(tools.keys()), key=lambda x: (tools[x].order, tools[x].name)) tools_ava = {} for tool_name, tool in tools.items(): tools_ava[tool_name] = 1 if tool.all_vs_all is not None else 0 if MODE == "webserver": with Session.connect(): s_id = Session.new() else: upload_folder = Functions.random_string(20) tmp_dir = config_reader.upload_folder upload_folder_path = os.path.join(tmp_dir, upload_folder) while os.path.exists(upload_folder_path): upload_folder = Functions.random_string(20) upload_folder_path = os.path.join(tmp_dir, upload_folder) s_id = upload_folder id_job = Functions.random_string( 5) + "_" + datetime.datetime.fromtimestamp( time.time()).strftime('%Y%m%d%H%M%S') if "id_job" in request.args: id_job = request.args["id_job"] email = "" if "email" in request.args: email = request.args["email"] return render_template( "run.html", id_job=id_job, email=email, menu="run", allowed_ext=ALLOWED_EXTENSIONS, s_id=s_id, max_upload_file_size=config_reader.max_upload_file_size, example=config_reader.example_target != "", target=os.path.basename(config_reader.example_target), query=os.path.basename(config_reader.example_query), tools_names=tools_names, tools=tools, tools_ava=tools_ava, version=VERSION)
def global_templates_variables(): """ Global variables used for any view """ return { "title": app_title, "mode": MODE, "all_jobs": Functions.get_list_all_jobs(MODE), "debug": DEBUG }
def documentation_run(): """ Documentation run page """ latest = Latest() version = latest.latest max_upload_file_size = config_reader.max_upload_file_size if max_upload_file_size == -1: max_upload_file_size = "no limit" else: max_upload_file_size = Functions.get_readable_size( max_upload_file_size, 0) max_upload_size = config_reader.max_upload_size if max_upload_size == -1: max_upload_size = "no limit" else: max_upload_size = Functions.get_readable_size(max_upload_size, 0) max_upload_size_ava = config_reader.max_upload_size_ava if max_upload_size_ava == -1: max_upload_size_ava = "no limit" else: max_upload_size_ava = Functions.get_readable_size( max_upload_size_ava, 0) with open(os.path.join(app_folder, "md", "doc_run.md"), "r", encoding='utf-8') as install_instr: content = install_instr.read() env = Environment() template = env.from_string(content) content = template.render(mode=MODE, version=version, size=max_upload_file_size, size_unc=max_upload_size, size_ava=max_upload_size_ava) md = Markdown(extensions=[TocExtension(baselevel=1)]) content = Markup(md.convert(content)) toc = Markup(md.toc) return render_template("documentation.html", menu="documentation", content=content, toc=toc)
def global_templates_variables(): """ Global variables used for any view """ return { "title": app_title, "mode": MODE, "all_jobs": Functions.get_list_all_jobs(MODE), "cookie_wall": config_reader.cookie_wall, "legal_pages": config_reader.legal, "debug": DEBUG }
def parse_data_folders(app_data, now, max_age, gallery_jobs, fake=False): for file in os.listdir(app_data): if file not in gallery_jobs and file not in ["gallery"]: file = os.path.join(app_data, file) create_date = os.path.getctime(file) age = (now - create_date) / 86400 # Age in days if age > max_age["data"]: try: if os.path.isdir(file): print("Removing folder %s..." % file) if not fake: shutil.rmtree(file) else: print("Removing file %s..." % file) if not fake: os.remove(file) except OSError: print(traceback.print_exc()) elif os.path.isdir(file): query_name_file = os.path.join(file, ".query") if os.path.exists(query_name_file): with open(query_name_file) as query_file: query_filename = query_file.read().strip("\n") sorted_file = Functions.get_fasta_file( file, "query", True) if not sorted_file.endswith(".sorted"): sorted_file = None if sorted_file is not None: create_date = os.path.getctime(sorted_file) age = (now - create_date) / 86400 # Age in days if age > max_age["fasta_sorted"]: print("Removing fasta file %s..." % sorted_file) if not fake: os.remove(sorted_file) query_reference = os.path.join( file, "as_reference_" + os.path.basename(query_filename)) if os.path.exists(query_reference): create_date = os.path.getctime(query_reference) age = (now - create_date) / 86400 # Age in days if age > max_age["fasta_sorted"]: print("Removing fasta file %s..." % query_reference) if not fake: os.remove(query_reference)
def dl_fasta(id_res, filename): """ Download fasta file :param id_res: job id :type id_res: str :param filename: file name (not used, but can be in the URL to define download filename to the browser) :type filename: str """ res_dir = os.path.join(APP_DATA, id_res) lock_query = os.path.join(res_dir, ".query-fasta-build") is_sorted = os.path.exists(os.path.join(res_dir, ".sorted")) if not os.path.exists(lock_query) or not is_sorted: query_fasta = Functions.get_fasta_file(res_dir, "query", is_sorted) if query_fasta is not None: if query_fasta.endswith(".gz") or query_fasta.endswith( ".gz.sorted"): content = get_file(query_fasta, True) return Response(content, mimetype="application/gzip") content = get_file(query_fasta) return Response(content, mimetype="text/plain") abort(404)
def build_fasta(id_res): """ Generate the fasta file of query :param id_res: job id :type id_res: str """ res_dir = os.path.join(APP_DATA, id_res) lock_query = os.path.join(res_dir, ".query-fasta-build") is_sorted = os.path.exists(os.path.join(res_dir, ".sorted")) compressed = request.form["gzip"].lower() == "true" query_fasta = Functions.get_fasta_file(res_dir, "query", is_sorted) if query_fasta is not None: if is_sorted and not query_fasta.endswith(".sorted"): # Do the sort Path(lock_query).touch() if not compressed or MODE == "standalone": # If compressed, it will took a long time, so not wait Path(lock_query + ".pending").touch() index_file = os.path.join(res_dir, "query.idx.sorted") if MODE == "webserver": thread = threading.Timer(1, Functions.sort_fasta, kwargs={ "job_name": id_res, "fasta_file": query_fasta, "index_file": index_file, "lock_file": lock_query, "compress": compressed, "mailer": mailer, "mode": MODE }) thread.start() else: Functions.sort_fasta(job_name=id_res, fasta_file=query_fasta, index_file=index_file, lock_file=lock_query, compress=compressed, mailer=None, mode=MODE) if not compressed or MODE == "standalone": if MODE == "webserver": i = 0 time.sleep(5) while os.path.exists(lock_query) and (i < 2 or MODE == "standalone"): i += 1 time.sleep(5) os.remove(lock_query + ".pending") if os.path.exists(lock_query): return jsonify({ "success": True, "status": 1, "status_message": "In progress" }) return jsonify({ "success": True, "status": 2, "status_message": "Done", "gzip": compressed }) else: return jsonify({ "success": True, "status": 1, "status_message": "In progress" }) elif is_sorted and os.path.exists(lock_query): # Sort is already in progress return jsonify({ "success": True, "status": 1, "status_message": "In progress" }) else: # No sort to do or sort done if compressed and not query_fasta.endswith(".gz.fasta"): # If compressed file is asked, we must compress it now if not done before... Path(lock_query).touch() thread = threading.Timer(1, Functions.compress_and_send_mail, kwargs={ "job_name": id_res, "fasta_file": query_fasta, "index_file": os.path.join( res_dir, "query.idx.sorted"), "lock_file": lock_query, "mailer": mailer }) thread.start() return jsonify({ "success": True, "status": 1, "status_message": "In progress" }) return jsonify({ "success": True, "status": 2, "status_message": "Done", "gzip": query_fasta.endswith(".gz") or query_fasta.endswith(".gz.sorted") }) else: return jsonify({ "success": False, "message": "Unable to get fasta file for query. Please contact us to report the bug" })
def upload(): """ Do upload of a file """ try: s_id = request.form['s_id'] if MODE == "webserver": try: with Session.connect(): session = Session.get(s_id=s_id) if session.ask_for_upload(False): folder = session.upload_folder else: return jsonify({ "files": [], "success": "ERR", "message": "Not allowed to upload!" }) except DoesNotExist: return jsonify({ "files": [], "success": "ERR", "message": "Session not initialized. Please refresh the page." }) else: folder = s_id files = request.files[list(request.files.keys())[0]] if files: filename = files.filename folder_files = os.path.join(app.config["UPLOAD_FOLDER"], folder) if not os.path.exists(folder_files): os.makedirs(folder_files) filename = Functions.get_valid_uploaded_filename( filename, folder_files) mime_type = files.content_type if not Functions.allowed_file(files.filename, request.form['formats'].split(",")): result = UploadFile(name=filename, type_f=mime_type, size=0, not_allowed_msg="File type not allowed") shutil.rmtree(folder_files) else: # save file to disk uploaded_file_path = os.path.join(folder_files, filename) files.save(uploaded_file_path) # get file size after saving size = os.path.getsize(uploaded_file_path) # return json for js call back result = UploadFile(name=filename, type_f=mime_type, size=size) return jsonify({"files": [result.get_file()], "success": "OK"}) return jsonify({ "files": [], "success": "404", "message": "No file provided" }) except: # Except all possible exceptions to prevent crashes traceback.print_exc() return jsonify({ "files": [], "success": "ERR", "message": "An unexpected error has occurred on upload. " "Please contact the support." })
def build_query_chr_as_reference(self): """ Assemble query contigs like reference chromosomes :return: path of the fasta file """ try: if not self.sorted: raise Exception("Contigs must be sorted to do that!") with open(os.path.join(self.data_dir, ".query")) as query_file: query_fasta = query_file.read().strip("\n") if not os.path.isfile(query_fasta): raise Exception("Query fasta does not exists") o_fasta = os.path.join(os.path.dirname(query_fasta), "as_reference_" + os.path.basename(query_fasta)) if o_fasta.endswith(".gz"): o_fasta = o_fasta[:-3] if not os.path.exists(o_fasta): uncompressed = False if query_fasta.endswith(".gz"): uncompressed = True query_fasta = Functions.uncompress(query_fasta) query_f = SeqIO.index(query_fasta, "fasta") contigs_assoc = self.get_queries_on_target_association() mapped_queries = set() with open(o_fasta, "w") as out: for target in self.t_order: if target in contigs_assoc: queries = sorted(contigs_assoc[target], key=lambda x: self.q_order.index(x)) seq = SeqRecord(Seq("")) for query in queries: mapped_queries.add(query) new_seq = query_f[query] if self.q_reversed[query]: new_seq = new_seq.reverse_complement() seq += new_seq seq += 100 * "N" seq = seq[:-100] seq.id = seq.name = seq.description = target SeqIO.write(seq, out, "fasta") for contig in self.q_order: if contig not in mapped_queries: seq = query_f[contig] seq.id += "_unaligned" SeqIO.write(seq, out, "fasta") query_f.close() if uncompressed: os.remove(query_fasta) status = "success" except Exception: o_fasta = "_._" status="fail" if MODE == "webserver": parts = os.path.basename(o_fasta).rsplit(".", 1) Functions.send_fasta_ready(mailer=self.mailer, job_name=self.id_job, sample_name=parts[0], ext=parts[1], compressed=False, path="download", status=status) return o_fasta
def parse_data_folders(app_data, gallery_jobs, now, max_age, fake=False): """ Parse data folder and remove too old jobs :param app_data: folder where jobs are stored :param gallery_jobs: id of jobs which are inside the gallery :type gallery_jobs: list :param now: current timestamp :type now: float :param max_age: remove all files & folders older than this age. Define it for each category (uploads, data, error, ...) :type max_age: dict :param fake: if True, just print files to delete, without delete them :type fake: bool :return: """ for file in os.listdir(app_data): if file not in gallery_jobs and file not in ["gallery"]: file = os.path.join(app_data, file) create_date = os.path.getctime(file) age = (now - create_date) / 86400 # Age in days if age > max_age["data"]: try: if os.path.isdir(file): print("Removing folder %s..." % file) if not fake: shutil.rmtree(file) else: print("Removing file %s..." % file) if not fake: os.remove(file) except OSError: print(traceback.print_exc()) elif os.path.isdir(file): query_name_file = os.path.join(file, ".query") if os.path.exists(query_name_file): with open(query_name_file) as query_file: query_filename = query_file.read().strip("\n") sorted_file = Functions.get_fasta_file( file, "query", True) if not sorted_file.endswith(".sorted"): sorted_file = None if sorted_file is not None: create_date = os.path.getctime(sorted_file) age = (now - create_date) / 86400 # Age in days if age > max_age["fasta_sorted"]: print("Removing fasta file %s..." % sorted_file) if not fake: os.remove(sorted_file) query_reference = os.path.join( file, "as_reference_" + os.path.basename(query_filename)) if os.path.exists(query_reference): create_date = os.path.getctime(query_reference) age = (now - create_date) / 86400 # Age in days if age > max_age["fasta_sorted"]: print("Removing fasta file %s..." % query_reference) if not fake: os.remove(query_reference)