def dnld_pfam_uniprot_seqs(ss, uniprot_acc, aa_uniprot_file, dir_cache_prj): if len(uniprot_acc) != 0: _ = opj(dir_cache_prj, 'aa_uniprot_acc_cache__' + ss) prev_uniprot_acc = [] if ope(_): with open(_, 'rb') as f: prev_uniprot_acc = pickle.load(f) with open(_, 'wb') as f: pickle.dump(uniprot_acc, f, protocol=PICKLE_PROTOCOL) if (set(uniprot_acc) != set(prev_uniprot_acc)) or \ (not ope(aa_uniprot_file)): Log.inf('Downloading Pfam protein sequences from UniProt:', ss) # Note: the number of sequences downloaded from UniProt may # be less than the total number of accessions. This is normal # as Pfam may return "obsolete" accessions, which will not be # downloaded here. _ = fasta_by_accession_list(uniprot_acc) _ = standardize_fasta_text(_, SEQ_TYPE_AA, pfam=True) write_fasta(_, aa_uniprot_file) else: if ope(aa_uniprot_file): osremove(aa_uniprot_file)
def remove(self): '''purpose: deletes the file on the filesystem containing the sqlite db''' from os import remove as osremove try: osremove(self.name+".sqlite") except OSError: osremove(self.name)
def split(path, size, file_, dirpath, split_size, start_time=0, i=1, inLoop=False): parts = ceil(size/TG_SPLIT_SIZE) if EQUAL_SPLITS and not inLoop: split_size = ceil(size/parts) if file_.upper().endswith(VIDEO_SUFFIXES): base_name, extension = ospath.splitext(file_) split_size = split_size - 2500000 while i <= parts : parted_name = "{}.part{}{}".format(str(base_name), str(i).zfill(3), str(extension)) out_path = ospath.join(dirpath, parted_name) run(["ffmpeg", "-hide_banner", "-loglevel", "error", "-i", path, "-ss", str(start_time), "-fs", str(split_size), "-async", "1", "-strict", "-2", "-c", "copy", out_path]) out_size = get_path_size(out_path) if out_size > 2097152000: dif = out_size - 2097152000 split_size = split_size - dif + 2500000 osremove(out_path) return split(path, size, file_, dirpath, split_size, start_time, i, inLoop=True) lpd = get_media_info(out_path)[0] if lpd <= 4 or out_size < 1000000: osremove(out_path) break start_time += lpd - 3 i = i + 1 else: out_path = ospath.join(dirpath, file_ + ".") run(["split", "--numeric-suffixes=1", "--suffix-length=3", f"--bytes={split_size}", path, out_path])
def deleteMap(self, item): mapForDeletion = self.initFile['LevelsList'][item.text()] gamefolder = current_project.settings["gamefolder"] target_to_delete = os.path.join(gamefolder, fifl.LEVELS, mapForDeletion) if (os.path.basename(current_project.settings["workingFile"]) != mapForDeletion): reply = QtWidgets.QMessageBox.question( self, 'Delete?', 'Do you really wish to delete:\n' + mapForDeletion, QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) if reply == QtWidgets.QMessageBox.Yes: osremove(target_to_delete) game_init.regenerateLevelList() self.reloadInitFile() else: QtWidgets.QMessageBox.information( self, 'Delete Problem', "Can't delete a map while it's open in the map editor.", QtWidgets.QMessageBox.Ok)
def measure_all_leaves(): """ 12/13/16 Measures all leaves in all collections. COPY AND PASTED FROM MAIN2.PY :return: number of successes and failures """ # remove the old measurements try: osremove('save-data/leaf-data.xml') except FileNotFoundError: pass # # create ruler # ruler = Ruler() # load the leaves from the harddrive leaves = load_species_structured(IMAGE_DIR) # measure the found leaves successes, failures = measure_leaves_list(leaves) # report successes and failures print('\n\nFINSIHED\n{0} leaves measured successfully and {1} failed attempts.'.format(successes, failures)) return successes, failures
def write_append(self, data): '''Appends tasks to an existing file. CAUTION: This function does not add or update anything besides tasks.''' with tarfile.open(data['filename'], 'r:gz') as tar: f = tar.extractfile('todo.data') document = ElementTree.parse(f) f.close() htd = document.getroot() tasklist = document.find("tasklist") # TODO update the rest of the saved info super(FileFilter, self).store_tasks(data['task_store'], tasklist) #write to file with tarfile.open(data['filename'], 'w:gz') as tar: # store xml in a temp file (datafile, datafile_path) = mkstemp() datafile = open(datafile_path, 'wb') datafile.write(ElementTree.tostring(htd, encoding="UTF-8")) datafile.close() # store version in a temp file (verfile, verfile_path) = mkstemp() verfile = open(verfile_path, 'wb') verfile.write(self.file_version) verfile.close() # create tgz-formatted output file and write tar.add(datafile_path, arcname="todo.data") tar.add(verfile_path, arcname="version.data") tar.close() osremove(datafile_path) osremove(verfile_path)
def check_encrypt_or_decrypt(do, path, key, erase_it): """[summary] Arguments: do {[type]} -- [description] path {[type]} -- [description] key {[type]} -- [description] erase_it {[type]} -- [description] """ if ("encrypt" in str(do).lower()): if ".crp0" not in path and ".crp__" not in path: print("> Working on: ", path) encrypt(path, key) # Delete file if erase is confirmed if erase_it == True: print("> Removing the original : ", path.replace(".crp0", "")) try: osremove(path.replace(".crp0", "")) except: print( "> Can't remove due to Permissions purposes or something else!" ) print("> Skipping: ", path.replace(".crp0", "")) else: print("> Skipping: ", path) elif ("decrypt" in str(do).lower()): if ".crp0" in path: print("> Working on: ", path) decrypt(path, key) osremove(path) else: print("> Skipping: ", path) else: exit("> Command Error choose either 'encrypt' or 'decrypt'")
def deleteFile(filename): if (ospath.exists(filename)): try: osremove(filename) return True, "File deleted." except: return False, "ERROR: %s" % str(format_exc()) else: return False, "ERROR: File \"%s\" does not exists." % filename
def change_query_file(file_path): """Marks query file as proccessed by re-writing files suffics""" done_file_path = file_path.replace("_new", "") + "_done" todo_file = open(file_path, "r") done_file = open(done_file_path, "w") done_file.write(todo_file.readline()) done_file.close() todo_file.close() osremove(path.abspath(todo_file.name))
def clean_unwanted(path: str): LOGGER.info(f"Cleaning unwanted files/folders: {path}") for dirpath, subdir, files in walk(path, topdown=False): for filee in files: if filee.endswith(".!qB") or filee.endswith('.parts') and filee.startswith('.'): osremove(ospath.join(dirpath, filee)) for folder in subdir: if folder == ".unwanted": rmtree(ospath.join(dirpath, folder)) for dirpath, subdir, files in walk(path, topdown=False): if not listdir(dirpath): rmdir(dirpath)
def dnld_prot_seqs(ss, prot_acc_user, aa_prot_ncbi_file, dir_cache_prj): if len(prot_acc_user) != 0: acc_old = set() if ope(aa_prot_ncbi_file): _ = read_fasta(aa_prot_ncbi_file, SEQ_TYPE_AA) acc_old = set([x.definition.split('|')[0] for x in _]) if acc_old == set(prot_acc_user): return prot_acc_user else: pickle_file = opj(dir_cache_prj, 'ncbi_prot_metadata_cache__' + ss) if ope(pickle_file): with open(pickle_file, 'rb') as f: pa_info = pickle.load(f) print() Log.inf('Downloading protein sequences from NCBI:', ss) _ = dnld_ncbi_seqs('protein', prot_acc_user, rettype='gb', retmode='xml') prot_acc_user_new = list() for rec in _: acc_ver = rec.accession_version defn = rec.definition organism = rec.organism prot_acc_user_new.append(acc_ver) defn_new = defn.split('[' + organism + ']')[0] defn_new = defn_new.lower().strip() defn_new = defn_new.replace(' ', '_').replace('-', '_') defn_new = defn_new.replace(',', '') defn_new = defn_new[0].upper() + defn_new[1:] defn_new = acc_ver + '|' + defn_new + '|' + organism defn_new = defn_new.replace(' ', '_').replace('-', '_') rec.definition = defn_new prot_acc_user = prot_acc_user_new write_fasta(_, aa_prot_ncbi_file) else: if ope(aa_prot_ncbi_file): osremove(aa_prot_ncbi_file) return prot_acc_user
async def random_cat(message, client, arguments): # Fetching data drom urban url = 'http://random.cat/meow' response = loads(rget(url).text)["file"] urlretrieve(response, "./download/cats/" + response[20:]) # Sending message await client.send_file(message.channel, "./download/cats/" + response[20:], filename="./download/cats/" + response[20:], content="<@" + message.author.id + "> ", tts=False) osremove("./download/cats/" + response[20:]) send(1, "Wild cat received!") return
def _exists(filepath, filetype=None): """ Tell if a file exists @params: `filepath`: The source `filetype`: The file type of file2 @returns: `True` if exists else `False` """ filetype = filetype or SafeFs._filetype(filepath) if filetype == SafeFs.FILETYPE_NOENTLINK: osremove(filepath) return False return filetype in [ SafeFs.FILETYPE_FILE, SafeFs.FILETYPE_FILELINK, SafeFs.FILETYPE_DIR, SafeFs.FILETYPE_DIRLINK ]
def clearDl(learnfile='learning.lrn', lfrepr="your dynamic learning data"): """Deletes file learnfile with confirmation using 'lfrepr' as representation.""" x = 'godacetchemal' confirmationquestion = "Are you sure you want to delete " + str(lfrepr) + "? (Y/N) >" while x.lower() not in ['yes', 'no', 'y', 'n']: ## Makes sure the user's answer is valid. x = raw_input(confirmationquestion) if x.lower() in ['yes', 'y']: try: osremove(learnfile) ## Deletes file. print "Deleted successfully." break except (IOError, WindowsError, OSError): ## Different error for different operative systems. print "That file does not exist!" break elif x.lower() in ['no', 'n']: ## Aborts deletion. print "Deletion aborted." break print "That is not a valid answer."
def test_file_does_not_exist(self): ''' Test instantiation when file does not exist ''' testfile = self.test['file'] osremove(testfile.name) fd = ReadFile(testfile.name, self.key) fd.read() self.assertEqual( fd.data, None, msg='Incorrect data from file' ) self.assertEqual( fd.lines, [], msg='Incorrect lines from file' )
def deleteMap(self, item): mapForDeletion = self.initFile['LevelsList'][item.text()] gamefolder = current_project.settings["gamefolder"] target_to_delete = os.path.join(gamefolder,fifl.LEVELS,mapForDeletion) if(os.path.basename(current_project.settings["workingFile"])!=mapForDeletion): reply = QtWidgets.QMessageBox.question(self, 'Delete?', 'Do you really wish to delete:\n'+mapForDeletion, QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) if reply == QtWidgets.QMessageBox.Yes: osremove(target_to_delete) game_init.regenerateInit() self.reloadInitFile() else: QtWidgets.QMessageBox.information(self, 'Delete Problem', "Can't delete a map while it's open in the map editor.", QtWidgets.QMessageBox.Ok )
def write_simple(self, data): '''Writes todo list data to xml file. Data is a dictionary of data pieces to store.''' # create tgz-formatted output file and write with tarfile.open(data['filename'], 'w:gz') as tar: # store xml in a temp file (datafile, datafile_path) = mkstemp() super(FileFilter, self).write_simple(data, datafile_path) # store version in a temp file (verfile, verfile_path) = mkstemp() verfile = open(verfile_path, 'wb') verfile.write(self.file_version) verfile.close() tar.add(datafile_path, arcname="todo.data") tar.add(verfile_path, arcname="version.data") osremove(datafile_path) osremove(verfile_path)
def filter_queries(ss, aa_queries_file, min_query_length, max_query_length, max_query_identity, vsearch, prot_acc_user, overwrite, logging=True): if logging is True: print() Log.inf('Filtering AA query sequences:', ss) Log.msg('min_query_length:', str(min_query_length)) Log.msg('max_query_length:', str(max_query_length)) Log.msg('max_query_identity:', str(max_query_identity)) parsed_fasta_1 = filter_fasta_by_length(aa_queries_file, SEQ_TYPE_AA, min_query_length, max_query_length) tmp1 = aa_queries_file + '_temp1' tmp2 = aa_queries_file + '_temp2' for rec in parsed_fasta_1: rec.seq.gc_code = 1 rec.seq = rec.seq.untranslate() write_fasta(parsed_fasta_1, tmp1) run_cluster_fast(vsearch, max_query_identity, tmp1, tmp2) parsed_fasta_2 = read_fasta(tmp2, SEQ_TYPE_DNA, parse_def=True) prot_acc_user_new = list() for rec in parsed_fasta_2: rec.seq.gc_code = 1 rec.seq = rec.seq.translate() acc = rec.accession_version if acc in prot_acc_user: prot_acc_user_new.append(acc) if overwrite is True: write_fasta(parsed_fasta_2, aa_queries_file, prepend_acc=True) osremove(tmp1) osremove(tmp2) return prot_acc_user_new
def main(): # bot.set_my_commands(botcmds) start_cleanup() if IS_VPS: asyrun(start_server_async(PORT)) # Check if the bot is restarting if ospath.isfile(".restartmsg"): with open(".restartmsg") as f: chat_id, msg_id = map(int, f) bot.edit_message_text("Restarted successfully!", chat_id, msg_id) osremove(".restartmsg") elif OWNER_ID: try: text = "<b>Bot Restarted!</b>" bot.sendMessage(chat_id=OWNER_ID, text=text, parse_mode=ParseMode.HTML) if AUTHORIZED_CHATS: for i in AUTHORIZED_CHATS: bot.sendMessage(chat_id=i, text=text, parse_mode=ParseMode.HTML) except Exception as e: LOGGER.warning(e) start_handler = CommandHandler(BotCommands.StartCommand, start, run_async=True) ping_handler = CommandHandler(BotCommands.PingCommand, ping, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) restart_handler = CommandHandler(BotCommands.RestartCommand, restart, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True) help_handler = CommandHandler(BotCommands.HelpCommand, bot_help, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) stats_handler = CommandHandler(BotCommands.StatsCommand, stats, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) log_handler = CommandHandler(BotCommands.LogCommand, log, filters=CustomFilters.owner_filter | CustomFilters.sudo_user, run_async=True) dispatcher.add_handler(start_handler) dispatcher.add_handler(ping_handler) dispatcher.add_handler(restart_handler) dispatcher.add_handler(help_handler) dispatcher.add_handler(stats_handler) dispatcher.add_handler(log_handler) updater.start_polling(drop_pending_updates=IGNORE_PENDING_REQUESTS) LOGGER.info("Bot Started!") signal.signal(signal.SIGINT, exit_clean_up) if rss_session is not None: rss_session.start()
def clean_tlds(page): from lists.black import black_tlds as tlds page_path = '%s%s' % (root_path, page) files = get_dumps(page) print(files.__len__()) for f in files: b = False for s in tlds: if f[f.__len__() - s.__len__():] == s: b = True break if b: file_path = '%s/%s' % (page_path, f) osremove(file_path) files = get_dumps(page) print(files.__len__())
def clean_subdomains(page): from lists.black import subdomains page_path = '%s%s' % (root_path, page) files = get_dumps(page) print(files.__len__()) for f in files: b = False for s in subdomains: if s in f: b = True break if b: file_path = '%s/%s' % (page_path, f) osremove(file_path) files = get_dumps(page) print(files.__len__())
def _remove(filepath, filetype=None): """ Remove an entry @params: `filepath`: The path of the entry `filetype`: The file type @returns: `True` if succeed else `False` """ try: filetype = filetype or SafeFs._filetype(filepath) if not SafeFs._exists(filepath, filetype): return False if filetype == SafeFs.FILETYPE_DIR: rmtree(filepath, ignore_errors=True) else: osremove(filepath) return True except OSError: return False
def setLeechType(update, context): query = update.callback_query message = query.message user_id = query.from_user.id data = query.data data = data.split(" ") if user_id != int(data[1]): query.answer(text="Not Yours!", show_alert=True) elif data[2] == "doc": if user_id in AS_MEDIA_USERS: AS_MEDIA_USERS.remove(user_id) AS_DOC_USERS.add(user_id) if DB_URI is not None: DbManger().user_doc(user_id) query.answer(text="Your File Will Deliver As Document!", show_alert=True) editLeechType(message, query) elif data[2] == "med": if user_id in AS_DOC_USERS: AS_DOC_USERS.remove(user_id) AS_MEDIA_USERS.add(user_id) if DB_URI is not None: DbManger().user_media(user_id) query.answer(text="Your File Will Deliver As Media!", show_alert=True) editLeechType(message, query) elif data[2] == "thumb": path = f"Thumbnails/{user_id}.jpg" if ospath.lexists(path): osremove(path) if DB_URI is not None: DbManger().user_rm_thumb(user_id, path) query.answer(text="Thumbnail Removed!", show_alert=True) editLeechType(message, query) else: query.answer(text="Old Settings", show_alert=True) elif data[2] == "close": try: query.message.delete() query.message.reply_to_message.delete() except: pass
async def random_dog(message, client, arguments): # Getting a picture not a video or gif loop = 1 response = "" while loop == 1: response = str(urlopen("https://random.dog/woof").read())[2:-1] if response[-3:] == ("jpg" or "png"): loop = 0 urlretrieve("https://random.dog/{}".format(quote(response)), "./download/dogs/" + response) # Sending picture await client.send_file(message.channel, "./download/dogs/" + response, filename="./download/dogs/" + response, content="<@" + message.author.id + "> ", tts=False) osremove("./download/dogs/" + response) send(1, "Top Dog received!") return
def setThumb(update, context): user_id = update.message.from_user.id reply_to = update.message.reply_to_message if reply_to is not None and reply_to.photo: path = "Thumbnails/" if not ospath.isdir(path): mkdir(path) photo_msg = app.get_messages( update.message.chat.id, reply_to_message_ids=update.message.message_id) photo_dir = app.download_media(photo_msg, file_name=path) des_dir = ospath.join(path, str(user_id) + ".jpg") Image.open(photo_dir).convert("RGB").save(des_dir, "JPEG") osremove(photo_dir) if DB_URI is not None: DbManger().user_save_thumb(user_id, des_dir) msg = f"Custom thumbnail saved for <a href='tg://user?id={user_id}'>{update.message.from_user.full_name}</a>." sendMessage(msg, context.bot, update) else: sendMessage("Reply to a photo to save custom thumbnail.", context.bot, update)
def backfill_videos(): query = ''' select id, path, thumb from images where type = 'video' and ( thumb like '%.mp4' or thumb like '%.flv' or thumb like '%.wmv' ) ''' cur = db.conn.cursor() for imgid, image, oldthumb in cur.execute(query).fetchall(): saveas = oldthumb saveas = '%s.png' % saveas[:saveas.rfind('.')] try: newthumb = ImageUtils.create_thumbnail(image, saveas) except Exception as e: print('ERROR: %s' % str(e)) continue print('replacing %s with %s' % (oldthumb, newthumb)) q = ''' update images set thumb = ? where id = ? ''' cur.execute(q, (newthumb, imgid)) db.commit() print('removing %s...' % oldthumb), osremove(oldthumb) print('removed') cur.close()
def backfill_videos(): query = ''' select id, path, thumb from images where type = 'video' and ( thumb like '%.mp4' or thumb like '%.flv' or thumb like '%.wmv' ) ''' cur = db.conn.cursor() for imgid, image, oldthumb in cur.execute(query).fetchall(): saveas = oldthumb saveas = '%s.png' % saveas[:saveas.rfind('.')] try: newthumb = ImageUtils.create_thumbnail(image, saveas) except Exception, e: print 'ERROR: %s' % str(e) continue print 'replacing %s with %s' % (oldthumb, newthumb) q = ''' update images set thumb = ? where id = ? ''' cur.execute(q, (newthumb, imgid)) db.commit() print 'removing %s...' % oldthumb, osremove(oldthumb) print 'removed'
def tearDownClass(cls): osremove(cls.content)
def run_tblastn_on_assemblies(ss, assemblies, aa_queries_file, tblastn, dir_prj_assmbl_blast_results, blast_2_evalue, blast_2_max_hsps, blast_2_qcov_hsp_perc, blast_2_best_hit_overhang, blast_2_best_hit_score_edge, blast_2_max_target_seqs, threads, dir_cache_prj, dir_prj_ips): if len(assemblies) > 0: print() Log.inf('Running BLAST on assemblies:', ss) if tblastn is None: Log.err('tblastn is not available. Cannot continue. Exiting.') exit(0) else: Log.wrn('There are no assemblies. Nothing to do, stopping.') exit(0) cache_file = opj(dir_cache_prj, 'blast_2_settings_cache__' + ss) pickled = dict() settings = {'blast_2_evalue': blast_2_evalue, 'blast_2_max_hsps': blast_2_max_hsps, 'blast_2_qcov_hsp_perc': blast_2_qcov_hsp_perc, 'blast_2_best_hit_overhang': blast_2_best_hit_overhang, 'blast_2_best_hit_score_edge': blast_2_best_hit_score_edge, 'blast_2_max_target_seqs': blast_2_max_target_seqs, 'queries': seq_records_to_dict( read_fasta(aa_queries_file, SEQ_TYPE_AA))} Log.msg('evalue:', str(blast_2_evalue)) Log.msg('max_hsps:', str(blast_2_max_hsps)) Log.msg('qcov_hsp_perc:', str(blast_2_qcov_hsp_perc)) Log.msg('best_hit_overhang:', str(blast_2_best_hit_overhang)) Log.msg('best_hit_score_edge:', str(blast_2_best_hit_score_edge)) Log.msg('max_target_seqs:', str(blast_2_max_target_seqs)) print() for a in assemblies: assmbl_src = a['src'] assmbl_name = a['name'] if assmbl_src != 'user_fasta': if assmbl_name.endswith('__' + ss): assmbl_name = assmbl_name.replace('__' + ss, '') else: continue assmbl_blast_db_path = a['blast_db_path'] assmbl_genetic_code = a['gc_id'] ips_json_dump_path = opj(dir_prj_ips, assmbl_name + '_ann_ips__' + ss + '.json') _ = opj(dir_prj_assmbl_blast_results, assmbl_name + '__' + ss + '.tsv') if ope(_) and ope(cache_file): with open(cache_file, 'rb') as f: pickled = pickle.load(f) if ope(_) and pickled == settings: # Log.msg('The provided BLAST settings and query sequences did ' # 'not change since the previous run.') Log.msg('BLAST results already exist:', assmbl_name) else: Log.msg('Running tblastn on: ' + assmbl_name, ss) if ope(ips_json_dump_path): osremove(ips_json_dump_path) run_blast(exec_file=tblastn, task='tblastn', threads=threads, db_path=assmbl_blast_db_path, queries_file=aa_queries_file, out_file=_, evalue=blast_2_evalue, max_hsps=blast_2_max_hsps, qcov_hsp_perc=blast_2_qcov_hsp_perc, best_hit_overhang=blast_2_best_hit_overhang, best_hit_score_edge=blast_2_best_hit_score_edge, max_target_seqs=blast_2_max_target_seqs, db_genetic_code=assmbl_genetic_code, out_cols=BLST_RES_COLS_2) a['blast_hits_aa__' + ss] = parse_blast_results_file(_, BLST_RES_COLS_2) with open(cache_file, 'wb') as f: pickle.dump(settings, f, protocol=PICKLE_PROTOCOL)
def remove_file(filename): """ Remove file idempotently """ with contextlib.suppress(FileNotFoundError): osremove(filename)
def run_spades(se_fastq_files, pe_fastq_files, dir_spades_assemblies, spades, dir_temp, ss, threads, ram): if len(se_fastq_files) > 0 or len(pe_fastq_files) > 0: if spades is None: Log.err('SPAdes is not available. Cannot continue. Exiting.') exit(0) for se in se_fastq_files: dir_results = opj(dir_spades_assemblies, se + '__' + ss) fq_path = se_fastq_files[se]['vsearch_results_path' + '__' + ss] se_fastq_files[se]['spades_assembly' + '__' + ss] = None if ope(dir_results): Log.msg('SPAdes assembly already exists:', se) else: make_dirs(dir_results) Log.msg('Running SPAdes on:', se) run_spades_se(spades, out_dir=dir_results, input_file=fq_path, threads=threads, memory=ram, rna=True) assmbl_path = opj(dir_results, 'transcripts.fasta') if ope(assmbl_path): count = len(read_fasta(assmbl_path, SEQ_TYPE_NT)) tr_str = ' transcripts.' if count == 1: tr_str = ' transcript.' Log.msg('SPAdes produced ' + str(count) + tr_str, False) se_fastq_files[se]['spades_assembly' + '__' + ss] = assmbl_path else: Log.wrn('SPAdes produced no transcripts.', False) for pe in pe_fastq_files: dir_results = opj(dir_spades_assemblies, pe + '__' + ss) fq_paths = pe_fastq_files[pe]['vsearch_results_path' + '__' + ss] pe_fastq_files[pe]['spades_assembly' + '__' + ss] = None if ope(dir_results): Log.msg('SPAdes assembly already exists:', pe) else: make_dirs(dir_results) Log.msg('Running SPAdes on: ' + pe) if osstat(fq_paths[0]).st_size > 0 and \ osstat(fq_paths[1]).st_size > 0: run_spades_pe(spades, out_dir=dir_results, input_files=fq_paths, threads=threads, memory=ram, rna=True) else: _ = opj(dir_temp, 'temp.fasta') combine_text_files(fq_paths, _) run_spades_se(spades, out_dir=dir_results, input_file=_, threads=threads, memory=ram, rna=True) osremove(_) assmbl_path = opj(dir_results, 'transcripts.fasta') if ope(assmbl_path): count = len(read_fasta(assmbl_path, SEQ_TYPE_NT)) tr_str = ' transcripts.' if count == 1: tr_str = ' transcript.' Log.msg('SPAdes produced ' + str(count) + tr_str, False) pe_fastq_files[pe]['spades_assembly' + '__' + ss] = assmbl_path else: Log.wrn('SPAdes produced no transcripts.', False)
def add_qb_torrent(link, path, listener, select): client = get_client() pincode = "" try: if ospath.exists(link): is_file = True ext_hash = _get_hash_file(link) else: is_file = False ext_hash = _get_hash_magnet(link) tor_info = client.torrents_info(torrent_hashes=ext_hash) if len(tor_info) > 0: sendMessage("This Torrent is already in list.", listener.bot, listener.update) client.auth_log_out() return if is_file: op = client.torrents_add(torrent_files=[link], save_path=path) osremove(link) else: op = client.torrents_add(link, save_path=path) sleep(0.3) if op.lower() == "ok.": meta_time = time() tor_info = client.torrents_info(torrent_hashes=ext_hash) if len(tor_info) == 0: while True: if time() - meta_time >= 30: ermsg = "The Torrent was not added. Report when you see this error" sendMessage(ermsg, listener.bot, listener.update) client.torrents_delete(torrent_hashes=ext_hash, delete_files=True) client.auth_log_out() return tor_info = client.torrents_info(torrent_hashes=ext_hash) if len(tor_info) > 0: break else: sendMessage("This is an unsupported/invalid link.", listener.bot, listener.update) client.torrents_delete(torrent_hashes=ext_hash, delete_files=True) client.auth_log_out() return tor_info = tor_info[0] ext_hash = tor_info.hash gid = ''.join(random.SystemRandom().choices(string.ascii_letters + string.digits, k=14)) with download_dict_lock: download_dict[listener.uid] = QbDownloadStatus( listener, client, gid, ext_hash, select) LOGGER.info( f"QbitDownload started: {tor_info.name} - Hash: {ext_hash}") Thread(target=_qb_listener, args=(listener, client, gid, ext_hash, select, meta_time, path)).start() if BASE_URL is not None and select: if not is_file: metamsg = "Downloading Metadata, wait then you can select files or mirror torrent file" meta = sendMessage(metamsg, listener.bot, listener.update) while True: tor_info = client.torrents_info(torrent_hashes=ext_hash) if len(tor_info) == 0: deleteMessage(listener.bot, meta) return try: tor_info = tor_info[0] if tor_info.state in ["metaDL", "checkingResumeData"]: sleep(1) else: deleteMessage(listener.bot, meta) break except: deleteMessage(listener.bot, meta) return sleep(0.5) client.torrents_pause(torrent_hashes=ext_hash) for n in str(ext_hash): if n.isdigit(): pincode += str(n) if len(pincode) == 4: break buttons = button_build.ButtonMaker() if WEB_PINCODE: buttons.buildbutton("Select Files", f"{BASE_URL}/app/files/{ext_hash}") buttons.sbutton("Pincode", f"pin {gid} {pincode}") else: buttons.buildbutton( "Select Files", f"{BASE_URL}/app/files/{ext_hash}?pin_code={pincode}") buttons.sbutton("Done Selecting", f"done {gid} {ext_hash}") QBBUTTONS = InlineKeyboardMarkup(buttons.build_menu(2)) msg = "Your download paused. Choose files then press Done Selecting button to start downloading." sendMarkup(msg, listener.bot, listener.update, QBBUTTONS) else: sendStatusMessage(listener.update, listener.bot) except Exception as e: sendMessage(str(e), listener.bot, listener.update) client.auth_log_out()
def remove(self): '''purpose: deletes the file on the filesystem containing the sqlite db''' from os import remove as osremove osremove(self.name+".sqlite")
def onDownloadComplete(self): with download_dict_lock: LOGGER.info(f"Download completed: {download_dict[self.uid].name()}") download = download_dict[self.uid] name = str(download.name()).replace('/', '') gid = download.gid() size = download.size_raw() if name == "None" or self.isQbit: name = listdir(f'{DOWNLOAD_DIR}{self.uid}')[-1] m_path = f'{DOWNLOAD_DIR}{self.uid}/{name}' if self.isZip: try: with download_dict_lock: download_dict[self.uid] = ZipStatus(name, m_path, size) pswd = self.pswd path = m_path + ".zip" LOGGER.info(f'Zip: orig_path: {m_path}, zip_path: {path}') if pswd is not None: if self.isLeech and int(size) > TG_SPLIT_SIZE: path = m_path + ".zip" srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", f"-p{pswd}", path, m_path]) else: srun(["7z", "a", "-mx=0", f"-p{pswd}", path, m_path]) elif self.isLeech and int(size) > TG_SPLIT_SIZE: path = m_path + ".zip" srun(["7z", f"-v{TG_SPLIT_SIZE}b", "a", "-mx=0", path, m_path]) else: srun(["7z", "a", "-mx=0", path, m_path]) except FileNotFoundError: LOGGER.info('File to archive not found!') self.onUploadError('Internal error occurred!!') return try: rmtree(m_path) except: osremove(m_path) elif self.extract: try: if ospath.isfile(m_path): path = get_base_name(m_path) LOGGER.info(f"Extracting: {name}") with download_dict_lock: download_dict[self.uid] = ExtractStatus(name, m_path, size) pswd = self.pswd if ospath.isdir(m_path): for dirpath, subdir, files in walk(m_path, topdown=False): for file_ in files: if search(r'\.part0*1.rar$', file_) or search(r'\.7z.0*1$', file_) \ or (file_.endswith(".rar") and not search(r'\.part\d+.rar$', file_)) \ or file_.endswith(".zip") or search(r'\.zip.0*1$', file_): m_path = ospath.join(dirpath, file_) if pswd is not None: result = srun(["7z", "x", f"-p{pswd}", m_path, f"-o{dirpath}", "-aot"]) else: result = srun(["7z", "x", m_path, f"-o{dirpath}", "-aot"]) if result.returncode != 0: LOGGER.error('Unable to extract archive!') for file_ in files: if file_.endswith(".rar") or search(r'\.r\d+$', file_) \ or search(r'\.7z.\d+$', file_) or search(r'\.z\d+$', file_) \ or search(r'\.zip.\d+$', file_) or file_.endswith(".zip"): del_path = ospath.join(dirpath, file_) osremove(del_path) path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: if pswd is not None: result = srun(["bash", "pextract", m_path, pswd]) else: result = srun(["bash", "extract", m_path]) if result.returncode == 0: LOGGER.info(f"Extract Path: {path}") osremove(m_path) LOGGER.info(f"Deleting archive: {m_path}") else: LOGGER.error('Unable to extract archive! Uploading anyway') path = f'{DOWNLOAD_DIR}{self.uid}/{name}' except NotSupportedExtractionArchive: LOGGER.info("Not any valid archive, uploading file as it is.") path = f'{DOWNLOAD_DIR}{self.uid}/{name}' else: path = f'{DOWNLOAD_DIR}{self.uid}/{name}' up_name = PurePath(path).name up_path = f'{DOWNLOAD_DIR}{self.uid}/{up_name}' size = get_path_size(f'{DOWNLOAD_DIR}{self.uid}') if self.isLeech and not self.isZip: checked = False for dirpath, subdir, files in walk(f'{DOWNLOAD_DIR}{self.uid}', topdown=False): for file_ in files: f_path = ospath.join(dirpath, file_) f_size = ospath.getsize(f_path) if int(f_size) > TG_SPLIT_SIZE: if not checked: checked = True with download_dict_lock: download_dict[self.uid] = SplitStatus(up_name, up_path, size) LOGGER.info(f"Splitting: {up_name}") fssplit(f_path, f_size, file_, dirpath, TG_SPLIT_SIZE) osremove(f_path) if self.isLeech: LOGGER.info(f"Leech Name: {up_name}") tg = TgUploader(up_name, self) tg_upload_status = TgUploadStatus(tg, size, gid, self) with download_dict_lock: download_dict[self.uid] = tg_upload_status update_all_messages() tg.upload() else: LOGGER.info(f"Upload Name: {up_name}") drive = GoogleDriveHelper(up_name, self) upload_status = UploadStatus(drive, size, gid, self) with download_dict_lock: download_dict[self.uid] = upload_status update_all_messages() drive.upload(up_name)
def test_save(self): self.graphic.color = 'white' fname = 'tmp.png' self.graphic.save(fname) self.assertImageMatchesArray(Image.open(fname), self.ref_white) osremove(fname)
def __upload_file(self, up_path, file_, dirpath): if CUSTOM_FILENAME is not None: cap_mono = f"{CUSTOM_FILENAME} <code>{file_}</code>" file_ = f"{CUSTOM_FILENAME} {file_}" new_path = ospath.join(dirpath, file_) osrename(up_path, new_path) up_path = new_path else: cap_mono = f"<code>{file_}</code>" notMedia = False thumb = self.__thumb try: if not self.__as_doc: duration = 0 if file_.upper().endswith(VIDEO_SUFFIXES): duration = get_media_info(up_path)[0] if thumb is None: thumb = take_ss(up_path) if self.__is_cancelled: if self.__thumb is None and thumb is not None and ospath.lexists(thumb): osremove(thumb) return if thumb is not None: img = Image.open(thumb) width, height = img.size else: width, height = get_video_resolution(up_path) if not file_.upper().endswith(("MKV", "MP4")): file_ = ospath.splitext(file_)[0] + '.mp4' new_path = ospath.join(dirpath, file_) osrename(up_path, new_path) up_path = new_path self.__sent_msg = self.__sent_msg.reply_video(video=up_path, quote=True, caption=cap_mono, parse_mode="html", duration=duration, width=width, height=height, thumb=thumb, supports_streaming=True, disable_notification=True, progress=self.__upload_progress) elif file_.upper().endswith(AUDIO_SUFFIXES): duration , artist, title = get_media_info(up_path) self.__sent_msg = self.__sent_msg.reply_audio(audio=up_path, quote=True, caption=cap_mono, parse_mode="html", duration=duration, performer=artist, title=title, thumb=thumb, disable_notification=True, progress=self.__upload_progress) elif file_.upper().endswith(IMAGE_SUFFIXES): self.__sent_msg = self.__sent_msg.reply_photo(photo=up_path, quote=True, caption=cap_mono, parse_mode="html", disable_notification=True, progress=self.__upload_progress) else: notMedia = True if self.__as_doc or notMedia: if file_.upper().endswith(VIDEO_SUFFIXES) and thumb is None: thumb = take_ss(up_path) if self.__is_cancelled: if self.__thumb is None and thumb is not None and ospath.lexists(thumb): osremove(thumb) return self.__sent_msg = self.__sent_msg.reply_document(document=up_path, quote=True, thumb=thumb, caption=cap_mono, parse_mode="html", disable_notification=True, progress=self.__upload_progress) except FloodWait as f: LOGGER.warning(str(f)) sleep(f.x) except RPCError as e: LOGGER.error(f"RPCError: {e} File: {up_path}") self.__corrupted += 1 except Exception as err: LOGGER.error(f"{err} File: {up_path}") self.__corrupted += 1 if self.__thumb is None and thumb is not None and ospath.lexists(thumb): osremove(thumb) if not self.__is_cancelled: osremove(up_path)
output(show_output, True, 'Power on') proc.stdin.write('<command>type_via_keybuf \\r\\r</command>' + endline) # Disk ROM ask for date, enter twice to skip output(show_output, True, 'Press return twice') proc.stdin.write('<command>type_via_keybuf load"' + crop_load + '\\r</command>' + endline) output(show_output, True, 'type load"' + crop_load) proc.stdin.write('<command>type_via_keybuf save"' + crop_save + save_argument + '\\r</command>' + endline) output(show_output, True, 'type save"' + crop_save + save_argument) proc.stdin.write('<command>type_via_keybuf poke-2,0\\r</command>' + endline) output(show_output, True, 'Quit') proc.wait() file_save_full = disk_path + '/' + crop_save if delete_original: if os.path.isfile(file_save_full): show_log('', 'Deleting source', 3) show_log('', ' '.join(['delete_file:', file_load_full]), 4) osremove(file_load_full) else: show_log('', ' '.join(['source_not_deleted', file_load_full]), 2) show_log('', ' '.join(['converted_not_found', crop_save]), 2) show_log('', '', 3, bullet=0)
def delete_file(file): try: osremove(file) except FileNotFoundError: pass
def onUploadComplete(self, link: str, size, files, folders, typ, name: str): msg = f'<b>Name: </b><code>{escape(name)}</code>\n\n<b>Size: </b>{size}' if self.isLeech: count = len(files) msg += f'\n<b>Total Files: </b>{count}' if typ != 0: msg += f'\n<b>Corrupted Files: </b>{typ}' msg += f'\n<b>cc: </b>{self.tag}\n\n' if self.message.chat.type == 'private': sendMessage(msg, self.bot, self.update) else: chat_id = str(self.message.chat.id)[4:] fmsg = '' for index, item in enumerate(list(files), start=1): msg_id = files[item] link = f"https://t.me/c/{chat_id}/{msg_id}" fmsg += f"{index}. <a href='{link}'>{item}</a>\n" if len(fmsg.encode('utf-8') + msg.encode('utf-8')) > 4000: sendMessage(msg + fmsg, self.bot, self.update) sleep(1.5) fmsg = '' if fmsg != '': sendMessage(msg + fmsg, self.bot, self.update) try: clean_download(f'{DOWNLOAD_DIR}{self.uid}') except FileNotFoundError: pass with download_dict_lock: del download_dict[self.uid] dcount = len(download_dict) if dcount == 0: self.clean() else: update_all_messages() else: msg += f'\n\n<b>Type: </b>{typ}' if ospath.isdir(f'{DOWNLOAD_DIR}{self.uid}/{name}'): msg += f'\n<b>SubFolders: </b>{folders}' msg += f'\n<b>Files: </b>{files}' msg += f'\n\n<b>cc: </b>{self.tag}' buttons = ButtonMaker() link = short_url(link) buttons.buildbutton("☁️ Drive Link", link) LOGGER.info(f'Done Uploading {name}') if INDEX_URL is not None: url_path = requests.utils.quote(f'{name}') share_url = f'{INDEX_URL}/{url_path}' if ospath.isdir(f'{DOWNLOAD_DIR}/{self.uid}/{name}'): share_url += '/' share_url = short_url(share_url) buttons.buildbutton("⚡ Index Link", share_url) else: share_url = short_url(share_url) buttons.buildbutton("⚡ Index Link", share_url) if VIEW_LINK: share_urls = f'{INDEX_URL}/{url_path}?a=view' share_urls = short_url(share_urls) buttons.buildbutton("🌐 View Link", share_urls) if BUTTON_FOUR_NAME is not None and BUTTON_FOUR_URL is not None: buttons.buildbutton(f"{BUTTON_FOUR_NAME}", f"{BUTTON_FOUR_URL}") if BUTTON_FIVE_NAME is not None and BUTTON_FIVE_URL is not None: buttons.buildbutton(f"{BUTTON_FIVE_NAME}", f"{BUTTON_FIVE_URL}") if BUTTON_SIX_NAME is not None and BUTTON_SIX_URL is not None: buttons.buildbutton(f"{BUTTON_SIX_NAME}", f"{BUTTON_SIX_URL}") if self.isQbit and QB_SEED and not self.extract: if self.isZip: try: osremove(f'{DOWNLOAD_DIR}{self.uid}/{name}') except: pass return sendMarkup(msg, self.bot, self.update, InlineKeyboardMarkup(buttons.build_menu(2))) else: try: clean_download(f'{DOWNLOAD_DIR}{self.uid}') except FileNotFoundError: pass with download_dict_lock: del download_dict[self.uid] count = len(download_dict) sendMarkup(msg, self.bot, self.update, InlineKeyboardMarkup(buttons.build_menu(2))) if count == 0: self.clean() else: update_all_messages()