def setUp(self): self.longMessage = True for a_db_file in db_test_upgrade_files: if path.exists(db_folder + a_db_file['file']) is True: rm(db_folder + a_db_file['file']) for a_db_file in db_test_upgrade_files: shutil.copyfile(db_folder + a_db_file['original_file'], db_folder + a_db_file['file'])
def dir_delete(path): if not os.path.exists(path): return if os.path.isdir(path): os.rmdir(path) else: os.rm(path)
def tearDownClass(cls): if KEEP_TEST_FILE is not True: try: rm(db_test_file) rm(db_file) except: pass
def rmall(*args, purge_tree=True): """ Removes any given directories or files in args tuple. For directories it works recursive. :Attributes: :param: args: File or directory names to remove. :type: args: str. :param: purge_tree: Default True. If false the the tree will just be cleaned, else all will be removed. :type: purge_tree: bool. :raises: TypeError """ for arg in args: if isfile(arg): rm(arg) elif isdir(arg): [rm(f) for f in glob(join(arg, '**'), recursive=True) if isfile(f)] if purge_tree: [ rmdir(d) for d in glob(join(arg, '**'), recursive=True)[::-1] if isdir(d) ] else: raise TypeError( str(arg) + " in *args is not a file or even a directory.")
def test_arithmagic(entries, err): standard_in = sys.stdin standard_out = sys.stdout try: with open("__IN__.txt", 'w') as f: f.writelines([str(i) + '\n' for i in entries]) sys.stdin = open("__IN__.txt", 'r') # Redirect std in. sys.stdout = open("__OUT__.txt", 'w') # Redirect std out. message = "\narithmagic() failed to raise a ValueError " try: s.arithmagic() except ValueError as e: sys.stdin = standard_in # Reset std in. sys.stdout = standard_out # Reset std out. print("\nCorrect Error message: {}".format(err)) print("Student Error message: {}".format(e)) return self._grade(1, "Poor error message") except Exception as e: self.feedback += message self.feedback +="(got {} instead)".format(self._errType(e)) self.feedback += "\n\tError message: {}".format(e) else: self.feedback += message return 0 finally: sys.stdin = standard_in # Reset std in. sys.stdout = standard_out # Reset std out. rm("__IN__.txt") rm("__OUT__.txt")
async def DBdump(self, ctx): await ctx.author.send("Création du fichier...") fp = mdb.dumpAllDB() fileD = discord.File(fp, "dump_file.sql") await ctx.author.send(file=fileD) rm(fp) await ctx.channel.send("Fichier envoyé")
def newTable(self, justDeltas=False): if not self.bin: oldRows = [r for r in self.test._rows if abs(r.cells[-2]) > 0] else: oldRows = self.test._rows newRows = [self.patchIt(t) for t in oldRows] if self.write: self.deltasCSVWriter() header = [h.name for h in self.test.headers[:-1]] name = str(randint(0, 1e6)) with open('tmp0.csv', 'w') as csvfile: writer = csv.writer(csvfile, delimiter=',') writer.writerow(header) for el in newRows: writer.writerow(el + [0]) if justDeltas == False: try: new = createTbl(['tmp0.csv']) rm('tmp0.csv') return new except: set_trace() else: return self.change
def image(updater, context): """ takes an image and returns what is drawn in the image from IBM Watson API """ bot = Bot(token=TOKEN) file_id = updater.message.photo[-1] newFile = bot.get_file(file_id) newFile.download("./images/img.jpg") # Set up IBM Watson VisualRecognition API authenticator = IAMAuthenticator(IBM_TOKEN) visual_recognition = VisualRecognitionV3(version=IBM_VERSION, authenticator=authenticator) visual_recognition.set_service_url(IBM_URL) # Send a request to IBM Watson and recieve the result with open("./images/img.jpg", "rb") as image_file: classes = visual_recognition.classify(images_file=image_file, threshold="0.6").get_result() img_recognition = classes["images"][0]["classifiers"][0]["classes"][0][ "class"] recognition_score = classes["images"][0]["classifiers"][0]["classes"][0][ "score"] updater.message.reply_text(TEMPLATES["image_recognition"].format( name=img_recognition, percent=recognition_score)) rm("./images/img.jpg")
def create(self): self.mode = self.CREATE if self.image_path == "": QMessageBox.information(self,"Information", u"请先打开一张图片!") return if os.path.exists(ustr(self.image_path)+'.xml'): os.rm(ustr(self.image_path)+'.xml') self.points = [] self.pre_pos = None #1:bgr 0:grayscale self.image = cv2.imdecode(np.fromfile(ustr(self.image_path), dtype=np.uint8), 1) #self.image = cv2.imdecode(self.label_Image.pixmap, 1) self.image_gray = cv2.imdecode(np.fromfile(ustr(self.image_path), dtype=np.uint8), 0) h, w = self.image.shape[:2] self.mask = np.zeros((h+2, w+2), np.uint8) self.flooded = copy.deepcopy(self.image_gray) self.flooded[:]=0 self.flooded2 = copy.deepcopy(self.image_gray) self.flooded2[:]=0 self.flooded2_show = copy.deepcopy(self.image) self.flooded2_show[:]=0 self.mask[:] = 0 cv2.namedWindow('image', cv2.WINDOW_KEEPRATIO | cv2.WINDOW_NORMAL) cv2.imshow('image',self.image) cv2.setMouseCallback('image',self.draw)
def upload_to_s3(bucket, file_path, prefix, timestamp): """Uploads a file to S3 adding the corresponding prefixes and timestamp suffix if necessary. Args: bucket (S3.Bucket): Bucket to which the file will be uploaded file_path (str): Path in which the file to upload is found prefix (str): Prefix to add to the uploaded file name timestamp (Optional[str]): Timestamp mark to add to the file name Returns: [type]: [description] """ upload_name = f'{prefix}_{timestamp or ""}{basename(file_path)}' try: bucket.upload_file(file_path, upload_name) syslog.syslog(syslog.LOG_INFO, f'Uploaded {file_path} to S3 Bucket - {bucket.name}') return True except S3UploadFailedError as s3ex: syslog.syslog( syslog.LOG_ERR, f'Failed to upload {file_path} to S3 Bucket - {bucket_name} - {s3ex}' ) return False finally: rm(file_path)
def generate_color_theme(colors): """Generate new color theme with rules for `colors` inside. Then set up it as current theme. Arguments: colors: [Color] """ theme_path = theme.abspath theme_cache = theme_path + '.cache' theme_plist = read_plist(theme_path) colorized_theme_path = theme.colorized_path new_colors = (template(color) for color in set(colors)) for el in new_colors: theme_plist['settings'].append(el) write_plist(theme_plist, colorized_theme_path) theme.set(colorized_theme_path) if basename(theme_path).startswith('Colorized-'): if exists(theme_path): rm(theme_path) if exists(theme_cache): rm(theme_cache)
def zipdir(basedir, archivename): print archivename try: os.makedirs(archivename) # make the directories if needed os.rmdir(archivename) #remove the filename except: pass try: os.rm(archivename) except: pass if not os.path.isdir(basedir) : print 'unable to upload directory: %s as it does not exist!' % basedir return False with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z: for root, dirs, files in os.walk(basedir): #NOTE: ignore empty directories for fn in files: absfn = os.path.join(root, fn) zfn = absfn[len(basedir)+len(os.sep):] #XXX: relative path print zfn z.write(absfn, zfn) return True
def _download_mv(self, song): def _wrap(req): received_size = 0 can_play_emited = False content_length = int(req.headers.get('Content-Length')) print('size of file: ', round(content_length / 2**20, 2), 'M') fh = open(mv_path, 'wb') while True: if self.force_quit: del req fh.close() os.remove(mv_path) return chunk = req.read(CHUNK) received_size += len(chunk) percent = int(received_size/content_length * 100) self.emit('chunk-received', percent) #print('percentage:', percent) if (received_size > CHUNK_MV_TO_PLAY or percent > 20) \ and not can_play_emited: can_play_emited = True print('mv can play now') self.emit('can-play', mv_path, 'OK') if not chunk: break fh.write(chunk) fh.close() print('mv downloaded') self.emit('downloaded', mv_path) mv_link, mv_path = get_song_link(song, self.app.conf, True) if mv_link is False: self.emit('can-play', mv_path, 'URLError') self.emit('downloaded', None) return None if mv_link is True: print('local song exists, signals will be emited:', mv_path) self.emit('can-play', mv_path, 'OK') self.emit('downloaded', mv_path) return retried = 0 print('Net.AsyncSong, mv will be downloaded:', mv_path) while retried < MAXTIMES: try: req = request.urlopen(mv_link) self.req = req _wrap(req) return mv_path except Exception as e: print('AsyncMV.getmv()', e, 'with mv_link:', mv_link) retried += 1 if retried == MAXTIMES: print('mv failed to download, please check link', mv_link) if os.path.exists(mv_path): os.rm(mv_path) self.emit('can-play', mv_path, 'URLError') else: self.emit('can-play', mv_path, 'FileNotFoundError') self.emit('downloaded', None) return None
def downloadYtMP4(): if not programExists("youtube-dl"): return eflash('youtube-dl is not installed or is not on your PATH.', request.url) if request.method == 'POST': url = request.form['url'] if url != '': if os.path.exists("download.mp4"): os.rm("download.mp4") os.system("python3 -m youtube_dl -f best -o " + "download0.mp4 " + url) return send_from_directory(".", "download0.mp4", as_attachment=True) else: return render_template('download.html', error='You must specify a URL!', background=database.Database("config.json").get('background')) else: return render_template("download.html", background=database.Database("config.json").get('background'))
def message(updater, context): """ takes the text and returns audio file where robot reads the text (str -> voice_message.ogg) """ bot = Bot(token=TOKEN) text = updater.message.text ru_voice_id = "com.apple.speech.synthesis.voice.yuri" engine = pyttsx3.init() engine.setProperty("voice", ru_voice_id) engine.save_to_file(text, "./audio/voice_message.wav") engine.runAndWait() # Formatting to the required codec subprocess.run([ "ffmpeg", "-i", "./audio/voice_message.wav", "-acodec", "libopus", "./audio/voice_message.ogg", "-y", ]) bot.send_voice(chat_id=updater.message.chat_id, voice=open("./audio/voice_message.ogg", "rb")) rm("./audio/voice_message.ogg") rm("./audio/voice_message.wav")
def test_arithmagic(entries, err): standard_in = sys.stdin standard_out = sys.stdout try: with open("__IN__.txt", 'w') as f: f.writelines([str(i) + '\n' for i in entries]) sys.stdin = open("__IN__.txt", 'r') # Redirect std in. sys.stdout = open("__OUT__.txt", 'w') # Redirect std out. message = "\narithmagic() failed to raise a ValueError " try: s.arithmagic() except ValueError as e: sys.stdin = standard_in # Reset std in. sys.stdout = standard_out # Reset std out. print("\nCorrect Error message: {}".format(err)) print("Student Error message: {}".format(e)) return self._grade(1, "Poor error message") except Exception as e: self.feedback += message self.feedback += "(got {} instead)".format( self._errType(e)) self.feedback += "\n\tError message: {}".format(e) else: self.feedback += message return 0 finally: sys.stdin = standard_in # Reset std in. sys.stdout = standard_out # Reset std out. rm("__IN__.txt") rm("__OUT__.txt")
def delete_database(path): if exists(path): rm(path) print ("Database '%s' deleted." %path) else: message = "Database '%s' does not exist" %path raise DatabaseError(message)
def get_auto(self): uuid = str(uuid4()) file_name = self.storage + uuid + '.jpg' command = self.command_prefix + ' -o ' + file_name cmd(command) im = Image.open(file_name) exif_data = im._getexif() auto_histogram = im.histogram() iso_value = 800 ss_value = 200000 maker_notes = exif_data[37500] maker_notes_list = maker_notes.split(' ') for note in maker_notes_list: data = note.split('=') key = data[0] if key == 'exp': ss_value = int((float(data[1]) / 1e6) * 1000000) elif data[0] == 'ag': iso_value = int(float(data[1]) / 2.56) rm(file_name) return { 'iso': iso_value, 'ss' : ss_value, }
def mainDos(): libM() global today,tiempoInit,tiempoErr,archivoU0Z,archivoS1,Rs1,Ru0z slp(tiempoInit) try: enviado = False if (activarIntTFHKA()): logger.info('impresora conectada satfactoriamente ') if (cwU0Z() and cwS1()): logger.info('Archivo U0Z y S1 creado satifactoriamente') if (copiaCCS1() and copiaCCU0Z()): logger.info('Archivo U0Z y S1 copiado a la carpeta compartida satifactoriamente') enviado = True rm(Rs1) rm(Ru0z) else: enviado = False logger.warning("Error al escribir U0Z y S1") else:logger.warning("Error al escribir U0Z y S1") else: for i in range(reIntento): if activarIntTFHKA():mainDos() else:logger.warning("Error de impresora verifique conecion inento "+str(i));slp(tiempoErr) except Exception as e: global infoERR if infoERR == True: logger.warning(formErro) logger.warning(str(e)) return enviado
def test_write_file(): inputfile = "sample_data.csv" testfile = "/tmp/test_write_file_" + datetime.now().strftime("%y%m%d-%H%M%S") + inputfile dm = DataMatrix(inputfile) dm.write_file(testfile) assert(filecmp.cmp(inputfile, testfile, shallow=False)) rm(testfile)
def copy_lock(src, dst, copyfun=shutil.copy2, lock_ending='lock'): lock_file = '.'.join(dst if not os.path.isdir(dst) else os.path.join(dst, src.rsplit(os.sep, 1)[-1]), lock_ending) fd = open(lock_file, 'w') fd.close() copyfun(src, dst) os.rm(lock_file)
def main(fname): try: with open(fname, 'r') as fin, \ open(fname.replace('.csv', '.bib'), 'w') as fout: reader = csv.reader(fin, delimiter=',', quotechar='"') templates = make_templates(entry_types) regex = f',\n .{{,{longest}}} = {{}}' for row in reader: if row[0] in entry_types.keys(): template = templates[row[0]] clean_row = [ s.replace('"', '').replace('&', '\&') for s in row ] formatted = template.format(*clean_row) formatted = re.sub(regex, '', formatted) fout.write(formatted) else: raise ValueError(f'unknown entry type <{row[0]}>') # IndexError is thrown when there are more fields than expected. # ValueError is thrown when an unknown reference type is read. except (IndexError, ValueError) as err: sys.stderr.write(f'error: {str(err)}\n') rm(fout.name) return 1 except FileNotFoundError: sys.stderr.write('error: no raw refs file\n') return 1 return 0
def rename(self, old, new): """rename This one is tricky. If either path points to a node, raise EINVAL. If `old` points only and not more than one directory deep beyond a node, all of that node's tags are removed from `old`. If `new` points to a path not more than one directory deep beyond a node, all of that node's tags are added to `old`. Whichever combination of the above, `old` is then renamed to `new` via built-in os module. """ logger.debug("rename(%r, %r)", old, new) onode, opath = self._getnode(old) nnode, npath = self._getnode(new) if opath is None or npath is None: raise FuseOSError(EINVAL) ofpath = _getpath(onode, opath) nfpath = _getpath(nnode, npath) if len(opath) > 1: pass else: ofpath = _tmplink(ofpath) for tag in list(onode.tags): self.root.untag(ofpath, tag) if len(npath) > 1: os.rename(ofpath, nfpath) else: for tag in list(nnode.tags): self.root.tag(ofpath, tag) os.rm(ofpath)
def import_zip(link, out_dir=None): if not out_dir: out_dir = os.path.expanduser(f'{link.split("/")[-1][:-4]}'[:30] + '/') resp = r.get(link, stream=True) if resp.status_code == 200: with open(f'.tmp{link.split("/")[-1][:-4]}'[:30], 'wb') as f: for i in resp.iter_content(5120): f.write(i) with zipfile.ZipFile(f'.tmp{link.split("/")[-1][:-4]}'[:30]) as zipf: pathlib.Path(out_dir).mkdir(parents=True, exist_ok=True) files = zipf.infolist() for i in files: uzipedfile = zipf.read(i) hash = hashlib.md5(uzipedfile).hexdigest() file_ext = i.filename.split('.')[-1] if dbase.check_md5(hash): with open(out_dir + hash + '.' + file_ext, 'wb') as f: f.write(uzipedfile) dbase.insert_content(out_dir + hash + '.' + file_ext, file_ext, hash) log.info(f'{link} imported') os.rm(f'.tmp{link.split("/")[-1][:-4]}'[:30]) return 0 else: return resp.status_code
def test_01_setrevisionfile(self): """Save revision number in a file.""" # Make sure the revision file doesn't exist. revisionfile = os.path.join(self.repopath, 'revision-flag.txt') if os.path.isfile(revisionfile): os.rm(revisionfile) # Define the hook configuration. self.writeConf(testconf, '''\ <?xml version="1.0"?> <Actions> <SetRevisionFile>{0}</SetRevisionFile> </Actions> '''.format(revisionfile)) # Call the script that uses the configuration. p = self.callHook(testhook, self.repopath, self.revision) p.wait() # Check for the default exit code. self.assertEqual( p.returncode, 0, 'Exit code not correct: {0}'.format(p.returncode)) # Verify that the revision file now exists. self.assertTrue(os.path.isfile(revisionfile), 'Revision file not found: ' + revisionfile) # Check the revision file contents. contents = open(revisionfile).read().rstrip() self.assertEqual( contents, self.revision, 'Revision file contents not correct: {0}'.format(contents))
def sed(pattern, replacement, path, g=True): tmp_file = NamedTemporaryFile(delete=False) with open(path, 'r') as fd: for line in fd: tmp_file.write(sub(pattern, replacement, line)) tmp_file.close() rm(path) move(tmp_file.name, path)
def reset(cls): cls.count = 0 try: from os import remove as rm rm('log.txt') print('log.txt deleted.') except: print('No log file found to delete.')
def test_write_file_numpy_rowmajor(): inputfile = "sample_data.csv" testfile = "/tmp/test_write_file_numpy_" + datetime.now().strftime("%y%m%d-%H%M%S") + inputfile x = nh.load_row_major_numpy("sample_data.csv") dm = DataMatrix(x) dm.write_file(testfile) assert(filecmp.cmp(inputfile, testfile, shallow=False)) rm(testfile)
def tearDown(self): for z in self._temp_files: try: with open(z, 'rb'): pass rm(z) except IOError: continue
def problem6(self, s): """Test the SentenceGenerator class. 20 points.""" with open("__test1__.txt", 'w') as f: f.write("a b c d e f g h i j k l m n o p q r s t u v w x y z") with open("__test2__.txt", 'w') as f: f.write("I am Sam Sam I am.\n" "Do you like green eggs and ham?\n" "I do not like them, Sam I am.\n" "I do not like green eggs and ham.") with open("__test3__.txt", 'w') as f: f.write("Love is patient Love is kind\n" "It does not envy It does not boast\n" "It is not proud It is not rude\n" "It is not self-seeking It is not easily angered\n" "It keeps no record of wrongs\n" "Love does not delight in evil\n" "but rejoices with the truth\n" "It always protects always trusts\n" "always hopes always perseveres\n" "Love never fails") with open("__test4__.txt", 'w') as f: f.write("the quick brown fox jumped over the lazy dog\n" "the slow brown snake slithered over the brown patch\n" "the slow green snake slithered under the green leaves\n" "the quick green fox doesnt exist since foxes are orange\n" "the quick orange fox out foxed the fox in disguise\n" "the sneaky orange duck quacked at the brown fox\n" "what does the fox say\n" "what doesnt the fox say") def test_sentences(filename, num_sentences): filename = "__{}__.txt".format(filename) print("\n{}\nSource file:".format('-'*80)) with open(filename, 'r') as training_set: print(training_set.read().strip()) print("\nCorrect example sentence:") correct = SentenceGenerator(filename) print(correct.babble()) print("\nStudent sentences:") student = s.SentenceGenerator(filename) for _ in xrange(num_sentences): print(student.babble()) return self._grade(5) points = test_sentences("test1", 2) points += test_sentences("test2", 3) points += test_sentences("test3", 5) points += test_sentences("test4", 5) for i in xrange(1,5): rm("__test{}__.txt".format(i)) return points
def run(self): from flask import current_app as app from utils.cloud import cloudfiles from ext.sys.database import db from explorer import File session = db.session self.root_dir = app.config['ROOT_LOCATION'] container = cloudfiles.container CLOUDFILES = [] # get online file list from container, those matched from cloud should # be marked as not local in db marker = "" while True: batch = container.list_objects(marker=marker) if len(batch) > 0: CLOUDFILES += batch marker = CLOUDFILES[-1] else: break print 'Total Files in Cloud: %s' % str(len(CLOUDFILES)) c_files = ["/%s" % f for f in CLOUDFILES] res = session.query(File).filter(File.path.in_(c_files)) \ .filter(File.local == True) \ .update({'local': False}, synchronize_session=False) print '%s Files Were set as Non Local' % str(res) print '-' files = session.query(File) \ .filter(File.local == True) \ .filter(File.path != '/').all() for file in files: cloud_file = file.path[1:len(file.path)] local_path = self.root_dir + file.path try: obj = container.create_object(cloud_file) if file.dir: obj.content_type = 'application/directory' obj.sync_metadata() obj.write() else: obj.load_from_filename(local_path) if self.delete_local: os.rm(local_path) print 'Removed local file: %s' % local_path file.local = False session.add(file) session.commit() print 'Uploaded: %s' % local_path except: print 'Failed To Upload: %s' % local_path
def problem6(self, s): """Test the SentenceGenerator class. 20 points.""" with open("__test1__.txt", 'w') as f: f.write("a b c d e f g h i j k l m n o p q r s t u v w x y z") with open("__test2__.txt", 'w') as f: f.write("I am Sam Sam I am.\n" "Do you like green eggs and ham?\n" "I do not like them, Sam I am.\n" "I do not like green eggs and ham.") with open("__test3__.txt", 'w') as f: f.write("Love is patient Love is kind\n" "It does not envy It does not boast\n" "It is not proud It is not rude\n" "It is not self-seeking It is not easily angered\n" "It keeps no record of wrongs\n" "Love does not delight in evil\n" "but rejoices with the truth\n" "It always protects always trusts\n" "always hopes always perseveres\n" "Love never fails") with open("__test4__.txt", 'w') as f: f.write("the quick brown fox jumped over the lazy dog\n" "the slow brown snake slithered over the brown patch\n" "the slow green snake slithered under the green leaves\n" "the quick green fox doesnt exist since foxes are orange\n" "the quick orange fox out foxed the fox in disguise\n" "the sneaky orange duck quacked at the brown fox\n" "what does the fox say\n" "what doesnt the fox say") def test_sentences(filename, num_sentences): filename = "__{}__.txt".format(filename) print("\n{}\nSource file:".format('-' * 80)) with open(filename, 'r') as training_set: print(training_set.read().strip()) print("\nCorrect example sentence:") correct = SentenceGenerator(filename) print(correct.babble()) print("\nStudent sentences:") student = s.SentenceGenerator(filename) for _ in xrange(num_sentences): print(student.babble()) return self._grade(5) points = test_sentences("test1", 2) points += test_sentences("test2", 3) points += test_sentences("test3", 5) points += test_sentences("test4", 5) for i in xrange(1, 5): rm("__test{}__.txt".format(i)) return points
def tearDown(self): for x in self._temp_files: try: rm(x) except OSError as e: if e.errno == 2: continue else: print(str(e), file=sys.stderr)
def clean_themes_folder(): theme_folder = theme.dirname current_theme = theme.name themes = glob(theme_folder + '/*') colored_themes = [t for t in themes if basename(t).startswith('Colorized-')] themes_except_current = [t for t in colored_themes if basename(t) != current_theme] for color_theme in themes_except_current: if exists(color_theme): rm(color_theme)
def kaggleCompetitionDownload(dataSetName, pathJson='./kaggle/kaggle.json'): import os import os.path from os import path import shutil from kaggle.api.kaggle_api_extended import KaggleApi print("kaggleCompetitionDownload - Downloading Kaggle Dataset: ", dataSetName) !pip install -q kaggle if not os.path.isdir("./iLUtil"): print("\tErr: iLUtil not exist") print("\tErr: Pls clone from github lin3372/iLUtil.git") print("\t\t!git clone https://github.com/lin3372/iLUtil.git") # sanity check -- iLUtil (Ivan's utility functions) print("kaggleDataSetDownload: ", "Current Directory = ", os.getcwd()) if not os.path.isdir("./iLUtil"): print("\tErr: iLUtil not exist") print("\tErr: Pls clone from github lin3372/iLUtil.git") print("\t\t!git clone https://github.com/lin3372/iLUtil.git") # sanity check -- remove old /root/.kaggle if it still exist, and copy the new kaggle.json if path.exists("/root/.kaggle/"): if os.path.isfile("/root/.kaggle"): print("\tremoving file /root/.kaggle/") os.rm("/root/.kaggle") elif os.path.isdir("/root/.kaggle"): print("\tremoving dir /root/.kaggle/") shutil.rmtree("/root/.kaggle", ignore_errors=True) print("\tcreating /root/.kaggle/") !mkdir /root/.kaggle # sanity check -- remove old ./dataSetKaggle if it still exist if path.exists("./dataSetKaggle"): if os.path.isfile("./dataSetKaggle"): print("\tremoving file ./dataSetKaggle/") os.rm("./dataSetKaggle") elif os.path.isdir("./dataSetKaggle/"): print("\tremoving dir ./dataSetKaggle/") shutil.rmtree("./dataSetKaggle/", ignore_errors=True) #os.rmdir("./dataSetKaggle/") print("\tcreating ./dataSetKaggle/") !mkdir ./dataSetKaggle print("\tcopying kaggle.json from ./iLUtil/kaggle/kaggle.json to /root/.kaggle/") !cp ../iLUtil/kaggle/kaggle.json /root/.kaggle/ !chmod 600 /root/.kaggle/kaggle.json api = KaggleApi() api.authenticate() api.competition_download_files(dataSetName) !ls -l ./dataSetKaggle/ print("\tFinished Downloading Kaggle DataSet - ", dataSetName)
def _install_predis(stdout=None): """Callback to install Predis for the Redis module""" http.dl(PREDIS_URI, 'predis.zip') output = unzip('predis.zip') if stdout and output: stdout.write(str(output).strip() + '\n') os.rename('nrk-predis-d02e2e1', 'predis') rm('predis.zip')
def setUp(self): cp(self.snapshot, self.file) try: rm(self.lockfile) except: pass try: rm(self.txtfile) except: pass
def _install_fancybox(stdout=None): """Callback to install necessary library for the Fancybox module""" http.dl(FANCYBOX_URI, 'fancybox.zip') output = unzip('fancybox.zip') if stdout and output: stdout.write(str(output).strip() + '\n') os.rename('fancyapps-fancyBox-18d1712', 'fancybox') rm('fancybox.zip')
def setUp(self): # Setup our variables self.mac_address = "DE:AD:BE:EF:CA:DE" # Our fake mac address deviceFile = config.working_dir + '/devices/' + self.mac_address if(os.path.exists(deviceFile)): os.rm(deviceFile) self.thrd = main.fork_main() # Start the server
def File_Selection(self,filename,hopper_date,hopper_time): #Converts the Hopper.txt fixed width file to out.csv delimited file IN_FILE = filename OUT_FILE = filename + ".csv" RANGES = ((0, 10), (18, 19), (45, 18), (66, 19), (88, 19), (114, 19), (136, 19), (157, 19), (179, 19), (203, 19), (223, 10)) try: rfp = open(IN_FILE, 'r', encoding="UTF-8") except IOError: print("Could not read from", IN_FILE) raise SystemExit try: wfp = open(OUT_FILE, 'w', encoding="UTF-8") except IOError: print("Could not write to", OUT_FILE) raise SystemExit for line in rfp: parts = [] for rng in RANGES: parts.append(line[rng[0]:rng[0] + rng[1]].strip()) wfp.write(",".join(parts) + "\n") rfp.close() wfp.close() rm(IN_FILE) # removes the primary file # Removes the unncessary data from the file. file_path = OUT_FILE with open(file_path, "r", encoding="UTF-8") as file: lines = file.readlines() with open(file_path, "w", encoding="UTF-8") as file: check = "M1GMG" for line in lines: if check in line: file.write(line) # Date and time column are added to the hopper file ReadFile = OUT_FILE DateStamp = [hopper_date] TimeStamp = [hopper_time] destination_folder = path.dirname(ReadFile) destination_file = destination_folder + "/Hopper.csv" with open(ReadFile, "r+", encoding="UTF-8") as f_in, open(destination_file, 'w', encoding="UTF-8") as f_out: reader = csv.reader(f_in, delimiter=',') FileWriter = csv.writer(f_out) for line in reader: FileWriter.writerow(line + DateStamp + TimeStamp) rm(OUT_FILE) #removes the intermediate file
def install(self, spec, prefix): fobis = which('FoBiS.py') command = ["build", "-ch", "-dbld", "."] compiler = ["-fc", "${FC}", "-compiler"] cflags = "-c" lflags = "" cflags_shared = " " lflags_shared = " " target = ["-t", "face.f90"] mkdirp(prefix.lib, prefix.include) rm("fobos") if spec.satisfies('%gcc'): compiler.append("gnu") cflags += " -Ofast -frealloc-lhs -std=f2008 -fall-intrinsics" lflags += " -Ofast" cflags_shared += " -fPIC" lflags_shared += " -shared " elif spec.satisfies('%intel'): compiler.append("intel") cflags += " -fast -assume realloc_lhs -standard-semantics -std08" lflags += " -fast" cflags_shared += " -fpic" lflags_shared += " -shared" else: raise InstallError("Unsported compiler.") if spec.satisfies('+shared'): libname = 'libface.so' soname = libname + '.' + spec.version.up_to(1) filename = libname + '.' + spec.version.dotted fobis_options = command + compiler + \ ["-cflags", cflags + cflags_shared] + \ ["-lflags", lflags + lflags_shared + '-Wl,-soname,' + soname] + \ ["-mklib", "shared"] + target + \ ["-o", filename] fobis(*fobis_options) install(filename, prefix.lib) with working_dir(prefix.lib): ln = which('ln') ln('-s', filename, soname) ln('-s', soname, libname) if spec.satisfies('+static'): fobis_options = command + compiler + ["-cflags", cflags] + \ ["-lflags", lflags] + ["-mklib", "static"] + \ target + ["-o", "libface.a"] fobis(*fobis_options) install('libface.a', prefix.lib) if spec.satisfies('+shared') or spec.satisfies('+static'): install(join_path('mod', 'face.mod'), prefix.include)
def testProgram(self,programName): """Función que evalua un programa Argumentos: programName: Nombre del programa a evaluar. Debe existir tanto en el archivo de pruebas, como en el de soluciones""" #Indicador al usuario de que programa se está probando print("Evaluando: ",programName.split('%')[0]) #Se compila el programa usando la función y se guarda su retorno para saber si fue exitosa la compilación success = self.compileSource(programName) #Si no pudo compilarse, regresa if not success: return #Se obtiene una lista con las pruebas que se ejecutaran por cada programa en forma de comandos. cmds = self.getTestsByProgram(programName) #Se obtiene una lista con las soluciones esperadas de cada prueba por programa. solutions = self.getSolutionsByProgram(programName) #Variable para el numero de pruebas de este programa possiblePoints = len(cmds) #Variable para los puntos obtenidos pointsObtained = 0 #Se juntan cada prueba con su solución para realizarlas #El comando de la prueba se encuentra en cmd y la salida esperada se encuentra en solution for cmd,solution in zip(cmds,solutions): #Se intenta ejecutar la prueba actual ejecutando el comando en cmd try: raw_output = Tester.runCommand(cmd) #Es posible que al ejecutar el comando no pueda leer el resultado, lo cual arroja esta excepción except UnicodeDecodeError: #Si no es posible leer la salida del comando ejecutado, se informa y se regresa 0 print("\n",'Salida no reconocible') return 0.0 #Si el resultado de la prueba corresponde a lo esperado de acuerdo a las condiciones establecidas... if Tester.assertOutput(raw_output,solution): #Imprime tanto la prueba como su resultado e informa que es correcto print(cmd,": \n",raw_output," Correcto","\n") #Se suma un punto a los obtenidos pointsObtained += 1 #Si no es lo suficientemente parecida else: #Imprime tanto el comando, como su resultado y el resultado esperado print("\n",cmd,": \n",raw_output," \n\nIncorrecto. El resultado esperado era: \n",solution,"\n") #Si esta activado, se muestran que diferencias hay en el codigo if self.showDiff: Tester.showDifferences(raw_output,solution) #Cuando se terminen de ejecutar las pruebas, se informa cuantos se pasaron en este programa print("\nCorrectos en",programName.split('%')[0], ": ",pointsObtained,'/',possiblePoints) #Separador de programa print("********************************************************") #Para mantener limpio el directorio, el ejecutable generado se elimina try: rm(self.testDir+programName.split('%')[0]+'.x') except FileNotFoundError: pass #Al final de la ejecución de las pruebas, se pretende regresar la calificación de quien fue revisado #TODO: Recibir este dato en algún lado return (pointsObtained*1.0)/(possiblePoints*1.0)
def run(self, edit): if theme.is_colorized: theme_path = theme.abspath theme_cache = theme_path + '.cache' theme.set(theme.uncolorized_path) if exists(theme_path): rm(theme_path) if exists(theme_cache): rm(theme_cache) clear_css_regions(self.view) user_settings.dynamic_highlight = False
def cleanup(): message = Message(MessageType.DIE) job_manager.queue.put(message) jm_thread.join() while not postbox.empty(): post_message(postbox.get(), ruds) ruds.close() rm(common.DMS_UDS_PATH) log("Server: Cleaned up")
def editor_prompt(prompt_text, suffix=".txt"): fd, name = tempfile.mkstemp(suffix=suffix) write(fd, (prompt_text + "\n").encode("utf-8")) close(fd) system(config["editor"] + " " + name) with open(name, "r") as f: for i in range(prompt_text.count("\n") + 1): f.readline() input = f.read() rm(name) return input
def mainUno(): libM() global today, tiempoInit, tiempoErr, archivoU0Z, archivoS1, Rs1, Ru0z, reIntento slp(tiempoInit) try: enviado = False if (activarIntTFHKA()): #si es true logger.info('impresora conectada satfactoriamente ') if (cwU0Z() and cwS1()): logger.info('Archivo U0Z y S1 creado satifactoriamente') if conexionFTP()['estatusCftp']: logger.info('conexiona ftp con el servidor en sincronia ') if (pubU0Z() and pubS1()): #conexionFTP()['ftp'].delete(archivoU0Z) #conexionFTP()['ftp'].delete(archivoS1) #conexionFTP()['ftp'].retrlines('LIST') conexionFTP()['ftp'].quit() logger.info( 'archivo publicado al servidor ftp satfactoriamente ' ) rm(Rs1) rm(Ru0z) enviado = True else: enviado = False else: for i in range(reIntento): if conexionFTP()['ftp']: mainUno() else: logger.warning( "error con el servidor ftp intento " + str(i)) slp(tiempoErr) # else: logger.warning("Error al escribir U0Z y S1") else: for i in range(reIntento): if activarIntTFHKA(): mainUno() else: logger.warning( "Error de impresora verifique conecion inento " + str(i)) except Exception as e: global infoERR if infoERR == True: logger.warning(formErro) logger.warning(str(e)) return enviado
def _install_jquery_colorpicker(stdout=None): """Callback to install necessary library for the module""" os.makedirs('./colorpicker') http.dl(JQ_COLOR_PICKER_URI, './colorpicker/colorpicker.zip') with pushd('./colorpicker'): output = unzip('colorpicker.zip') if stdout and output: stdout.write(str(output).strip() + '\n') rm('colorpicker.zip')
def delete_slideshow(mapper, connection, target): try: slides_file = slideshows.path(str(target.id)) # remove resource files resource_dir = join(dirname(slides_file), '%d_files' % target.id) if exists(resource_dir): rmtree(resource_dir) rm(slides_file) except OSError: pass
def pacc_off(self, bdelete=False): """Turn off polar accumulation and delete internal xfoil polar. The saved polar file is *not* deleted, unless bdelete=True. """ if(not self.bpacc): self.error('PACC is already off') self.force_menu('oper') self.send('pacc') self.bpacc = False self.send('pdel 1') #delete internal polar if(bdelete): rm(self.polar_savefile)
def replace_jp2_with_geotiff(jp2_path, remove_old=False): """Will replace a jp2 with a geotiff of the same name. Can remove old jp2""" log.info("Preparing to convert {}".format(jp2_path)) tif_path = jp2_path[:-4] + ".tif" metadata_string = extract_metadata(jp2_path) metadata = get_geotransform_from_metadata(metadata_string) convert_jp2_to_tiff(jp2_path, tif_path) add_projection_and_extent_to_tiff(tif_path, metadata) if remove_old: os.rm(jp2_path)
def genTable(tbl, rows): name = str(randint(0, 1000)) header = [h.name for h in tbl.headers[:-1]] with open(name + '.csv', 'w') as csvfile: writer = csv.writer(csvfile, delimiter=',') writer.writerow(header) for el in rows: writer.writerow(el[:-1]) new = createTbl([name + '.csv']) rm(name + '.csv') return new
def test_download_file(ae): # first, remove file locally in case it is there from a prior run: local_path = os.path.join(HERE, "downloads/sample_input.txt") try: os.rm(local_path) except Exception: pass rsp = ae.download_file(local_path=local_path, remote_path="sample_input.txt") assert rsp.get("status") == "success" with open(local_path, "rb") as f: assert f.readline() == "1\n" assert f.readline() == "2\n" assert f.readline() == "3"
def test_Protein(self): #Basic self.assertIsInstance( self.mol, molecule.Protein ) #Write Methods self.assertTrue( self.mol.write_structure( 'test.cif',ftype='cif') ) self.assertTrue( self.mol.write_structure( 'test.pdb',ftype='pdb') ) with self.assertRaises(Exception) as file_format_problem: self.assertTrue( self.mol.write_structure( 'test',ftype='random') ) self.assertTrue( 'Please provide appropriate "ftype" argument. (cif/pdb).' in str(file_format_problem.exception) ) rm( 'test.cif' ) rm( 'test.pdb' ) self.assertNotEqual( len([i for i in self.mol]) , 0 )
async def backup(): date = dt.datetime.now() DATE = "{}-{}-{}".format(str(date.day), str(date.month), str(date.year)) channel = client.get_channel(728749737968271500) # ID du chan "backup" await channel.send("Backup du {}".format(DATE)) fp = mdb.dumpAllDB() res = cryptBackup(fp) backupCryptedFile = open(res[0], "rb") keyFile = open(res[1], "rb") nonceFile = open(res[2], "rb") output = subprocess.getoutput("curl -F \"file=@{}\" https://api.anonfiles.com/upload".format(backupCryptedFile.name)) parseN = output.count("\n") data = output.split("\n") dataF = json.loads(data[parseN]) await channel.send("** You can download todays backup through this link and the key file to decrypt it! **") await channel.send(dataF['data']['file']['url']['full']) fileD = discord.File(keyFile, "backupKeyFile{}.k".format(DATE)) fileN = discord.File(nonceFile, "nonceFile-{}.txt".format(DATE)) await channel.send(file=fileD) await channel.send(file=fileN) await channel.send("**=============**") rm(fp) rm(nonceFile.name) rm(backupCryptedFile.name) rm(keyFile.name)
def _install_ckeditor(stdout=None): """Callback to install necessary library for the IMCE module""" arg = 'xf' if stdout: arg += 'v' http.dl(CKEDITOR_URI, 'ckeditor.tar.gz') output = tar('xf', 'ckeditor.tar.gz') if stdout and output: stdout.write(str(output).strip() + '\n') rm('ckeditor.tar.gz')
def fetch_file_from_url(url, target_file): try: os.rm(target_file) except: pass args = ['wget', '-nv', '-t', '3', '-O', str(target_file), str(url)] log.debug('Command to execute: %s', ' '.join(args)) output = Popen(args, stdout=PIPE, stderr=PIPE) retcode = output.wait() pipe = output.communicate() log.debug("Return code: %s, stdout: %s", retcode, pipe) if os.path.getsize(target_file) == 0: raise RuntimeError("Got empty file, something bad happaned")