def do_backup(self, args): """ Back the current directory up to a hard drive if one is connected. Otherwise do nothing for now but something can surely be arranged with `dropbox` as well. Both would be nice. """ IGNORE_PATTERNS = ["*.pyc", "tmp", ".git", "contrib", ".xfer", ".backup", ".archive", ".gitignore"] global TOTAL TOTAL = size_of_dir(os.curdir, ignore=IGNORE_PATTERNS) global done done = 0 global spinner spinner = None def progress_update(path, names): global done, TOTAL if self.SETTINGS["verbose"]: print("Backing up {}.".format(path)) for f in names: ignore = False for s in IGNORE_PATTERNS: if fnmatch.fnmatch(f, s): ignore = True if not ignore: path2add = os.path.join(path, f) if not os.path.isdir(path2add): done += os.path.getsize(path2add) spinner.next() return IGNORE_PATTERNS dest = list() if not os.path.exists(BACKUP_DIR_PATH): os.makedirs(BACKUP_DIR_PATH) dest.append(BACKUP_DIR_PATH) if os.path.exists(os.path.join(DROPBOX)): dest.append(DROPBOX) pprint(dest) try: for d in dest: spinner = Spinner("Backing files up to {} ".format(d)) if os.path.exists(d): shutil.rmtree(d) shutil.copytree(os.curdir, d, symlinks=True, ignore=progress_update # `ignore` can be used to log progress. ) spinner.finish() print(NEWLINE) except shutil.Error as e: print("Something went wrong!") for x in e: print ("Error copying {} to {}: {}".format(*x))
def extract_cdr_data(f): spinner = Spinner('Loading %s:' % f) with open(f) as fd: while 1: l = fd.readline() spinner.next() if not l: raise StopIteration if l == '': continue t = l.split(';') try: d = datetime.datetime.strptime(':'.join([t[3], t[4]]), '%Y%m%d:%H%M%S') except: continue else: yield { 'cdrType': t[0], 'callingPartyNumberKey': t[1], 'callingSubscriberImsi': t[2], 'dateTime': d, 'chargeableDuration': int(t[5]), 'exchangeIdentity': t[6], 'outgoingRoute': t[7], 'incomingRoute': t[8], 'cellId1stCellCalling': t[9], 'gsmTeleServiceCode': t[10], 'cellIdLastCellCalling': t[11], 'disconnectingParty': t[12], 'callingSubscriberImei': t[13], 'tac': t[14], 'residentCustomerFlag': t[15], 'paymentType': t[16], 'contractStatus': t[17], 'contractStartingDate': t[18], 'country': t[19], 'cityPostalCode': t[20], 'city': t[21] }
def __init__(self, number_users, delta, users_resources, num_periods=None): self._reputation = None self.number_users = number_users self.delta = delta self.users_resources = users_resources if num_periods is not None: if num_periods == 0: self.progress = Spinner('Processing ') else: self.progress = Bar('Processing %(eta_td)s', max=num_periods) else: def dummy_progress(): while True: yield self.progress = dummy_progress()
def __init__(self, *args, outfile="commands.log", label="Processing", spinner=True, final='done'): self.show_spinner = spinner self.outfile = outfile self.args = " ".join(args) self.final = final self.label = label self.proc = None if spinner: self.spinner = Spinner(f' - {self.label} ... ') else: self.spinner = BlankSpinner()
def test_bar(): from progress.spinner import Spinner Spinner.phases = [ '🕐', '🕑', '🕒', '🕓', '🕔', '🕕', '🕖', '🕗', '🕘', '🕙', '🕚', '🕛' ] _status_bar = Spinner("Downloading.. ", end="www") for i in range(100): time.sleep(0.02) _status_bar.next() _status_bar.message _status_bar.finish()
def spin_that(q): from progress.spinner import Spinner b = Spinner('Decrypting data with the device ', file=sys.stderr) for i in range(40): b.next() try: if q.get(block=True, timeout=0.5): break except: pass b.finish() sys.stderr.write(' done\n')
def fetch_articles(self, output_progress=False): spinner = None if output_progress: spinner = Spinner('Loading articles ') articles_index = [] last_fetch = self._configs.get('last_fetch') offset = 0 count = 20 while (True): try: articles = self._pocket.retrieve(state='unread', count=count, offset=offset, since=last_fetch) except PocketException as e: spinner.finish() raise_from(self._check_exception(e), e) if not articles['list']: break articles_index.extend(self._get_articles_index(articles)) offset += count if spinner: spinner.next() if spinner: spinner.finish() sort_field = self._configs.get('sort_field') if not sort_field: sort_field = 'reading_time' articles_index = sorted(articles_index, key=itemgetter(sort_field)) self._storage.write(articles_index) self._configs.set('last_fetch', self._get_timestamp(datetime.now())) self._configs.write()
def crop_grains(input, output, x, y): print("[", end='') printy("🔥[oB]MineralView@", end='') print("]", end='') printy(" [g]selecting grain:@ [wB]" + input + "@") global spinner spinner = Spinner('Selecting from [' + str(x) + ", " + str(y) + "]") sys.setrecursionlimit(10**6) image = Image.open(input) global image_rgb image_rgb = image.convert("RGB") global height height = image.height global width width = image.width global coords coords = [None] * (width + 2) for i in range(width + 2): coords[i] = [True] * (height + 2) find_boundary_initial(x, y) # export to png output p = convert_array(width, height) #print(p.__str__()) f = open(output, 'wb') w = png.Writer(width, height, greyscale=False, transparent=(0, 0, 0)) w.write(f, p) print('', end="\r") print("[", end='') printy("🔥[oB]MineralView@", end='') print("]", end='') printy(" [g]outputted grain:@ [wB]" + output + "@")
def unzip_files(path, folders=[], files=[]): contents = zipfile.ZipFile(path) # Unzip files from jar spin = Spinner("Unzipping files...") for f in contents.namelist(): spin.next() for dir in folders: if f.startswith(dir): contents.extract(f) for f in files: contents.extract(f) spin.finish()
def fetch_articles(self, output_progress=False): spinner = None if output_progress: spinner = Spinner('Loading articles ') articles_index = [] last_fetch = self._configs.get('last_fetch') offset = 0 count = 20 while(True): try: articles = self._pocket.retrieve( state='unread', count=count, offset=offset, since=last_fetch ) except PocketException as e: spinner.finish() raise_from(self._check_exception(e), e) if not articles['list']: break articles_index.extend(self._get_articles_index(articles)) offset += count if spinner: spinner.next() if spinner: spinner.finish() sort_field = self._configs.get('sort_field') if not sort_field: sort_field = 'reading_time' articles_index = sorted(articles_index, key=itemgetter(sort_field)) self._storage.write(articles_index) self._configs.set('last_fetch', self._get_timestamp(datetime.now())) self._configs.write()
def get_stars_forks_data(org, repo): stars = [] stars_has_next = True forks = [] forks_has_next = True stars_cursor = None forks_cursor = None spinner = Spinner('Fetching Stars and Forks') while stars_has_next or forks_has_next: spinner.next() variables = { "org": org, "repoName": repo, "size": 100, "starsCursor": stars_cursor, "forksCursor": forks_cursor, } query = get_data_query('graphql/stars_forks_data.gql') rate_limit = get_rate_limit(client) handle_rate_limit(rate_limit) results = json.loads(client.execute(query, variables)) if results['data'] and results['data']['repository']['stargazers']['edges']: for edge in results['data']['repository']['stargazers']['edges']: stars.append({ 'owner': org, 'repo': repo, 'createdAt': edge['starredAt'], }) stars_has_next = results['data']['repository']['stargazers']['pageInfo']['hasNextPage'] stars_cursor = results['data']['repository']['stargazers']['pageInfo']['endCursor'] if results['data'] and results['data']['repository']['forks']['edges']: nodes = [ edge['node'] for edge in results['data']['repository']['forks']['edges']] for node in nodes: forks.append({ 'owner': org, 'repo': repo, 'createdAt': node['createdAt'] }) forks_has_next = results['data']['repository']['forks']['pageInfo']['hasNextPage'] forks_cursor = results['data']['repository']['forks']['pageInfo']['endCursor'] spinner.finish() return stars, forks
def create_backup(remote_path, local_path, client, sftps): sftp = client.client.open_sftp() print("Collecting paths to copy") spinner = Spinner('Collecting...') remote_paths = sftp_walk(sftp, remote_path, spinner) print() backup_path = os.path.join(local_path, 'backup-' + str(datetime.datetime.now())) os.mkdir(backup_path) index = len(remote_path) print("Creating local directories for file transfer") ## Create all local directories necessary local_paths = [] for path in remote_paths: local = backup_path + path[index:] create_necessary_subfolders(local) local_paths.append(local) #sftp.get(path, local) print('Copying files over') ## Create worker threads # first check how many channels are allowed THREADS = len(sftps) print('Opening {0} channels'.format(THREADS)) # set up the workers chunked_remote_paths = np.array_split(remote_paths, THREADS) chunked_local_paths = np.array_split(local_paths, THREADS) workers = [] for _remote_paths, _local_paths, sftp in zip(chunked_remote_paths, chunked_local_paths, sftps): workers.append( Thread(target=copy_worker, args=(client, sftp, _remote_paths, _local_paths))) for worker in workers: worker.start() for worker in workers: worker.join()
def getFilesFromFolder(folder, types=[".mp3"]): fileList = [] iterations = 0 spinner = Spinner() if os.path.isdir(folder): for root, _, files in os.walk(folder): if len(files) == 0: break for file in files: iterations += 1 spinner.message = "Analysing folder: '{}' - Files: {} - Duplicates: {} - ".format( folder, iterations, DUPLICATES.total) spinner.next() path = os.path.join(root, file) size = os.path.getsize(path) extension = os.path.splitext(file)[-1] filter_duplicates(extension, types, path, fileList, size) spinner.clearln() return fileList
def get(self, url, fname=False): '''Download file from url using requests library ''' r = requests.get(url, stream=True, verify=False) size = r.headers['content-length'] if not fname: fname = url.split('/')[-1] if size: p = Bar(fname, max=int(size)) else: p = Spinner(fname) with open(fname, 'wb') as f: for chunk in r.iter_content(chunk_size=1024 * 50): if chunk: # filter out keep-alive new chunks p.next(len(chunk)) f.write(chunk) p.finish() return fname
def clean(self, password : str): try: install_progress = Spinner(message='Cleaning Up Packages ') for _ in range(1, 75): time.sleep(0.007) install_progress.next() proc = Popen('sudo apt-get -y autoremove'.split(), stdin=PIPE, stdout=PIPE, stderr=PIPE) proc.communicate(password.encode()) for _ in range(1, 26): time.sleep(0.007) install_progress.next() click.echo('\n') click.echo(click.style('🎉 Successfully Cleaned Turbocharge! 🎉', fg='green')) except subprocess.CalledProcessError as e: click.echo(e.output) click.echo('An Error Occurred During Installation...', err = True)
def die(): global alive alive = False print("\n[+] Cleaning up...") time.sleep(1) spinner = Spinner("Exiting...") for _ in range(10): spinner.next() time.sleep(0.1) spinner.finish() clear() if linux: os.system("reset") sys.exit(0) print("[+] Exited Successfully") os._exit(1)
def program(self): if markov.info_state == "rational": conversation = " He must be really well-informed :-o" else: conversation = " I hope he makes a good decision..." for i in range(0, self.maximum): time.sleep(5) print("The starting state is " + markov.info_state + ", Master!" + conversation) program_spinner = Spinner("Simulating, Master!") while True: switching() program_spinner.next() if choice.current == build_up.destination: break program_spinner.finish()
def gen_samples_with_prob(self, samples): self.generator.drawSamples(samples) self.generator.toCSV(self.path + 'samples.csv') samplesToReturn = [] # Load the csv and return samples as list with open(self.path + 'samples.csv', 'r') as read_obj: csv_dict_reader = DictReader(read_obj) spinner = Spinner("Loading samples...") for row in csv_dict_reader: asdict = dict(row) world = [int(value) for value in list(asdict.values())] prob = self.get_sampling_prob(asdict) samplesToReturn.append([world, asdict, prob]) spinner.next() spinner.finish() return samplesToReturn
def banner(): init() i = 0 load = Spinner() while (i < 5): i = i + 1 time.sleep(1) load.next() load.finish() os.system("clear") print( colored( """ ---------------------------------------------------------- ████████╗░█████╗░██████╗░░█████╗░███╗░░██╗████████╗██╗░░░██╗██╗░░░░░░█████╗░ ╚══██╔══╝██╔══██╗██╔══██╗██╔══██╗████╗░██║╚══██╔══╝██║░░░██║██║░░░░░██╔══██╗ ░░░██║░░░███████║██████╔╝███████║██╔██╗██║░░░██║░░░██║░░░██║██║░░░░░███████║ ░░░██║░░░██╔══██║██╔══██╗██╔══██║██║╚████║░░░██║░░░██║░░░██║██║░░░░░██╔══██║ ░░░██║░░░██║░░██║██║░░██║██║░░██║██║░╚███║░░░██║░░░╚██████╔╝███████╗██║░░██║ ░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░╚══╝░░░╚═╝░░░░╚═════╝░╚══════╝╚═╝░░╚═╝ ██╗░░░██╗ ██████╗░░░░░░███╗░░ ██║░░░██║ ╚════██╗░░░░████║░░ ╚██╗░██╔╝ ░░███╔═╝░░░██╔██║░░ ░╚████╔╝░ ██╔══╝░░░░░╚═╝██║░░ ░░╚██╔╝░░ ███████╗██╗███████╗ ░░░╚═╝░░░ ╚══════╝╚═╝╚══════╝ ---------------------------------------------------------- Version:2.1 Dipnot=Test amaçlı yazılmış olup tüm sorumluluk kullanıcıya aittir. ---------------------------------------------------------- """, "cyan")) print( colored( """ ######################### # Created by Rei-ken # #########################""", "blue")) print( colored( """ Github : https://github.com/Rei-ken/ Youtube: https://youtube.com/channel/UC0huPBEXz8EW8SekJWVE_JQ Discord: https://discord.gg/fMWyY5b Web : https://reiken.online/ """, "magenta"))
def download(url, dst): r = requests.get(url, stream=True) size = None while size is None: try: size = r.headers['content-length'] except: r = requests.get(url, stream=True) if size: p = Bar('Downloading...', max=int(size)) else: p = Spinner('Downloading...') with open(dst, 'wb') as f: for chunk in r.iter_content(chunk_size=1024 * 50): if chunk: # filter out keep-alive new chunks p.next(len(chunk)) f.write(chunk) p.finish()
def getEntropy(self): cNodes = len(self.structure[0]) samples = list(itertools.product([1, 0], repeat=cNodes)) sum = 0.00 print(len(samples)) spinner = Spinner("Calculating entropy...") for sample in samples: evidence = {i: sample[i] for i in range(0, len(sample))} prSample = self.get_sampling_prob(evidence) if prSample != 0: term = prSample * math.log2(prSample) else: term = 0 sum += term spinner.next() spinner.finish() return -sum
def uninstall(self, script : str, password : str, package_name : str): try: installer_progress = Spinner(message=f'Uninstalling {package_name}...', max=100) # sudo requires the flag '-S' in order to take input from stdin for _ in range(1, 75): time.sleep(0.007) installer_progress.next() proc = Popen(script.split(), stdin=PIPE, stdout=PIPE, stderr=PIPE) # Popen only accepts byte-arrays so you must encode the string proc.communicate(password.encode()) # stdoutput = (output)[0].decode('utf-8') for _ in range(1, 26): time.sleep(0.01) installer_progress.next() click.echo(click.style(f'\n\n 🎉 Successfully Uninstalled {package_name}! 🎉 \n', fg='green')) except subprocess.CalledProcessError as e: click.echo(e.output) click.echo('An Error Occurred During Installation...', err = True)
def get_expression_data(df, GTEX_DB, GTEX_COLUMNS): """ This function get median gene expression values for 53 tissues from gtex db """ new_df = pd.DataFrame().assign(**GTEX_COLUMNS) spinner = Spinner('Adding fetures ') for gene_name in df.gene_symbol: try: spinner.next() values = GTEX_DB.query(f'Description == "{gene_name}"').iloc[:, 2:] if len(values) != 1: values = pd.Series(GTEX_COLUMNS) new_df = new_df.append(values, ignore_index=True) except: continue spinner.finish() print(new_df) return new_df
def start_broadcast(self, broadcast_id): data = json.dumps({'_uuid': self.ig.uuid, '_uid': self.ig.username_id, 'should_send_notifications': int(self.sendNotification), '_csrftoken': self.ig.token}) if self.ig.SendRequest(endpoint='live/' + str(broadcast_id) + '/start/', post=self.ig.generateSignature(data)): print('CTRL+C to quit.') spinner = Spinner(" - ") try: while True: spinner.next() except KeyboardInterrupt: spinner.finish() pass except Exception as error: print(error) self.end_broadcast(broadcast_id)
def Pb10(): from progress.spinner import Spinner #from progress.spinner import MoonSpinner #from progress.spinner import PieSpinner #from progress.spinner import PixelSpinner #from progress.spinner import LineSpinner import time bar = Spinner('进度条10', max=100) #max的值100,可调节 #bar = MoonSpinner('进度条10', max=100) #bar = PieSpinner('进度条10', max=100) #bar = PixelSpinner('进度条10', max=100) #bar = LineSpinner('进度条10', max=100) for i in range(100): #这个也需要适当调节 bar.next() time.sleep(0.1) #延迟时间,可调节,0.1~1之间最佳 bar.finish()
def get_aws_images(self, os_version): """Get the images available""" ec2 = boto3.client('ec2', self._region) os = "*" + os_version + '*' filters = [{'Name': 'name', 'Values': [os]}] image_info = {} spinner = Spinner(OKGREEN + "Getting images from AWS" + ' ' + ENDC, end=" ") aws_images = ec2.describe_images(Filters=filters) for image in aws_images['Images']: image_info[image['ImageId']] = {} image_info[image['ImageId']]['name'] = image['Name'] image_info[image['ImageId']]['date'] = image['CreationDate'] spinner.next() # Sort images by date sorted_images = dict( OrderedDict(sorted(image_info.items(), key=lambda t: t[1]['date']))) spinner.finish() return sorted_images
def create_csv(result, csv_filename): spinner = Spinner(('create CSV file to %s ' % csv_filename)) with open(csv_filename, 'w') as f: writer = csv.writer(f, delimiter='\t') head = [] head.append('FILE') head.append('DOMINANT_COLOR_RGB') for x in range(MAX_RESULTS): head.append('LABEL_' + str(x)) writer.writerow(head) for response in result: #print(response) row = [ response['file'], "(%s, %s, %s)" % ( response['response']['responses'][0] ['imagePropertiesAnnotation']['dominantColors']['colors'] [1]['color']['red'], response['response']['responses'][0] ['imagePropertiesAnnotation']['dominantColors']['colors'] [1]['color']['green'], response['response']['responses'][0] ['imagePropertiesAnnotation']['dominantColors']['colors'] [1]['color']['blue'], ), ] for x in range(MAX_RESULTS): try: label = response['response']['responses'][0][ 'labelAnnotations'][x]['description'] except IndexError: label = 'NULL' row.append(label) #print(row) writer.writerow(row) spinner.next() spinner.finish()
def program(self): #main program operation if markov.info_state == "rational": conversation = " He must be really well-informed :-o" else: conversation = " I hope he makes a good decision..." for i in range(0, self.maximum): time.sleep(3) print("The starting state is %s, Master! %s" % (markov.info_state, conversation)) #print("Going from %s to %s!" % (build_up.source, build_up.destination)) print("Going from %s to %s!" % (choice.current, choice.destination)) time.sleep(3) program_spinner = Spinner("Simulating, Master!") while True: switching() print("Running!") program_spinner.next() print(choice.choice_list) if choice.current == choice.destination: break program_spinner.finish() build_up.set_points()
#!/usr/bin/python # -*- coding: utf-8 -*- # test.py from progress.bar import Bar, IncrementalBar from progress.spinner import Spinner import time import settings as set spinner = Spinner('Restarting stopped hosts ') while True: time.sleep(5) set.restartExited() spinner.next()
def entry(wikisite, wikipath, user, password, dry, verbose, logfile, protocol, sleep, start): global isSilent, logfileName startMatched = False processed = 0 updated = 0 if verbose: isSilent = False else: if logfile: logfileName = logfile site = mwclient.Site(wikisite, wikipath, scheme=protocol) site.requests['timeout'] = 300 site.login(user, password) if dry: echo("Doing a dry-run! No writes will be performed") if isSilent: spinner = Spinner('Loading ') for page in site.Categories['Glossary']: if start and not startMatched: if page.page_title != start: continue else: startMatched = True processed = processed + 1 if page.namespace != 0: echo("\tNot a regular page!") continue echo(page.page_title) text = page.text() if '|Link=' not in text: echo("\tLink param not found!") continue wikicode = mwparserfromhell.parse(text) templates = wikicode.filter_templates(matches='Glossary') if not len(templates): echo("\tTemplate not found!") continue template = templates[0] if not template: echo("\tTemplate not found!") continue if template.has_param('Link'): link = template.get('Link') linkValue = link.value.rstrip() echo("\tLink: %s" % linkValue) if not validators.url(linkValue): echo("\t! The param value (%s) is not an URL!" % linkValue) continue r = requests.get('https://www.ebi.ac.uk/ols/api/terms?iri=%s' % linkValue.replace('https://', 'http://'), timeout=300) json = r.json() if '_embedded' not in json: echo("\t! The API response does not contain a _embedded list:") echo("\t\t%s" % json) continue if 'terms' not in json['_embedded']: echo("\t! The API response does not contain a terms list:") echo("\t\t%s" % json) continue terms = json['_embedded']['terms'] if not len(terms): echo('\tTerms not found!') continue for term in terms: label = term['label'] echo("\t\t%s" % label) is_obsolete = term['is_obsolete'] replaced = term['term_replaced_by'] if replaced and not validators.url(replaced): echo( "\t! The replacement is not an URL but a term ID: %s" % replaced) r2 = requests.get( 'https://www.ebi.ac.uk/ols/api/terms?id=%s' % replaced, timeout=300) json2 = r2.json() if not json2: echo("Unable to fetch URI for term replcement %s" % replaced) continue if '_embedded' not in json2: echo("Unable to fetch _embedded for term replcement %s" % replaced) continue if 'terms' not in json2['_embedded']: echo("Unable to fetch terms for term replcement %s" % replaced) continue if not len(json2['_embedded']['terms']): echo("Unable to fetch terms for term replcement %s" % replaced) continue v = json2['_embedded']['terms'][0]['iri'] if not v: echo("Unable to fetch URI for term replcement %s" % replaced) continue replaced = v if replaced: echo("\tTerm '%s' (Link: %s) is replaced by: %s" % (label, linkValue, replaced)) link.value = '%s\n' % replaced if not dry: page.edit(text=str(wikicode), summary='Updating EFO links: %s -> %s' % (linkValue, replaced)) updated = updated + 1 echo("----------------") echo(str(wikicode)) echo("----------------") # all is done for the term, go next continue else: echo("\t! Link param not found") continue if isSilent: spinner.next() if sleep: time.sleep(sleep) if isSilent: spinner.finish() echo("Done!") echo("\nProcessed %s pages, Updated %s pages" % (processed, updated))
def analyse(self): """ read from the messages queue, and generate: 1. Counter for From field 2. Counter for Time field (by hour) """ # {'id': '16f39fe119ee8427', 'labels': ['UNREAD', 'CATEGORY_UPDATES', 'INBOX'], 'fields': {'from': 'Coursera <*****@*****.**>', 'date': 'Tue, 24 Dec 2019 22:13:09 +0000'}} with concurrent.futures.ThreadPoolExecutor() as executor: progress = Spinner(f"{helpers.loader_icn} Loading messages ") event = Event() future = executor.submit(self._load_table, event) while not event.isSet() and future.running(): progress.next() time.sleep(0.1) progress.finish() progress = Spinner(f"{helpers.loader_icn} Analysing count ") event = Event() future = executor.submit(self._analyze_count, event) while not event.isSet() and future.running(): progress.next() time.sleep(0.1) progress.finish() progress = Spinner(f"{helpers.loader_icn} Analysing senders ") event = Event() future = executor.submit(self._analyze_senders, event) while not event.isSet() and future.running(): progress.next() time.sleep(0.1) progress.finish() progress = Spinner(f"{helpers.loader_icn} Analysing dates ") event = Event() future = executor.submit(self._analyze_date, event) while not event.isSet() and future.running(): progress.next() time.sleep(0.1) progress.finish()
date = str( calendar.timegm( (datetime.now() + timedelta(-10)) .utctimetuple() ) ) data = {"token": _token, "ts_to": date} response = requests.post(files_list_url, data=data) response = response.json() if("files" in response.keys()): fileCount = len(response["files"]) else: print("The following error message was received from SLACK API: " + str(response['error'])) continue spinner = Spinner('Looking for files...\n') if(fileCount > 0): print("Parsing " + str(fileCount) + " files...\n", end='', file=sys.stdout) spinner.next() else: print("No files older than 10 days found.", file=sys.stdout) spinning = True while spinning: if len(response["files"]) == 0: spinner.finish() spinning = False break elif(whileCount >= fileCount and whileCount > 1):
def remove(package_list): ''' Uninstall Applications And Packages ''' uninstaller = Uninstaller() password = getpass('Enter your password: '******',') for package in packages: if package in devpackages: uninstaller.uninstall(f'sudo -S apt-get remove -y {package}', password, package_name=devpackages[package]) if package in applications: uninstaller.uninstall(f'sudo snap remove {package}', password, package_name=applications[package]) if package == 'anaconda': try: installer_progress = Spinner( message=f'Uninstalling Anaconda...', max=100) # sudo requires the flag '-S' in order to take input from stdin for _ in range(1, 75): time.sleep(0.007) installer_progress.next() os.system('rm -rf ~/anaconda3 ~/.continuum ~/.conda') os.system('rm ~/anaconda.sh') with open('.bashrc', 'r') as file: lines = file.read() with open('.bashrc', 'w') as file: for line in lines: if 'anaconda' in line or 'miniconda' in line: return else: file.write(line) # stdoutput = (output)[0].decode('utf-8') for _ in range(75, 101): time.sleep(0.01) installer_progress.next() click.echo(click.style( f'\n\n 🎉 Successfully Uninstalled Anaconda! 🎉 \n', fg='green')) except subprocess.CalledProcessError as e: click.echo(e.output) click.echo('An Error Occurred During Uninstallation...', err=True) if package == 'miniconda': try: installer_progress = Spinner( message=f'Uninstalling Miniconda...', max=100) # sudo requires the flag '-S' in order to take input from stdin for _ in range(1, 75): time.sleep(0.007) installer_progress.next() os.system('rm -rf ~/miniconda ~/.continuum ~/.conda ~/.condarc') os.system('rm ~/miniconda.sh') with open('.bashrc', 'r') as file: lines = file.read() with open('.bashrc', 'w') as file: for line in lines: if 'anaconda' in line or 'miniconda' in line: return else: file.write(line) # stdoutput = (output)[0].decode('utf-8') for _ in range(1, 101): time.sleep(0.01) installer_progress.next() click.echo(click.style( f'\n\n 🎉 Successfully Uninstalled Miniconda! 🎉 \n', fg='green')) except subprocess.CalledProcessError as e: click.echo(e.output) click.echo('An Error Occurred During Uninstallation...', err=True)
def install(package_list): ''' Install A Specified Package(s) ''' password = getpass('Enter your password: '******',') turbocharge = Installer() click.echo('\n') os_bar = IncrementalBar('Getting Operating System...', max = 1) os_bar.next() for package_name in packages: package_name = package_name.strip(' ') if platform == 'linux': click.echo('\n') finding_bar = IncrementalBar('Finding Requested Packages...', max = 1) if package_name in devpackages: show_progress(finding_bar) turbocharge.install_task(devpackages[package_name], f'sudo -S apt-get install -y {package_name}', password, f'{package_name} --version', [f'{devpackages[package_name]} Version']) if package_name in applications: show_progress(finding_bar) turbocharge.install_task(applications[package_name], f'sudo -S snap install --classic {package_name}', password, '', []) if package_name == 'chrome': show_progress(finding_bar) try: click.echo('\n') password = getpass("Enter your password: "******"wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb".split(), stdin=PIPE, stdout=PIPE, stderr=PIPE) proc.wait() second = Popen("sudo -S apt-get install -y ./google-chrome-stable_current_amd64.deb".split(), stdin=PIPE, stdout=PIPE, stderr=PIPE) # Popen only accepts byte-arrays so you must encode the string second.communicate(password.encode()) # stdoutput = (output)[0].decode('utf-8') click.echo(click.style('\n\n 🎉 Successfully Installed Chrome! 🎉 \n')) # Testing the successful installation of the package testing_bar = IncrementalBar('Testing package...', max = 100) for _ in range(1, 21): time.sleep(0.045) testing_bar.next() os.system('cd --') for _ in range(21, 60): time.sleep(0.045) testing_bar.next() for _ in range(60, 101): time.sleep(0.03) testing_bar.next() click.echo('\n') click.echo(click.style('Test Passed: Chrome Launch ✅\n', fg='green')) except subprocess.CalledProcessError as e: click.echo(e.output) click.echo('An Error Occurred During Installation...', err = True) if package_name == 'anaconda': show_progress(finding_bar) username = getuser() try: installer_progress = Spinner(message=f'Installing {package_name}...', max=100) # sudo requires the flag '-S' in order to take input from stdin for _ in range(1, 35): time.sleep(0.01) installer_progress.next() os.system("wget https://repo.anaconda.com/archive/Anaconda3-2020.07-Linux-x86_64.sh -O ~/anaconda.sh") for _ in range(35, 61): time.sleep(0.01) installer_progress.next() os.system('bash ~/anaconda.sh -b -p $HOME/anaconda3') for _ in range(61, 91): time.sleep(0.01) installer_progress.next() os.system(f'echo "export PATH="/home/{username}/anaconda3/bin:$PATH"" >> ~/.bashrc') for _ in range(90, 101): time.sleep(0.01) installer_progress.next() # stdoutput = (output)[0].decode('utf-8') click.echo(click.style(f'\n\n 🎉 Successfully Installed {package_name}! 🎉 \n')) except subprocess.CalledProcessError as e: click.echo(e.output) click.echo('An Error Occurred During Installation...', err = True) if package_name == 'miniconda': show_progress(finding_bar) username = getuser() try: installer_progress = Spinner(message=f'Installing {package_name}...', max=100) # sudo requires the flag '-S' in order to take input from stdin for _ in range(1, 35): time.sleep(0.01) installer_progress.next() os.system("wget https://repo.anaconda.com/archive/Anaconda3-2020.07-Linux-x86_64.sh -O ~/miniconda.sh") for _ in range(35, 61): time.sleep(0.01) installer_progress.next() os.system('bash ~/anaconda.sh -b -p $HOME/anaconda3') for _ in range(61, 91): time.sleep(0.01) installer_progress.next() os.system(f'echo "export PATH="/home/{username}/anaconda3/bin:$PATH"" >> ~/.bashrc') for _ in range(90, 101): time.sleep(0.01) installer_progress.next() # stdoutput = (output)[0].decode('utf-8') click.echo(click.style(f'\n\n 🎉 Successfully Installed {package_name}! 🎉 \n')) except subprocess.CalledProcessError as e: click.echo(e.output) click.echo('An Error Occurred During Installation...', err = True) elif package_name not in devpackages and package_name not in applications and package_name != 'chrome' and package_name != 'anaconda' and package_name != 'miniconda': click.echo('\n') click.echo(click.style(':( Package Not Found! :(', fg='red'))
# for data_center in data_centers: # print("DataCenter Name: %s Status: %s" % (data_center.name, data_center.get_status().get_state())) for cluster in clusters: print("Cluster Name: %s " % (cluster.name)) def spm_status(host): if host.storage_manager.valueOf_ == "true": return 1 else: return 0 spinner = Spinner("Waiting ") terminate = 0 while terminate != "1": data_centers = api.datacenters.list() count = 0 for data_center in data_centers: if data_center.get_status().get_state() == "up": count += 1 if count == len(data_centers): terminate = 1 if terminate == 1: break spinner.next() sys.stdout.flush() api.disconnect()