def get_uploaded_tracks(self, user): spinner = Halo(text="Fetching uploads") spinner.start() tracks = self.client.get("/tracks", id=user.id) spinner.stop() print("Found {} uploads".format(len(tracks))) self.get_multiple_tracks(tracks)
def get_liked_tracks(self): spinner = Halo(text="Fetching liked tracks") spinner.start() liked_tracks = self.client.get("/resolve", url=self.url + "/likes") spinner.stop() print("{} liked track(s) found".format(len(liked_tracks))) self.get_multiple_tracks(liked_tracks)
def get_recommended_tracks(self, track, no_of_tracks=10): params = { "client_id": secret, "limit": no_of_tracks, "offset": 0 } spinner = Halo(text="Fetching tracks similar to {}".format(track.title)) spinner.start() recommended_tracks_url = "{}/tracks/{}/related".format(self.API_V2, track.id) r = self.session.get(recommended_tracks_url, params=params) spinner.stop() tracks = json.loads(r.text)["collection"] self.get_multiple_tracks(tracks)
def require(package): """ Specifies a package that should be installed. By default, this method will use the default package manager of the OS (brew for Mac, apt-get for Ubuntu, etc.), but you can override the default package by specifying it in the `package_aliases.py` file. Args: package (str): User-friendly name of package. Most often, this will be the same as the actual name of the package in the package manager, but some packages will have different names on different systems. Use your best judgement to determine what name to use. """ DefaultPackageManager = default_package_managers[get_platform()] command = DefaultPackageManager(package) if package in package_aliases: default = package_aliases[package].get('default', command) command = package_aliases[package].get(get_platform(), default) if command is None: return spinner = Halo( text="Installing {}".format(package), spinner="dots", placement="right" ) spinner.start() successful = command.execute() if not successful: spinner.fail() else: spinner.succeed()
def main(ARGS): # Load DeepSpeech model if os.path.isdir(ARGS.model): model_dir = ARGS.model ARGS.model = os.path.join(model_dir, 'output_graph.pb') ARGS.alphabet = os.path.join(model_dir, ARGS.alphabet if ARGS.alphabet else 'alphabet.txt') ARGS.lm = os.path.join(model_dir, ARGS.lm) ARGS.trie = os.path.join(model_dir, ARGS.trie) print('Initializing model...') logging.info("ARGS.model: %s", ARGS.model) logging.info("ARGS.alphabet: %s", ARGS.alphabet) model = deepspeech.Model(ARGS.model, ARGS.n_features, ARGS.n_context, ARGS.alphabet, ARGS.beam_width) if ARGS.lm and ARGS.trie: logging.info("ARGS.lm: %s", ARGS.lm) logging.info("ARGS.trie: %s", ARGS.trie) model.enableDecoderWithLM(ARGS.alphabet, ARGS.lm, ARGS.trie, ARGS.lm_alpha, ARGS.lm_beta) # Start audio with VAD vad_audio = VADAudio(aggressiveness=ARGS.vad_aggressiveness, device=ARGS.device, input_rate=ARGS.rate) print("Listening (ctrl-C to exit)...") frames = vad_audio.vad_collector() # Stream from microphone to DeepSpeech using VAD spinner = None if not ARGS.nospinner: spinner = Halo(spinner='line') stream_context = model.setupStream() wav_data = bytearray() for frame in frames: if frame is not None: if spinner: spinner.start() logging.debug("streaming frame") model.feedAudioContent(stream_context, np.frombuffer(frame, np.int16)) if ARGS.savewav: wav_data.extend(frame) else: if spinner: spinner.stop() logging.debug("end utterence") if ARGS.savewav: vad_audio.write_wav(os.path.join(ARGS.savewav, datetime.now().strftime("savewav_%Y-%m-%d_%H-%M-%S_%f.wav")), wav_data) wav_data = bytearray() text = model.finishStream(stream_context) print("Recognized: %s" % text) stream_context = model.setupStream()
def get_charted_tracks(self, kind, no_of_tracks=10): url_params = { "limit": no_of_tracks, "genre": "soundcloud:genres:" + self.args.genre, "kind": kind, "client_id": secret } url = "{}/charts".format(self.API_V2) tracks = [] spinner = Halo(text="Fetching {} {} tracks".format(no_of_tracks, kind)) spinner.start() while len(tracks) < no_of_tracks: response = self.session.get(url, params=url_params) json_payload = json.loads(response.text) tracks += json_payload["collection"] url = json_payload["next_href"] spinner.stop() tracks = map(lambda x: x["track"], tracks[:no_of_tracks]) self.get_multiple_tracks(tracks)
def link(src, dest, backup_dir): """ Creates a symbolic link at the specified location. Args: src (str): Path to the file that we want to link to. dest (str): Path of the link that will be created. backupd_dir (str): Path to directory to backup existing files. """ message = "linking %s" % os.path.basename(src) spinner = Halo(text=message, spinner="dots", placement="right") spinner.start() if os.path.islink(dest): os.unlink(dest) if os.path.exists(dest): _backup(dest, backup_dir) os.symlink(src, dest) spinner.succeed()
def main(self): os.chdir(self.dirname) if self.args.top: self.get_charted_tracks("top") if self.args.new: self.get_charted_tracks("trending") spinner = Halo(text="Resolving URL") spinner.start() data = self.client.get("/resolve", url=self.url) if self.url else None spinner.stop() if isinstance(data, resource.Resource): if data.kind == "user": print("User profile found") folder = self.validate_name(data.username) if not os.path.isdir(folder): os.mkdir(folder) os.chdir(os.path.join(os.getcwd(), folder)) print("Saving in: " + os.getcwd()) if self.args.all or self.args.likes: self.get_liked_tracks() if not self.args.likes: self.get_uploaded_tracks(data) elif data.kind == "track": print("Single track found") print("Saving in: " + os.getcwd()) if self.args.similar: self.get_recommended_tracks(data) self.get_single_track(data) elif data.kind == "playlist": print("Single playlist found.") folder = self.validate_name(data.user["username"]) if not os.path.isdir(folder): os.mkdir(folder) os.chdir(os.path.join(os.getcwd(), str(folder))) self.get_playlist(data) elif isinstance(data, resource.ResourceList): if data[0].kind == "playlist": print("%d playlists found" % (len(data))) for playlist in data: self.get_playlist(playlist) elif data[0].kind == "track": self.get_multiple_tracks(data)
def curl(src, dest): """ Installs `src` to path `dest` """ spinner = Halo( text="curl {}".format(dest), spinner="dots", placement="right" ) spinner.start() if os.path.exists(dest): spinner.info("{} already exists".format(dest)) return try: sh.curl("-fLo", dest, src) spinner.succeed() except sh.ErrorReturnCode as err: err_message = "\n\t" + err.stderr.replace("\n", "\n\t") logging.error( "Error downloading file `%s`: %s", src, err_message ) spinner.fail()
async def install(self): tasks = [self._install_addon(addon) for addon in self.addons] if self.peggle is True: tasks.append(self._install_peggle()) self.addons.append('Peggle') self.loader = Halo(f'Installing addons... (0/{len(tasks)})') self.loader.start() async with ClientSession() as self.session: await asyncio.gather(*tasks) self.loader.stop() for addon, error in self._failed: print(f"Failed to install: '{addon}' ({error})") for addon in self._done: print(f"Successfully installed: '{addon}'")
def default_shell(name): """ Sets default shell for the current user. """ spinner = Halo( text="Default shell `{}`".format(name), spinner="dots", placement="right" ) spinner.start() try: path = sh.which(name).strip() user = sh.whoami().strip() with Authentication(): sh.chsh("-s", path, user) spinner.succeed() except sh.ErrorReturnCode as err: err_message = "\n\t" + err.stderr.replace("\n", "\n\t") logging.error( "Error changing default shell to %s: %s", name, err_message ) spinner.fail()
def configure(namespace, key, *values): """ Sets configuration on mac using `defaults` """ spinner = Halo( text="Setting {}".format(key), spinner="dots", placement="right" ) spinner.start() try: if namespace: sh.defaults("write", namespace, key, *values) else: sh.defaults("write", key, *values) spinner.succeed() except sh.ErrorReturnCode as err: err_message = "\n\t" + err.stderr.replace("\n", "\n\t") logging.error( "Error with `defaults write -g %s %s: %s", key, values, err_message ) spinner.fail()
def test_text_stripping(self): """Test the text being stripped before output. """ spinner = Halo(text='foo\n', spinner='dots', stream=self._stream) spinner.start() time.sleep(1) spinner.succeed('foo\n') output = self._get_test_output()['text'] self.assertEqual(output[0], '{} foo'.format(frames[0])) self.assertEqual(output[1], '{} foo'.format(frames[1])) self.assertEqual(output[2], '{} foo'.format(frames[2])) pattern = re.compile(r'(✔|v) foo', re.UNICODE) self.assertRegexpMatches(output[-1], pattern)
def search_torrents(query): spinner = Halo(text=f'Searching for {query}', spinner='dots') spinner.start() a = [['------Exit application------', None, None, None]] y = yts_search(query) z = zooqle_search(query) l = lime_search(query) b = [['--------Search Again--------', None, None, None]] t = a + y + z + l + b list_format(t) spinner.stop() return t
def main(ARGS): # Load DeepSpeech model if os.path.isdir(ARGS.model): model_dir = ARGS.model ARGS.model = os.path.join(model_dir, 'output_graph.pb') ARGS.scorer = os.path.join(model_dir, ARGS.scorer) print('Initializing model...') logging.info("ARGS.model: %s", ARGS.model) model = deepspeech.Model(ARGS.model) if ARGS.scorer: logging.info("ARGS.scorer: %s", ARGS.scorer) model.enableExternalScorer(ARGS.scorer) # Start audio with VAD vad_audio = VADAudio(aggressiveness=ARGS.vad_aggressiveness, device=ARGS.device, input_rate=ARGS.rate, file=ARGS.file) print(vad_audio.device) print("Listening (ctrl-C to exit)...") frames = vad_audio.vad_collector() # Stream from microphone to DeepSpeech using VAD spinner = None if not ARGS.nospinner: spinner = Halo(spinner='line') stream_context = model.createStream() wav_data = bytearray() for frame in frames: if frame is not None: if spinner: spinner.start() logging.debug("streaming frame") stream_context.feedAudioContent(np.frombuffer(frame, np.int16)) if ARGS.savewav: wav_data.extend(frame) else: if spinner: spinner.stop() logging.debug("end utterence") if ARGS.savewav: vad_audio.write_wav(os.path.join(ARGS.savewav, datetime.now().strftime("savewav_%Y-%m-%d_%H-%M-%S_%f.wav")), wav_data) wav_data = bytearray() text = stream_context.finishStream() #processInput(text) print(text) # pyautogui.write(text) stream_context = model.createStream()
def save_file(self, data): spinner = Halo(text='Saving', spinner='simpleDotsScrolling') spinner.start() filename_slug = self.__slugify(self.title) directory = "./output/{}".format(filename_slug) if not os.path.exists(directory): os.makedirs(directory) data.to_csv( "{}/{}_{}.csv".format( directory, filename_slug, self.date), index=False, encoding=self.encoding) spinner.stop() self.restart(colored('Successfully extracted data!', "green"))
def test_initial_title_spinner(self): """Test Halo with initial title. """ stdout_ = sys.stdout sys.stdout = self._stream spinner = Halo('bar') spinner.start() time.sleep(1) spinner.stop() sys.stdout = stdout_ output = self._get_test_output() self.assertEqual(output[0], '{0} bar'.format(frames[0])) self.assertEqual(output[1], '{0} bar'.format(frames[1])) self.assertEqual(output[2], '{0} bar'.format(frames[2]))
def users(sid): """ List users in a Station. """ spinner = Halo("Retrieving station users", spinner="dot").start() r = galileo.stations.list_stations(stationids=[sid]) if len(r) == 0: spinner.stop() click.echo("No station matches that query.") return users_list = r.stations[0].users users_list = [user.__dict__ for user in users_list] users_df = pandas.json_normalize(users_list) spinner.stop() click.echo(users_df)
def remap_key(src, dest): """ Remaps src key to dest key. An example of remapping the caps lock key to act like the left control key would be to call `remap_key('capslock', 'left-ctrl') Args: src (str): Key name in keyboard dict. This is the key that will change functionality. dest (str): Key name in keyboard dict. The key defined in `src` should act like this key. """ # TODO (phillip): Right now, these changes do not survive a reboot. I am # going to just change this manually in the keyboard settings, but I might # be able to figure out how to do it with `defaults`. # https://apple.stackexchange.com/questions/141069/updating-modifier-keys-from-the-command-line-takes-no-effect spinner = Halo( text="Remapping {} to {}".format(src, dest), spinner="dots", placement="right" ) spinner.start() remap_dict = { 'UserKeyMapping': [ { 'HIDKeyboardModifierMappingSrc': keyboard[src], 'HIDKeyboardModifierMappingDst': keyboard[dest] } ] } try: sh.hidutil("property", "--set", str(remap_dict).replace("'", '"')) spinner.succeed() except sh.ErrorReturnCode as err: err_message = "\n\t" + err.stderr.replace("\n", "\n\t") logging.error( "Error with `hidutil property --set %s : %s", str(remap_dict), err_message ) spinner.fail()
def extract(src, dest): """ Extracts the source file in dest """ spinner = Halo( text="extract {}".format(src), spinner="dots", placement="right" ) spinner.start() try: # TODO (phillip): This should choose the correct decompression based # on the filename where possible. with tarfile.open(src, "r:gz") as tar: tar.extractall(dest) sh.rm(src) spinner.succeed() except sh.ErrorReturnCode as err: err_message = "\n\t" + err.stderr.replace("\n", "\n\t") logging.error( "Error extracting file `%s`: %s", src, err_message ) spinner.fail()
def _make_json_skeleton(limit, reddit, redditor): """ Create a skeleton for JSON export. Include scrape details at the top. Parameters ---------- limit: str String denoting n_results returned reddit: PRAW Reddit object redditor: PRAW Redditor object Returns ------- skeleton: dict Dictionary containing all Redditor data redditor: PRAW Redditor object """ plurality = "results" \ if int(limit) > 1 \ else "result" Halo().info("Processing %s %s from u/%s's profile." % (limit, plurality, redditor)) skeleton = { "scrape_settings": { "redditor": redditor, "n_results": limit }, "data": { "information": None, "interactions": {} } } redditor = reddit.redditor(redditor) return redditor, skeleton
def main(): global connection global cursor # conectar ao banco de dados connect() cursor.execute('SELECT version()') version = str(cursor.fetchone()) # inicializar banco de dados with Halo(text='Inicializando banco...', spinner='dots'): run_sql('drop.sql') run_sql('initialize.sql') run_sql('insert.sql') # inicializar interface de linha de comando while True: os.system('clear') print(title) if (test_connection()): status = colored('[ONLINE]', 'green') print(status, end=" ") print(version.split(',')[0][2:]) else: status = colored('[OFFLINE]', 'red') print(status) for index in menu.keys(): print("[" + str(index) + "] " + menu[index]) user_input = prompt(">> ") try: int(user_input) except Exception as e: print(error_tag + "Opção inválida") prompt("Pressione ENTER para continuar. ") else: parse_menu(int(user_input))
def _write(args, cat_i, data, each_sub, sub): """ Write submissions to file. Calls methods from external modules: NameFile().r_fname() Export.export() Parameters ---------- args: Namespace Namespace object containing all arguments that were defined in the CLI cat_i: str String denoting n_results returned or keywords searched for data: dict Dictionary containing scraped Subreddit submission data each_sub: list List of Subreddit scraping settings sub: str String denoting the Subreddit name Returns ------- None """ f_name = NameFile().r_fname(args, cat_i, each_sub, sub) export_option = "json" \ if not args.csv \ else "csv" Export.export(data, f_name, export_option, "subreddits") print() Halo(color = "green", text = Style.BRIGHT + Fore.GREEN + "%s file for r/%s created." % (export_option.upper(), sub)).succeed() print()
def revoke(email: str = typer.Argument( ..., help="The account email that would be lose admin access")): """Revokes admin acess. More info at https://argorithm.github.io/toolkit/cli#revoke """ data = {"email": email} header = authmanager.get_header() url = app_settings.get_endpoint() + "/admin/revoke" try: with Halo(text='Connecting', spinner='dots'): rq = requests.post(url, json=data, headers=header) except requests.RequestException as rqe: msg.fail("Connection failed", str(rqe)) raise typer.Abort() if rq.status_code == 200: msg.good("revoked admin priveleges") elif rq.status_code == 401: msg.warn("Not authorized", "You need admin priveleges") elif rq.status_code == 404: msg.warn("Not found", "No such email registered") else: msg.fail("Application Error")
def elb(instance_id): loop = asyncio.get_event_loop() # print spinner spinner = Halo(text=f"Running query for {instance_id}, sit tight", spinner='dots') spinner.start() all_info = loop.run_until_complete( run_elb_task(loop=loop, instance_id=instance_id)) spinner.stop() # print all info click.echo( f"{colored(f'======== Instance Details: {instance_id} =========', color='magenta'):^100}" ) click.echo(f"{colored('ELB Info:', attrs=['bold'])}") for i in all_info[1]: click.echo( f"{colored('LB Name:', color='cyan', attrs=['bold']):<40} {colored(i.get('LoadBalancerName', ''))}" ) click.echo( f"{colored('Target Group Name:', color='cyan', attrs=['bold']):<40} {colored(i.get('TargetGroupName', ''))}" ) click.echo(f"") ec2_details = all_info[0] click.echo(f"{colored('EC2 Info:', attrs=['bold'])}") click.echo( f"{colored('Instance Type:', color='cyan', attrs=['bold']):<40} {colored(ec2_details.get('InstanceType', ''))}" ) click.echo( f"{colored('Launch Time:', color='cyan', attrs=['bold']):<40} {colored( ec2_details.get('LaunchTime').isoformat() if ec2_details.get('LaunchTime') else '')}" ) click.echo( f"{colored('IP Addresses:', color='cyan', attrs=['bold']):<40} Private - {colored(ec2_details.get('PrivateIpAddress', ''))}" f" Public - {colored(ec2_details.get('PublicIpAddress', ''))}") click.echo( f"{colored('Status:', color='cyan', attrs=['bold']):<40} {colored(ec2_details.get('State', '').get('Name', ''))}" ) click.echo( f"{colored('Tags:', color='cyan', attrs=['bold']):<40}\n{colored(print_dict_list(ec2_details.get('Tags', [])[:5]))}" ) click.echo( f"{colored('VPC Id:', color='cyan', attrs=['bold']):<40} {colored(ec2_details.get('VpcId', ''))}" ) click.echo( f"{colored('Subnet Id:', color='cyan', attrs=['bold']):<40} {colored(ec2_details.get('SubnetId', ''))}" )
def reboot_instance(instance_id, flags): with Halo(text='Stopping Instance...', spinner='dots', text_color='white', color='green') as spinner: try: ec2.reboot_instances(InstanceIds=[instance_id], DryRun=False) spinner.stop() unicode_chars = '\n\u2714 ' print( f'{colored(unicode_chars, "green")}' f'AWS EC2 instance {instance_id} '\ f'is rebooting.\n' ) return True except ClientError as e: spinner.stop() handle_ec2_errors(e, flags)
async def additional_setup(self): with Halo("[infrastructure] starting etcd") as spinner: await self.publisher.ssh.run( f"HOSTNAME={self.publisher.hostname} " "envsubst '$HOSTNAME' " ' < "$HOME/config/etcd.template" ' " | sudo tee /etc/default/etcd " " > /dev/null", check=True, ) await self.publisher.ssh.run("sudo systemctl restart etcd", check=True) # Make sure etcd is healthy async for attempt in AsyncRetrying(wait=wait_fixed(2), stop=stop_after_attempt(20)): with attempt: await self.publisher.ssh.run( ("ETCDCTL_API=3 etcdctl " f"--endpoints {self.publisher.hostname}:2379 " "endpoint health"), check=True, ) spinner.succeed("[infrastructure] etcd healthy")
def shell(): """Open an interactive shell in the remote environment""" if not which("session-manager-plugin"): cprint("Session Manager Plugin is not installed", "red") print( "Installation instructions:", colored( "https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager-working-with-install-plugin.html", "white", ), ) exit(1) task = run_task_until_disconnect(app.cluster, app.settings["shell"]["task_family"]) if task is None: exit(1) task_arn = task["taskArn"] Halo(text=f"starting task {task_arn}").info() wait_for_task(app.cluster, task_arn, "running container", status="tasks_running") shell_to_task(task, app.cluster, app.settings["shell"]["command"])
def encrypt_data(): print("[i] Zip files are preferred for compatibility purposes.") data_type = input("Message or File (m/f) > ") if data_type == "f": filename = input("Source Filename > ") try: data = open(filename, "rb").read() except FileNotFoundError: print("[!] CRITICAL: File", filename, "not found!") input("Press [Enter] to continue.") main() elif data_type == "m": filename = input("New Filename > ") data = input("Message > ").encode("utf-8") else: print("\n[!] CRITICAL: Invalid option!") input("Press [Enter] to continue.") main() spinner = Halo(text='Encrypting...', spinner='dots') spinner.start() file_out = open(filename + ".bin", "wb") recipient_key = RSA.import_key(open("public.pem").read()) session_key = get_random_bytes(16) cipher_rsa = PKCS1_OAEP.new(recipient_key) enc_session_key = cipher_rsa.encrypt(session_key) cipher_aes = AES.new(session_key, AES.MODE_EAX) ciphertext, tag = cipher_aes.encrypt_and_digest(data) _ = [file_out.write(x) for x in (enc_session_key, cipher_aes.nonce, tag, ciphertext)] file_out.close() spinner.stop() print("[i] Encryption Sucessful!") input("Press [Enter] to continue.") main()
def execute(self): spinner = Halo(text='Gathering jobs...', spinner='dots', color='cyan') spinner.start() self.configure() jobs = self.jenkins.get_jobs(self.repo.name) retval = "" for env_name, env in jobs.items(): try: prod_job = next(obj for obj in env['jobs'] if 'prod' in obj['name'].lower()) except StopIteration: prod_job = None try: qa_job = next(obj for obj in env['jobs'] if 'qa' in obj['name'].lower()) except StopIteration: qa_job = None if not prod_job or not qa_job: continue task = DeploymentTask(self.repo, prod_job, qa_job, self.jenkins, self.version) retval += f""" Task : Deploy {self.repo.name} {self.version} to PROD TW Task : {self.tw_task} Release : {self.release_notes_url} Diff : {self.get_diff_url(task.prod_hash)} ENV : {env_name} QA Hash : {task.qa_hash} Deploy : {self.repo.get_tag_hash(self.version)} PROD : {task.prod_details} JOB URL : {task.prod_job['url']} """ spinner.stop() print(retval)
def run(self): """Starts all Skyzes Servcies then processes messages""" # Start services self.__start_up_skyze_services() # Test batches of messages if False: self.__send_market_message() if False: msg = MessageSchedulerTest() self.__messaging_service.publishMessage(msg) # Start Skyze by messaging the scheduler to invoke it's schedule msg = MessageSchedulerRun() self.__messaging_service.publishMessage(msg) # Process messages - invokes the messaging service to infinitely # loop looking for messages to route spinner = Halo( text='SkyZe is alive ... Processing Messages', spinner='dots') # spinner.start() print("\n\n") self.__messaging_service.process_messages()
def train(self, epochs: int) -> float: """ Trains the model and returns the accuracy on the test_data Parameters: ---------- epochs : int Number of epochs for the training Returns: ------- float Accuracy of the model on the test_data """ trainloader = DataLoader(self.train_data, batch_size=100, shuffle=True, num_workers=2) testloader = DataLoader(self.test_data, batch_size=100, shuffle=True, num_workers=2) halo = Halo(text='Loading', spinner='dots') halo.start() for epoch in range(epochs): for i, data in enumerate(trainloader, 0): features, targets = data features = Variable(features, requires_grad=False) targets = Variable(targets, requires_grad=False) self.optimizer.zero_grad() outputs = self.model(features) loss = self.criterion(outputs, targets) loss.backward() self.optimizer.step() halo.text = f"Epoch:{epoch}, Step:{(i+1)/40*100}, Loss:{loss.data[0]}" halo.stop() features = self.test_data.data_tensor targets = self.test_data.target_tensor features = Variable(features, requires_grad=False) _, output = self.model(features).max(dim=1) print(confusion_matrix(targets.numpy(), output.data.numpy())) print("accuracy", accuracy_score(targets.numpy(), output.data.numpy()))
def get(app_name, output=None, extract=False, repo="hbb1.oscwii.org"): if output is None: output = app_name + ".zip" # https://hbb1.oscwii.org/hbb/fceugx/fceugx.zip # print("Obtaining " + app_name + " from " + repo + "..") with Halo(text="Obtaining " + app_name + " from " + repo + "..", color="yellow", text_color="yellow"): u = requests.get("https://" + repo + "/hbb/" + app_name + "/" + app_name + ".zip") with open(output, "wb") as f: f.write(u.content) print(GREEN + "Download success! Output: " + output) if extract is True: with ZipFile(output, 'r') as zip_ref: print("Extracting..") zip_ref.extractall("ExtractedApps") print("Extracted to ExtractedApps!")
def destroy_cluster( cluster_id: str, enable_spinner: bool, aws_region: str, ) -> None: """ Destroy a cluster. Args: cluster_id: The ID of the cluster. enable_spinner: Whether to enable the spinner animation. aws_region: The region the cluster is in. """ with Halo(enabled=enable_spinner): check_cluster_id_exists( new_cluster_id=cluster_id, existing_cluster_ids=existing_cluster_ids(aws_region=aws_region), ) cluster_vms = ClusterInstances( cluster_id=cluster_id, aws_region=aws_region, ) cluster_vms.destroy()
def main(ARGS): # Load DeepSpeech model if os.path.isdir(ARGS.model): model_dir = ARGS.model ARGS.model = os.path.join(model_dir, 'output_graph.pb') ARGS.alphabet = os.path.join(model_dir, ARGS.alphabet if ARGS.alphabet else 'alphabet.txt') ARGS.lm = os.path.join(model_dir, ARGS.lm) ARGS.trie = os.path.join(model_dir, ARGS.trie) print('Initializing model...') logging.info("ARGS.model: %s", ARGS.model) logging.info("ARGS.alphabet: %s", ARGS.alphabet) model = deepspeech.Model(ARGS.model, ARGS.n_features, ARGS.n_context, ARGS.alphabet, ARGS.beam_width) if ARGS.lm and ARGS.trie: logging.info("ARGS.lm: %s", ARGS.lm) logging.info("ARGS.trie: %s", ARGS.trie) model.enableDecoderWithLM(ARGS.alphabet, ARGS.lm, ARGS.trie, ARGS.lm_alpha, ARGS.lm_beta) # Start audio with VAD vad_audio = VADAudio(aggressiveness=ARGS.vad_aggressiveness, device=ARGS.device, input_rate=ARGS.rate) print("Listening (ctrl-C to exit)...") frames = vad_audio.vad_collector() # Stream from microphone to DeepSpeech using VAD spinner = None if not ARGS.nospinner: spinner = Halo(spinner='line') stream_context = model.setupStream() wav_data = bytearray() for frame in frames: if frame is not None: if spinner: spinner.start() logging.debug("streaming frame") model.feedAudioContent(stream_context, np.frombuffer(frame, np.int16)) if ARGS.savewav: wav_data.extend(frame) else: if spinner: spinner.stop() logging.debug("end utterence") if ARGS.savewav: vad_audio.write_wav(os.path.join(ARGS.savewav, datetime.now().strftime("savewav_%Y-%m-%d_%H-%M-%S_%f.wav")), wav_data) wav_data = bytearray() text = model.finishStream(stream_context) print("Recognized: %s" % text) stream_context = model.setupStream()
def packer_and_tf( environment: Environment, system: System, force_rebuilt: Optional[Set[Any]], build_args: BuildArgs, ) -> Iterator[Dict[Any, Any]]: packer_config = system.packer_config.from_args(build_args, environment) if force_rebuilt is not None: build = packer.ensure_ami_build(packer_config, force_rebuilt, system.root_dir) else: build = None tf_vars = environment.make_tf_vars(build, build_args) try: with cloud.terraform(tf_vars, system.root_dir) as data: yield data except cloud.NoImageError: Halo("[infrastructure] no image found; forcing build").info() build = packer.ensure_ami_build(packer_config, set(), system.root_dir) tf_vars = environment.make_tf_vars(build, build_args) with cloud.terraform(tf_vars, system.root_dir) as data: yield data
def _destroy_cluster( cluster_id: str, transport: Transport, enable_spinner: bool, ) -> None: """ Destroy a cluster. Args: cluster_id: The ID of the cluster. transport: The transport to use for any communication with the cluster. enable_spinner: Whether to enable the spinner animation. """ with Halo(enabled=enable_spinner): check_cluster_id_exists( new_cluster_id=cluster_id, existing_cluster_ids=existing_cluster_ids(), ) cluster_containers = ClusterContainers( cluster_id=cluster_id, transport=transport, ) cluster_containers.destroy()
def __init__(self, args): self.auth = dict(name=args.user, password=args.passwd) self.url = self.__urlParse.search(args.url) self.worker = args.worker self.scheme = args.scheme self.override = args.override self.dl_file = args.no_download self.starTime = time.time() self.__setEnVar() # Login Handle with Halo(text='\n Authenticating') as sp: if not self.__login(): sp.fail(' Login Failed :(') sys.exit() sp.succeed(' Login Success') self.__manageVersion() # Create Folder self.path = os.path.join(os.getcwd(), args.path, self.title) if not os.path.exists(self.path): os.makedirs(self.path) os.chdir(self.path)
def cli(host, port): history = InMemoryHistory() with Halo(text='Trying to connect to {}:{}'.format(host, port), spinner='earth') as spinner: spinner.text = 'Fetching API' try: nifi_completer = NifiCompleter(NifiCompletion(host, port).create_tree()) spinner.succeed('Connected') except urllib.error.URLError: print("\nConnexion refused, make sure that the Nifi server is running") exit() cmd = "" while cmd != 'exit': try: cmd = prompt("> ", completer=nifi_completer, history=history) print('You entered:', cmd) except KeyboardInterrupt: continue # Control-C pressed. Try again. except EOFError: break # Control-D pressed. print('Bye!')
def main(): config = configparser.ConfigParser() config.read('conf.ini') test_mode = config['SETTINGS'].getboolean('test_mode') verbose_mode = config['SETTINGS'].getboolean('verbose_mode') sleep_time = int(config['SETTINGS']['sleep_time']) spinner = Halo(text='', spinner='dots') tm = '' if test_mode: tm = f'{C.R}TEST MODE{C.Y}' print(f"""{C.Y} ╦ ╦╔═╗╔═╗╦ ╔═╗╦╦═╗ ║ ║╠═╝╠╣ ║ ╠═╣║╠╦╝ {tm} ╚═╝╩ ╚ ╩═╝╩ ╩╩╩╚═ {C.C}v1.0 {C.G}impshum{C.W} """) runner(config, spinner) schedule.every(sleep_time).seconds.do(runner, config, spinner) while True: schedule.run_pending() sleep(1)
def diff_all_table_data(self): failures = 0 print(bold(red('Starting table analysis.'))) with warnings.catch_warnings(): warnings.simplefilter("ignore", category=sa_exc.SAWarning) tables = sorted( self.firstinspector.get_table_names(schema="public")) for table in tables: with Halo(text=f"Analysing table {table}. " f"[{tables.index(table) + 1}/{len(tables)}]", spinner='dots') as spinner: result, message = self.diff_table_data(table) if result is True: spinner.succeed(f"{table} - {message}") elif result is None: spinner.warn(f"{table} - {message}") else: failures += 1 spinner.fail(f"{table} - {message}") print(bold(green('Table analysis complete.'))) if failures > 0: return 1 return 0
def print_build(build: dict) -> None: first_line = [ colored("===", attrs=["dark"]), colored(str(build["buildNumber"]), "white"), ] if build["buildStatus"] == "IN_PROGRESS": first_line.append("in progress") first_line.append(colored(build["sourceVersion"], "blue")) getattr( Halo(text=" ".join(first_line), placement="right"), STATUS_MAP[build["buildStatus"]], )() if "endTime" in build: print(indent(formatted_time_ago(build["endTime"]), 4 * " ")) else: print( indent("started " + formatted_time_ago(build["startTime"]), 4 * " ")) s3 = boto3.client("s3") try: cprint(indent(get_artifact(build, "commit.txt"), 4 * " ")) except s3.exceptions.NoSuchKey: print("")
def wrapper_add_spinner(*args, **kwargs): human_func_name = humanize(func.__name__) human_func_result = humanize(func.__name__, isResult=True) flag_skip = False with Halo(text=human_func_name, spinner=name) as spinner: result = func(spinner, *args, **kwargs) if isinstance(result, tuple): status, output = result elif isinstance(result, list): status = result[0] output = result[1] elif isinstance(result, bool): status = result output = None else: status = False flag_skip = True spinner.fail( '{} - Function return unexpected result: {}'.format( human_func_name, str(result))) if not flag_skip: text = human_func_result if output is not None and len(output) > 0: text += ': {}'.format(output) if isinstance(status, bool) and status: spinner.succeed(text) elif isinstance(status, bool) and not status: spinner.fail(text) else: if status == 'info': spinner.info(text) else: spinner.warn(text) return status
def run_cmd(cmd, data=None): if not data: data = {} output = "" env = merge_env(data) logger.trace(f"merged environment: {env}") logger.debug(f"running command: {cmd}") debug = data.get("debug", False) with ExitStack() as stack: message = f"Running {cmd}" if not debug and not is_ci(): stack.enter_context(Halo(text=message, spinner='dots')) else: logger.info(message) with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=isinstance(cmd, str), env=env) as proc: while True: line = proc.stdout.readline() while line: line_decoded = line.decode("UTF-8") if debug: logger.log("OUTPUT", line_decoded.strip()) output += line_decoded line = proc.stdout.readline() if proc.poll() is not None: break rc = proc.poll() logger.debug(f"result: {rc}") if rc != 0: logger.error(output) raise RuntimeError(f"Command failed with non-zero exit status:{rc} - {cmd}") return output
def font(name): """ Installs fonts using curl. Args: name (str): The name of the font as defined in `font_library` dictionary. """ spinner = Halo( text="Font {}".format(name), spinner="dots", placement="right" ) spinner.start() try: library = os.path.join(HOME, "Library/Fonts") path = os.path.join(library, name) sh.curl("-fLo", path, font_library[name]) spinner.succeed() except sh.ErrorReturnCode as err: err_message = "\n\t" + err.stderr.replace("\n", "\n\t") logging.error( "Error installing font `%s`: %s", name, err_message ) spinner.fail()
def file(path, template_file, load_vars=lambda: {}): """ Installs a template file using symlinks. If a file already exists at the specified path and it is not a symbolic link, then this function will print an error and return. If the file is a symlink to the `build` directory of your dotfiles repo, then this will check to see if the template has been modified since the file was last built. Args: path (str): Filesystem path where we should install the filled out template file. template_file (str): The filename of the template to install. The file should be located in the $ROOT/templates directory of this repository. load_vars (func): A function that will be run when the file is built to fill in template information. This is passed in as a function so that user input is only asked for when the file is built. """ spinner = Halo(text=path, spinner="dots", placement="right") spinner.start() if os.path.exists(path) and not os.path.islink(path): print("Error: {} exists and is not a soft link".format(path)) spinner.fail() return try: # Load template as a Jinja2 Template template_path = os.path.join( ROOT, os.path.join("templates", template_file) ) template_mtime = os.path.getmtime(template_path) with open(template_path, "r") as template_file: template = Template(template_file.read()) build_path = os.path.join( ROOT, os.path.join("build", os.path.basename(path)) ) if not os.path.exists(build_path): build_mtime = 0 else: build_mtime = os.path.getmtime(build_path) # Build the template if the template has been modified since last build if template_mtime > build_mtime: # TODO (plemons): I should only do this if I actually need user # input. Theoretically, the load_vars function could just read # from a config file making this unnecessary spinner.info("Asking for user input for {}".format(path)) if not os.path.exists(os.path.dirname(build_path)): os.makedirs(os.path.dirname(build_path)) with open(build_path, 'w') as outfile: outfile.write(template.render(**load_vars())) path = os.path.expanduser(path) dirpath = os.path.dirname(path) if not os.path.exists(dirpath): os.makedirs(dirpath) if os.path.islink(path): os.unlink(path) os.symlink(build_path, path) spinner.succeed() except OSError as err: print(err) spinner.fail()
import time from halo import Halo spinner = Halo(text="Loading") spinner.start() time.sleep(2) spinner.stop()
def install_homebrew(): """ Installs or upgrades homebrew on mac. If homebrew is not installed, this command will install it, otherwise it will update homebrew to the latest version. Additionally, it will offer to upgrade all homebrew packages. Upgrading all packages can take a long time, so the user is given the choice to skip the upgrade. """ print("Checking homebrew install") if sh.which("brew"): spinner = Halo( text="Updating homebrew", spinner="dots", placement="right" ) spinner.start() sh.brew("update") spinner.succeed() print( "Before using homebrew to install packages, we can upgrade " "any outdated packages." ) response = user_input("Run brew upgrade? [y|N] ") if response[0].lower() == "y": spinner = Halo( text="Upgrade brew packages", spinner="dots", placement="right" ) spinner.start() sh.brew("upgrade") spinner.succeed() else: print("Skipped brew package upgrades") else: # TODO (phillip): Currently, this homebrew installation does not work on a fresh # computer. It works from the command line, but not when run from the script. I # need to figure out what is going on. It could be because user input is needed. spinner = Halo( text="Installing homebrew", spinner="dots", placement="right" ) spinner.start() try: script = sh.curl("-fsSL", BREW_GITHUB).stdout sh.ruby("-e", script) spinner.succeed() except sh.ErrorReturnCode: logging.error("Unable to install homebrew. Aborting...") spinner.fail() exit(1)
from halo import Halo # pip install halo first import time spinner = Halo({'text': 'Calculating...', 'color': 'red','spinner': 'dots8'}) spinner.start() counter = 0 while counter < 51: # print(counter) counter += 1 time.sleep(.1) # Run time consuming work here # You can also change properties for spinner as and when you want # spinner.stop() spinner.succeed()
class Installer: CURSE_URL = 'https://wow.curseforge.com' ALT_CURSE_URL = 'https://www.curseforge.com' ALT_REGEX = re.compile(r'class="download__link" href="(?P<path>.+)"') def __init__(self, conf='conf.json', peggle=False): with open(conf, 'r') as f: config = json.loads(f.read()) self.addons_path = Path(config['addons_path']) self.addons = config['addons'] self.peggle = peggle self.session = None # Runtime self.loader = None self._done = [] self._failed = [] def done(self, addon, error=None): if error is not None: self._failed.append((addon, error)) else: self._done.append(addon) errors = f', {len(self._failed)} errors' if self._failed else '' self.loader.text = f'Installing addons... ({len(self._done) + len(self._failed)}/{len(self.addons)}{errors})' async def _alt_install_addon(self, addon): """ Retry on standard Curse website. """ url = f'{self.ALT_CURSE_URL}/wow/addons/{addon}/download' async with self.session.get(url) as response: if response.status != 200: self.done(addon, 'not found') return match = self.ALT_REGEX.search(await response.text()) if not match: self.done(addon, 'regex error /!\\') return url = f"{self.ALT_CURSE_URL}{match.group('path')}" async with self.session.get(url) as response: if response.status != 200: self.done(addon, 'not found') return zip_data = await response.read() z = zipfile.ZipFile(BytesIO(zip_data)) z.extractall(self.addons_path) self.done(addon) async def _install_addon(self, addon): """ Install from new Curse project website. """ url = f'{self.CURSE_URL}/projects/{addon}/files/latest' async with self.session.get(url) as response: if response.status != 200: await self._alt_install_addon(addon) return zip_data = await response.read() z = zipfile.ZipFile(BytesIO(zip_data)) z.extractall(self.addons_path) self.done(addon) async def _install_peggle(self): """ Custom installation of the addon 'Peggle'. See https://github.com/adamz01h/wow_peggle """ url = 'https://github.com/adamz01h/wow_peggle/archive/master.zip' async with self.session.get(url) as response: if response.status != 200: self.done('Peggle', 'could not retrieve archive from github') return zip_data = await response.read() tmp_path = Path('/tmp/peggle') z = zipfile.ZipFile(BytesIO(zip_data)) z.extractall(tmp_path) shutil.move( tmp_path / 'wow_peggle-master/Peggle', self.addons_path / 'Peggle', ) self.done('Peggle') async def install(self): tasks = [self._install_addon(addon) for addon in self.addons] if self.peggle is True: tasks.append(self._install_peggle()) self.addons.append('Peggle') self.loader = Halo(f'Installing addons... (0/{len(tasks)})') self.loader.start() async with ClientSession() as self.session: await asyncio.gather(*tasks) self.loader.stop() for addon, error in self._failed: print(f"Failed to install: '{addon}' ({error})") for addon in self._done: print(f"Successfully installed: '{addon}'")