def get_uploaded_tracks(self, user): spinner = Halo(text="Fetching uploads") spinner.start() tracks = self.client.get("/tracks", id=user.id) spinner.stop() print("Found {} uploads".format(len(tracks))) self.get_multiple_tracks(tracks)
def get_liked_tracks(self): spinner = Halo(text="Fetching liked tracks") spinner.start() liked_tracks = self.client.get("/resolve", url=self.url + "/likes") spinner.stop() print("{} liked track(s) found".format(len(liked_tracks))) self.get_multiple_tracks(liked_tracks)
def get_recommended_tracks(self, track, no_of_tracks=10): params = { "client_id": secret, "limit": no_of_tracks, "offset": 0 } spinner = Halo(text="Fetching tracks similar to {}".format(track.title)) spinner.start() recommended_tracks_url = "{}/tracks/{}/related".format(self.API_V2, track.id) r = self.session.get(recommended_tracks_url, params=params) spinner.stop() tracks = json.loads(r.text)["collection"] self.get_multiple_tracks(tracks)
def main(ARGS): # Load DeepSpeech model if os.path.isdir(ARGS.model): model_dir = ARGS.model ARGS.model = os.path.join(model_dir, 'output_graph.pb') ARGS.alphabet = os.path.join(model_dir, ARGS.alphabet if ARGS.alphabet else 'alphabet.txt') ARGS.lm = os.path.join(model_dir, ARGS.lm) ARGS.trie = os.path.join(model_dir, ARGS.trie) print('Initializing model...') logging.info("ARGS.model: %s", ARGS.model) logging.info("ARGS.alphabet: %s", ARGS.alphabet) model = deepspeech.Model(ARGS.model, ARGS.n_features, ARGS.n_context, ARGS.alphabet, ARGS.beam_width) if ARGS.lm and ARGS.trie: logging.info("ARGS.lm: %s", ARGS.lm) logging.info("ARGS.trie: %s", ARGS.trie) model.enableDecoderWithLM(ARGS.alphabet, ARGS.lm, ARGS.trie, ARGS.lm_alpha, ARGS.lm_beta) # Start audio with VAD vad_audio = VADAudio(aggressiveness=ARGS.vad_aggressiveness, device=ARGS.device, input_rate=ARGS.rate) print("Listening (ctrl-C to exit)...") frames = vad_audio.vad_collector() # Stream from microphone to DeepSpeech using VAD spinner = None if not ARGS.nospinner: spinner = Halo(spinner='line') stream_context = model.setupStream() wav_data = bytearray() for frame in frames: if frame is not None: if spinner: spinner.start() logging.debug("streaming frame") model.feedAudioContent(stream_context, np.frombuffer(frame, np.int16)) if ARGS.savewav: wav_data.extend(frame) else: if spinner: spinner.stop() logging.debug("end utterence") if ARGS.savewav: vad_audio.write_wav(os.path.join(ARGS.savewav, datetime.now().strftime("savewav_%Y-%m-%d_%H-%M-%S_%f.wav")), wav_data) wav_data = bytearray() text = model.finishStream(stream_context) print("Recognized: %s" % text) stream_context = model.setupStream()
def get_charted_tracks(self, kind, no_of_tracks=10): url_params = { "limit": no_of_tracks, "genre": "soundcloud:genres:" + self.args.genre, "kind": kind, "client_id": secret } url = "{}/charts".format(self.API_V2) tracks = [] spinner = Halo(text="Fetching {} {} tracks".format(no_of_tracks, kind)) spinner.start() while len(tracks) < no_of_tracks: response = self.session.get(url, params=url_params) json_payload = json.loads(response.text) tracks += json_payload["collection"] url = json_payload["next_href"] spinner.stop() tracks = map(lambda x: x["track"], tracks[:no_of_tracks]) self.get_multiple_tracks(tracks)
def main(self): os.chdir(self.dirname) if self.args.top: self.get_charted_tracks("top") if self.args.new: self.get_charted_tracks("trending") spinner = Halo(text="Resolving URL") spinner.start() data = self.client.get("/resolve", url=self.url) if self.url else None spinner.stop() if isinstance(data, resource.Resource): if data.kind == "user": print("User profile found") folder = self.validate_name(data.username) if not os.path.isdir(folder): os.mkdir(folder) os.chdir(os.path.join(os.getcwd(), folder)) print("Saving in: " + os.getcwd()) if self.args.all or self.args.likes: self.get_liked_tracks() if not self.args.likes: self.get_uploaded_tracks(data) elif data.kind == "track": print("Single track found") print("Saving in: " + os.getcwd()) if self.args.similar: self.get_recommended_tracks(data) self.get_single_track(data) elif data.kind == "playlist": print("Single playlist found.") folder = self.validate_name(data.user["username"]) if not os.path.isdir(folder): os.mkdir(folder) os.chdir(os.path.join(os.getcwd(), str(folder))) self.get_playlist(data) elif isinstance(data, resource.ResourceList): if data[0].kind == "playlist": print("%d playlists found" % (len(data))) for playlist in data: self.get_playlist(playlist) elif data[0].kind == "track": self.get_multiple_tracks(data)
def get_liked_tracks(self, user): no_of_tracks = self.args.limit if self.args.limit else 9999 params = { "client_id": client_id, "limit": no_of_tracks, "offset": 0 } tracks = [] spinner = Halo(text="Fetching uploads") spinner.start() url = "{}/users/{}/likes".format(self.API_V2, user.id) while url: json_payload = self.session.get(url, params=params).json() tracks += list(filter(lambda x: 'playlist' not in x, json_payload["collection"])) tracks = list(filter(lambda track: self.can_download_track(track['track']), tracks)) if len(tracks) >= no_of_tracks: break url = json_payload.get("next_href", None) spinner.stop() tracks = [track['track'] for track in tracks] print("Found {} likes".format(len(tracks))) self.get_multiple_tracks(tracks)
def create(name, public): """ Create a new Mission in your account. """ if not name: print("Please specify a name with the -n or --name flag.") return spinner = Halo("Uploading files.", spinner="dot").start() try: mission = galileo.missions.create_mission(name,public=public) except Exception as e: print("Error:", e) spinner.stop() return spinner.stop() print("Created Mission:", mission.name) print("Mission ID: ", mission.mission_id) print("Public: ", str(bool(public)))
def handle(self, *args, **options): print("Updating gamelogs...") yesterdays_games = NBAGame.objects.filter(date=date.today() - timedelta(days=1)) for game in yesterdays_games: spinner = Halo(text=str(game), spinner='dots') spinner.start() try: game.create_gamelogs() spinner.succeed() except Exception as e: spinner.fail() print(e) spinner.stop() time.sleep(1) print("Done.")
def _delete(stack_name): spinner = Halo(text='deleting', spinner='dots') spinner.start() client_cf = boto3.client('cloudformation') try: while True: client_cf.delete_stack(StackName='sprite') aws_response = client_cf.describe_stacks(StackName='sprite') spinner.text = 'checking delete status' status = aws_response['Stacks'][0]['StackStatus'] if status != 'DELETE_IN_PROGRESS': raise BadStatusException(status) except BadStatusException as e: print(f"received bad status: {status}") except ClientError as e: if 'does not exist' in str(e): spinner.succeed('complete') else: raise e finally: spinner.stop()
def ls(): """ List all Galileo Universes your account belongs to. """ spinner = Halo("Checking your Universe membership", spinner="dot").start() universes_ls = galileo.universes.list_universes() spinner.stop() universes_ls = [universe.__dict__ for universe in universes_ls] universes_df = pandas.json_normalize(universes_ls) universes_df['creation_timestamp'] = pandas.to_datetime( universes_df.creation_timestamp) universes_df = universes_df.sort_values(by="creation_timestamp", ascending=False) universes_df = universes_df[[ "universe_id", "name", "creation_timestamp" ]] spinner.stop() click.echo(universes_df)
def download_file(session, filename, url, params={}, silent=False): if not silent: spinner = Halo(text='Connecting to stream...') spinner.start() response = session.get(url, stream=True, params=params) if not silent: spinner.stop() file_size = float(response.headers['content-length'] ) if 'content-length' in response.headers else 0 if does_file_exist(filename, file_size): if not silent: print("{} already exists, skipping\n".format(filename)) return filename if not silent: print("Saving as: {}".format(filename)) if not silent: print("File Size: {0:.2f}".format(file_size / (1000**2))) with open(filename, 'wb') as file: for chunk in tqdm(response.iter_content(chunk_size=1024), total=file_size / 1024 + 1, unit='KB', unit_scale=True, disable=silent): if chunk: file.write(chunk) if not silent: print("Download complete\n")
def search_subtitles(search_term, torrent_name): spinner = Halo(text=f'Searching for {search_term} subtitles', spinner='dots') spinner.start() a = [['No subtitle', None, None]] srch, s, e, g, movie = get_search_term(search_term, torrent_name) o = opensubs.opensub_search(search_term) if movie: t = titlovi_search(srch, g=g, movie=True) else: t = titlovi_search(srch, s=s, e=e) b = [['Custom Search', None, None]] c = a + o + t + b print(c) list_format(c) spinner.stop() return c
def read_screen(): """Takes a screenshot and processes the image. Then feeds the image to Google Tesseract OCR Returns: String -- The string(s) found in the image """ spinner = Halo(text='Reading screen', spinner='bouncingBar') spinner.start() screenshot_file = os.path.join('Screens', 'to_ocr.png') image = screen_grab(save=False, location=screenshot_file) gray = preprocess_img(image) # load the image as a PIL/Pillow image, apply OCR, and then delete the temporary file text = pytesseract.image_to_string(Image.fromarray(gray)) spinner.succeed() spinner.stop() return text
def ls(index, id, short, name, user_id, page, items, head): """ List the Missions in your Galileo profile. """ spinner = Halo("Retrieving information", spinner="dot").start() self = galileo.profiles.self() spinner.stop() spinner = Halo("Retrieving your Mission", spinner="dot").start() user_id += (self.user_id, ) missions = galileo.missions.list_missions( mission_ids=list(id), names=list(name), user_ids=list(user_id), page=page, items=items, ) if len(missions) == 0: spinner.stop() click.echo("No mission matches that query.") return if isinstance(index, int): missions_ls = missions[index] else: missions_ls = missions missions_ls = [mission.__dict__ for mission in missions_ls] missions_df = pandas.json_normalize(missions_ls) missions_df['creation_timestamp'] = pandas.to_datetime( missions_df.creation_timestamp) missions_df = missions_df.sort_values(by="creation_timestamp", ascending=False) if short: missions_df = missions_df[[ "name", "mission_id", "public", ]] else: missions_df = missions_df[[ "name", "mission_id", "source_storage_id", "source_path", "destination_storage_id", "destination_path", "description", "public", "creation_timestamp", ]] spinner.stop() if head: click.echo(missions_df.head(head)) else: click.echo("(Displaying only first 30 items)\n") click.echo(missions_df.head(30))
def train(self, epochs: int) -> float: """ Trains the model and returns the accuracy on the test_data Parameters: ---------- epochs : int Number of epochs for the training Returns: ------- float Accuracy of the model on the test_data """ trainloader = DataLoader(self.train_data, batch_size=100, shuffle=True, num_workers=2) testloader = DataLoader(self.test_data, batch_size=100, shuffle=True, num_workers=2) halo = Halo(text='Loading', spinner='dots') halo.start() for epoch in range(epochs): for i, data in enumerate(trainloader, 0): features, targets = data features = Variable(features, requires_grad=False) targets = Variable(targets, requires_grad=False) self.optimizer.zero_grad() outputs = self.model(features) loss = self.criterion(outputs, targets) loss.backward() self.optimizer.step() halo.text = f"Epoch:{epoch}, Step:{(i+1)/40*100}, Loss:{loss.data[0]}" halo.stop() features = self.test_data.data_tensor targets = self.test_data.target_tensor features = Variable(features, requires_grad=False) _, output = self.model(features).max(dim=1) print(confusion_matrix(targets.numpy(), output.data.numpy())) print("accuracy", accuracy_score(targets.numpy(), output.data.numpy()))
def get_file(self): spinner = Halo(text='Getting File', spinner='simpleDotsScrolling') spinner.start() try: authentication = ( (self.authentication["username"], self.authentication["password"]) if self.authentication else None) except KeyError as e: spinner.stop() self.restart( colored("Invalid authentication details: {}".format(e), "red")) try: if self.protocol == "http": data_file = requests.get( self.url.format(date=self.date), auth=authentication ).content data = io.StringIO(data_file.decode(self.encoding)) # data_file = open("./dummy_data/TradedInstrument.txt", # "r", encoding=self.encoding) # data = data_file if self.protocol == "ftp": ftp = FTP(self.host) if authentication: ftp.login(authentication[0], authentication[1]) data = self.__get_file_ftp(ftp, self.filename) except Exception as e: spinner.stop() self.restart(colored("Could not get file: {}".format(e), "red")) spinner.stop() return data
def ls(lz_ids, userid, page=0, items=10, everything=False): """ List all Landing Zones in your Galileo account. """ spinner = Halo("Retrieving lzs", spinner="dot") spinner.start() self = galileo.profiles.self() userids = [] if not everything: userids.append(self.userid) lzs = galileo.lz.list_lz( lz_ids=list(lz_ids), userids=list(userids), page=page, items=items, ) if len(lzs) == 0: spinner.stop() click.echo("No Landing Zones found.") return lzs = [lz.__dict__ for lz in lzs] lzs_df = pandas.json_normalize(lzs) lzs_df = lzs_df[[ "name", "arch", "status", "userid", "cpu_count", "gpu_count", "memory_amount", ]] spinner.stop() click.echo(lzs_df.head(items))
def generate_password(self): """Generates a complex password Raises: UserExits: user types "exit" in length EmptyField: user leaves length field empty PasswordNotLongEnough: raised when user enters a length below 8 Returns: str -- complex password """ password = [] length = input("Enter Length for Password (At least 8): ") if length.lower().strip() == "exit": raise UserExits elif length.strip() == "": raise EmptyField elif int(length) < 8: raise PasswordNotLongEnough else: # generating a password spinner = Halo(text=colored("Generating Password", "green"), spinner=self.dots_, color="green") spinner.start() for i in range(0, int(length)): #choose character from one of the lists randomly password.append( random.choice( random.choice([ string.ascii_lowercase, string.ascii_uppercase, string.digits, self.specialChar_ ]))) finalPass = "".join(password) spinner.stop() return finalPass
def __refresh_token(url, username, password, config_dir, headless=True, spinner=True): spinner = Halo(enabled=spinner) try: spinner.start(SPINNER_MSGS['token_refresh']) driver = SSODriver(url, username, headless=headless, cookie_dir=config_dir) try: return driver.refresh_token(username, password) except MFACodeNeeded as e: spinner.stop() mfacode = inquirer.text(message='MFA Code') spinner.start(SPINNER_MSGS['mfa_send']) driver.send_mfa(e.mfa_form, mfacode) spinner.start(SPINNER_MSGS['token_refresh']) return driver.get_token() except AlertMessage as e: sys.exit(e) finally: spinner.stop() except KeyboardInterrupt as e: spinner.stop() raise e finally: driver.close()
def add(module_names: List[str]): # NOTE: a module name can be: # local file, PyPI name, git repo/GitHub? twin = load_config() modules_append = [] builtin_modules_map = { "owntwin.base": "owntwin.builtin_modules.base", } spinner = Halo(text="", spinner="bouncingBar") for module_name in module_names: module_name = builtin_modules_map.get(module_name, module_name) try: module = import_module(module_name) spinner.start( "Installing {}".format( typer.style(module.id, fg=typer.colors.GREEN, bold=True) ) ) modules_append.append((module.id, module.module)) package = Package(".") module.add( twin["bbox"], package, CACHE_DIR, ) spinner.succeed() except Exception as err: logger.error(err) spinner.fail() spinner.stop() if modules_append: for (module_name, module) in modules_append: twin["modules"][module_name] = module save_config(twin, FILENAME)
def __save_password(self, filename, data, nonce, website): """Saves password to DB Arguments: filename {str} -- DB to save to data {str} -- password that will be saved nonce {hexadecimal} -- converted from byte type to hexadecimal as byte type is not supported in JSON website {str} -- name of the website for the given password """ spinner = Halo(text=colored("Saving", "green"), spinner=self.dots_, color="green") spinner.start() if os.path.isfile(filename): try: with open(filename, 'r') as jsondata: jfile = json.load(jsondata) jfile[website]["nonce"] = nonce jfile[website]["password"] = data with open(filename, 'w') as jsondata: json.dump(jfile, jsondata, sort_keys=True, indent=4) except KeyError: with open(filename, 'r') as jsondata: jfile = json.load(jsondata) jfile[website] = {} jfile[website]["nonce"] = nonce jfile[website]["password"] = data with open(filename, 'w') as jsondata: json.dump(jfile, jsondata, sort_keys=True, indent=4) else: # initialize the file in case it doesn't exist off the start jfile = {website: {}} jfile[website]["nonce"] = nonce jfile[website]["password"] = data with open(filename, 'w') as jsondata: json.dump(jfile, jsondata, sort_keys=True, indent=4) spinner.stop() print( colored(f"{self.checkmark_} Saved successfully. Thank you!", "green"))
def main(): s = Style.RESET_ALL c = Fore.LIGHTRED_EX os.system('cls') os.system('title Instagram Auto Acceptor V3.1 ^| Menu') input_username = input(f"Enter user > {c}>{s} :") time.sleep(1) input_password = input(f" [$] Enter pass > {c}>{s} :") print(' ') time.sleep(1) try: input_delay = int(input(f"[!] Click Enter to start {c}>{s} ")) print('____________________________') except ValueError: input_delay = 5 post = { 'username': input_username, 'enc_password': '******' + input_password } spinner = Halo(text='Loading', spinner='dots', color='red') spinner.start() i = InstagramAccept(post) spinner.stop() os.system('cls') slow(" LOGIN ...") print(" ") if i.login() == True: while True: i.loop() spinner.start() time.sleep(input_delay) spinner.stop() else: print('>> Login ERROR !') input() main()
def main(): s = Style.RESET_ALL c = Fore.LIGHTRED_EX os.system('cls') os.system('title Instagram Auto Acceptor V3.1 ^| Menu') input_username = input(f"ضع يوزر حسابك > {c}>{s} :") time.sleep(1) input_password = input(f" [$] ضع كلمة السر > {c}>{s} :") print(' ') #vv1ck time.sleep(1) try: input_delay = int(input(f"[!] اضغط انتر للبدء {c}>{s} ")) print('____________________________') except ValueError: input_delay = 5 post = { 'username': input_username, 'enc_password': '******' + input_password } spinner = Halo(text='Loading', spinner='dots', color='red') spinner.start() i = InstagramAccept(post) spinner.stop() os.system('cls') slow(" LOGIN ...") print(" ") if i.login() == True: while True: i.loop() spinner.start() time.sleep(input_delay) spinner.stop() else: print('>> خطأ في تسجيل الدخول !') input() main()
def ls(head): """ List all Cargo Bays attached to your Galileo Account. """ spinner = Halo("Getting the list of your Cargo Bays", spinner="dot").start() try: cargobays_ls = galileo.cargobays.list_cargobays() spinner.stop() except Exception as e: spinner.stop() print("Problem retrieving Cargo Bay list.", e) return cargobays_ls = [cargobay.__dict__ for cargobay in cargobays_ls] cargobays_df = pandas.json_normalize(cargobays_ls) cargobays_df['creation_timestamp'] = pandas.to_datetime( cargobays_df.creation_timestamp) cargobays_df = cargobays_df.sort_values(by="creation_timestamp", ascending=False) cargobays_df = cargobays_df[[ "name", "storage_id", "storage_type", "creation_timestamp" ]] spinner.stop() if head: click.echo(cargobays_df.head(head)) else: print("Displaying first 10 Cargo Bays.") click.echo(cargobays_df.head(10))
def test_spinner_getters_setters(self): """Test spinner getters and setters. """ spinner = Halo() self.assertEqual(spinner.text, '') self.assertIsNone(spinner.text_color, None) self.assertEqual(spinner.color, 'cyan') self.assertIsNone(spinner.spinner_id) spinner.spinner = 'dots12' spinner.text = 'bar' spinner.text_color = 'red' spinner.color = 'red' self.assertEqual(spinner.text, 'bar') self.assertEqual(spinner.text_color, 'red') self.assertEqual(spinner.color, 'red') if is_supported(): self.assertEqual(spinner.spinner, Spinners['dots12'].value) else: self.assertEqual(spinner.spinner, default_spinner) spinner.spinner = 'dots11' if is_supported(): self.assertEqual(spinner.spinner, Spinners['dots11'].value) else: self.assertEqual(spinner.spinner, default_spinner) spinner.spinner = 'foo_bar' self.assertEqual(spinner.spinner, default_spinner) # Color is None spinner.text_color = None spinner.color = None spinner.start() spinner.stop() self.assertIsNone(spinner.text_color) self.assertIsNone(spinner.color)
def isSpinning(spinnerState): spinnerState_inside = None _spinner = Halo(text='Waiting for Git to come online', spinner='dots') if spinnerState == State.SPINNER_DEAD: _spinner.start() while spinnerState == State.SPINNER_ALIVE: if spinnerState_inside == State.SPINNER_STOP: _spinner.stop() if spinnerState_inside == State.SPINNER_SUCCEED: _spinner.succeed(text=SPINNER_PERIST_DATA_SUCCEED) if spinnerState_inside == State.SPINNER_WARNING: _spinner.warn(text=SPINNER_PERIST_DATA_WARNING) if spinnerState_inside == State.SPINNER_FAIL: _spinner.fail(text=SPINNER_PERIST_DATA_FAIL) exit(1) if spinnerState_inside == State.SPINNER_SUCCEED_SOCK: _spinner.succeed(text='is this getting here') if spinnerState_inside == State.SPINNER_FAIL_SOCK: _spinner.fail(text=SPINNER_PERIST_DATA_FAIL_SOCK) exit(1) continue time.sleep(.5)
def audio_consumer(vad_audio, websocket): """blocks""" spinner = None if not ARGS.nospinner: spinner = Halo( spinner='line') # circleHalves point arc boxBounce2 bounce line length_ms = 0 wav_data = bytearray() for block in vad_audio.vad_collector(): if ready and websocket.is_active: if block is not None: if not length_ms: logging.debug("begin utterence") if spinner: spinner.start() logging.log(5, "sending block") websocket.send_binary(block) if ARGS.savewav: wav_data.extend(block) length_ms += vad_audio.block_duration_ms else: if spinner: spinner.stop() if not length_ms: raise RuntimeError("ended utterence without beginning") logging.debug("end utterence") if ARGS.savewav: vad_audio.write_wav( os.path.join( ARGS.savewav, datetime.now().strftime( "savewav_%Y-%m-%d_%H-%M-%S_%f.wav")), wav_data) wav_data = bytearray() logging.info("sent audio length_ms: %d" % length_ms) logging.log(5, "sending EOS") websocket.send_text('EOS') length_ms = 0
def get_answer(question, optionA, optionB, optionC): spinner = Halo(text='Finding Answer for you ..', spinner='bouncingBar') spinner.start() r = requests.get("http://google.com/search?q=" + question) soup = BeautifulSoup(r.text, 'html.parser') response = soup.find_all("span", class_="st") res = str(r.text) countoption1 = res.count(optionA) countoption2 = res.count(optionB) countoption3 = res.count(optionC) s=1 maxcount = max(countoption1, countoption2, countoption3) sumcount = countoption1 + countoption2 + countoption3 sumcount=sumcount+0.1 probA = round(((countoption1 / sumcount) * 100), 2) probB = round(((countoption2 / sumcount) * 100), 2) probC = round(((countoption3 / sumcount) * 100), 2) # " }"+ print("\n" + question + "\n") if countoption1 == maxcount: print(bcolors.OKGREEN + "A{" + optionA + " }" + bcolors.ENDC + " -: " + str(probA)) print(bcolors.BOLD + "B{" + optionB + " }" + bcolors.ENDC + " -: " + str(probB)) print(bcolors.BOLD + "C{" + optionC + " }" + bcolors.ENDC + " -: " + str(probC) + "\n") elif countoption2 == maxcount: print(bcolors.BOLD + "A{" + optionA + " }" + bcolors.ENDC + " -: " + str(probA)) print(bcolors.OKGREEN + "B{" + optionB + " }" + bcolors.ENDC + " -: " + str(probB)) print(bcolors.BOLD + "C{" + optionC + " }" + bcolors.ENDC + " -: " + str(probC) + "\n") else: print(bcolors.BOLD + "A{" + optionA + " }" + bcolors.ENDC + " -: " + str(probA)) print(bcolors.BOLD + "B{" + optionB + " }" + bcolors.ENDC + " -: " + str(probB)) print(bcolors.OKGREEN + "C{" + optionC + " }" + bcolors.ENDC + " -: " + str(probC) + "\n") spinner.succeed() spinner.stop()
def kick(station_id, user_ids): """ Remove users from a Station. """ spinner = Halo("Removing users from station", spinner="dot").start() for user in user_ids: try: spinner.stop() r = galileo.stations.remove_member_from_station( station_id, user) username = galileo.profiles.list_users( user_ids=[user])[0].username station_name = galileo.stations.list_stations( station_ids=[station_id])[0].name click.echo( "Removed {name} from station {station_name} ({station_id})" .format(name=username, station_name=station_name, station_id=station_id)) except Exception as e: spinner.stop() click.echo("Error", e)
def check_for_small_leak(): os.system('clear') spinner = Halo(text='Gathering IP-Adresses', spinner='dots') spinner.start() ipv4leak = threading.Thread(target=update_ipv4, args=(20, 50)) ipv6leak = threading.Thread(target=update_ipv6, args=(20, 50)) ip_torrent_leak = threading.Thread(target=update_torrent_ip, args=[30]) dns_server_leak = threading.Thread(target=update_dns, args=(30, 1000)) ipv4leak.start() ipv6leak.start() ip_torrent_leak.start() dns_server_leak.start() ipv4leak.join() update_spinner(ipv4, spinner, "ipv4") ipv6leak.join() update_spinner(ipv6, spinner, "ipv6") ip_torrent_leak.join() update_spinner(torrent_ips, spinner, "torrent ip") dns_server_leak.join() update_spinner(dns, spinner, "dns server") spinner.stop() print("\n\nRESULTS") print_ipv4_results() if len(ipv6) > 0: print_ipv6_results() print_dns_results() if len(torrent_ips) > 0: print_torrent_results() print("\n\n")
def combine_scraped_data_for_game(self, combine_game_id): subprocess.run(["clear"]) spinner = Halo(color=get_random_cli_color(), spinner=get_random_dots_spinner()) spinner.text = f"Combining scraped data for {combine_game_id}..." spinner.start() result = self.combine_data.execute(combine_game_id) if not (result["gather_scraped_data_success"] and result["combined_data_success"] and result["update_pitch_apps_success"]): spinner.fail(f"Failed to combine data for {combine_game_id}!") print_message(result["error"], wrap=False, fg="bright_red", bold=True) return Result.Fail(result["error"]) spinner.stop() pfx_errors = result["results"]["pfx_errors"] if pfx_errors.get("pitchfx_error", []): self.pfx_errors[combine_game_id] = pfx_errors["pitchfx_error"] if pfx_errors.get("invalid_pitchfx", []): self.invalid_pfx[combine_game_id] = pfx_errors["invalid_pitchfx"] if self.total_pitch_apps_any_pitchfx_error > 0: pitch_apps_plural = ("pitch appearances" if self.total_pitch_apps_any_pitchfx_error > 1 else "pitch appearance") at_bats_plural = "at bats" if self.total_at_bats_any_pitchfx_error > 1 else "at bat" message = ( f"PitchFX data could not be reconciled for game: {combine_game_id}\n" f"{self.total_pitch_apps_any_pitchfx_error} {pitch_apps_plural} with data errors " f"({self.total_at_bats_any_pitchfx_error} total {at_bats_plural})\n" ) print_message(message, fg="bright_yellow", bold=True) else: message = f"All scraped data for {combine_game_id} was successfully combined!" print_message(message, fg="bright_cyan", bold=True) pause(message="Press any key to continue...") return Result.Ok()
def google_wiki(sim_ques, options, neg): spinner = Halo(text='Googling. and searching Wikipedia', spinner='dots2') spinner.start() # number of google result pages num_pages = 1 points = list() content = "" maxo = "" maxp = -sys.maxsize words = split_string(sim_ques) for o in options: o = o.lower() original = o o += ' wiki' # get google search results for option + 'wiki' search_wiki = google.search(o, num_pages) link = search_wiki[0].link content = get_page(link) soup = BeautifulSoup(content, "lxml") page = soup.get_text().lower() temp = 0 for word in words: temp = temp + page.count(word) temp += smart_answer(page, words) if neg: temp *= -1 points.append(temp) if temp > maxp: maxp = temp maxo = original spinner.succeed() spinner.stop() return points, maxo
async def process(data: datetime): """ Main processor coordinator Setup the producer -> consumer Get avaliable currencies from BACEN and request quotes for each After fetching quotes, wait for task update_min and output the consolidated quote if any or print X """ min_q = asyncio.Queue() dolar_q = asyncio.Queue() min = {"symbol": "", "quote": sys.float_info.max} strdata = data.strftime("%m-%d-%Y") async with aiohttp.ClientSession(trust_env=True) as session: currencies = await get_currencies(session) spinner = Halo(text="Fetching Quotes", spinner="dots") spinner.start() symbols = [c["simbolo"] for c in currencies] quotes_tasks = [] for q in symbols: quotes_task = asyncio.ensure_future( get_quote(session, q, strdata, min_q, dolar_q)) quotes_tasks.append(quotes_task) compute_task = update_min(min, min_q) await asyncio.gather(*quotes_tasks) await min_q.put(None) await compute_task if min["symbol"]: lookup_description(min["symbol"], currencies, min) spinner.stop() if min["symbol"]: dolar = await dolar_q.get() print(f"{min['symbol']},{min['nomeFormatado']},{min['quote']/dolar}") else: print("x")
def search(self): spinner = None if self._cli_mode: spinner = Halo(text='Searching...', spinner='dots') spinner.start() if self._media_type == 'tv': self._torrents = eztv(self._query.replace(' ', '-').lower(), page=self._page, limit=self._limit, quality=self._quality, debug=self._debug) elif self._media_type == 'movie': self._torrents = yts(q=self._query, limit=self._limit, sort_by=self._sort_by, sort_order=self._sort_order, quality=self._quality, minimum_rating=self._minimum_rating, page=self._page, debug=self._debug) if self._cli_mode: spinner.stop() return self._torrents
class LogProgress(): def __init__(self, message, spinner=None, is_spinning=None, alternative_logger=None, concommitant_logger=None): self.message = message self.spinner = Halo(text=message, spinner=spinner) self.alternative_logger = alternative_logger self.concommitant_logger = concommitant_logger self.spinning = is_spinning is None or is_spinning def __enter__(self): if self.spinning: self.spinner.start() return SpinLogger(self.spinner, self.concommitant_logger) else: return BasicLogger(self.message, self.alternative_logger) def __exit__(self, type, value, traceback): if self.spinning: self.spinner.stop()
import time from halo import Halo spinner = Halo(text="Loading") spinner.start() time.sleep(2) spinner.stop()
class Installer: CURSE_URL = 'https://wow.curseforge.com' ALT_CURSE_URL = 'https://www.curseforge.com' ALT_REGEX = re.compile(r'class="download__link" href="(?P<path>.+)"') def __init__(self, conf='conf.json', peggle=False): with open(conf, 'r') as f: config = json.loads(f.read()) self.addons_path = Path(config['addons_path']) self.addons = config['addons'] self.peggle = peggle self.session = None # Runtime self.loader = None self._done = [] self._failed = [] def done(self, addon, error=None): if error is not None: self._failed.append((addon, error)) else: self._done.append(addon) errors = f', {len(self._failed)} errors' if self._failed else '' self.loader.text = f'Installing addons... ({len(self._done) + len(self._failed)}/{len(self.addons)}{errors})' async def _alt_install_addon(self, addon): """ Retry on standard Curse website. """ url = f'{self.ALT_CURSE_URL}/wow/addons/{addon}/download' async with self.session.get(url) as response: if response.status != 200: self.done(addon, 'not found') return match = self.ALT_REGEX.search(await response.text()) if not match: self.done(addon, 'regex error /!\\') return url = f"{self.ALT_CURSE_URL}{match.group('path')}" async with self.session.get(url) as response: if response.status != 200: self.done(addon, 'not found') return zip_data = await response.read() z = zipfile.ZipFile(BytesIO(zip_data)) z.extractall(self.addons_path) self.done(addon) async def _install_addon(self, addon): """ Install from new Curse project website. """ url = f'{self.CURSE_URL}/projects/{addon}/files/latest' async with self.session.get(url) as response: if response.status != 200: await self._alt_install_addon(addon) return zip_data = await response.read() z = zipfile.ZipFile(BytesIO(zip_data)) z.extractall(self.addons_path) self.done(addon) async def _install_peggle(self): """ Custom installation of the addon 'Peggle'. See https://github.com/adamz01h/wow_peggle """ url = 'https://github.com/adamz01h/wow_peggle/archive/master.zip' async with self.session.get(url) as response: if response.status != 200: self.done('Peggle', 'could not retrieve archive from github') return zip_data = await response.read() tmp_path = Path('/tmp/peggle') z = zipfile.ZipFile(BytesIO(zip_data)) z.extractall(tmp_path) shutil.move( tmp_path / 'wow_peggle-master/Peggle', self.addons_path / 'Peggle', ) self.done('Peggle') async def install(self): tasks = [self._install_addon(addon) for addon in self.addons] if self.peggle is True: tasks.append(self._install_peggle()) self.addons.append('Peggle') self.loader = Halo(f'Installing addons... (0/{len(tasks)})') self.loader.start() async with ClientSession() as self.session: await asyncio.gather(*tasks) self.loader.stop() for addon, error in self._failed: print(f"Failed to install: '{addon}' ({error})") for addon in self._done: print(f"Successfully installed: '{addon}'")